Merge with master.kernel.org:/pub/scm/git/git.git

This commit is contained in:
Peter Anvin 2005-10-03 12:04:44 -07:00
commit 79a9d8ea0d
44 changed files with 1366 additions and 457 deletions

1
.gitignore vendored
View File

@ -82,6 +82,7 @@ git-ssh-push
git-ssh-upload git-ssh-upload
git-status git-status
git-stripspace git-stripspace
git-symbolic-ref
git-tag git-tag
git-tar-tree git-tar-tree
git-unpack-file git-unpack-file

View File

@ -9,7 +9,7 @@ git-clone - Clones a repository.
SYNOPSIS SYNOPSIS
-------- --------
'git clone' [-l] [-u <upload-pack>] [-q] <repository> <directory> 'git clone' [-l [-s]] [-q] [-n] [-u <upload-pack>] <repository> <directory>
DESCRIPTION DESCRIPTION
----------- -----------
@ -17,6 +17,7 @@ Clones a repository into a newly created directory.
OPTIONS OPTIONS
------- -------
--local::
-l:: -l::
When the repository to clone from is on a local machine, When the repository to clone from is on a local machine,
this flag bypasses normal "git aware" transport this flag bypasses normal "git aware" transport
@ -25,10 +26,22 @@ OPTIONS
The files under .git/objects/ directory are hardlinked The files under .git/objects/ directory are hardlinked
to save space when possible. to save space when possible.
--shared::
-s::
When the repository to clone is on the local machine,
instead of using hard links automatically setup
.git/objects/info/alternatives to share the objects
with the source repository
--quiet::
-q:: -q::
Operate quietly. This flag is passed to "rsync" and Operate quietly. This flag is passed to "rsync" and
"git-clone-pack" commands when given. "git-clone-pack" commands when given.
-n::
No checkout of HEAD is performed after the clone is complete.
--upload-pack <upload-pack>::
-u <upload-pack>:: -u <upload-pack>::
When given, and the repository to clone from is handled When given, and the repository to clone from is handled
by 'git-clone-pack', '--exec=<upload-pack>' is passed to by 'git-clone-pack', '--exec=<upload-pack>' is passed to

View File

@ -13,7 +13,7 @@ SYNOPSIS
(-[c|d|o|i|s|u|k|m])\* (-[c|d|o|i|s|u|k|m])\*
[-x <pattern>|--exclude=<pattern>] [-x <pattern>|--exclude=<pattern>]
[-X <file>|--exclude-from=<file>] [-X <file>|--exclude-from=<file>]
[--exclude-per-directory=<file>] [--exclude-per-directory=<file>] [--] [<file>]\*
DESCRIPTION DESCRIPTION
----------- -----------
@ -77,6 +77,13 @@ OPTIONS
K to be killed K to be killed
? other ? other
--::
Do not interpret any more arguments as options.
<file>::
Files to show. If no files are given all files which match the other
specified criteria are shown.
Output Output
------ ------
show files just outputs the filename unless '--stage' is specified in show files just outputs the filename unless '--stage' is specified in

View File

@ -50,7 +50,7 @@
# DEFINES += -DUSE_STDEV # DEFINES += -DUSE_STDEV
GIT_VERSION = 0.99.7.GIT GIT_VERSION = 0.99.8.GIT
CFLAGS = -g -O2 -Wall CFLAGS = -g -O2 -Wall
ALL_CFLAGS = $(CFLAGS) $(PLATFORM_DEFINES) $(DEFINES) ALL_CFLAGS = $(CFLAGS) $(PLATFORM_DEFINES) $(DEFINES)
@ -103,25 +103,29 @@ SIMPLE_PROGRAMS = \
# ... and all the rest # ... and all the rest
PROGRAMS = \ PROGRAMS = \
git-apply$X git-cat-file$X git-checkout-index$X \ git-apply$X git-cat-file$X \
git-clone-pack$X git-commit-tree$X git-convert-objects$X \ git-checkout-index$X git-clone-pack$X git-commit-tree$X \
git-diff-files$X git-diff-index$X git-diff-stages$X \ git-convert-objects$X git-diff-files$X \
git-diff-tree$X git-fetch-pack$X git-fsck-objects$X \ git-diff-index$X git-diff-stages$X \
git-hash-object$X git-init-db$X git-local-fetch$X \ git-diff-tree$X git-fetch-pack$X git-fsck-objects$X \
git-ls-files$X git-ls-tree$X git-merge-base$X \ git-hash-object$X git-init-db$X \
git-merge-index$X git-mktag$X git-pack-objects$X \ git-local-fetch$X git-ls-files$X git-ls-tree$X git-merge-base$X \
git-patch-id$X git-peek-remote$X git-prune-packed$X \ git-merge-index$X git-mktag$X git-pack-objects$X git-patch-id$X \
git-read-tree$X git-receive-pack$X git-rev-list$X \ git-peek-remote$X git-prune-packed$X git-read-tree$X \
git-rev-parse$X git-send-pack$X git-show-branch$X \ git-receive-pack$X git-rev-list$X git-rev-parse$X \
git-show-index$X git-ssh-fetch$X git-ssh-upload$X \ git-send-pack$X git-show-branch$X \
git-tar-tree$X git-unpack-file$X git-unpack-objects$X \ git-show-index$X git-ssh-fetch$X \
git-update-index$X git-update-server-info$X \ git-ssh-upload$X git-tar-tree$X git-unpack-file$X \
git-upload-pack$X git-verify-pack$X git-write-tree$X \ git-unpack-objects$X git-update-index$X git-update-server-info$X \
git-update-ref$X $(SIMPLE_PROGRAMS) git-upload-pack$X git-verify-pack$X git-write-tree$X \
git-update-ref$X git-symbolic-ref$X \
$(SIMPLE_PROGRAMS)
# Backward compatibility -- to be removed after 1.0 # Backward compatibility -- to be removed after 1.0
PROGRAMS += git-ssh-pull$X git-ssh-push$X PROGRAMS += git-ssh-pull$X git-ssh-push$X
GIT_LIST_TWEAK =
PYMODULES = \ PYMODULES = \
gitMergeCommon.py gitMergeCommon.py
@ -131,6 +135,8 @@ endif
ifdef WITH_SEND_EMAIL ifdef WITH_SEND_EMAIL
SCRIPT_PERL += git-send-email.perl SCRIPT_PERL += git-send-email.perl
else
GIT_LIST_TWEAK += -e '/^send-email$$/d'
endif endif
LIB_FILE=libgit.a LIB_FILE=libgit.a
@ -181,6 +187,10 @@ endif
ifneq (,$(findstring arm,$(shell uname -m))) ifneq (,$(findstring arm,$(shell uname -m)))
ARM_SHA1 = YesPlease ARM_SHA1 = YesPlease
endif endif
ifeq ($(shell uname -s),OpenBSD)
NEEDS_LIBICONV = YesPlease
PLATFORM_DEFINES += -I/usr/local/include -L/usr/local/lib
endif
ifndef NO_CURL ifndef NO_CURL
ifdef CURLDIR ifdef CURLDIR
@ -206,18 +216,32 @@ endif
ifndef NO_OPENSSL ifndef NO_OPENSSL
LIB_OBJS += epoch.o LIB_OBJS += epoch.o
OPENSSL_LIBSSL = -lssl OPENSSL_LIBSSL = -lssl
ifdef OPENSSLDIR
# Again this may be problematic -- gcc does not always want -R.
CFLAGS += -I$(OPENSSLDIR)/include
OPENSSL_LINK = -L$(OPENSSLDIR)/lib -R$(OPENSSLDIR)/lib
else
OPENSSL_LINK =
endif
else else
DEFINES += '-DNO_OPENSSL' DEFINES += '-DNO_OPENSSL'
MOZILLA_SHA1 = 1 MOZILLA_SHA1 = 1
OPENSSL_LIBSSL = OPENSSL_LIBSSL =
endif endif
ifdef NEEDS_SSL_WITH_CRYPTO ifdef NEEDS_SSL_WITH_CRYPTO
LIB_4_CRYPTO = -lcrypto -lssl LIB_4_CRYPTO = $(OPENSSL_LINK) -lcrypto -lssl
else else
LIB_4_CRYPTO = -lcrypto LIB_4_CRYPTO = $(OPENSSL_LINK) -lcrypto
endif endif
ifdef NEEDS_LIBICONV ifdef NEEDS_LIBICONV
LIB_4_ICONV = -liconv ifdef ICONVDIR
# Again this may be problematic -- gcc does not always want -R.
CFLAGS += -I$(ICONVDIR)/include
ICONV_LINK = -L$(ICONVDIR)/lib -R$(ICONVDIR)/lib
else
ICONV_LINK =
endif
LIB_4_ICONV = $(ICONV_LINK) -liconv
else else
LIB_4_ICONV = LIB_4_ICONV =
endif endif
@ -273,8 +297,13 @@ all:
git: git.sh Makefile git: git.sh Makefile
rm -f $@+ $@ rm -f $@+ $@
sed -e '1s|#!.*/sh|#!$(SHELL_PATH)|' \ sed -e '1s|#!.*/sh|#!$(SHELL_PATH)|' \
<<<<<<< Makefile
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \ -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
-e 's/@@X@@/$(X)/g' <$@.sh >$@+ -e 's/@@X@@/$(X)/g' <$@.sh >$@+
=======
-e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \
$(GIT_LIST_TWEAK) <$@.sh >$@+
>>>>>>> .merge_file_3QHyD4
chmod +x $@+ chmod +x $@+
mv $@+ $@ mv $@+ $@

13
apply.c
View File

@ -723,6 +723,16 @@ static int parse_single_patch(char *line, unsigned long size, struct patch *patc
return offset; return offset;
} }
static inline int metadata_changes(struct patch *patch)
{
return patch->is_rename > 0 ||
patch->is_copy > 0 ||
patch->is_new > 0 ||
patch->is_delete ||
(patch->old_mode && patch->new_mode &&
patch->old_mode != patch->new_mode);
}
static int parse_chunk(char *buffer, unsigned long size, struct patch *patch) static int parse_chunk(char *buffer, unsigned long size, struct patch *patch)
{ {
int hdrsize, patchsize; int hdrsize, patchsize;
@ -733,6 +743,9 @@ static int parse_chunk(char *buffer, unsigned long size, struct patch *patch)
patchsize = parse_single_patch(buffer + offset + hdrsize, size - offset - hdrsize, patch); patchsize = parse_single_patch(buffer + offset + hdrsize, size - offset - hdrsize, patch);
if (!patchsize && !metadata_changes(patch))
die("patch with only garbage at line %d", linenr);
return offset + hdrsize + patchsize; return offset + hdrsize + patchsize;
} }

View File

@ -189,6 +189,7 @@ extern char *git_path(const char *fmt, ...) __attribute__((format (printf, 1, 2)
extern char *sha1_file_name(const unsigned char *sha1); extern char *sha1_file_name(const unsigned char *sha1);
extern char *sha1_pack_name(const unsigned char *sha1); extern char *sha1_pack_name(const unsigned char *sha1);
extern char *sha1_pack_index_name(const unsigned char *sha1); extern char *sha1_pack_index_name(const unsigned char *sha1);
extern const unsigned char null_sha1[20];
int git_mkstemp(char *path, size_t n, const char *template); int git_mkstemp(char *path, size_t n, const char *template);
@ -228,6 +229,10 @@ extern int has_pack_index(const unsigned char *sha1);
extern int get_sha1(const char *str, unsigned char *sha1); extern int get_sha1(const char *str, unsigned char *sha1);
extern int get_sha1_hex(const char *hex, unsigned char *sha1); extern int get_sha1_hex(const char *hex, unsigned char *sha1);
extern char *sha1_to_hex(const unsigned char *sha1); /* static buffer result! */ extern char *sha1_to_hex(const unsigned char *sha1); /* static buffer result! */
extern int read_ref(const char *filename, unsigned char *sha1);
extern const char *resolve_ref(const char *path, unsigned char *sha1, int);
extern int create_symref(const char *git_HEAD, const char *refs_heads_master);
extern int validate_symref(const char *git_HEAD);
/* General helper functions */ /* General helper functions */
extern void usage(const char *err) NORETURN; extern void usage(const char *err) NORETURN;

6
debian/changelog vendored
View File

@ -1,3 +1,9 @@
git-core (0.99.8-0) unstable; urgency=low
* GIT 0.99.8
-- Junio C Hamano <junkio@cox.net> Sun, 2 Oct 2005 12:54:26 -0700
git-core (0.99.7-0) unstable; urgency=low git-core (0.99.7-0) unstable; urgency=low
* GIT 0.99.7 * GIT 0.99.7

View File

@ -34,7 +34,6 @@ static void show_modified(int oldmode, int mode,
int main(int argc, const char **argv) int main(int argc, const char **argv)
{ {
static const unsigned char null_sha1[20] = { 0, };
const char **pathspec; const char **pathspec;
const char *prefix = setup_git_directory(); const char *prefix = setup_git_directory();
int entries, i; int entries, i;

3
diff.c
View File

@ -10,7 +10,6 @@
#include "diffcore.h" #include "diffcore.h"
static const char *diff_opts = "-pu"; static const char *diff_opts = "-pu";
static unsigned char null_sha1[20] = { 0, };
static int use_size_cache; static int use_size_cache;
@ -414,7 +413,7 @@ void diff_free_filespec_data(struct diff_filespec *s)
static void prep_temp_blob(struct diff_tempfile *temp, static void prep_temp_blob(struct diff_tempfile *temp,
void *blob, void *blob,
unsigned long size, unsigned long size,
unsigned char *sha1, const unsigned char *sha1,
int mode) int mode)
{ {
int fd; int fd;

View File

@ -402,25 +402,17 @@ static void fsck_object_dir(const char *path)
static int fsck_head_link(void) static int fsck_head_link(void)
{ {
int fd, count;
char hex[40];
unsigned char sha1[20]; unsigned char sha1[20];
static char path[PATH_MAX], link[PATH_MAX]; const char *git_HEAD = strdup(git_path("HEAD"));
const char *git_dir = get_git_dir(); const char *git_refs_heads_master = resolve_ref(git_HEAD, sha1, 1);
int pfxlen = strlen(git_HEAD) - 4; /* strip .../.git/ part */
snprintf(path, sizeof(path), "%s/HEAD", git_dir); if (!git_refs_heads_master)
if (readlink(path, link, sizeof(link)) < 0) return error("HEAD is not a symbolic ref");
return error("HEAD is not a symlink"); if (strncmp(git_refs_heads_master + pfxlen, "refs/heads/", 11))
if (strncmp("refs/heads/", link, 11)) return error("HEAD points to something strange (%s)",
return error("HEAD points to something strange (%s)", link); git_refs_heads_master + pfxlen);
fd = open(path, O_RDONLY); if (!memcmp(null_sha1, sha1, 20))
if (fd < 0)
return error("HEAD: %s", strerror(errno));
count = read(fd, hex, sizeof(hex));
close(fd);
if (count < 0)
return error("HEAD: %s", strerror(errno));
if (count < 40 || get_sha1_hex(hex, sha1))
return error("HEAD: not a valid git pointer"); return error("HEAD: not a valid git pointer");
return 0; return 0;
} }

View File

@ -228,10 +228,12 @@ foreach my $ps (@psets) {
# skip commits already in repo # skip commits already in repo
# #
if (ptag($ps->{id})) { if (ptag($ps->{id})) {
$opt_v && print "Skipping already imported: $ps->{id}\n"; $opt_v && print " * Skipping already imported: $ps->{id}\n";
next; next;
} }
print " * Starting to work on $ps->{id}\n";
# #
# create the branch if needed # create the branch if needed
# #
@ -675,6 +677,10 @@ sub find_parents {
# that branch. # that branch.
# #
foreach my $branch (keys %branches) { foreach my $branch (keys %branches) {
# check that we actually know about the branch
next unless -e "$git_dir/refs/heads/$branch";
my $mergebase = `git-merge-base $branch $ps->{branch}`; my $mergebase = `git-merge-base $branch $ps->{branch}`;
die "Cannot find merge base for $branch and $ps->{branch}" if $?; die "Cannot find merge base for $branch and $ps->{branch}" if $?;
chomp $mergebase; chomp $mergebase;

View File

@ -38,7 +38,8 @@ bisect_start() {
# Verify HEAD. If we were bisecting before this, reset to the # Verify HEAD. If we were bisecting before this, reset to the
# top-of-line master first! # top-of-line master first!
# #
head=$(readlink $GIT_DIR/HEAD) || die "Bad HEAD - I need a symlink" head=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD) ||
die "Bad HEAD - I need a symbolic ref"
case "$head" in case "$head" in
refs/heads/bisect*) refs/heads/bisect*)
git checkout master || exit git checkout master || exit
@ -46,7 +47,7 @@ bisect_start() {
refs/heads/*) refs/heads/*)
;; ;;
*) *)
die "Bad HEAD - strange symlink" die "Bad HEAD - strange symbolic ref"
;; ;;
esac esac
@ -135,7 +136,7 @@ bisect_next() {
echo "$rev" > "$GIT_DIR/refs/heads/new-bisect" echo "$rev" > "$GIT_DIR/refs/heads/new-bisect"
git checkout new-bisect || exit git checkout new-bisect || exit
mv "$GIT_DIR/refs/heads/new-bisect" "$GIT_DIR/refs/heads/bisect" && mv "$GIT_DIR/refs/heads/new-bisect" "$GIT_DIR/refs/heads/bisect" &&
ln -sf refs/heads/bisect "$GIT_DIR/HEAD" GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD refs/heads/bisect
git-show-branch "$rev" git-show-branch "$rev"
} }

View File

@ -14,7 +14,8 @@ If two arguments, create a new branch <branchname> based off of <start-point>.
delete_branch () { delete_branch () {
option="$1" branch_name="$2" option="$1" branch_name="$2"
headref=$(readlink "$GIT_DIR/HEAD" | sed -e 's|^refs/heads/||') headref=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD |
sed -e 's|^refs/heads/||')
case ",$headref," in case ",$headref," in
",$branch_name,") ",$branch_name,")
die "Cannot delete the branch you are on." ;; die "Cannot delete the branch you are on." ;;
@ -67,7 +68,8 @@ done
case "$#" in case "$#" in
0) 0)
headref=$(readlink "$GIT_DIR/HEAD" | sed -e 's|^refs/heads/||') headref=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD |
sed -e 's|^refs/heads/||')
git-rev-parse --symbolic --all | git-rev-parse --symbolic --all |
sed -ne 's|^refs/heads/||p' | sed -ne 's|^refs/heads/||p' |
sort | sort |

View File

@ -71,7 +71,8 @@ if [ "$?" -eq 0 ]; then
echo $new > "$GIT_DIR/refs/heads/$newbranch" echo $new > "$GIT_DIR/refs/heads/$newbranch"
branch="$newbranch" branch="$newbranch"
fi fi
[ "$branch" ] && ln -sf "refs/heads/$branch" "$GIT_DIR/HEAD" [ "$branch" ] &&
GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD "refs/heads/$branch"
rm -f "$GIT_DIR/MERGE_HEAD" rm -f "$GIT_DIR/MERGE_HEAD"
else else
exit 1 exit 1

View File

@ -153,15 +153,8 @@ if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
fi >>.editmsg fi >>.editmsg
PARENTS="-p HEAD" PARENTS="-p HEAD"
if [ ! -r "$GIT_DIR/HEAD" ]; then if GIT_DIR="$GIT_DIR" git-rev-parse --verify HEAD >/dev/null 2>&1
if [ -z "$(git-ls-files)" ]; then then
echo Nothing to commit 1>&2
exit 1
fi
PARENTS=""
current=
else
current=$(git-rev-parse --verify HEAD)
if [ -f "$GIT_DIR/MERGE_HEAD" ]; then if [ -f "$GIT_DIR/MERGE_HEAD" ]; then
PARENTS="-p HEAD "`sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD"` PARENTS="-p HEAD "`sed -e 's/^/-p /' "$GIT_DIR/MERGE_HEAD"`
fi fi
@ -194,6 +187,12 @@ else
export GIT_AUTHOR_EMAIL export GIT_AUTHOR_EMAIL
export GIT_AUTHOR_DATE export GIT_AUTHOR_DATE
fi fi
else
if [ -z "$(git-ls-files)" ]; then
echo Nothing to commit 1>&2
exit 1
fi
PARENTS=""
fi fi
git-status >>.editmsg git-status >>.editmsg
if [ "$?" != "0" -a ! -f $GIT_DIR/MERGE_HEAD ] if [ "$?" != "0" -a ! -f $GIT_DIR/MERGE_HEAD ]

View File

@ -510,7 +510,7 @@ unless($pid) {
my $state = 0; my $state = 0;
my($patchset,$date,$author,$branch,$ancestor,$tag,$logmsg); my($patchset,$date,$author_name,$author_email,$branch,$ancestor,$tag,$logmsg);
my(@old,@new); my(@old,@new);
my $commit = sub { my $commit = sub {
my $pid; my $pid;
@ -591,11 +591,11 @@ my $commit = sub {
} }
exec("env", exec("env",
"GIT_AUTHOR_NAME=$author", "GIT_AUTHOR_NAME=$author_name",
"GIT_AUTHOR_EMAIL=$author", "GIT_AUTHOR_EMAIL=$author_email",
"GIT_AUTHOR_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)), "GIT_AUTHOR_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)),
"GIT_COMMITTER_NAME=$author", "GIT_COMMITTER_NAME=$author_name",
"GIT_COMMITTER_EMAIL=$author", "GIT_COMMITTER_EMAIL=$author_email",
"GIT_COMMITTER_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)), "GIT_COMMITTER_DATE=".strftime("+0000 %Y-%m-%d %H:%M:%S",gmtime($date)),
"git-commit-tree", $tree,@par); "git-commit-tree", $tree,@par);
die "Cannot exec git-commit-tree: $!\n"; die "Cannot exec git-commit-tree: $!\n";
@ -638,7 +638,7 @@ my $commit = sub {
print $out "object $cid\n". print $out "object $cid\n".
"type commit\n". "type commit\n".
"tag $xtag\n". "tag $xtag\n".
"tagger $author <$author>\n" "tagger $author_name <$author_email>\n"
or die "Cannot create tag object $xtag: $!\n"; or die "Cannot create tag object $xtag: $!\n";
close($out) close($out)
or die "Cannot create tag object $xtag: $!\n"; or die "Cannot create tag object $xtag: $!\n";
@ -683,7 +683,11 @@ while(<CVS>) {
$state=3; $state=3;
} elsif($state == 3 and s/^Author:\s+//) { } elsif($state == 3 and s/^Author:\s+//) {
s/\s+$//; s/\s+$//;
$author = $_; if (/^(.*?)\s+<(.*)>/) {
($author_name, $author_email) = ($1, $2);
} else {
$author_name = $author_email = $_;
}
$state = 4; $state = 4;
} elsif($state == 4 and s/^Branch:\s+//) { } elsif($state == 4 and s/^Branch:\s+//) {
s/\s+$//; s/\s+$//;

View File

@ -5,6 +5,7 @@
_x40='[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]' _x40='[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]'
_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40" _x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40"
tags=
append= append=
force= force=
update_head_ok= update_head_ok=
@ -17,6 +18,9 @@ do
-f|--f|--fo|--for|--forc|--force) -f|--f|--fo|--for|--forc|--force)
force=t force=t
;; ;;
-t|--t|--ta|--tag|--tags)
tags=t
;;
-u|--u|--up|--upd|--upda|--updat|--update|--update-|--update-h|\ -u|--u|--up|--upd|--upda|--updat|--update|--update-|--update-h|\
--update-he|--update-hea|--update-head|--update-head-|\ --update-he|--update-hea|--update-head|--update-head-|\
--update-head-o|--update-head-ok) --update-head-o|--update-head-ok)
@ -158,7 +162,26 @@ case "$update_head_ok" in
;; ;;
esac esac
for ref in $(get_remote_refs_for_fetch "$@") # If --tags (and later --heads or --all) is specified, then we are
# not talking about defaults stored in Pull: line of remotes or
# branches file, and just fetch those and refspecs explicitly given.
# Otherwise we do what we always did.
reflist=$(get_remote_refs_for_fetch "$@")
if test "$tags"
then
taglist=$(git-ls-remote --tags "$remote" | awk '{ print "."$2":"$2 }')
if test "$#" -gt 1
then
# remote URL plus explicit refspecs; we need to merge them.
reflist="$reflist $taglist"
else
# No explicit refspecs; fetch tags only.
reflist=$taglist
fi
fi
for ref in $reflist
do do
refs="$refs $ref" refs="$refs $ref"

View File

@ -7,9 +7,6 @@ from sets import Set
sys.path.append('@@GIT_PYTHON_PATH@@') sys.path.append('@@GIT_PYTHON_PATH@@')
from gitMergeCommon import * from gitMergeCommon import *
# The actual merge code
# ---------------------
originalIndexFile = os.environ.get('GIT_INDEX_FILE', originalIndexFile = os.environ.get('GIT_INDEX_FILE',
os.environ.get('GIT_DIR', '.git') + '/index') os.environ.get('GIT_DIR', '.git') + '/index')
temporaryIndexFile = os.environ.get('GIT_DIR', '.git') + \ temporaryIndexFile = os.environ.get('GIT_DIR', '.git') + \
@ -21,11 +18,23 @@ def setupIndex(temporary):
pass pass
if temporary: if temporary:
newIndex = temporaryIndexFile newIndex = temporaryIndexFile
os.environ
else: else:
newIndex = originalIndexFile newIndex = originalIndexFile
os.environ['GIT_INDEX_FILE'] = newIndex os.environ['GIT_INDEX_FILE'] = newIndex
# This is a global variable which is used in a number of places but
# only written to in the 'merge' function.
# cacheOnly == True => Don't leave any non-stage 0 entries in the cache and
# don't update the working directory.
# False => Leave unmerged entries in the cache and update
# the working directory.
cacheOnly = False
# The entry point to the merge code
# ---------------------------------
def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0): def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0):
'''Merge the commits h1 and h2, return the resulting virtual '''Merge the commits h1 and h2, return the resulting virtual
commit object and a flag indicating the cleaness of the merge.''' commit object and a flag indicating the cleaness of the merge.'''
@ -35,6 +44,7 @@ def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0):
def infoMsg(*args): def infoMsg(*args):
sys.stdout.write(' '*callDepth) sys.stdout.write(' '*callDepth)
printList(args) printList(args)
infoMsg('Merging:') infoMsg('Merging:')
infoMsg(h1) infoMsg(h1)
infoMsg(h2) infoMsg(h2)
@ -46,27 +56,27 @@ def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0):
infoMsg(x) infoMsg(x)
sys.stdout.flush() sys.stdout.flush()
Ms = ca[0] mergedCA = ca[0]
for h in ca[1:]: for h in ca[1:]:
[Ms, ignore] = merge(Ms, h, [mergedCA, dummy] = merge(mergedCA, h,
'Temporary shared merge branch 1', 'Temporary shared merge branch 1',
'Temporary shared merge branch 2', 'Temporary shared merge branch 2',
graph, callDepth+1) graph, callDepth+1)
assert(isinstance(Ms, Commit)) assert(isinstance(mergedCA, Commit))
global cacheOnly
if callDepth == 0: if callDepth == 0:
setupIndex(False) setupIndex(False)
cleanCache = False cacheOnly = False
else: else:
setupIndex(True) setupIndex(True)
runProgram(['git-read-tree', h1.tree()]) runProgram(['git-read-tree', h1.tree()])
cleanCache = True cacheOnly = True
[shaRes, clean] = mergeTrees(h1.tree(), h2.tree(), Ms.tree(), [shaRes, clean] = mergeTrees(h1.tree(), h2.tree(), mergedCA.tree(),
branch1Name, branch2Name, branch1Name, branch2Name)
cleanCache)
if clean or cleanCache: if clean or cacheOnly:
res = Commit(None, [h1, h2], tree=shaRes) res = Commit(None, [h1, h2], tree=shaRes)
graph.addNode(res) graph.addNode(res)
else: else:
@ -89,49 +99,14 @@ def getFilesAndDirs(tree):
return [files, dirs] return [files, dirs]
class CacheEntry: # Those two global variables are used in a number of places but only
def __init__(self, path): # written to in 'mergeTrees' and 'uniquePath'. They keep track of
class Stage: # every file and directory in the two branches that are about to be
def __init__(self): # merged.
self.sha1 = None currentFileSet = None
self.mode = None currentDirectorySet = None
self.stages = [Stage(), Stage(), Stage()]
self.path = path
unmergedRE = re.compile(r'^([0-7]+) ([0-9a-f]{40}) ([1-3])\t(.*)$', re.S) def mergeTrees(head, merge, common, branch1Name, branch2Name):
def unmergedCacheEntries():
'''Create a dictionary mapping file names to CacheEntry
objects. The dictionary contains one entry for every path with a
non-zero stage entry.'''
lines = runProgram(['git-ls-files', '-z', '--unmerged']).split('\0')
lines.pop()
res = {}
for l in lines:
m = unmergedRE.match(l)
if m:
mode = int(m.group(1), 8)
sha1 = m.group(2)
stage = int(m.group(3)) - 1
path = m.group(4)
if res.has_key(path):
e = res[path]
else:
e = CacheEntry(path)
res[path] = e
e.stages[stage].mode = mode
e.stages[stage].sha1 = sha1
else:
die('Error: Merge program failed: Unexpected output from', \
'git-ls-files:', l)
return res
def mergeTrees(head, merge, common, branch1Name, branch2Name,
cleanCache):
'''Merge the trees 'head' and 'merge' with the common ancestor '''Merge the trees 'head' and 'merge' with the common ancestor
'common'. The name of the head branch is 'branch1Name' and the name of 'common'. The name of the head branch is 'branch1Name' and the name of
the merge branch is 'branch2Name'. Return a tuple (tree, cleanMerge) the merge branch is 'branch2Name'. Return a tuple (tree, cleanMerge)
@ -144,33 +119,38 @@ def mergeTrees(head, merge, common, branch1Name, branch2Name,
print 'Already uptodate!' print 'Already uptodate!'
return [head, True] return [head, True]
if cleanCache: if cacheOnly:
updateArg = '-i' updateArg = '-i'
else: else:
updateArg = '-u' updateArg = '-u'
[out, code] = runProgram(['git-read-tree', updateArg, '-m', common, head, merge], returnCode = True) [out, code] = runProgram(['git-read-tree', updateArg, '-m',
common, head, merge], returnCode = True)
if code != 0: if code != 0:
die('git-read-tree:', out) die('git-read-tree:', out)
cleanMerge = True
[tree, code] = runProgram('git-write-tree', returnCode=True) [tree, code] = runProgram('git-write-tree', returnCode=True)
tree = tree.rstrip() tree = tree.rstrip()
if code != 0: if code != 0:
[files, dirs] = getFilesAndDirs(head) global currentFileSet, currentDirectorySet
[currentFileSet, currentDirectorySet] = getFilesAndDirs(head)
[filesM, dirsM] = getFilesAndDirs(merge) [filesM, dirsM] = getFilesAndDirs(merge)
files.union_update(filesM) currentFileSet.union_update(filesM)
dirs.union_update(dirsM) currentDirectorySet.union_update(dirsM)
cleanMerge = True
entries = unmergedCacheEntries() entries = unmergedCacheEntries()
for name in entries: renamesHead = getRenames(head, common, head, merge, entries)
if not processEntry(entries[name], branch1Name, branch2Name, renamesMerge = getRenames(merge, common, head, merge, entries)
files, dirs, cleanCache):
cleanMerge = processRenames(renamesHead, renamesMerge,
branch1Name, branch2Name)
for entry in entries:
if entry.processed:
continue
if not processEntry(entry, branch1Name, branch2Name):
cleanMerge = False cleanMerge = False
if cleanMerge or cleanCache: if cleanMerge or cacheOnly:
tree = runProgram('git-write-tree').rstrip() tree = runProgram('git-write-tree').rstrip()
else: else:
tree = None tree = None
@ -179,84 +159,538 @@ def mergeTrees(head, merge, common, branch1Name, branch2Name,
return [tree, cleanMerge] return [tree, cleanMerge]
def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache): # Low level file merging, update and removal
'''Merge one cache entry. 'files' is a Set with the files in both of # ------------------------------------------
the heads that we are going to merge. 'dirs' contains the
corresponding data for directories. If 'cleanCache' is True no
non-zero stages will be left in the cache for the path
corresponding to the entry 'entry'.'''
# cleanCache == True => Don't leave any non-stage 0 entries in the cache and def mergeFile(oPath, oSha, oMode, aPath, aSha, aMode, bPath, bSha, bMode,
# don't update the working directory branch1Name, branch2Name):
# False => Leave unmerged entries and update the working directory
# clean == True => non-conflict case merge = False
# False => conflict case clean = True
# If cleanCache == False then the cache shouldn't be updated if clean == False if stat.S_IFMT(aMode) != stat.S_IFMT(bMode):
clean = False
if stat.S_ISREG(aMode):
mode = aMode
sha = aSha
else:
mode = bMode
sha = bSha
else:
if aSha != oSha and bSha != oSha:
merge = True
def updateFile(clean, sha, mode, path, onlyWd=False): if aMode == oMode:
updateCache = not onlyWd and (cleanCache or (not cleanCache and clean)) mode = bMode
updateWd = onlyWd or (not cleanCache and clean) else:
mode = aMode
if updateWd: if aSha == oSha:
prog = ['git-cat-file', 'blob', sha] sha = bSha
if stat.S_ISREG(mode): elif bSha == oSha:
sha = aSha
elif stat.S_ISREG(aMode):
assert(stat.S_ISREG(bMode))
orig = runProgram(['git-unpack-file', oSha]).rstrip()
src1 = runProgram(['git-unpack-file', aSha]).rstrip()
src2 = runProgram(['git-unpack-file', bSha]).rstrip()
[out, code] = runProgram(['merge',
'-L', branch1Name + '/' + aPath,
'-L', 'orig/' + oPath,
'-L', branch2Name + '/' + bPath,
src1, orig, src2], returnCode=True)
sha = runProgram(['git-hash-object', '-t', 'blob', '-w',
src1]).rstrip()
os.unlink(orig)
os.unlink(src1)
os.unlink(src2)
clean = (code == 0)
else:
assert(stat.S_ISLNK(aMode) and stat.S_ISLNK(bMode))
sha = aSha
if aSha != bSha:
clean = False
return [sha, mode, clean, merge]
def updateFile(clean, sha, mode, path):
updateCache = cacheOnly or clean
updateWd = not cacheOnly
return updateFileExt(sha, mode, path, updateCache, updateWd)
def updateFileExt(sha, mode, path, updateCache, updateWd):
if cacheOnly:
updateWd = False
if updateWd:
pathComponents = path.split('/')
for x in xrange(1, len(pathComponents)):
p = '/'.join(pathComponents[0:x])
try:
createDir = not stat.S_ISDIR(os.lstat(p).st_mode)
except:
createDir = True
if createDir:
try: try:
os.unlink(path) os.mkdir(p)
except OSError: except OSError, e:
pass die("Couldn't create directory", p, e.strerror)
if mode & 0100:
mode = 0777
else:
mode = 0666
fd = os.open(path, os.O_WRONLY | os.O_TRUNC | os.O_CREAT, mode)
proc = subprocess.Popen(prog, stdout=fd)
proc.wait()
os.close(fd)
elif stat.S_ISLNK(mode):
linkTarget = runProgram(prog)
os.symlink(linkTarget, path)
else:
assert(False)
if updateWd and updateCache: prog = ['git-cat-file', 'blob', sha]
runProgram(['git-update-index', '--add', '--', path]) if stat.S_ISREG(mode):
elif updateCache:
runProgram(['git-update-index', '--add', '--cacheinfo',
'0%o' % mode, sha, path])
def removeFile(clean, path):
if cleanCache or (not cleanCache and clean):
runProgram(['git-update-index', '--force-remove', '--', path])
if not cleanCache and clean:
try: try:
os.unlink(path) os.unlink(path)
except OSError, e: except OSError:
if e.errno != errno.ENOENT and e.errno != errno.EISDIR: pass
raise if mode & 0100:
mode = 0777
else:
mode = 0666
fd = os.open(path, os.O_WRONLY | os.O_TRUNC | os.O_CREAT, mode)
proc = subprocess.Popen(prog, stdout=fd)
proc.wait()
os.close(fd)
elif stat.S_ISLNK(mode):
linkTarget = runProgram(prog)
os.symlink(linkTarget, path)
else:
assert(False)
def uniquePath(path, branch): if updateWd and updateCache:
newPath = path + '_' + branch runProgram(['git-update-index', '--add', '--', path])
suffix = 0 elif updateCache:
while newPath in files or newPath in dirs: runProgram(['git-update-index', '--add', '--cacheinfo',
suffix += 1 '0%o' % mode, sha, path])
newPath = path + '_' + branch + '_' + str(suffix)
files.add(newPath)
return newPath
debug('processing', entry.path, 'clean cache:', cleanCache) def removeFile(clean, path):
updateCache = cacheOnly or clean
updateWd = not cacheOnly
if updateCache:
runProgram(['git-update-index', '--force-remove', '--', path])
if updateWd:
try:
os.unlink(path)
except OSError, e:
if e.errno != errno.ENOENT and e.errno != errno.EISDIR:
raise
def uniquePath(path, branch):
def fileExists(path):
try:
os.lstat(path)
return True
except OSError, e:
if e.errno == errno.ENOENT:
return False
else:
raise
newPath = path + '_' + branch
suffix = 0
while newPath in currentFileSet or \
newPath in currentDirectorySet or \
fileExists(newPath):
suffix += 1
newPath = path + '_' + branch + '_' + str(suffix)
currentFileSet.add(newPath)
return newPath
# Cache entry management
# ----------------------
class CacheEntry:
def __init__(self, path):
class Stage:
def __init__(self):
self.sha1 = None
self.mode = None
# Used for debugging only
def __str__(self):
if self.mode != None:
m = '0%o' % self.mode
else:
m = 'None'
if self.sha1:
sha1 = self.sha1
else:
sha1 = 'None'
return 'sha1: ' + sha1 + ' mode: ' + m
self.stages = [Stage(), Stage(), Stage(), Stage()]
self.path = path
self.processed = False
def __str__(self):
return 'path: ' + self.path + ' stages: ' + repr([str(x) for x in self.stages])
class CacheEntryContainer:
def __init__(self):
self.entries = {}
def add(self, entry):
self.entries[entry.path] = entry
def get(self, path):
return self.entries.get(path)
def __iter__(self):
return self.entries.itervalues()
unmergedRE = re.compile(r'^([0-7]+) ([0-9a-f]{40}) ([1-3])\t(.*)$', re.S)
def unmergedCacheEntries():
'''Create a dictionary mapping file names to CacheEntry
objects. The dictionary contains one entry for every path with a
non-zero stage entry.'''
lines = runProgram(['git-ls-files', '-z', '--unmerged']).split('\0')
lines.pop()
res = CacheEntryContainer()
for l in lines:
m = unmergedRE.match(l)
if m:
mode = int(m.group(1), 8)
sha1 = m.group(2)
stage = int(m.group(3))
path = m.group(4)
e = res.get(path)
if not e:
e = CacheEntry(path)
res.add(e)
e.stages[stage].mode = mode
e.stages[stage].sha1 = sha1
else:
die('Error: Merge program failed: Unexpected output from',
'git-ls-files:', l)
return res
lsTreeRE = re.compile(r'^([0-7]+) (\S+) ([0-9a-f]{40})\t(.*)\n$', re.S)
def getCacheEntry(path, origTree, aTree, bTree):
'''Returns a CacheEntry object which doesn't have to correspond to
a real cache entry in Git's index.'''
def parse(out):
if out == '':
return [None, None]
else:
m = lsTreeRE.match(out)
if not m:
die('Unexpected output from git-ls-tree:', out)
elif m.group(2) == 'blob':
return [m.group(3), int(m.group(1), 8)]
else:
return [None, None]
res = CacheEntry(path)
[oSha, oMode] = parse(runProgram(['git-ls-tree', origTree, '--', path]))
[aSha, aMode] = parse(runProgram(['git-ls-tree', aTree, '--', path]))
[bSha, bMode] = parse(runProgram(['git-ls-tree', bTree, '--', path]))
res.stages[1].sha1 = oSha
res.stages[1].mode = oMode
res.stages[2].sha1 = aSha
res.stages[2].mode = aMode
res.stages[3].sha1 = bSha
res.stages[3].mode = bMode
return res
# Rename detection and handling
# -----------------------------
class RenameEntry:
def __init__(self,
src, srcSha, srcMode, srcCacheEntry,
dst, dstSha, dstMode, dstCacheEntry,
score):
self.srcName = src
self.srcSha = srcSha
self.srcMode = srcMode
self.srcCacheEntry = srcCacheEntry
self.dstName = dst
self.dstSha = dstSha
self.dstMode = dstMode
self.dstCacheEntry = dstCacheEntry
self.score = score
self.processed = False
class RenameEntryContainer:
def __init__(self):
self.entriesSrc = {}
self.entriesDst = {}
def add(self, entry):
self.entriesSrc[entry.srcName] = entry
self.entriesDst[entry.dstName] = entry
def getSrc(self, path):
return self.entriesSrc.get(path)
def getDst(self, path):
return self.entriesDst.get(path)
def __iter__(self):
return self.entriesSrc.itervalues()
parseDiffRenamesRE = re.compile('^:([0-7]+) ([0-7]+) ([0-9a-f]{40}) ([0-9a-f]{40}) R([0-9]*)$')
def getRenames(tree, oTree, aTree, bTree, cacheEntries):
'''Get information of all renames which occured between 'oTree' and
'tree'. We need the three trees in the merge ('oTree', 'aTree' and
'bTree') to be able to associate the correct cache entries with
the rename information. 'tree' is always equal to either aTree or bTree.'''
assert(tree == aTree or tree == bTree)
inp = runProgram(['git-diff-tree', '-M', '--diff-filter=R', '-r',
'-z', oTree, tree])
ret = RenameEntryContainer()
try:
recs = inp.split("\0")
recs.pop() # remove last entry (which is '')
it = recs.__iter__()
while True:
rec = it.next()
m = parseDiffRenamesRE.match(rec)
if not m:
die('Unexpected output from git-diff-tree:', rec)
srcMode = int(m.group(1), 8)
dstMode = int(m.group(2), 8)
srcSha = m.group(3)
dstSha = m.group(4)
score = m.group(5)
src = it.next()
dst = it.next()
srcCacheEntry = cacheEntries.get(src)
if not srcCacheEntry:
srcCacheEntry = getCacheEntry(src, oTree, aTree, bTree)
cacheEntries.add(srcCacheEntry)
dstCacheEntry = cacheEntries.get(dst)
if not dstCacheEntry:
dstCacheEntry = getCacheEntry(dst, oTree, aTree, bTree)
cacheEntries.add(dstCacheEntry)
ret.add(RenameEntry(src, srcSha, srcMode, srcCacheEntry,
dst, dstSha, dstMode, dstCacheEntry,
score))
except StopIteration:
pass
return ret
def fmtRename(src, dst):
srcPath = src.split('/')
dstPath = dst.split('/')
path = []
endIndex = min(len(srcPath), len(dstPath)) - 1
for x in range(0, endIndex):
if srcPath[x] == dstPath[x]:
path.append(srcPath[x])
else:
endIndex = x
break
if len(path) > 0:
return '/'.join(path) + \
'/{' + '/'.join(srcPath[endIndex:]) + ' => ' + \
'/'.join(dstPath[endIndex:]) + '}'
else:
return src + ' => ' + dst
def processRenames(renamesA, renamesB, branchNameA, branchNameB):
srcNames = Set()
for x in renamesA:
srcNames.add(x.srcName)
for x in renamesB:
srcNames.add(x.srcName)
cleanMerge = True
for path in srcNames:
if renamesA.getSrc(path):
renames1 = renamesA
renames2 = renamesB
branchName1 = branchNameA
branchName2 = branchNameB
else:
renames1 = renamesB
renames2 = renamesA
branchName1 = branchNameB
branchName2 = branchNameA
ren1 = renames1.getSrc(path)
ren2 = renames2.getSrc(path)
ren1.dstCacheEntry.processed = True
ren1.srcCacheEntry.processed = True
if ren1.processed:
continue
ren1.processed = True
removeFile(True, ren1.srcName)
if ren2:
# Renamed in 1 and renamed in 2
assert(ren1.srcName == ren2.srcName)
ren2.dstCacheEntry.processed = True
ren2.processed = True
if ren1.dstName != ren2.dstName:
print 'CONFLICT (rename/rename): Rename', \
fmtRename(path, ren1.dstName), 'in branch', branchName1, \
'rename', fmtRename(path, ren2.dstName), 'in', branchName2
cleanMerge = False
if ren1.dstName in currentDirectorySet:
dstName1 = uniquePath(ren1.dstName, branchName1)
print ren1.dstName, 'is a directory in', branchName2, \
'adding as', dstName1, 'instead.'
removeFile(False, ren1.dstName)
else:
dstName1 = ren1.dstName
if ren2.dstName in currentDirectorySet:
dstName2 = uniquePath(ren2.dstName, branchName2)
print ren2.dstName, 'is a directory in', branchName1, \
'adding as', dstName2, 'instead.'
removeFile(False, ren2.dstName)
else:
dstName2 = ren1.dstName
updateFile(False, ren1.dstSha, ren1.dstMode, dstName1)
updateFile(False, ren2.dstSha, ren2.dstMode, dstName2)
else:
print 'Renaming', fmtRename(path, ren1.dstName)
[resSha, resMode, clean, merge] = \
mergeFile(ren1.srcName, ren1.srcSha, ren1.srcMode,
ren1.dstName, ren1.dstSha, ren1.dstMode,
ren2.dstName, ren2.dstSha, ren2.dstMode,
branchName1, branchName2)
if merge:
print 'Auto-merging', ren1.dstName
if not clean:
print 'CONFLICT (content): merge conflict in', ren1.dstName
cleanMerge = False
if not cacheOnly:
updateFileExt(ren1.dstSha, ren1.dstMode, ren1.dstName,
updateCache=True, updateWd=False)
updateFile(clean, resSha, resMode, ren1.dstName)
else:
# Renamed in 1, maybe changed in 2
if renamesA == renames1:
stage = 3
else:
stage = 2
srcShaOtherBranch = ren1.srcCacheEntry.stages[stage].sha1
srcModeOtherBranch = ren1.srcCacheEntry.stages[stage].mode
dstShaOtherBranch = ren1.dstCacheEntry.stages[stage].sha1
dstModeOtherBranch = ren1.dstCacheEntry.stages[stage].mode
tryMerge = False
if ren1.dstName in currentDirectorySet:
newPath = uniquePath(ren1.dstName, branchName1)
print 'CONFLICT (rename/directory): Rename', \
fmtRename(ren1.srcName, ren1.dstName), 'in', branchName1,\
'directory', ren1.dstName, 'added in', branchName2
print 'Renaming', ren1.srcName, 'to', newPath, 'instead'
cleanMerge = False
removeFile(False, ren1.dstName)
updateFile(False, ren1.dstSha, ren1.dstMode, newPath)
elif srcShaOtherBranch == None:
print 'CONFLICT (rename/delete): Rename', \
fmtRename(ren1.srcName, ren1.dstName), 'in', \
branchName1, 'and deleted in', branchName2
cleanMerge = False
updateFile(False, ren1.dstSha, ren1.dstMode, ren1.dstName)
elif dstShaOtherBranch:
newPath = uniquePath(ren1.dstName, branchName2)
print 'CONFLICT (rename/add): Rename', \
fmtRename(ren1.srcName, ren1.dstName), 'in', \
branchName1 + '.', ren1.dstName, 'added in', branchName2
print 'Adding as', newPath, 'instead'
updateFile(False, dstShaOtherBranch, dstModeOtherBranch, newPath)
cleanMerge = False
tryMerge = True
elif renames2.getDst(ren1.dstName):
dst2 = renames2.getDst(ren1.dstName)
newPath1 = uniquePath(ren1.dstName, branchName1)
newPath2 = uniquePath(dst2.dstName, branchName2)
print 'CONFLICT (rename/rename): Rename', \
fmtRename(ren1.srcName, ren1.dstName), 'in', \
branchName1+'. Rename', \
fmtRename(dst2.srcName, dst2.dstName), 'in', branchName2
print 'Renaming', ren1.srcName, 'to', newPath1, 'and', \
dst2.srcName, 'to', newPath2, 'instead'
removeFile(False, ren1.dstName)
updateFile(False, ren1.dstSha, ren1.dstMode, newPath1)
updateFile(False, dst2.dstSha, dst2.dstMode, newPath2)
dst2.processed = True
cleanMerge = False
else:
tryMerge = True
if tryMerge:
print 'Renaming', fmtRename(ren1.srcName, ren1.dstName)
[resSha, resMode, clean, merge] = \
mergeFile(ren1.srcName, ren1.srcSha, ren1.srcMode,
ren1.dstName, ren1.dstSha, ren1.dstMode,
ren1.srcName, srcShaOtherBranch, srcModeOtherBranch,
branchName1, branchName2)
if merge:
print 'Auto-merging', ren1.dstName
if not clean:
print 'CONFLICT (rename/modify): Merge conflict in', ren1.dstName
cleanMerge = False
if not cacheOnly:
updateFileExt(ren1.dstSha, ren1.dstMode, ren1.dstName,
updateCache=True, updateWd=False)
updateFile(clean, resSha, resMode, ren1.dstName)
return cleanMerge
# Per entry merge function
# ------------------------
def processEntry(entry, branch1Name, branch2Name):
'''Merge one cache entry.'''
debug('processing', entry.path, 'clean cache:', cacheOnly)
cleanMerge = True cleanMerge = True
path = entry.path path = entry.path
oSha = entry.stages[0].sha1 oSha = entry.stages[1].sha1
oMode = entry.stages[0].mode oMode = entry.stages[1].mode
aSha = entry.stages[1].sha1 aSha = entry.stages[2].sha1
aMode = entry.stages[1].mode aMode = entry.stages[2].mode
bSha = entry.stages[2].sha1 bSha = entry.stages[3].sha1
bMode = entry.stages[2].mode bMode = entry.stages[3].mode
assert(oSha == None or isSha(oSha)) assert(oSha == None or isSha(oSha))
assert(aSha == None or isSha(aSha)) assert(aSha == None or isSha(aSha))
@ -275,28 +709,26 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
(not aSha and bSha == oSha): (not aSha and bSha == oSha):
# Deleted in both or deleted in one and unchanged in the other # Deleted in both or deleted in one and unchanged in the other
if aSha: if aSha:
print 'Removing ' + path print 'Removing', path
removeFile(True, path) removeFile(True, path)
else: else:
# Deleted in one and changed in the other # Deleted in one and changed in the other
cleanMerge = False cleanMerge = False
if not aSha: if not aSha:
print 'CONFLICT (del/mod): "' + path + '" deleted in', \ print 'CONFLICT (delete/modify):', path, 'deleted in', \
branch1Name, 'and modified in', branch2Name, \ branch1Name, 'and modified in', branch2Name + '.', \
'. Version', branch2Name, ' of "' + path + \ 'Version', branch2Name, 'of', path, 'left in tree.'
'" left in tree'
mode = bMode mode = bMode
sha = bSha sha = bSha
else: else:
print 'CONFLICT (mod/del): "' + path + '" deleted in', \ print 'CONFLICT (modify/delete):', path, 'deleted in', \
branch2Name, 'and modified in', branch1Name + \ branch2Name, 'and modified in', branch1Name + '.', \
'. Version', branch1Name, 'of "' + path + \ 'Version', branch1Name, 'of', path, 'left in tree.'
'" left in tree'
mode = aMode mode = aMode
sha = aSha sha = aSha
updateFile(False, sha, mode, path) updateFile(False, sha, mode, path)
elif (not oSha and aSha and not bSha) or \ elif (not oSha and aSha and not bSha) or \
(not oSha and not aSha and bSha): (not oSha and not aSha and bSha):
# #
@ -307,27 +739,26 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
otherBranch = branch2Name otherBranch = branch2Name
mode = aMode mode = aMode
sha = aSha sha = aSha
conf = 'file/dir' conf = 'file/directory'
else: else:
addBranch = branch2Name addBranch = branch2Name
otherBranch = branch1Name otherBranch = branch1Name
mode = bMode mode = bMode
sha = bSha sha = bSha
conf = 'dir/file' conf = 'directory/file'
if path in dirs: if path in currentDirectorySet:
cleanMerge = False cleanMerge = False
newPath = uniquePath(path, addBranch) newPath = uniquePath(path, addBranch)
print 'CONFLICT (' + conf + \ print 'CONFLICT (' + conf + '):', \
'): There is a directory with name "' + path + '" in', \ 'There is a directory with name', path, 'in', \
otherBranch + '. Adding "' + path + '" as "' + newPath + '"' otherBranch + '. Adding', path, 'as', newPath
removeFile(False, path) removeFile(False, path)
path = newPath updateFile(False, sha, mode, newPath)
else: else:
print 'Adding "' + path + '"' print 'Adding', path
updateFile(True, sha, mode, path)
updateFile(True, sha, mode, path)
elif not oSha and aSha and bSha: elif not oSha and aSha and bSha:
# #
@ -336,10 +767,9 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
if aSha == bSha: if aSha == bSha:
if aMode != bMode: if aMode != bMode:
cleanMerge = False cleanMerge = False
print 'CONFLICT: File "' + path + \ print 'CONFLICT: File', path, \
'" added identically in both branches,', \ 'added identically in both branches, but permissions', \
'but permissions conflict', '0%o' % aMode, '->', \ 'conflict', '0%o' % aMode, '->', '0%o' % bMode
'0%o' % bMode
print 'CONFLICT: adding with permission:', '0%o' % aMode print 'CONFLICT: adding with permission:', '0%o' % aMode
updateFile(False, aSha, aMode, path) updateFile(False, aSha, aMode, path)
@ -350,8 +780,9 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
cleanMerge = False cleanMerge = False
newPath1 = uniquePath(path, branch1Name) newPath1 = uniquePath(path, branch1Name)
newPath2 = uniquePath(path, branch2Name) newPath2 = uniquePath(path, branch2Name)
print 'CONFLICT (add/add): File "' + path + \ print 'CONFLICT (add/add): File', path, \
'" added non-identically in both branches.' 'added non-identically in both branches. Adding as', \
newPath1, 'and', newPath2, 'instead.'
removeFile(False, path) removeFile(False, path)
updateFile(False, aSha, aMode, newPath1) updateFile(False, aSha, aMode, newPath1)
updateFile(False, bSha, bMode, newPath2) updateFile(False, bSha, bMode, newPath2)
@ -360,39 +791,24 @@ def processEntry(entry, branch1Name, branch2Name, files, dirs, cleanCache):
# #
# case D: Modified in both, but differently. # case D: Modified in both, but differently.
# #
print 'Auto-merging', path print 'Auto-merging', path
orig = runProgram(['git-unpack-file', oSha]).rstrip() [sha, mode, clean, dummy] = \
src1 = runProgram(['git-unpack-file', aSha]).rstrip() mergeFile(path, oSha, oMode,
src2 = runProgram(['git-unpack-file', bSha]).rstrip() path, aSha, aMode,
[out, ret] = runProgram(['merge', path, bSha, bMode,
'-L', branch1Name + '/' + path, branch1Name, branch2Name)
'-L', 'orig/' + path, if clean:
'-L', branch2Name + '/' + path, updateFile(True, sha, mode, path)
src1, orig, src2], returnCode=True)
if aMode == oMode:
mode = bMode
else: else:
mode = aMode
sha = runProgram(['git-hash-object', '-t', 'blob', '-w',
src1]).rstrip()
if ret != 0:
cleanMerge = False cleanMerge = False
print 'CONFLICT (content): Merge conflict in "' + path + '".' print 'CONFLICT (content): Merge conflict in', path
if cleanCache: if cacheOnly:
updateFile(False, sha, mode, path) updateFile(False, sha, mode, path)
else: else:
updateFile(True, aSha, aMode, path) updateFileExt(aSha, aMode, path,
updateFile(False, sha, mode, path, True) updateCache=True, updateWd=False)
else: updateFileExt(sha, mode, path, updateCache=False, updateWd=True)
updateFile(True, sha, mode, path)
os.unlink(orig)
os.unlink(src1)
os.unlink(src2)
else: else:
die("ERROR: Fatal merge failure, shouldn't happen.") die("ERROR: Fatal merge failure, shouldn't happen.")
@ -416,7 +832,7 @@ for nextArg in xrange(1, len(sys.argv)):
try: try:
h1 = firstBranch = sys.argv[nextArg + 1] h1 = firstBranch = sys.argv[nextArg + 1]
h2 = secondBranch = sys.argv[nextArg + 2] h2 = secondBranch = sys.argv[nextArg + 2]
except IndexError: except IndexError:
usage() usage()
break break
@ -428,8 +844,8 @@ try:
graph = buildGraph([h1, h2]) graph = buildGraph([h1, h2])
[res, clean] = merge(graph.shaMap[h1], graph.shaMap[h2], [dummy, clean] = merge(graph.shaMap[h1], graph.shaMap[h2],
firstBranch, secondBranch, graph) firstBranch, secondBranch, graph)
print '' print ''
except: except:

View File

@ -123,10 +123,30 @@ case "$#,$common" in
dropsave dropsave
exit 0 exit 0
;; ;;
1,*) 1,?*"$LF"?*)
# We are not doing octopus and not fast forward. Need a # We are not doing octopus and not fast forward. Need a
# real merge. # real merge.
;; ;;
1,*)
# We are not doing octopus, not fast forward, and have only
# one common. See if it is really trivial.
echo "Trying really trivial in-index merge..."
git-update-index --refresh 2>/dev/null
if git-read-tree --trivial -m -u $common $head "$1" &&
result_tree=$(git-write-tree)
then
echo "Wonderful."
result_commit=$(
echo "$merge_msg" |
git-commit-tree $result_tree -p HEAD -p "$1"
) || exit
git-update-ref HEAD $result_commit $head
summary $result_commit
dropsave
exit 0
fi
echo "Nope."
;;
*) *)
# An octopus. If we can reach all the remote we are up to date. # An octopus. If we can reach all the remote we are up to date.
up_to_date=t up_to_date=t

View File

@ -13,10 +13,13 @@
unset CDPATH unset CDPATH
die() { die() {
echo "$@" >&2 echo >&2 "$@"
exit 1 exit 1
} }
[ -h "$GIT_DIR/HEAD" ] && case "$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD 2>/dev/null)" in
refs/*) : ;;
*) false ;;
esac &&
[ -d "$GIT_DIR/refs" ] && [ -d "$GIT_DIR/refs" ] &&
[ -d "$GIT_OBJECT_DIRECTORY/00" ] [ -d "$GIT_OBJECT_DIRECTORY/00" ]

View File

@ -31,15 +31,15 @@ report () {
[ "$header" ] [ "$header" ]
} }
branch=`readlink "$GIT_DIR/HEAD"` branch=$(GIT_DIR="$GIT_DIR" git-symbolic-ref HEAD)
case "$branch" in case "$branch" in
refs/heads/master) ;; refs/heads/master) ;;
*) echo "# On branch $branch" ;; *) echo "# On branch $branch" ;;
esac esac
git-update-index --refresh >/dev/null 2>&1 git-update-index -q --unmerged --refresh || exit
if test -f "$GIT_DIR/HEAD" if GIT_DIR="$GIT_DIR" git-rev-parse --verify HEAD >/dev/null 2>&1
then then
git-diff-index -M --cached HEAD | git-diff-index -M --cached HEAD |
sed 's/^://' | sed 's/^://' |

55
git.sh
View File

@ -26,17 +26,50 @@ esac
echo "Usage: git COMMAND [OPTIONS] [TARGET]" echo "Usage: git COMMAND [OPTIONS] [TARGET]"
if [ -n "$cmd" ]; then if [ -n "$cmd" ]; then
echo " git command '$cmd' not found: commands are:" echo "git command '$cmd' not found."
else
echo " git commands are:"
fi fi
echo "git commands are:"
cat <<\EOF fmt <<\EOF | sed -e 's/^/ /'
add apply archimport bisect branch checkout cherry clone add
commit count-objects cvsimport diff fetch format-patch apply
fsck-cache get-tar-commit-id init-db log ls-remote octopus archimport
pack-objects parse-remote patch-id prune pull push rebase bisect
relink rename repack request-pull reset resolve revert branch
send-email shortlog show-branch status tag verify-tag checkout
whatchanged cherry
clone
commit
count-objects
cvsimport
diff
fetch
format-patch
fsck-objects
get-tar-commit-id
init-db
log
ls-remote
octopus
pack-objects
parse-remote
patch-id
prune
pull
push
rebase
relink
rename
repack
request-pull
reset
resolve
revert
send-email
shortlog
show-branch
status
tag
verify-tag
whatchanged
EOF EOF

View File

@ -1,6 +1,6 @@
#include "cache.h" #include "cache.h"
#include "commit.h" #include "commit.h"
#include "pack.h"
#include "fetch.h" #include "fetch.h"
#include <curl/curl.h> #include <curl/curl.h>
@ -13,8 +13,12 @@
#define curl_global_init(a) do { /* nothing */ } while(0) #define curl_global_init(a) do { /* nothing */ } while(0)
#endif #endif
#define PREV_BUF_SIZE 4096
#define RANGE_HEADER_SIZE 30
static CURL *curl; static CURL *curl;
static struct curl_slist *no_pragma_header; static struct curl_slist *no_pragma_header;
static struct curl_slist *no_range_header;
static char curl_errorstr[CURL_ERROR_SIZE]; static char curl_errorstr[CURL_ERROR_SIZE];
static char *initial_base; static char *initial_base;
@ -87,12 +91,37 @@ void prefetch(unsigned char *sha1)
{ {
} }
int relink_or_rename(char *old, char *new) {
int ret;
ret = link(old, new);
if (ret < 0) {
/* Same Coda hack as in write_sha1_file(sha1_file.c) */
ret = errno;
if (ret == EXDEV && !rename(old, new))
return 0;
}
unlink(old);
if (ret) {
if (ret != EEXIST)
return ret;
}
return 0;
}
static int got_alternates = 0; static int got_alternates = 0;
static int fetch_index(struct alt_base *repo, unsigned char *sha1) static int fetch_index(struct alt_base *repo, unsigned char *sha1)
{ {
char *filename; char *filename;
char *url; char *url;
char tmpfile[PATH_MAX];
int ret;
long prev_posn = 0;
char range[RANGE_HEADER_SIZE];
struct curl_slist *range_header = NULL;
CURLcode curl_result;
FILE *indexfile; FILE *indexfile;
@ -108,7 +137,8 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1)
repo->base, sha1_to_hex(sha1)); repo->base, sha1_to_hex(sha1));
filename = sha1_pack_index_name(sha1); filename = sha1_pack_index_name(sha1);
indexfile = fopen(filename, "w"); snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
indexfile = fopen(tmpfile, "a");
if (!indexfile) if (!indexfile)
return error("Unable to open local file %s for pack index", return error("Unable to open local file %s for pack index",
filename); filename);
@ -119,13 +149,36 @@ static int fetch_index(struct alt_base *repo, unsigned char *sha1)
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_pragma_header); curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_pragma_header);
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr); curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr);
if (curl_easy_perform(curl)) { /* If there is data present from a previous transfer attempt,
resume where it left off */
prev_posn = ftell(indexfile);
if (prev_posn>0) {
if (get_verbosely)
fprintf(stderr,
"Resuming fetch of index for pack %s at byte %ld\n",
sha1_to_hex(sha1), prev_posn);
sprintf(range, "Range: bytes=%ld-", prev_posn);
range_header = curl_slist_append(range_header, range);
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, range_header);
}
/* Clear out the Range: header after performing the request, so
other curl requests don't inherit inappropriate header data */
curl_result = curl_easy_perform(curl);
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_range_header);
if (curl_result != 0) {
fclose(indexfile); fclose(indexfile);
return error("Unable to get pack index %s\n%s", url, return error("Unable to get pack index %s\n%s", url,
curl_errorstr); curl_errorstr);
} }
fclose(indexfile); fclose(indexfile);
ret = relink_or_rename(tmpfile, filename);
if (ret)
return error("unable to write index filename %s: %s",
filename, strerror(ret));
return 0; return 0;
} }
@ -306,6 +359,12 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
struct packed_git **lst; struct packed_git **lst;
FILE *packfile; FILE *packfile;
char *filename; char *filename;
char tmpfile[PATH_MAX];
int ret;
long prev_posn = 0;
char range[RANGE_HEADER_SIZE];
struct curl_slist *range_header = NULL;
CURLcode curl_result;
if (fetch_indices(repo)) if (fetch_indices(repo))
return -1; return -1;
@ -325,7 +384,8 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
repo->base, sha1_to_hex(target->sha1)); repo->base, sha1_to_hex(target->sha1));
filename = sha1_pack_name(target->sha1); filename = sha1_pack_name(target->sha1);
packfile = fopen(filename, "w"); snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
packfile = fopen(tmpfile, "a");
if (!packfile) if (!packfile)
return error("Unable to open local file %s for pack", return error("Unable to open local file %s for pack",
filename); filename);
@ -336,7 +396,24 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_pragma_header); curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_pragma_header);
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr); curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr);
if (curl_easy_perform(curl)) { /* If there is data present from a previous transfer attempt,
resume where it left off */
prev_posn = ftell(packfile);
if (prev_posn>0) {
if (get_verbosely)
fprintf(stderr,
"Resuming fetch of pack %s at byte %ld\n",
sha1_to_hex(target->sha1), prev_posn);
sprintf(range, "Range: bytes=%ld-", prev_posn);
range_header = curl_slist_append(range_header, range);
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, range_header);
}
/* Clear out the Range: header after performing the request, so
other curl requests don't inherit inappropriate header data */
curl_result = curl_easy_perform(curl);
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_range_header);
if (curl_result != 0) {
fclose(packfile); fclose(packfile);
return error("Unable to get pack file %s\n%s", url, return error("Unable to get pack file %s\n%s", url,
curl_errorstr); curl_errorstr);
@ -344,11 +421,18 @@ static int fetch_pack(struct alt_base *repo, unsigned char *sha1)
fclose(packfile); fclose(packfile);
ret = relink_or_rename(tmpfile, filename);
if (ret)
return error("unable to write pack filename %s: %s",
filename, strerror(ret));
lst = &repo->packs; lst = &repo->packs;
while (*lst != target) while (*lst != target)
lst = &((*lst)->next); lst = &((*lst)->next);
*lst = (*lst)->next; *lst = (*lst)->next;
if (verify_pack(target, 0))
return -1;
install_packed_git(target); install_packed_git(target);
return 0; return 0;
@ -360,14 +444,33 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1)
char *filename = sha1_file_name(sha1); char *filename = sha1_file_name(sha1);
unsigned char real_sha1[20]; unsigned char real_sha1[20];
char tmpfile[PATH_MAX]; char tmpfile[PATH_MAX];
char prevfile[PATH_MAX];
int ret; int ret;
char *url; char *url;
char *posn; char *posn;
int prevlocal;
unsigned char prev_buf[PREV_BUF_SIZE];
ssize_t prev_read = 0;
long prev_posn = 0;
char range[RANGE_HEADER_SIZE];
struct curl_slist *range_header = NULL;
CURLcode curl_result;
snprintf(tmpfile, sizeof(tmpfile), "%s/obj_XXXXXX", snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
get_object_directory()); snprintf(prevfile, sizeof(prevfile), "%s.prev", filename);
if (unlink(prevfile) && (errno != ENOENT))
return error("Failed to unlink %s (%s)",
prevfile, strerror(errno));
if (rename(tmpfile, prevfile) && (errno != ENOENT))
return error("Failed to rename %s to %s (%s)",
tmpfile, prevfile, strerror(errno));
local = open(tmpfile, O_WRONLY | O_CREAT | O_EXCL, 0666);
/* Note: if another instance starts now, it will turn our new
tmpfile into its prevfile. */
local = mkstemp(tmpfile);
if (local < 0) if (local < 0)
return error("Couldn't create temporary file %s for %s: %s\n", return error("Couldn't create temporary file %s for %s: %s\n",
tmpfile, filename, strerror(errno)); tmpfile, filename, strerror(errno));
@ -396,8 +499,57 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1)
curl_easy_setopt(curl, CURLOPT_URL, url); curl_easy_setopt(curl, CURLOPT_URL, url);
if (curl_easy_perform(curl)) { /* If a previous temp file is present, process what was already
unlink(filename); fetched. */
prevlocal = open(prevfile, O_RDONLY);
if (prevlocal != -1) {
do {
prev_read = read(prevlocal, prev_buf, PREV_BUF_SIZE);
if (prev_read>0) {
if (fwrite_sha1_file(prev_buf,
1,
prev_read,
NULL) == prev_read) {
prev_posn += prev_read;
} else {
prev_read = -1;
}
}
} while (prev_read > 0);
close(prevlocal);
}
unlink(prevfile);
/* Reset inflate/SHA1 if there was an error reading the previous temp
file; also rewind to the beginning of the local file. */
if (prev_read == -1) {
memset(&stream, 0, sizeof(stream));
inflateInit(&stream);
SHA1_Init(&c);
if (prev_posn>0) {
prev_posn = 0;
lseek(local, SEEK_SET, 0);
ftruncate(local, 0);
}
}
/* If we have successfully processed data from a previous fetch
attempt, only fetch the data we don't already have. */
if (prev_posn>0) {
if (get_verbosely)
fprintf(stderr,
"Resuming fetch of object %s at byte %ld\n",
hex, prev_posn);
sprintf(range, "Range: bytes=%ld-", prev_posn);
range_header = curl_slist_append(range_header, range);
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, range_header);
}
/* Clear out the Range: header after performing the request, so
other curl requests don't inherit inappropriate header data */
curl_result = curl_easy_perform(curl);
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, no_range_header);
if (curl_result != 0) {
return error("%s", curl_errorstr); return error("%s", curl_errorstr);
} }
@ -413,20 +565,11 @@ static int fetch_object(struct alt_base *repo, unsigned char *sha1)
unlink(tmpfile); unlink(tmpfile);
return error("File %s has bad hash\n", hex); return error("File %s has bad hash\n", hex);
} }
ret = link(tmpfile, filename); ret = relink_or_rename(tmpfile, filename);
if (ret < 0) { if (ret)
/* Same Coda hack as in write_sha1_file(sha1_file.c) */ return error("unable to write sha1 filename %s: %s",
ret = errno; filename, strerror(ret));
if (ret == EXDEV && !rename(tmpfile, filename))
goto out;
}
unlink(tmpfile);
if (ret) {
if (ret != EEXIST)
return error("unable to write sha1 filename %s: %s",
filename, strerror(ret));
}
out:
pull_say("got %s\n", hex); pull_say("got %s\n", hex);
return 0; return 0;
} }
@ -519,6 +662,7 @@ int main(int argc, char **argv)
curl = curl_easy_init(); curl = curl_easy_init();
no_pragma_header = curl_slist_append(no_pragma_header, "Pragma:"); no_pragma_header = curl_slist_append(no_pragma_header, "Pragma:");
no_range_header = curl_slist_append(no_range_header, "Range:");
curl_ssl_verify = getenv("GIT_SSL_NO_VERIFY") ? 0 : 1; curl_ssl_verify = getenv("GIT_SSL_NO_VERIFY") ? 0 : 1;
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, curl_ssl_verify); curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, curl_ssl_verify);

View File

@ -29,7 +29,7 @@ int hold_index_file_for_update(struct cache_file *cf, const char *path)
signal(SIGINT, remove_lock_file_on_signal); signal(SIGINT, remove_lock_file_on_signal);
atexit(remove_lock_file); atexit(remove_lock_file);
} }
return open(cf->lockfile, O_RDWR | O_CREAT | O_EXCL, 0600); return open(cf->lockfile, O_RDWR | O_CREAT | O_EXCL, 0666);
} }
int commit_index_file(struct cache_file *cf) int commit_index_file(struct cache_file *cf)

View File

@ -166,6 +166,7 @@ static void create_default_files(const char *git_dir,
{ {
unsigned len = strlen(git_dir); unsigned len = strlen(git_dir);
static char path[PATH_MAX]; static char path[PATH_MAX];
unsigned char sha1[20];
if (len > sizeof(path)-50) if (len > sizeof(path)-50)
die("insane git directory %s", git_dir); die("insane git directory %s", git_dir);
@ -186,15 +187,14 @@ static void create_default_files(const char *git_dir,
/* /*
* Create the default symlink from ".git/HEAD" to the "master" * Create the default symlink from ".git/HEAD" to the "master"
* branch * branch, if it does not exist yet.
*/ */
strcpy(path + len, "HEAD"); strcpy(path + len, "HEAD");
if (symlink("refs/heads/master", path) < 0) { if (read_ref(path, sha1) < 0) {
if (errno != EEXIST) { if (create_symref(path, "refs/heads/master") < 0)
perror(path);
exit(1); exit(1);
}
} }
path[len] = 0;
copy_templates(path, len, template_path); copy_templates(path, len, template_path);
} }

View File

@ -530,7 +530,7 @@ static void verify_pathspec(void)
static const char ls_files_usage[] = static const char ls_files_usage[] =
"git-ls-files [-z] [-t] (--[cached|deleted|others|stage|unmerged|killed|modified])* " "git-ls-files [-z] [-t] (--[cached|deleted|others|stage|unmerged|killed|modified])* "
"[ --ignored ] [--exclude=<pattern>] [--exclude-from=<file>] " "[ --ignored ] [--exclude=<pattern>] [--exclude-from=<file>] "
"[ --exclude-per-directory=<filename> ]"; "[ --exclude-per-directory=<filename> ] [--] [<file>]*";
int main(int argc, const char **argv) int main(int argc, const char **argv)
{ {
@ -544,6 +544,10 @@ int main(int argc, const char **argv)
for (i = 1; i < argc; i++) { for (i = 1; i < argc; i++) {
const char *arg = argv[i]; const char *arg = argv[i];
if (!strcmp(arg, "--")) {
i++;
break;
}
if (!strcmp(arg, "-z")) { if (!strcmp(arg, "-z")) {
line_terminator = 0; line_terminator = 0;
continue; continue;

View File

@ -128,7 +128,7 @@ int main(int argc, char **argv)
unsigned long len = parse_email(map, size); unsigned long len = parse_email(map, size);
assert(len <= size); assert(len <= size);
sprintf(name, "%04d", ++nr); sprintf(name, "%04d", ++nr);
fd = open(name, O_WRONLY | O_CREAT | O_EXCL, 0600); fd = open(name, O_WRONLY | O_CREAT | O_EXCL, 0666);
if (fd < 0) { if (fd < 0) {
perror(name); perror(name);
exit(1); exit(1);

View File

@ -464,11 +464,15 @@ int read_cache(void)
errno = EBUSY; errno = EBUSY;
if (active_cache) if (active_cache)
return error("more than one cachefile"); return active_nr;
errno = ENOENT; errno = ENOENT;
fd = open(get_index_file(), O_RDONLY); fd = open(get_index_file(), O_RDONLY);
if (fd < 0) if (fd < 0) {
return (errno == ENOENT) ? 0 : error("open failed"); if (errno == ENOENT)
return 0;
die("index file open failed (%s)", strerror(errno));
}
size = 0; // avoid gcc warning size = 0; // avoid gcc warning
map = MAP_FAILED; map = MAP_FAILED;
@ -480,7 +484,7 @@ int read_cache(void)
} }
close(fd); close(fd);
if (map == MAP_FAILED) if (map == MAP_FAILED)
return error("mmap failed"); die("index file mmap failed (%s)", strerror(errno));
hdr = map; hdr = map;
if (verify_hdr(hdr, size) < 0) if (verify_hdr(hdr, size) < 0)
@ -501,7 +505,7 @@ int read_cache(void)
unmap: unmap:
munmap(map, size); munmap(map, size);
errno = EINVAL; errno = EINVAL;
return error("verify header failed"); die("index file corrupt");
} }
#define WRITE_BUFFER_SIZE 8192 #define WRITE_BUFFER_SIZE 8192

View File

@ -13,6 +13,8 @@
static int merge = 0; static int merge = 0;
static int update = 0; static int update = 0;
static int index_only = 0; static int index_only = 0;
static int nontrivial_merge = 0;
static int trivial_merges_only = 0;
static int head_idx = -1; static int head_idx = -1;
static int merge_size = 0; static int merge_size = 0;
@ -275,6 +277,9 @@ static int unpack_trees(merge_fn_t fn)
if (unpack_trees_rec(posns, len, "", fn, &indpos)) if (unpack_trees_rec(posns, len, "", fn, &indpos))
return -1; return -1;
if (trivial_merges_only && nontrivial_merge)
die("Merge requires file-level merging");
check_updates(active_cache, active_nr); check_updates(active_cache, active_nr);
return 0; return 0;
} }
@ -460,6 +465,8 @@ static int threeway_merge(struct cache_entry **stages)
verify_uptodate(index); verify_uptodate(index);
} }
nontrivial_merge = 1;
/* #2, #3, #4, #6, #7, #9, #11. */ /* #2, #3, #4, #6, #7, #9, #11. */
count = 0; count = 0;
if (!head_match || !remote_match) { if (!head_match || !remote_match) {
@ -629,9 +636,9 @@ int main(int argc, char **argv)
continue; continue;
} }
if (!strcmp(arg, "--head")) { if (!strcmp(arg, "--trivial")) {
head_idx = stage - 1; trivial_merges_only = 1;
fn = threeway_merge; continue;
} }
/* "-m" stands for "merge", meaning we start in stage 1 */ /* "-m" stands for "merge", meaning we start in stage 1 */
@ -657,7 +664,8 @@ int main(int argc, char **argv)
} }
if ((update||index_only) && !merge) if ((update||index_only) && !merge)
usage(read_tree_usage); usage(read_tree_usage);
if (merge && !fn) {
if (merge) {
if (stage < 2) if (stage < 2)
die("just how do you expect me to merge %d trees?", stage-1); die("just how do you expect me to merge %d trees?", stage-1);
switch (stage - 1) { switch (stage - 1) {
@ -674,9 +682,7 @@ int main(int argc, char **argv)
fn = threeway_merge; fn = threeway_merge;
break; break;
} }
}
if (head_idx < 0) {
if (stage - 1 >= 3) if (stage - 1 >= 3)
head_idx = stage - 2; head_idx = stage - 2;
else else

189
refs.c
View File

@ -2,19 +2,157 @@
#include "cache.h" #include "cache.h"
#include <errno.h> #include <errno.h>
#include <ctype.h>
static int read_ref(const char *refname, unsigned char *sha1) /* We allow "recursive" symbolic refs. Only within reason, though */
#define MAXDEPTH 5
#ifndef USE_SYMLINK_HEAD
#define USE_SYMLINK_HEAD 1
#endif
int validate_symref(const char *path)
{ {
int ret = -1; struct stat st;
int fd = open(git_path("%s", refname), O_RDONLY); char *buf, buffer[256];
int len, fd;
if (fd >= 0) { if (lstat(path, &st) < 0)
char buffer[60]; return -1;
if (read(fd, buffer, sizeof(buffer)) >= 40)
ret = get_sha1_hex(buffer, sha1); /* Make sure it is a "refs/.." symlink */
close(fd); if (S_ISLNK(st.st_mode)) {
len = readlink(path, buffer, sizeof(buffer)-1);
if (len >= 5 && !memcmp("refs/", buffer, 5))
return 0;
return -1;
} }
return ret;
/*
* Anything else, just open it and try to see if it is a symbolic ref.
*/
fd = open(path, O_RDONLY);
if (fd < 0)
return -1;
len = read(fd, buffer, sizeof(buffer)-1);
close(fd);
/*
* Is it a symbolic ref?
*/
if (len < 4 || memcmp("ref:", buffer, 4))
return -1;
buf = buffer + 4;
len -= 4;
while (len && isspace(*buf))
buf++, len--;
if (len >= 5 && !memcmp("refs/", buffer, 5))
return 0;
return -1;
}
const char *resolve_ref(const char *path, unsigned char *sha1, int reading)
{
int depth = MAXDEPTH, len;
char buffer[256];
for (;;) {
struct stat st;
char *buf;
int fd;
if (--depth < 0)
return NULL;
/* Special case: non-existing file.
* Not having the refs/heads/new-branch is OK
* if we are writing into it, so is .git/HEAD
* that points at refs/heads/master still to be
* born. It is NOT OK if we are resolving for
* reading.
*/
if (lstat(path, &st) < 0) {
if (reading || errno != ENOENT)
return NULL;
memset(sha1, 0, 20);
return path;
}
/* Follow "normalized" - ie "refs/.." symlinks by hand */
if (S_ISLNK(st.st_mode)) {
len = readlink(path, buffer, sizeof(buffer)-1);
if (len >= 5 && !memcmp("refs/", buffer, 5)) {
path = git_path("%.*s", len, buffer);
continue;
}
}
/*
* Anything else, just open it and try to use it as
* a ref
*/
fd = open(path, O_RDONLY);
if (fd < 0)
return NULL;
len = read(fd, buffer, sizeof(buffer)-1);
close(fd);
/*
* Is it a symbolic ref?
*/
if (len < 4 || memcmp("ref:", buffer, 4))
break;
buf = buffer + 4;
len -= 4;
while (len && isspace(*buf))
buf++, len--;
while (len && isspace(buf[len-1]))
buf[--len] = 0;
path = git_path("%.*s", len, buf);
}
if (len < 40 || get_sha1_hex(buffer, sha1))
return NULL;
return path;
}
int create_symref(const char *git_HEAD, const char *refs_heads_master)
{
#if USE_SYMLINK_HEAD
unlink(git_HEAD);
return symlink(refs_heads_master, git_HEAD);
#else
const char *lockpath;
char ref[1000];
int fd, len, written;
len = snprintf(ref, sizeof(ref), "ref: %s\n", refs_heads_master);
if (sizeof(ref) <= len) {
error("refname too long: %s", refs_heads_master);
return -1;
}
lockpath = mkpath("%s.lock", git_HEAD);
fd = open(lockpath, O_CREAT | O_EXCL | O_WRONLY, 0666);
written = write(fd, ref, len);
close(fd);
if (written != len) {
unlink(lockpath);
error("Unable to write to %s", lockpath);
return -2;
}
if (rename(lockpath, git_HEAD) < 0) {
unlink(lockpath);
error("Unable to create %s", git_HEAD);
return -3;
}
return 0;
#endif
}
int read_ref(const char *filename, unsigned char *sha1)
{
if (resolve_ref(filename, sha1, 1))
return 0;
return -1;
} }
static int do_for_each_ref(const char *base, int (*fn)(const char *path, const unsigned char *sha1)) static int do_for_each_ref(const char *base, int (*fn)(const char *path, const unsigned char *sha1))
@ -54,7 +192,7 @@ static int do_for_each_ref(const char *base, int (*fn)(const char *path, const u
break; break;
continue; continue;
} }
if (read_ref(path, sha1) < 0) if (read_ref(git_path("%s", path), sha1) < 0)
continue; continue;
if (!has_sha1_file(sha1)) if (!has_sha1_file(sha1))
continue; continue;
@ -71,7 +209,7 @@ static int do_for_each_ref(const char *base, int (*fn)(const char *path, const u
int head_ref(int (*fn)(const char *path, const unsigned char *sha1)) int head_ref(int (*fn)(const char *path, const unsigned char *sha1))
{ {
unsigned char sha1[20]; unsigned char sha1[20];
if (!read_ref("HEAD", sha1)) if (!read_ref(git_path("HEAD"), sha1))
return fn("HEAD", sha1); return fn("HEAD", sha1);
return 0; return 0;
} }
@ -101,33 +239,14 @@ static char *ref_lock_file_name(const char *ref)
return ret; return ret;
} }
static int read_ref_file(const char *filename, unsigned char *sha1) {
int fd = open(filename, O_RDONLY);
char hex[41];
if (fd < 0) {
return error("Couldn't open %s\n", filename);
}
if ((read(fd, hex, 41) < 41) ||
(hex[40] != '\n') ||
get_sha1_hex(hex, sha1)) {
error("Couldn't read a hash from %s\n", filename);
close(fd);
return -1;
}
close(fd);
return 0;
}
int get_ref_sha1(const char *ref, unsigned char *sha1) int get_ref_sha1(const char *ref, unsigned char *sha1)
{ {
char *filename; const char *filename;
int retval;
if (check_ref_format(ref)) if (check_ref_format(ref))
return -1; return -1;
filename = ref_file_name(ref); filename = git_path("refs/%s", ref);
retval = read_ref_file(filename, sha1); return read_ref(filename, sha1);
free(filename);
return retval;
} }
static int lock_ref_file(const char *filename, const char *lock_filename, static int lock_ref_file(const char *filename, const char *lock_filename,
@ -140,7 +259,7 @@ static int lock_ref_file(const char *filename, const char *lock_filename,
return error("Couldn't open lock file for %s: %s", return error("Couldn't open lock file for %s: %s",
filename, strerror(errno)); filename, strerror(errno));
} }
retval = read_ref_file(filename, current_sha1); retval = read_ref(filename, current_sha1);
if (old_sha1) { if (old_sha1) {
if (retval) { if (retval) {
close(fd); close(fd);

16
setup.c
View File

@ -76,18 +76,20 @@ const char **get_pathspec(const char *prefix, const char **pathspec)
* Test it it looks like we're at the top * Test it it looks like we're at the top
* level git directory. We want to see a * level git directory. We want to see a
* *
* - a HEAD symlink and a refs/ directory under ".git"
* - either a .git/objects/ directory _or_ the proper * - either a .git/objects/ directory _or_ the proper
* GIT_OBJECT_DIRECTORY environment variable * GIT_OBJECT_DIRECTORY environment variable
* - a refs/ directory under ".git"
* - either a HEAD symlink or a HEAD file that is formatted as
* a proper "ref:".
*/ */
static int is_toplevel_directory(void) static int is_toplevel_directory(void)
{ {
struct stat st; if (access(".git/refs/", X_OK) ||
access(getenv(DB_ENVIRONMENT) ?
return !lstat(".git/HEAD", &st) && getenv(DB_ENVIRONMENT) : ".git/objects/", X_OK) ||
S_ISLNK(st.st_mode) && validate_symref(".git/HEAD"))
!access(".git/refs/", X_OK) && return 0;
(getenv(DB_ENVIRONMENT) || !access(".git/objects/", X_OK)); return 1;
} }
const char *setup_git_directory(void) const char *setup_git_directory(void)

View File

@ -20,6 +20,8 @@
#endif #endif
#endif #endif
const unsigned char null_sha1[20] = { 0, };
static unsigned int sha1_file_open_flag = O_NOATIME; static unsigned int sha1_file_open_flag = O_NOATIME;
static unsigned hexval(char c) static unsigned hexval(char c)

View File

@ -119,21 +119,6 @@ static int get_short_sha1(const char *name, int len, unsigned char *sha1)
return -1; return -1;
} }
static int get_sha1_file(const char *path, unsigned char *result)
{
char buffer[60];
int fd = open(path, O_RDONLY);
int len;
if (fd < 0)
return -1;
len = read(fd, buffer, sizeof(buffer));
close(fd);
if (len < 40)
return -1;
return get_sha1_hex(buffer, result);
}
static int get_sha1_basic(const char *str, int len, unsigned char *sha1) static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
{ {
static const char *prefix[] = { static const char *prefix[] = {
@ -150,7 +135,7 @@ static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
for (p = prefix; *p; p++) { for (p = prefix; *p; p++) {
char *pathname = git_path("%s/%.*s", *p, len, str); char *pathname = git_path("%s/%.*s", *p, len, str);
if (!get_sha1_file(pathname, sha1)) if (!read_ref(pathname, sha1))
return 0; return 0;
} }

View File

@ -349,6 +349,7 @@ int main(int ac, char **av)
int all_heads = 0, all_tags = 0; int all_heads = 0, all_tags = 0;
int all_mask, all_revs, shown_merge_point; int all_mask, all_revs, shown_merge_point;
char head_path[128]; char head_path[128];
const char *head_path_p;
int head_path_len; int head_path_len;
unsigned char head_sha1[20]; unsigned char head_sha1[20];
int merge_base = 0; int merge_base = 0;
@ -430,11 +431,15 @@ int main(int ac, char **av)
if (0 <= extra) if (0 <= extra)
join_revs(&list, &seen, num_rev, extra); join_revs(&list, &seen, num_rev, extra);
head_path_len = readlink(".git/HEAD", head_path, sizeof(head_path)-1); head_path_p = resolve_ref(git_path("HEAD"), head_sha1, 1);
if ((head_path_len < 0) || get_sha1("HEAD", head_sha1)) if (head_path_p) {
head_path_len = strlen(head_path_p);
memcpy(head_path, head_path_p, head_path_len + 1);
}
else {
head_path_len = 0;
head_path[0] = 0; head_path[0] = 0;
else }
head_path[head_path_len] = 0;
if (merge_base) if (merge_base)
return show_merge_base(seen, num_rev); return show_merge_base(seen, num_rev);

34
symbolic-ref.c Normal file
View File

@ -0,0 +1,34 @@
#include "cache.h"
static const char git_symbolic_ref_usage[] =
"git-symbolic-ref name [ref]";
static int check_symref(const char *HEAD)
{
unsigned char sha1[20];
const char *git_HEAD = strdup(git_path("%s", HEAD));
const char *git_refs_heads_master = resolve_ref(git_HEAD, sha1, 0);
if (git_refs_heads_master) {
/* we want to strip the .git/ part */
int pfxlen = strlen(git_HEAD) - strlen(HEAD);
puts(git_refs_heads_master + pfxlen);
}
else
die("No such ref: %s", HEAD);
}
int main(int argc, const char **argv)
{
setup_git_directory();
switch (argc) {
case 2:
check_symref(argv[1]);
break;
case 3:
create_symref(strdup(git_path("%s", argv[1])), argv[2]);
break;
default:
usage(git_symbolic_ref_usage);
}
return 0;
}

View File

@ -5,6 +5,7 @@
#GIT_TEST_OPTS=--verbose --debug #GIT_TEST_OPTS=--verbose --debug
SHELL_PATH ?= $(SHELL) SHELL_PATH ?= $(SHELL)
TAR ?= $(TAR)
T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh) T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)

69
t/t3002-ls-files-dashpath.sh Executable file
View File

@ -0,0 +1,69 @@
#!/bin/sh
#
# Copyright (c) 2005 Junio C Hamano
#
test_description='git-ls-files test (-- to terminate the path list).
This test runs git-ls-files --others with the following on the
filesystem.
path0 - a file
-foo - a file with a funny name.
-- - another file with a funny name.
'
. ./test-lib.sh
test_expect_success \
setup \
'echo frotz >path0 &&
echo frotz >./-foo &&
echo frotz >./--'
test_expect_success \
'git-ls-files without path restriction.' \
'git-ls-files --others >output &&
diff -u output - <<EOF
--
-foo
output
path0
EOF
'
test_expect_success \
'git-ls-files with path restriction.' \
'git-ls-files --others path0 >output &&
diff -u output - <<EOF
path0
EOF
'
test_expect_success \
'git-ls-files with path restriction with --.' \
'git-ls-files --others -- path0 >output &&
diff -u output - <<EOF
path0
EOF
'
test_expect_success \
'git-ls-files with path restriction with -- --.' \
'git-ls-files --others -- -- >output &&
diff -u output - <<EOF
--
EOF
'
test_expect_success \
'git-ls-files with no path restriction.' \
'git-ls-files --others -- >output &&
diff -u output - <<EOF
--
-foo
output
path0
EOF
'
test_done

View File

@ -50,7 +50,7 @@ test_expect_success \
test_expect_success \ test_expect_success \
'validate file modification time' \ 'validate file modification time' \
'TZ=GMT tar tvf b.tar a/a | 'TZ=GMT $TAR tvf b.tar a/a |
awk \{print\ \$4,\ \(length\(\$5\)\<7\)\ ?\ \$5\":00\"\ :\ \$5\} \ awk \{print\ \$4,\ \(length\(\$5\)\<7\)\ ?\ \$5\":00\"\ :\ \$5\} \
>b.mtime && >b.mtime &&
echo "2005-05-27 22:00:00" >expected.mtime && echo "2005-05-27 22:00:00" >expected.mtime &&
@ -63,7 +63,7 @@ test_expect_success \
test_expect_success \ test_expect_success \
'extract tar archive' \ 'extract tar archive' \
'(cd b && tar xf -) <b.tar' '(cd b && $TAR xf -) <b.tar'
test_expect_success \ test_expect_success \
'validate filenames' \ 'validate filenames' \
@ -80,7 +80,7 @@ test_expect_success \
test_expect_success \ test_expect_success \
'extract tar archive with prefix' \ 'extract tar archive with prefix' \
'(cd c && tar xf -) <c.tar' '(cd c && $TAR xf -) <c.tar'
test_expect_success \ test_expect_success \
'validate filenames with prefix' \ 'validate filenames with prefix' \

View File

@ -20,12 +20,12 @@ test_expect_success setup '
commit=$(echo "Commit #$i" | git-commit-tree $tree -p $parent) && commit=$(echo "Commit #$i" | git-commit-tree $tree -p $parent) &&
parent=$commit || return 1 parent=$commit || return 1
done && done &&
echo "$commit" >.git/HEAD && git-update-ref HEAD "$commit" &&
git-clone -l ./. victim && git-clone -l ./. victim &&
cd victim && cd victim &&
git-log && git-log &&
cd .. && cd .. &&
echo $zero >.git/HEAD && git-update-ref HEAD "$zero" &&
parent=$zero && parent=$zero &&
for i in $cnt for i in $cnt
do do
@ -33,7 +33,7 @@ test_expect_success setup '
commit=$(echo "Rebase #$i" | git-commit-tree $tree -p $parent) && commit=$(echo "Rebase #$i" | git-commit-tree $tree -p $parent) &&
parent=$commit || return 1 parent=$commit || return 1
done && done &&
echo "$commit" >.git/HEAD && git-update-ref HEAD "$commit" &&
echo Rebase && echo Rebase &&
git-log' git-log'

View File

@ -7,20 +7,6 @@ test_description='Tests git-rev-list --bisect functionality'
. ./test-lib.sh . ./test-lib.sh
. ../t6000lib.sh # t6xxx specific functions . ../t6000lib.sh # t6xxx specific functions
bc_expr()
{
bc <<EOF
scale=1
define abs(x) {
if (x>=0) { return (x); } else { return (-x); }
}
define floor(x) {
save=scale; scale=0; result=x/1; scale=save; return (result);
}
$*
EOF
}
# usage: test_bisection max-diff bisect-option head ^prune... # usage: test_bisection max-diff bisect-option head ^prune...
# #
# e.g. test_bisection 1 --bisect l1 ^l0 # e.g. test_bisection 1 --bisect l1 ^l0
@ -35,8 +21,19 @@ test_bisection_diff()
_head=$1 _head=$1
shift 1 shift 1
_bisection_size=$(git-rev-list $_bisection "$@" | wc -l) _bisection_size=$(git-rev-list $_bisection "$@" | wc -l)
[ -n "$_list_size" -a -n "$_bisection_size" ] || error "test_bisection_diff failed" [ -n "$_list_size" -a -n "$_bisection_size" ] ||
test_expect_success "bisection diff $_bisect_option $_head $* <= $_max_diff" "[ $(bc_expr "floor(abs($_list_size/2)-$_bisection_size)") -le $_max_diff ]" error "test_bisection_diff failed"
# Test if bisection size is close to half of list size within
# tolerance.
#
_bisect_err=`expr $_list_size - $_bisection_size \* 2`
test "$_bisect_err" -lt 0 && _bisect_err=`expr 0 - $_bisect_err`
_bisect_err=`expr $_bisect_err / 2` ; # floor
test_expect_success \
"bisection diff $_bisect_option $_head $* <= $_max_diff" \
'test $_bisect_err -le $_max_diff'
} }
date >path0 date >path0

View File

@ -353,6 +353,8 @@ static void traverse_tree(void *buffer, unsigned long size,
if (size < namelen + 20 || sscanf(buffer, "%o", &mode) != 1) if (size < namelen + 20 || sscanf(buffer, "%o", &mode) != 1)
die("corrupt 'tree' file"); die("corrupt 'tree' file");
if (S_ISDIR(mode) || S_ISREG(mode))
mode |= (mode & 0100) ? 0777 : 0666;
buffer = sha1 + 20; buffer = sha1 + 20;
size -= namelen + 20; size -= namelen + 20;

View File

@ -13,7 +13,7 @@
* like "git-update-index *" and suddenly having all the object * like "git-update-index *" and suddenly having all the object
* files be revision controlled. * files be revision controlled.
*/ */
static int allow_add = 0, allow_remove = 0, allow_replace = 0, not_new = 0, quiet = 0, info_only = 0; static int allow_add = 0, allow_remove = 0, allow_replace = 0, allow_unmerged = 0, not_new = 0, quiet = 0, info_only = 0;
static int force_remove; static int force_remove;
/* Three functions to allow overloaded pointer return; see linux/err.h */ /* Three functions to allow overloaded pointer return; see linux/err.h */
@ -135,7 +135,7 @@ static struct cache_entry *refresh_entry(struct cache_entry *ce)
changed = ce_match_stat(ce, &st); changed = ce_match_stat(ce, &st);
if (!changed) if (!changed)
return ce; return NULL;
if (ce_modified(ce, &st)) if (ce_modified(ce, &st))
return ERR_PTR(-EINVAL); return ERR_PTR(-EINVAL);
@ -156,16 +156,20 @@ static int refresh_cache(void)
struct cache_entry *ce, *new; struct cache_entry *ce, *new;
ce = active_cache[i]; ce = active_cache[i];
if (ce_stage(ce)) { if (ce_stage(ce)) {
printf("%s: needs merge\n", ce->name);
has_errors = 1;
while ((i < active_nr) && while ((i < active_nr) &&
! strcmp(active_cache[i]->name, ce->name)) ! strcmp(active_cache[i]->name, ce->name))
i++; i++;
i--; i--;
if (allow_unmerged)
continue;
printf("%s: needs merge\n", ce->name);
has_errors = 1;
continue; continue;
} }
new = refresh_entry(ce); new = refresh_entry(ce);
if (!new)
continue;
if (IS_ERR(new)) { if (IS_ERR(new)) {
if (not_new && PTR_ERR(new) == -ENOENT) if (not_new && PTR_ERR(new) == -ENOENT)
continue; continue;
@ -335,6 +339,10 @@ int main(int argc, const char **argv)
allow_remove = 1; allow_remove = 1;
continue; continue;
} }
if (!strcmp(path, "--unmerged")) {
allow_unmerged = 1;
continue;
}
if (!strcmp(path, "--refresh")) { if (!strcmp(path, "--refresh")) {
has_errors |= refresh_cache(); has_errors |= refresh_cache();
continue; continue;
@ -383,9 +391,11 @@ int main(int argc, const char **argv)
update_one(buf.buf, prefix, prefix_length); update_one(buf.buf, prefix, prefix_length);
} }
} }
if (write_cache(newfd, active_cache, active_nr) || if (active_cache_changed) {
commit_index_file(&cache_file)) if (write_cache(newfd, active_cache, active_nr) ||
die("Unable to write new cachefile"); commit_index_file(&cache_file))
die("Unable to write new cachefile");
}
return has_errors ? 1 : 0; return has_errors ? 1 : 0;
} }

View File

@ -4,53 +4,6 @@
static const char git_update_ref_usage[] = "git-update-ref <refname> <value> [<oldval>]"; static const char git_update_ref_usage[] = "git-update-ref <refname> <value> [<oldval>]";
#define MAXDEPTH 5
static const char *resolve_ref(const char *path, unsigned char *sha1)
{
int depth = MAXDEPTH, len;
char buffer[256];
for (;;) {
struct stat st;
int fd;
if (--depth < 0)
return NULL;
/* Special case: non-existing file */
if (lstat(path, &st) < 0) {
if (errno != ENOENT)
return NULL;
memset(sha1, 0, 20);
return path;
}
/* Follow "normalized" - ie "refs/.." symlinks by hand */
if (S_ISLNK(st.st_mode)) {
len = readlink(path, buffer, sizeof(buffer)-1);
if (len >= 5 && !memcmp("refs/", buffer, 5)) {
path = git_path("%.*s", len, buffer);
continue;
}
}
/*
* Anything else, just open it and try to use it as
* a ref
*/
fd = open(path, O_RDONLY);
if (fd < 0)
return NULL;
len = read(fd, buffer, sizeof(buffer)-1);
close(fd);
break;
}
if (len < 40 || get_sha1_hex(buffer, sha1))
return NULL;
return path;
}
static int re_verify(const char *path, unsigned char *oldsha1, unsigned char *currsha1) static int re_verify(const char *path, unsigned char *oldsha1, unsigned char *currsha1)
{ {
char buf[40]; char buf[40];
@ -84,7 +37,7 @@ int main(int argc, char **argv)
if (oldval && get_sha1(oldval, oldsha1) < 0) if (oldval && get_sha1(oldval, oldsha1) < 0)
die("%s: not a valid old SHA1", oldval); die("%s: not a valid old SHA1", oldval);
path = resolve_ref(git_path("%s", refname), currsha1); path = resolve_ref(git_path("%s", refname), currsha1, !!oldval);
if (!path) if (!path)
die("No such ref: %s", refname); die("No such ref: %s", refname);

View File

@ -15,7 +15,7 @@ static void report(const char *prefix, const char *err, va_list params)
void usage(const char *err) void usage(const char *err)
{ {
fprintf(stderr, "usage: %s\n", err); fprintf(stderr, "usage: %s\n", err);
exit(1); exit(129);
} }
void die(const char *err, ...) void die(const char *err, ...)
@ -25,7 +25,7 @@ void die(const char *err, ...)
va_start(params, err); va_start(params, err);
report("fatal: ", err, params); report("fatal: ", err, params);
va_end(params); va_end(params);
exit(1); exit(128);
} }
int error(const char *err, ...) int error(const char *err, ...)