diff --git a/.gitignore b/.gitignore index 4c8c8e4115..7f2cd55088 100644 --- a/.gitignore +++ b/.gitignore @@ -66,7 +66,6 @@ git-merge-one-file git-merge-ours git-merge-recur git-merge-recursive -git-merge-recursive-old git-merge-resolve git-merge-stupid git-mktag diff --git a/Documentation/git-merge.txt b/Documentation/git-merge.txt index bebf30ad3d..e2954aa76e 100644 --- a/Documentation/git-merge.txt +++ b/Documentation/git-merge.txt @@ -8,12 +8,14 @@ git-merge - Grand Unified Merge Driver SYNOPSIS -------- -'git-merge' [-n] [--no-commit] [-s ]... ... - +[verse] +'git-merge' [-n] [--no-commit] [--squash] [-s ]... + [--reflog-action=] + -m= ... DESCRIPTION ----------- -This is the top-level user interface to the merge machinery +This is the top-level interface to the merge machinery which drives multiple merge strategy scripts. @@ -27,13 +29,19 @@ include::merge-options.txt[] to give a good default for automated `git-merge` invocations. :: - our branch head commit. + Our branch head commit. This has to be `HEAD`, so new + syntax does not require it :: - other branch head merged into our branch. You need at + Other branch head merged into our branch. You need at least one . Specifying more than one obviously means you are trying an Octopus. +--reflog-action=:: + This is used internally when `git-pull` calls this command + to record that the merge was created by `pull` command + in the `ref-log` entry that results from the merge. + include::merge-strategies.txt[] diff --git a/INSTALL b/INSTALL index fce6bc39d5..8f69039fb9 100644 --- a/INSTALL +++ b/INSTALL @@ -99,9 +99,6 @@ Issues of note: - "perl" and POSIX-compliant shells are needed to use most of the barebone Porcelainish scripts. - - "python" 2.3 or more recent; if you have 2.3, you may need - to build with "make WITH_OWN_SUBPROCESS_PY=YesPlease". - - Some platform specific issues are dealt with Makefile rules, but depending on your specific installation, you may not have all the libraries/tools needed, or you may have diff --git a/Makefile b/Makefile index 36ce8cd606..fc30dcbbb9 100644 --- a/Makefile +++ b/Makefile @@ -69,8 +69,6 @@ all: # # Define NO_MMAP if you want to avoid mmap. # -# Define WITH_OWN_SUBPROCESS_PY if you want to use with python 2.3. -# # Define NO_IPV6 if you lack IPv6 support and getaddrinfo(). # # Define NO_SOCKADDR_STORAGE if your platform does not have struct @@ -116,7 +114,6 @@ prefix = $(HOME) bindir = $(prefix)/bin gitexecdir = $(bindir) template_dir = $(prefix)/share/git-core/templates/ -GIT_PYTHON_DIR = $(prefix)/share/git-core/python # DESTDIR= # default configuration for gitweb @@ -135,7 +132,7 @@ GITWEB_FAVICON = git-favicon.png GITWEB_SITE_HEADER = GITWEB_SITE_FOOTER = -export prefix bindir gitexecdir template_dir GIT_PYTHON_DIR +export prefix bindir gitexecdir template_dir CC = gcc AR = ar @@ -179,12 +176,8 @@ SCRIPT_PERL = \ git-svnimport.perl git-cvsexportcommit.perl \ git-send-email.perl git-svn.perl -SCRIPT_PYTHON = \ - git-merge-recursive-old.py - SCRIPTS = $(patsubst %.sh,%,$(SCRIPT_SH)) \ $(patsubst %.perl,%,$(SCRIPT_PERL)) \ - $(patsubst %.py,%,$(SCRIPT_PYTHON)) \ git-cherry-pick git-status git-instaweb # ... and all the rest that could be moved out of bindir to gitexecdir @@ -227,12 +220,6 @@ endif ifndef PERL_PATH PERL_PATH = /usr/bin/perl endif -ifndef PYTHON_PATH - PYTHON_PATH = /usr/bin/python -endif - -PYMODULES = \ - gitMergeCommon.py LIB_FILE=libgit.a XDIFF_LIB=xdiff/lib.a @@ -423,16 +410,6 @@ endif -include config.mak.autogen -include config.mak -ifdef WITH_OWN_SUBPROCESS_PY - PYMODULES += compat/subprocess.py -else - ifeq ($(NO_PYTHON),) - ifneq ($(shell $(PYTHON_PATH) -c 'import subprocess;print"OK"' 2>/dev/null),OK) - PYMODULES += compat/subprocess.py - endif - endif -endif - ifndef NO_CURL ifdef CURLDIR # This is still problematic -- gcc does not always want -R. @@ -574,8 +551,6 @@ prefix_SQ = $(subst ','\'',$(prefix)) SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH)) PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH)) -PYTHON_PATH_SQ = $(subst ','\'',$(PYTHON_PATH)) -GIT_PYTHON_DIR_SQ = $(subst ','\'',$(GIT_PYTHON_DIR)) LIBS = $(GITLIBS) $(EXTLIBS) @@ -622,7 +597,6 @@ $(patsubst %.sh,%,$(SCRIPT_SH)) : % : %.sh -e 's|@@PERL@@|$(PERL_PATH_SQ)|g' \ -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \ -e 's/@@NO_CURL@@/$(NO_CURL)/g' \ - -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \ $@.sh >$@+ chmod +x $@+ mv $@+ $@ @@ -644,15 +618,6 @@ $(patsubst %.perl,%,$(SCRIPT_PERL)): % : %.perl chmod +x $@+ mv $@+ $@ -$(patsubst %.py,%,$(SCRIPT_PYTHON)) : % : %.py GIT-CFLAGS - rm -f $@ $@+ - sed -e '1s|#!.*python|#!$(PYTHON_PATH_SQ)|' \ - -e 's|@@GIT_PYTHON_PATH@@|$(GIT_PYTHON_DIR_SQ)|g' \ - -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \ - $@.py >$@+ - chmod +x $@+ - mv $@+ $@ - git-cherry-pick: git-revert cp $< $@+ mv $@+ $@ @@ -689,7 +654,6 @@ git-instaweb: git-instaweb.sh gitweb/gitweb.cgi gitweb/gitweb.css sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \ -e 's/@@GIT_VERSION@@/$(GIT_VERSION)/g' \ -e 's/@@NO_CURL@@/$(NO_CURL)/g' \ - -e 's/@@NO_PYTHON@@/$(NO_PYTHON)/g' \ -e '/@@GITWEB_CGI@@/r gitweb/gitweb.cgi' \ -e '/@@GITWEB_CGI@@/d' \ -e '/@@GITWEB_CSS@@/r gitweb/gitweb.css' \ @@ -709,7 +673,6 @@ configure: configure.ac git$X git.spec \ $(patsubst %.sh,%,$(SCRIPT_SH)) \ $(patsubst %.perl,%,$(SCRIPT_PERL)) \ - $(patsubst %.py,%,$(SCRIPT_PYTHON)) \ : GIT-VERSION-FILE %.o: %.c GIT-CFLAGS @@ -783,7 +746,7 @@ tags: find . -name '*.[hcS]' -print | xargs ctags -a ### Detect prefix changes -TRACK_CFLAGS = $(subst ','\'',$(ALL_CFLAGS)):$(GIT_PYTHON_DIR_SQ):\ +TRACK_CFLAGS = $(subst ','\'',$(ALL_CFLAGS)):\ $(bindir_SQ):$(gitexecdir_SQ):$(template_dir_SQ):$(prefix_SQ) GIT-CFLAGS: .FORCE-GIT-CFLAGS @@ -799,7 +762,6 @@ GIT-CFLAGS: .FORCE-GIT-CFLAGS # However, the environment gets quite big, and some programs have problems # with that. -export NO_PYTHON export NO_SVN_TESTS test: all @@ -834,8 +796,6 @@ install: all $(INSTALL) git$X gitk '$(DESTDIR_SQ)$(bindir_SQ)' $(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install $(MAKE) -C perl install - $(INSTALL) -d -m755 '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)' - $(INSTALL) $(PYMODULES) '$(DESTDIR_SQ)$(GIT_PYTHON_DIR_SQ)' if test 'z$(bindir_SQ)' != 'z$(gitexecdir_SQ)'; \ then \ ln -f '$(DESTDIR_SQ)$(bindir_SQ)/git$X' \ @@ -922,7 +882,6 @@ check-docs:: case "$$v" in \ git-merge-octopus | git-merge-ours | git-merge-recursive | \ git-merge-resolve | git-merge-stupid | git-merge-recur | \ - git-merge-recursive-old | \ git-ssh-pull | git-ssh-push ) continue ;; \ esac ; \ test -f "Documentation/$$v.txt" || \ diff --git a/compat/subprocess.py b/compat/subprocess.py deleted file mode 100644 index 6474eab119..0000000000 --- a/compat/subprocess.py +++ /dev/null @@ -1,1149 +0,0 @@ -# subprocess - Subprocesses with accessible I/O streams -# -# For more information about this module, see PEP 324. -# -# This module should remain compatible with Python 2.2, see PEP 291. -# -# Copyright (c) 2003-2005 by Peter Astrand -# -# Licensed to PSF under a Contributor Agreement. -# See http://www.python.org/2.4/license for licensing details. - -r"""subprocess - Subprocesses with accessible I/O streams - -This module allows you to spawn processes, connect to their -input/output/error pipes, and obtain their return codes. This module -intends to replace several other, older modules and functions, like: - -os.system -os.spawn* -os.popen* -popen2.* -commands.* - -Information about how the subprocess module can be used to replace these -modules and functions can be found below. - - - -Using the subprocess module -=========================== -This module defines one class called Popen: - -class Popen(args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - - -Arguments are: - -args should be a string, or a sequence of program arguments. The -program to execute is normally the first item in the args sequence or -string, but can be explicitly set by using the executable argument. - -On UNIX, with shell=False (default): In this case, the Popen class -uses os.execvp() to execute the child program. args should normally -be a sequence. A string will be treated as a sequence with the string -as the only item (the program to execute). - -On UNIX, with shell=True: If args is a string, it specifies the -command string to execute through the shell. If args is a sequence, -the first item specifies the command string, and any additional items -will be treated as additional shell arguments. - -On Windows: the Popen class uses CreateProcess() to execute the child -program, which operates on strings. If args is a sequence, it will be -converted to a string using the list2cmdline method. Please note that -not all MS Windows applications interpret the command line the same -way: The list2cmdline is designed for applications using the same -rules as the MS C runtime. - -bufsize, if given, has the same meaning as the corresponding argument -to the built-in open() function: 0 means unbuffered, 1 means line -buffered, any other positive value means use a buffer of -(approximately) that size. A negative bufsize means to use the system -default, which usually means fully buffered. The default value for -bufsize is 0 (unbuffered). - -stdin, stdout and stderr specify the executed programs' standard -input, standard output and standard error file handles, respectively. -Valid values are PIPE, an existing file descriptor (a positive -integer), an existing file object, and None. PIPE indicates that a -new pipe to the child should be created. With None, no redirection -will occur; the child's file handles will be inherited from the -parent. Additionally, stderr can be STDOUT, which indicates that the -stderr data from the applications should be captured into the same -file handle as for stdout. - -If preexec_fn is set to a callable object, this object will be called -in the child process just before the child is executed. - -If close_fds is true, all file descriptors except 0, 1 and 2 will be -closed before the child process is executed. - -if shell is true, the specified command will be executed through the -shell. - -If cwd is not None, the current directory will be changed to cwd -before the child is executed. - -If env is not None, it defines the environment variables for the new -process. - -If universal_newlines is true, the file objects stdout and stderr are -opened as a text files, but lines may be terminated by any of '\n', -the Unix end-of-line convention, '\r', the Macintosh convention or -'\r\n', the Windows convention. All of these external representations -are seen as '\n' by the Python program. Note: This feature is only -available if Python is built with universal newline support (the -default). Also, the newlines attribute of the file objects stdout, -stdin and stderr are not updated by the communicate() method. - -The startupinfo and creationflags, if given, will be passed to the -underlying CreateProcess() function. They can specify things such as -appearance of the main window and priority for the new process. -(Windows only) - - -This module also defines two shortcut functions: - -call(*args, **kwargs): - Run command with arguments. Wait for command to complete, then - return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - - -Exceptions ----------- -Exceptions raised in the child process, before the new program has -started to execute, will be re-raised in the parent. Additionally, -the exception object will have one extra attribute called -'child_traceback', which is a string containing traceback information -from the childs point of view. - -The most common exception raised is OSError. This occurs, for -example, when trying to execute a non-existent file. Applications -should prepare for OSErrors. - -A ValueError will be raised if Popen is called with invalid arguments. - - -Security --------- -Unlike some other popen functions, this implementation will never call -/bin/sh implicitly. This means that all characters, including shell -metacharacters, can safely be passed to child processes. - - -Popen objects -============= -Instances of the Popen class have the following methods: - -poll() - Check if child process has terminated. Returns returncode - attribute. - -wait() - Wait for child process to terminate. Returns returncode attribute. - -communicate(input=None) - Interact with process: Send data to stdin. Read data from stdout - and stderr, until end-of-file is reached. Wait for process to - terminate. The optional stdin argument should be a string to be - sent to the child process, or None, if no data should be sent to - the child. - - communicate() returns a tuple (stdout, stderr). - - Note: The data read is buffered in memory, so do not use this - method if the data size is large or unlimited. - -The following attributes are also available: - -stdin - If the stdin argument is PIPE, this attribute is a file object - that provides input to the child process. Otherwise, it is None. - -stdout - If the stdout argument is PIPE, this attribute is a file object - that provides output from the child process. Otherwise, it is - None. - -stderr - If the stderr argument is PIPE, this attribute is file object that - provides error output from the child process. Otherwise, it is - None. - -pid - The process ID of the child process. - -returncode - The child return code. A None value indicates that the process - hasn't terminated yet. A negative value -N indicates that the - child was terminated by signal N (UNIX only). - - -Replacing older functions with the subprocess module -==================================================== -In this section, "a ==> b" means that b can be used as a replacement -for a. - -Note: All functions in this section fail (more or less) silently if -the executed program cannot be found; this module raises an OSError -exception. - -In the following examples, we assume that the subprocess module is -imported with "from subprocess import *". - - -Replacing /bin/sh shell backquote ---------------------------------- -output=`mycmd myarg` -==> -output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] - - -Replacing shell pipe line -------------------------- -output=`dmesg | grep hda` -==> -p1 = Popen(["dmesg"], stdout=PIPE) -p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) -output = p2.communicate()[0] - - -Replacing os.system() ---------------------- -sts = os.system("mycmd" + " myarg") -==> -p = Popen("mycmd" + " myarg", shell=True) -sts = os.waitpid(p.pid, 0) - -Note: - -* Calling the program through the shell is usually not required. - -* It's easier to look at the returncode attribute than the - exitstatus. - -A more real-world example would look like this: - -try: - retcode = call("mycmd" + " myarg", shell=True) - if retcode < 0: - print >>sys.stderr, "Child was terminated by signal", -retcode - else: - print >>sys.stderr, "Child returned", retcode -except OSError, e: - print >>sys.stderr, "Execution failed:", e - - -Replacing os.spawn* -------------------- -P_NOWAIT example: - -pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") -==> -pid = Popen(["/bin/mycmd", "myarg"]).pid - - -P_WAIT example: - -retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") -==> -retcode = call(["/bin/mycmd", "myarg"]) - - -Vector example: - -os.spawnvp(os.P_NOWAIT, path, args) -==> -Popen([path] + args[1:]) - - -Environment example: - -os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) -==> -Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) - - -Replacing os.popen* -------------------- -pipe = os.popen(cmd, mode='r', bufsize) -==> -pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout - -pipe = os.popen(cmd, mode='w', bufsize) -==> -pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin - - -(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize) -==> -p = Popen(cmd, shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdin, child_stdout) = (p.stdin, p.stdout) - - -(child_stdin, - child_stdout, - child_stderr) = os.popen3(cmd, mode, bufsize) -==> -p = Popen(cmd, shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) -(child_stdin, - child_stdout, - child_stderr) = (p.stdin, p.stdout, p.stderr) - - -(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize) -==> -p = Popen(cmd, shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) -(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout) - - -Replacing popen2.* ------------------- -Note: If the cmd argument to popen2 functions is a string, the command -is executed through /bin/sh. If it is a list, the command is directly -executed. - -(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode) -==> -p = Popen(["somestring"], shell=True, bufsize=bufsize - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdout, child_stdin) = (p.stdout, p.stdin) - - -(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode) -==> -p = Popen(["mycmd", "myarg"], bufsize=bufsize, - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdout, child_stdin) = (p.stdout, p.stdin) - -The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen, -except that: - -* subprocess.Popen raises an exception if the execution fails -* the capturestderr argument is replaced with the stderr argument. -* stdin=PIPE and stdout=PIPE must be specified. -* popen2 closes all filedescriptors by default, but you have to specify - close_fds=True with subprocess.Popen. - - -""" - -import sys -mswindows = (sys.platform == "win32") - -import os -import types -import traceback - -if mswindows: - import threading - import msvcrt - if 0: # <-- change this to use pywin32 instead of the _subprocess driver - import pywintypes - from win32api import GetStdHandle, STD_INPUT_HANDLE, \ - STD_OUTPUT_HANDLE, STD_ERROR_HANDLE - from win32api import GetCurrentProcess, DuplicateHandle, \ - GetModuleFileName, GetVersion - from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE - from win32pipe import CreatePipe - from win32process import CreateProcess, STARTUPINFO, \ - GetExitCodeProcess, STARTF_USESTDHANDLES, \ - STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE - from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0 - else: - from _subprocess import * - class STARTUPINFO: - dwFlags = 0 - hStdInput = None - hStdOutput = None - hStdError = None - class pywintypes: - error = IOError -else: - import select - import errno - import fcntl - import pickle - -__all__ = ["Popen", "PIPE", "STDOUT", "call"] - -try: - MAXFD = os.sysconf("SC_OPEN_MAX") -except: - MAXFD = 256 - -# True/False does not exist on 2.2.0 -try: - False -except NameError: - False = 0 - True = 1 - -_active = [] - -def _cleanup(): - for inst in _active[:]: - inst.poll() - -PIPE = -1 -STDOUT = -2 - - -def call(*args, **kwargs): - """Run command with arguments. Wait for command to complete, then - return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ - return Popen(*args, **kwargs).wait() - - -def list2cmdline(seq): - """ - Translate a sequence of arguments into a command line - string, using the same rules as the MS C runtime: - - 1) Arguments are delimited by white space, which is either a - space or a tab. - - 2) A string surrounded by double quotation marks is - interpreted as a single argument, regardless of white space - contained within. A quoted string can be embedded in an - argument. - - 3) A double quotation mark preceded by a backslash is - interpreted as a literal double quotation mark. - - 4) Backslashes are interpreted literally, unless they - immediately precede a double quotation mark. - - 5) If backslashes immediately precede a double quotation mark, - every pair of backslashes is interpreted as a literal - backslash. If the number of backslashes is odd, the last - backslash escapes the next double quotation mark as - described in rule 3. - """ - - # See - # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp - result = [] - needquote = False - for arg in seq: - bs_buf = [] - - # Add a space to separate this argument from the others - if result: - result.append(' ') - - needquote = (" " in arg) or ("\t" in arg) - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - # Don't know if we need to double yet. - bs_buf.append(c) - elif c == '"': - # Double backspaces. - result.append('\\' * len(bs_buf)*2) - bs_buf = [] - result.append('\\"') - else: - # Normal char - if bs_buf: - result.extend(bs_buf) - bs_buf = [] - result.append(c) - - # Add remaining backspaces, if any. - if bs_buf: - result.extend(bs_buf) - - if needquote: - result.extend(bs_buf) - result.append('"') - - return ''.join(result) - - -class Popen(object): - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - """Create new Popen instance.""" - _cleanup() - - if not isinstance(bufsize, (int, long)): - raise TypeError("bufsize must be an integer") - - if mswindows: - if preexec_fn is not None: - raise ValueError("preexec_fn is not supported on Windows " - "platforms") - if close_fds: - raise ValueError("close_fds is not supported on Windows " - "platforms") - else: - # POSIX - if startupinfo is not None: - raise ValueError("startupinfo is only supported on Windows " - "platforms") - if creationflags != 0: - raise ValueError("creationflags is only supported on Windows " - "platforms") - - self.stdin = None - self.stdout = None - self.stderr = None - self.pid = None - self.returncode = None - self.universal_newlines = universal_newlines - - # Input and output objects. The general principle is like - # this: - # - # Parent Child - # ------ ----- - # p2cwrite ---stdin---> p2cread - # c2pread <--stdout--- c2pwrite - # errread <--stderr--- errwrite - # - # On POSIX, the child objects are file descriptors. On - # Windows, these are Windows file handles. The parent objects - # are file descriptors on both platforms. The parent objects - # are None when not using PIPEs. The child objects are None - # when not redirecting. - - (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) = self._get_handles(stdin, stdout, stderr) - - self._execute_child(args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - if p2cwrite: - self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) - if c2pread: - if universal_newlines: - self.stdout = os.fdopen(c2pread, 'rU', bufsize) - else: - self.stdout = os.fdopen(c2pread, 'rb', bufsize) - if errread: - if universal_newlines: - self.stderr = os.fdopen(errread, 'rU', bufsize) - else: - self.stderr = os.fdopen(errread, 'rb', bufsize) - - _active.append(self) - - - def _translate_newlines(self, data): - data = data.replace("\r\n", "\n") - data = data.replace("\r", "\n") - return data - - - if mswindows: - # - # Windows methods - # - def _get_handles(self, stdin, stdout, stderr): - """Construct and return tuple with IO objects: - p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite - """ - if stdin == None and stdout == None and stderr == None: - return (None, None, None, None, None, None) - - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin == None: - p2cread = GetStdHandle(STD_INPUT_HANDLE) - elif stdin == PIPE: - p2cread, p2cwrite = CreatePipe(None, 0) - # Detach and turn into fd - p2cwrite = p2cwrite.Detach() - p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) - elif type(stdin) == types.IntType: - p2cread = msvcrt.get_osfhandle(stdin) - else: - # Assuming file-like object - p2cread = msvcrt.get_osfhandle(stdin.fileno()) - p2cread = self._make_inheritable(p2cread) - - if stdout == None: - c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) - elif stdout == PIPE: - c2pread, c2pwrite = CreatePipe(None, 0) - # Detach and turn into fd - c2pread = c2pread.Detach() - c2pread = msvcrt.open_osfhandle(c2pread, 0) - elif type(stdout) == types.IntType: - c2pwrite = msvcrt.get_osfhandle(stdout) - else: - # Assuming file-like object - c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) - c2pwrite = self._make_inheritable(c2pwrite) - - if stderr == None: - errwrite = GetStdHandle(STD_ERROR_HANDLE) - elif stderr == PIPE: - errread, errwrite = CreatePipe(None, 0) - # Detach and turn into fd - errread = errread.Detach() - errread = msvcrt.open_osfhandle(errread, 0) - elif stderr == STDOUT: - errwrite = c2pwrite - elif type(stderr) == types.IntType: - errwrite = msvcrt.get_osfhandle(stderr) - else: - # Assuming file-like object - errwrite = msvcrt.get_osfhandle(stderr.fileno()) - errwrite = self._make_inheritable(errwrite) - - return (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - - def _make_inheritable(self, handle): - """Return a duplicate of handle, which is inheritable""" - return DuplicateHandle(GetCurrentProcess(), handle, - GetCurrentProcess(), 0, 1, - DUPLICATE_SAME_ACCESS) - - - def _find_w9xpopen(self): - """Find and return absolute path to w9xpopen.exe""" - w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - # Eeek - file-not-found - possibly an embedding - # situation - see if we can locate it in sys.exec_prefix - w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - raise RuntimeError("Cannot locate w9xpopen.exe, which is " - "needed for Popen to work with your " - "shell or platform.") - return w9xpopen - - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - """Execute program (MS Windows version)""" - - if not isinstance(args, types.StringTypes): - args = list2cmdline(args) - - # Process startup details - default_startupinfo = STARTUPINFO() - if startupinfo == None: - startupinfo = default_startupinfo - if not None in (p2cread, c2pwrite, errwrite): - startupinfo.dwFlags |= STARTF_USESTDHANDLES - startupinfo.hStdInput = p2cread - startupinfo.hStdOutput = c2pwrite - startupinfo.hStdError = errwrite - - if shell: - default_startupinfo.dwFlags |= STARTF_USESHOWWINDOW - default_startupinfo.wShowWindow = SW_HIDE - comspec = os.environ.get("COMSPEC", "cmd.exe") - args = comspec + " /c " + args - if (GetVersion() >= 0x80000000L or - os.path.basename(comspec).lower() == "command.com"): - # Win9x, or using command.com on NT. We need to - # use the w9xpopen intermediate program. For more - # information, see KB Q150956 - # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) - w9xpopen = self._find_w9xpopen() - args = '"%s" %s' % (w9xpopen, args) - # Not passing CREATE_NEW_CONSOLE has been known to - # cause random failures on win9x. Specifically a - # dialog: "Your program accessed mem currently in - # use at xxx" and a hopeful warning about the - # stability of your system. Cost is Ctrl+C wont - # kill children. - creationflags |= CREATE_NEW_CONSOLE - - # Start the process - try: - hp, ht, pid, tid = CreateProcess(executable, args, - # no special security - None, None, - # must inherit handles to pass std - # handles - 1, - creationflags, - env, - cwd, - startupinfo) - except pywintypes.error, e: - # Translate pywintypes.error to WindowsError, which is - # a subclass of OSError. FIXME: We should really - # translate errno using _sys_errlist (or simliar), but - # how can this be done from Python? - raise WindowsError(*e.args) - - # Retain the process handle, but close the thread handle - self._handle = hp - self.pid = pid - ht.Close() - - # Child is launched. Close the parent's copy of those pipe - # handles that only the child should have open. You need - # to make sure that no handles to the write end of the - # output pipe are maintained in this process or else the - # pipe will not close when the child process exits and the - # ReadFile will hang. - if p2cread != None: - p2cread.Close() - if c2pwrite != None: - c2pwrite.Close() - if errwrite != None: - errwrite.Close() - - - def poll(self): - """Check if child process has terminated. Returns returncode - attribute.""" - if self.returncode == None: - if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: - self.returncode = GetExitCodeProcess(self._handle) - _active.remove(self) - return self.returncode - - - def wait(self): - """Wait for child process to terminate. Returns returncode - attribute.""" - if self.returncode == None: - obj = WaitForSingleObject(self._handle, INFINITE) - self.returncode = GetExitCodeProcess(self._handle) - _active.remove(self) - return self.returncode - - - def _readerthread(self, fh, buffer): - buffer.append(fh.read()) - - - def communicate(self, input=None): - """Interact with process: Send data to stdin. Read data from - stdout and stderr, until end-of-file is reached. Wait for - process to terminate. The optional input argument should be a - string to be sent to the child process, or None, if no data - should be sent to the child. - - communicate() returns a tuple (stdout, stderr).""" - stdout = None # Return - stderr = None # Return - - if self.stdout: - stdout = [] - stdout_thread = threading.Thread(target=self._readerthread, - args=(self.stdout, stdout)) - stdout_thread.setDaemon(True) - stdout_thread.start() - if self.stderr: - stderr = [] - stderr_thread = threading.Thread(target=self._readerthread, - args=(self.stderr, stderr)) - stderr_thread.setDaemon(True) - stderr_thread.start() - - if self.stdin: - if input != None: - self.stdin.write(input) - self.stdin.close() - - if self.stdout: - stdout_thread.join() - if self.stderr: - stderr_thread.join() - - # All data exchanged. Translate lists into strings. - if stdout != None: - stdout = stdout[0] - if stderr != None: - stderr = stderr[0] - - # Translate newlines, if requested. We cannot let the file - # object do the translation: It is based on stdio, which is - # impossible to combine with select (unless forcing no - # buffering). - if self.universal_newlines and hasattr(open, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - else: - # - # POSIX methods - # - def _get_handles(self, stdin, stdout, stderr): - """Construct and return tuple with IO objects: - p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite - """ - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin == None: - pass - elif stdin == PIPE: - p2cread, p2cwrite = os.pipe() - elif type(stdin) == types.IntType: - p2cread = stdin - else: - # Assuming file-like object - p2cread = stdin.fileno() - - if stdout == None: - pass - elif stdout == PIPE: - c2pread, c2pwrite = os.pipe() - elif type(stdout) == types.IntType: - c2pwrite = stdout - else: - # Assuming file-like object - c2pwrite = stdout.fileno() - - if stderr == None: - pass - elif stderr == PIPE: - errread, errwrite = os.pipe() - elif stderr == STDOUT: - errwrite = c2pwrite - elif type(stderr) == types.IntType: - errwrite = stderr - else: - # Assuming file-like object - errwrite = stderr.fileno() - - return (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - - def _set_cloexec_flag(self, fd): - try: - cloexec_flag = fcntl.FD_CLOEXEC - except AttributeError: - cloexec_flag = 1 - - old = fcntl.fcntl(fd, fcntl.F_GETFD) - fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) - - - def _close_fds(self, but): - for i in range(3, MAXFD): - if i == but: - continue - try: - os.close(i) - except: - pass - - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - """Execute program (POSIX version)""" - - if isinstance(args, types.StringTypes): - args = [args] - - if shell: - args = ["/bin/sh", "-c"] + args - - if executable == None: - executable = args[0] - - # For transferring possible exec failure from child to parent - # The first char specifies the exception type: 0 means - # OSError, 1 means some other error. - errpipe_read, errpipe_write = os.pipe() - self._set_cloexec_flag(errpipe_write) - - self.pid = os.fork() - if self.pid == 0: - # Child - try: - # Close parent's pipe ends - if p2cwrite: - os.close(p2cwrite) - if c2pread: - os.close(c2pread) - if errread: - os.close(errread) - os.close(errpipe_read) - - # Dup fds for child - if p2cread: - os.dup2(p2cread, 0) - if c2pwrite: - os.dup2(c2pwrite, 1) - if errwrite: - os.dup2(errwrite, 2) - - # Close pipe fds. Make sure we doesn't close the same - # fd more than once. - if p2cread: - os.close(p2cread) - if c2pwrite and c2pwrite not in (p2cread,): - os.close(c2pwrite) - if errwrite and errwrite not in (p2cread, c2pwrite): - os.close(errwrite) - - # Close all other fds, if asked for - if close_fds: - self._close_fds(but=errpipe_write) - - if cwd != None: - os.chdir(cwd) - - if preexec_fn: - apply(preexec_fn) - - if env == None: - os.execvp(executable, args) - else: - os.execvpe(executable, args, env) - - except: - exc_type, exc_value, tb = sys.exc_info() - # Save the traceback and attach it to the exception object - exc_lines = traceback.format_exception(exc_type, - exc_value, - tb) - exc_value.child_traceback = ''.join(exc_lines) - os.write(errpipe_write, pickle.dumps(exc_value)) - - # This exitcode won't be reported to applications, so it - # really doesn't matter what we return. - os._exit(255) - - # Parent - os.close(errpipe_write) - if p2cread and p2cwrite: - os.close(p2cread) - if c2pwrite and c2pread: - os.close(c2pwrite) - if errwrite and errread: - os.close(errwrite) - - # Wait for exec to fail or succeed; possibly raising exception - data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB - os.close(errpipe_read) - if data != "": - os.waitpid(self.pid, 0) - child_exception = pickle.loads(data) - raise child_exception - - - def _handle_exitstatus(self, sts): - if os.WIFSIGNALED(sts): - self.returncode = -os.WTERMSIG(sts) - elif os.WIFEXITED(sts): - self.returncode = os.WEXITSTATUS(sts) - else: - # Should never happen - raise RuntimeError("Unknown child exit status!") - - _active.remove(self) - - - def poll(self): - """Check if child process has terminated. Returns returncode - attribute.""" - if self.returncode == None: - try: - pid, sts = os.waitpid(self.pid, os.WNOHANG) - if pid == self.pid: - self._handle_exitstatus(sts) - except os.error: - pass - return self.returncode - - - def wait(self): - """Wait for child process to terminate. Returns returncode - attribute.""" - if self.returncode == None: - pid, sts = os.waitpid(self.pid, 0) - self._handle_exitstatus(sts) - return self.returncode - - - def communicate(self, input=None): - """Interact with process: Send data to stdin. Read data from - stdout and stderr, until end-of-file is reached. Wait for - process to terminate. The optional input argument should be a - string to be sent to the child process, or None, if no data - should be sent to the child. - - communicate() returns a tuple (stdout, stderr).""" - read_set = [] - write_set = [] - stdout = None # Return - stderr = None # Return - - if self.stdin: - # Flush stdio buffer. This might block, if the user has - # been writing to .stdin in an uncontrolled fashion. - self.stdin.flush() - if input: - write_set.append(self.stdin) - else: - self.stdin.close() - if self.stdout: - read_set.append(self.stdout) - stdout = [] - if self.stderr: - read_set.append(self.stderr) - stderr = [] - - while read_set or write_set: - rlist, wlist, xlist = select.select(read_set, write_set, []) - - if self.stdin in wlist: - # When select has indicated that the file is writable, - # we can write up to PIPE_BUF bytes without risk - # blocking. POSIX defines PIPE_BUF >= 512 - bytes_written = os.write(self.stdin.fileno(), input[:512]) - input = input[bytes_written:] - if not input: - self.stdin.close() - write_set.remove(self.stdin) - - if self.stdout in rlist: - data = os.read(self.stdout.fileno(), 1024) - if data == "": - self.stdout.close() - read_set.remove(self.stdout) - stdout.append(data) - - if self.stderr in rlist: - data = os.read(self.stderr.fileno(), 1024) - if data == "": - self.stderr.close() - read_set.remove(self.stderr) - stderr.append(data) - - # All data exchanged. Translate lists into strings. - if stdout != None: - stdout = ''.join(stdout) - if stderr != None: - stderr = ''.join(stderr) - - # Translate newlines, if requested. We cannot let the file - # object do the translation: It is based on stdio, which is - # impossible to combine with select (unless forcing no - # buffering). - if self.universal_newlines and hasattr(open, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - -def _demo_posix(): - # - # Example 1: Simple redirection: Get process list - # - plist = Popen(["ps"], stdout=PIPE).communicate()[0] - print "Process list:" - print plist - - # - # Example 2: Change uid before executing child - # - if os.getuid() == 0: - p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) - p.wait() - - # - # Example 3: Connecting several subprocesses - # - print "Looking for 'hda'..." - p1 = Popen(["dmesg"], stdout=PIPE) - p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) - print repr(p2.communicate()[0]) - - # - # Example 4: Catch execution error - # - print - print "Trying a weird file..." - try: - print Popen(["/this/path/does/not/exist"]).communicate() - except OSError, e: - if e.errno == errno.ENOENT: - print "The file didn't exist. I thought so..." - print "Child traceback:" - print e.child_traceback - else: - print "Error", e.errno - else: - print >>sys.stderr, "Gosh. No error." - - -def _demo_windows(): - # - # Example 1: Connecting several subprocesses - # - print "Looking for 'PROMPT' in set output..." - p1 = Popen("set", stdout=PIPE, shell=True) - p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) - print repr(p2.communicate()[0]) - - # - # Example 2: Simple execution of program - # - print "Executing calc..." - p = Popen("calc") - p.wait() - - -if __name__ == "__main__": - if mswindows: - _demo_windows() - else: - _demo_posix() diff --git a/config.mak.in b/config.mak.in index 1cafa19ed4..9a578405d8 100644 --- a/config.mak.in +++ b/config.mak.in @@ -13,7 +13,6 @@ bindir = @bindir@ #gitexecdir = @libexecdir@/git-core/ datarootdir = @datarootdir@ template_dir = @datadir@/git-core/templates/ -GIT_PYTHON_DIR = @datadir@/git-core/python mandir=@mandir@ @@ -23,7 +22,6 @@ VPATH = @srcdir@ export exec_prefix mandir export srcdir VPATH -NO_PYTHON=@NO_PYTHON@ NEEDS_SSL_WITH_CRYPTO=@NEEDS_SSL_WITH_CRYPTO@ NO_OPENSSL=@NO_OPENSSL@ NO_CURL=@NO_CURL@ diff --git a/configure.ac b/configure.ac index cff5722eb9..34e34789bd 100644 --- a/configure.ac +++ b/configure.ac @@ -75,20 +75,6 @@ GIT_ARG_SET_PATH(shell) # Define PERL_PATH to provide path to Perl. GIT_ARG_SET_PATH(perl) # -# Define PYTHON_PATH to provide path to Python. -AC_ARG_WITH(python,[AS_HELP_STRING([--with-python=PATH], [provide PATH to python]) -AS_HELP_STRING([--without-python], [don't use python scripts])], - [if test "$withval" = "no"; then \ - NO_PYTHON=YesPlease; \ - elif test "$withval" = "yes"; then \ - NO_PYTHON=; \ - else \ - NO_PYTHON=; \ - PYTHON_PATH=$withval; \ - fi; \ - ]) -AC_SUBST(NO_PYTHON) -AC_SUBST(PYTHON_PATH) ## Checks for programs. @@ -98,18 +84,6 @@ AC_PROG_CC([cc gcc]) #AC_PROG_INSTALL # needs install-sh or install.sh in sources AC_CHECK_TOOL(AR, ar, :) AC_CHECK_PROGS(TAR, [gtar tar]) -# -# Define PYTHON_PATH to provide path to Python. -if test -z "$NO_PYTHON"; then - if test -z "$PYTHON_PATH"; then - AC_PATH_PROGS(PYTHON_PATH, [python python2.4 python2.3 python2]) - fi - if test -n "$PYTHON_PATH"; then - GIT_CONF_APPEND_LINE([PYTHON_PATH=@PYTHON_PATH@]) - NO_PYTHON="" - fi -fi - ## Checks for libraries. AC_MSG_NOTICE([CHECKS for libraries]) @@ -262,22 +236,9 @@ AC_SUBST(NO_SETENV) # Define NO_SYMLINK_HEAD if you never want .git/HEAD to be a symbolic link. # Enable it on Windows. By default, symrefs are still used. # -# Define WITH_OWN_SUBPROCESS_PY if you want to use with python 2.3. -AC_CACHE_CHECK([for subprocess.py], - [ac_cv_python_has_subprocess_py], -[if $PYTHON_PATH -c 'import subprocess' 2>/dev/null; then - ac_cv_python_has_subprocess_py=yes -else - ac_cv_python_has_subprocess_py=no -fi]) -if test $ac_cv_python_has_subprocess_py != yes; then - GIT_CONF_APPEND_LINE([WITH_OWN_SUBPROCESS_PY=YesPlease]) -fi -# # Define NO_ACCURATE_DIFF if your diff program at least sometimes misses # a missing newline at the end of the file. - ## Site configuration (override autodetection) ## --with-PACKAGE[=ARG] and --without-PACKAGE AC_MSG_NOTICE([CHECKS for site configuration]) diff --git a/git-merge-recursive-old.py b/git-merge-recursive-old.py deleted file mode 100755 index 4039435ce4..0000000000 --- a/git-merge-recursive-old.py +++ /dev/null @@ -1,944 +0,0 @@ -#!/usr/bin/python -# -# Copyright (C) 2005 Fredrik Kuivinen -# - -import sys -sys.path.append('''@@GIT_PYTHON_PATH@@''') - -import math, random, os, re, signal, tempfile, stat, errno, traceback -from heapq import heappush, heappop -from sets import Set - -from gitMergeCommon import * - -outputIndent = 0 -def output(*args): - sys.stdout.write(' '*outputIndent) - printList(args) - -originalIndexFile = os.environ.get('GIT_INDEX_FILE', - os.environ.get('GIT_DIR', '.git') + '/index') -temporaryIndexFile = os.environ.get('GIT_DIR', '.git') + \ - '/merge-recursive-tmp-index' -def setupIndex(temporary): - try: - os.unlink(temporaryIndexFile) - except OSError: - pass - if temporary: - newIndex = temporaryIndexFile - else: - newIndex = originalIndexFile - os.environ['GIT_INDEX_FILE'] = newIndex - -# This is a global variable which is used in a number of places but -# only written to in the 'merge' function. - -# cacheOnly == True => Don't leave any non-stage 0 entries in the cache and -# don't update the working directory. -# False => Leave unmerged entries in the cache and update -# the working directory. - -cacheOnly = False - -# The entry point to the merge code -# --------------------------------- - -def merge(h1, h2, branch1Name, branch2Name, graph, callDepth=0, ancestor=None): - '''Merge the commits h1 and h2, return the resulting virtual - commit object and a flag indicating the cleanness of the merge.''' - assert(isinstance(h1, Commit) and isinstance(h2, Commit)) - - global outputIndent - - output('Merging:') - output(h1) - output(h2) - sys.stdout.flush() - - if ancestor: - ca = [ancestor] - else: - assert(isinstance(graph, Graph)) - ca = getCommonAncestors(graph, h1, h2) - output('found', len(ca), 'common ancestor(s):') - for x in ca: - output(x) - sys.stdout.flush() - - mergedCA = ca[0] - for h in ca[1:]: - outputIndent = callDepth+1 - [mergedCA, dummy] = merge(mergedCA, h, - 'Temporary merge branch 1', - 'Temporary merge branch 2', - graph, callDepth+1) - outputIndent = callDepth - assert(isinstance(mergedCA, Commit)) - - global cacheOnly - if callDepth == 0: - setupIndex(False) - cacheOnly = False - else: - setupIndex(True) - runProgram(['git-read-tree', h1.tree()]) - cacheOnly = True - - [shaRes, clean] = mergeTrees(h1.tree(), h2.tree(), mergedCA.tree(), - branch1Name, branch2Name) - - if graph and (clean or cacheOnly): - res = Commit(None, [h1, h2], tree=shaRes) - graph.addNode(res) - else: - res = None - - return [res, clean] - -getFilesRE = re.compile(r'^([0-7]+) (\S+) ([0-9a-f]{40})\t(.*)$', re.S) -def getFilesAndDirs(tree): - files = Set() - dirs = Set() - out = runProgram(['git-ls-tree', '-r', '-z', '-t', tree]) - for l in out.split('\0'): - m = getFilesRE.match(l) - if m: - if m.group(2) == 'tree': - dirs.add(m.group(4)) - elif m.group(2) == 'blob': - files.add(m.group(4)) - - return [files, dirs] - -# Those two global variables are used in a number of places but only -# written to in 'mergeTrees' and 'uniquePath'. They keep track of -# every file and directory in the two branches that are about to be -# merged. -currentFileSet = None -currentDirectorySet = None - -def mergeTrees(head, merge, common, branch1Name, branch2Name): - '''Merge the trees 'head' and 'merge' with the common ancestor - 'common'. The name of the head branch is 'branch1Name' and the name of - the merge branch is 'branch2Name'. Return a tuple (tree, cleanMerge) - where tree is the resulting tree and cleanMerge is True iff the - merge was clean.''' - - assert(isSha(head) and isSha(merge) and isSha(common)) - - if common == merge: - output('Already uptodate!') - return [head, True] - - if cacheOnly: - updateArg = '-i' - else: - updateArg = '-u' - - [out, code] = runProgram(['git-read-tree', updateArg, '-m', - common, head, merge], returnCode = True) - if code != 0: - die('git-read-tree:', out) - - [tree, code] = runProgram('git-write-tree', returnCode=True) - tree = tree.rstrip() - if code != 0: - global currentFileSet, currentDirectorySet - [currentFileSet, currentDirectorySet] = getFilesAndDirs(head) - [filesM, dirsM] = getFilesAndDirs(merge) - currentFileSet.union_update(filesM) - currentDirectorySet.union_update(dirsM) - - entries = unmergedCacheEntries() - renamesHead = getRenames(head, common, head, merge, entries) - renamesMerge = getRenames(merge, common, head, merge, entries) - - cleanMerge = processRenames(renamesHead, renamesMerge, - branch1Name, branch2Name) - for entry in entries: - if entry.processed: - continue - if not processEntry(entry, branch1Name, branch2Name): - cleanMerge = False - - if cleanMerge or cacheOnly: - tree = runProgram('git-write-tree').rstrip() - else: - tree = None - else: - cleanMerge = True - - return [tree, cleanMerge] - -# Low level file merging, update and removal -# ------------------------------------------ - -def mergeFile(oPath, oSha, oMode, aPath, aSha, aMode, bPath, bSha, bMode, - branch1Name, branch2Name): - - merge = False - clean = True - - if stat.S_IFMT(aMode) != stat.S_IFMT(bMode): - clean = False - if stat.S_ISREG(aMode): - mode = aMode - sha = aSha - else: - mode = bMode - sha = bSha - else: - if aSha != oSha and bSha != oSha: - merge = True - - if aMode == oMode: - mode = bMode - else: - mode = aMode - - if aSha == oSha: - sha = bSha - elif bSha == oSha: - sha = aSha - elif stat.S_ISREG(aMode): - assert(stat.S_ISREG(bMode)) - - orig = runProgram(['git-unpack-file', oSha]).rstrip() - src1 = runProgram(['git-unpack-file', aSha]).rstrip() - src2 = runProgram(['git-unpack-file', bSha]).rstrip() - try: - [out, code] = runProgram(['merge', - '-L', branch1Name + '/' + aPath, - '-L', 'orig/' + oPath, - '-L', branch2Name + '/' + bPath, - src1, orig, src2], returnCode=True) - except ProgramError, e: - print >>sys.stderr, e - die("Failed to execute 'merge'. merge(1) is used as the " - "file-level merge tool. Is 'merge' in your path?") - - sha = runProgram(['git-hash-object', '-t', 'blob', '-w', - src1]).rstrip() - - os.unlink(orig) - os.unlink(src1) - os.unlink(src2) - - clean = (code == 0) - else: - assert(stat.S_ISLNK(aMode) and stat.S_ISLNK(bMode)) - sha = aSha - - if aSha != bSha: - clean = False - - return [sha, mode, clean, merge] - -def updateFile(clean, sha, mode, path): - updateCache = cacheOnly or clean - updateWd = not cacheOnly - - return updateFileExt(sha, mode, path, updateCache, updateWd) - -def updateFileExt(sha, mode, path, updateCache, updateWd): - if cacheOnly: - updateWd = False - - if updateWd: - pathComponents = path.split('/') - for x in xrange(1, len(pathComponents)): - p = '/'.join(pathComponents[0:x]) - - try: - createDir = not stat.S_ISDIR(os.lstat(p).st_mode) - except OSError: - createDir = True - - if createDir: - try: - os.mkdir(p) - except OSError, e: - die("Couldn't create directory", p, e.strerror) - - prog = ['git-cat-file', 'blob', sha] - if stat.S_ISREG(mode): - try: - os.unlink(path) - except OSError: - pass - if mode & 0100: - mode = 0777 - else: - mode = 0666 - fd = os.open(path, os.O_WRONLY | os.O_TRUNC | os.O_CREAT, mode) - proc = subprocess.Popen(prog, stdout=fd) - proc.wait() - os.close(fd) - elif stat.S_ISLNK(mode): - linkTarget = runProgram(prog) - os.symlink(linkTarget, path) - else: - assert(False) - - if updateWd and updateCache: - runProgram(['git-update-index', '--add', '--', path]) - elif updateCache: - runProgram(['git-update-index', '--add', '--cacheinfo', - '0%o' % mode, sha, path]) - -def setIndexStages(path, - oSHA1, oMode, - aSHA1, aMode, - bSHA1, bMode, - clear=True): - istring = [] - if clear: - istring.append("0 " + ("0" * 40) + "\t" + path + "\0") - if oMode: - istring.append("%o %s %d\t%s\0" % (oMode, oSHA1, 1, path)) - if aMode: - istring.append("%o %s %d\t%s\0" % (aMode, aSHA1, 2, path)) - if bMode: - istring.append("%o %s %d\t%s\0" % (bMode, bSHA1, 3, path)) - - runProgram(['git-update-index', '-z', '--index-info'], - input="".join(istring)) - -def removeFile(clean, path): - updateCache = cacheOnly or clean - updateWd = not cacheOnly - - if updateCache: - runProgram(['git-update-index', '--force-remove', '--', path]) - - if updateWd: - try: - os.unlink(path) - except OSError, e: - if e.errno != errno.ENOENT and e.errno != errno.EISDIR: - raise - try: - os.removedirs(os.path.dirname(path)) - except OSError: - pass - -def uniquePath(path, branch): - def fileExists(path): - try: - os.lstat(path) - return True - except OSError, e: - if e.errno == errno.ENOENT: - return False - else: - raise - - branch = branch.replace('/', '_') - newPath = path + '~' + branch - suffix = 0 - while newPath in currentFileSet or \ - newPath in currentDirectorySet or \ - fileExists(newPath): - suffix += 1 - newPath = path + '~' + branch + '_' + str(suffix) - currentFileSet.add(newPath) - return newPath - -# Cache entry management -# ---------------------- - -class CacheEntry: - def __init__(self, path): - class Stage: - def __init__(self): - self.sha1 = None - self.mode = None - - # Used for debugging only - def __str__(self): - if self.mode != None: - m = '0%o' % self.mode - else: - m = 'None' - - if self.sha1: - sha1 = self.sha1 - else: - sha1 = 'None' - return 'sha1: ' + sha1 + ' mode: ' + m - - self.stages = [Stage(), Stage(), Stage(), Stage()] - self.path = path - self.processed = False - - def __str__(self): - return 'path: ' + self.path + ' stages: ' + repr([str(x) for x in self.stages]) - -class CacheEntryContainer: - def __init__(self): - self.entries = {} - - def add(self, entry): - self.entries[entry.path] = entry - - def get(self, path): - return self.entries.get(path) - - def __iter__(self): - return self.entries.itervalues() - -unmergedRE = re.compile(r'^([0-7]+) ([0-9a-f]{40}) ([1-3])\t(.*)$', re.S) -def unmergedCacheEntries(): - '''Create a dictionary mapping file names to CacheEntry - objects. The dictionary contains one entry for every path with a - non-zero stage entry.''' - - lines = runProgram(['git-ls-files', '-z', '--unmerged']).split('\0') - lines.pop() - - res = CacheEntryContainer() - for l in lines: - m = unmergedRE.match(l) - if m: - mode = int(m.group(1), 8) - sha1 = m.group(2) - stage = int(m.group(3)) - path = m.group(4) - - e = res.get(path) - if not e: - e = CacheEntry(path) - res.add(e) - - e.stages[stage].mode = mode - e.stages[stage].sha1 = sha1 - else: - die('Error: Merge program failed: Unexpected output from', - 'git-ls-files:', l) - return res - -lsTreeRE = re.compile(r'^([0-7]+) (\S+) ([0-9a-f]{40})\t(.*)\n$', re.S) -def getCacheEntry(path, origTree, aTree, bTree): - '''Returns a CacheEntry object which doesn't have to correspond to - a real cache entry in Git's index.''' - - def parse(out): - if out == '': - return [None, None] - else: - m = lsTreeRE.match(out) - if not m: - die('Unexpected output from git-ls-tree:', out) - elif m.group(2) == 'blob': - return [m.group(3), int(m.group(1), 8)] - else: - return [None, None] - - res = CacheEntry(path) - - [oSha, oMode] = parse(runProgram(['git-ls-tree', origTree, '--', path])) - [aSha, aMode] = parse(runProgram(['git-ls-tree', aTree, '--', path])) - [bSha, bMode] = parse(runProgram(['git-ls-tree', bTree, '--', path])) - - res.stages[1].sha1 = oSha - res.stages[1].mode = oMode - res.stages[2].sha1 = aSha - res.stages[2].mode = aMode - res.stages[3].sha1 = bSha - res.stages[3].mode = bMode - - return res - -# Rename detection and handling -# ----------------------------- - -class RenameEntry: - def __init__(self, - src, srcSha, srcMode, srcCacheEntry, - dst, dstSha, dstMode, dstCacheEntry, - score): - self.srcName = src - self.srcSha = srcSha - self.srcMode = srcMode - self.srcCacheEntry = srcCacheEntry - self.dstName = dst - self.dstSha = dstSha - self.dstMode = dstMode - self.dstCacheEntry = dstCacheEntry - self.score = score - - self.processed = False - -class RenameEntryContainer: - def __init__(self): - self.entriesSrc = {} - self.entriesDst = {} - - def add(self, entry): - self.entriesSrc[entry.srcName] = entry - self.entriesDst[entry.dstName] = entry - - def getSrc(self, path): - return self.entriesSrc.get(path) - - def getDst(self, path): - return self.entriesDst.get(path) - - def __iter__(self): - return self.entriesSrc.itervalues() - -parseDiffRenamesRE = re.compile('^:([0-7]+) ([0-7]+) ([0-9a-f]{40}) ([0-9a-f]{40}) R([0-9]*)$') -def getRenames(tree, oTree, aTree, bTree, cacheEntries): - '''Get information of all renames which occured between 'oTree' and - 'tree'. We need the three trees in the merge ('oTree', 'aTree' and - 'bTree') to be able to associate the correct cache entries with - the rename information. 'tree' is always equal to either aTree or bTree.''' - - assert(tree == aTree or tree == bTree) - inp = runProgram(['git-diff-tree', '-M', '--diff-filter=R', '-r', - '-z', oTree, tree]) - - ret = RenameEntryContainer() - try: - recs = inp.split("\0") - recs.pop() # remove last entry (which is '') - it = recs.__iter__() - while True: - rec = it.next() - m = parseDiffRenamesRE.match(rec) - - if not m: - die('Unexpected output from git-diff-tree:', rec) - - srcMode = int(m.group(1), 8) - dstMode = int(m.group(2), 8) - srcSha = m.group(3) - dstSha = m.group(4) - score = m.group(5) - src = it.next() - dst = it.next() - - srcCacheEntry = cacheEntries.get(src) - if not srcCacheEntry: - srcCacheEntry = getCacheEntry(src, oTree, aTree, bTree) - cacheEntries.add(srcCacheEntry) - - dstCacheEntry = cacheEntries.get(dst) - if not dstCacheEntry: - dstCacheEntry = getCacheEntry(dst, oTree, aTree, bTree) - cacheEntries.add(dstCacheEntry) - - ret.add(RenameEntry(src, srcSha, srcMode, srcCacheEntry, - dst, dstSha, dstMode, dstCacheEntry, - score)) - except StopIteration: - pass - return ret - -def fmtRename(src, dst): - srcPath = src.split('/') - dstPath = dst.split('/') - path = [] - endIndex = min(len(srcPath), len(dstPath)) - 1 - for x in range(0, endIndex): - if srcPath[x] == dstPath[x]: - path.append(srcPath[x]) - else: - endIndex = x - break - - if len(path) > 0: - return '/'.join(path) + \ - '/{' + '/'.join(srcPath[endIndex:]) + ' => ' + \ - '/'.join(dstPath[endIndex:]) + '}' - else: - return src + ' => ' + dst - -def processRenames(renamesA, renamesB, branchNameA, branchNameB): - srcNames = Set() - for x in renamesA: - srcNames.add(x.srcName) - for x in renamesB: - srcNames.add(x.srcName) - - cleanMerge = True - for path in srcNames: - if renamesA.getSrc(path): - renames1 = renamesA - renames2 = renamesB - branchName1 = branchNameA - branchName2 = branchNameB - else: - renames1 = renamesB - renames2 = renamesA - branchName1 = branchNameB - branchName2 = branchNameA - - ren1 = renames1.getSrc(path) - ren2 = renames2.getSrc(path) - - ren1.dstCacheEntry.processed = True - ren1.srcCacheEntry.processed = True - - if ren1.processed: - continue - - ren1.processed = True - - if ren2: - # Renamed in 1 and renamed in 2 - assert(ren1.srcName == ren2.srcName) - ren2.dstCacheEntry.processed = True - ren2.processed = True - - if ren1.dstName != ren2.dstName: - output('CONFLICT (rename/rename): Rename', - fmtRename(path, ren1.dstName), 'in branch', branchName1, - 'rename', fmtRename(path, ren2.dstName), 'in', - branchName2) - cleanMerge = False - - if ren1.dstName in currentDirectorySet: - dstName1 = uniquePath(ren1.dstName, branchName1) - output(ren1.dstName, 'is a directory in', branchName2, - 'adding as', dstName1, 'instead.') - removeFile(False, ren1.dstName) - else: - dstName1 = ren1.dstName - - if ren2.dstName in currentDirectorySet: - dstName2 = uniquePath(ren2.dstName, branchName2) - output(ren2.dstName, 'is a directory in', branchName1, - 'adding as', dstName2, 'instead.') - removeFile(False, ren2.dstName) - else: - dstName2 = ren2.dstName - setIndexStages(dstName1, - None, None, - ren1.dstSha, ren1.dstMode, - None, None) - setIndexStages(dstName2, - None, None, - None, None, - ren2.dstSha, ren2.dstMode) - - else: - removeFile(True, ren1.srcName) - - [resSha, resMode, clean, merge] = \ - mergeFile(ren1.srcName, ren1.srcSha, ren1.srcMode, - ren1.dstName, ren1.dstSha, ren1.dstMode, - ren2.dstName, ren2.dstSha, ren2.dstMode, - branchName1, branchName2) - - if merge or not clean: - output('Renaming', fmtRename(path, ren1.dstName)) - - if merge: - output('Auto-merging', ren1.dstName) - - if not clean: - output('CONFLICT (content): merge conflict in', - ren1.dstName) - cleanMerge = False - - if not cacheOnly: - setIndexStages(ren1.dstName, - ren1.srcSha, ren1.srcMode, - ren1.dstSha, ren1.dstMode, - ren2.dstSha, ren2.dstMode) - - updateFile(clean, resSha, resMode, ren1.dstName) - else: - removeFile(True, ren1.srcName) - - # Renamed in 1, maybe changed in 2 - if renamesA == renames1: - stage = 3 - else: - stage = 2 - - srcShaOtherBranch = ren1.srcCacheEntry.stages[stage].sha1 - srcModeOtherBranch = ren1.srcCacheEntry.stages[stage].mode - - dstShaOtherBranch = ren1.dstCacheEntry.stages[stage].sha1 - dstModeOtherBranch = ren1.dstCacheEntry.stages[stage].mode - - tryMerge = False - - if ren1.dstName in currentDirectorySet: - newPath = uniquePath(ren1.dstName, branchName1) - output('CONFLICT (rename/directory): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', branchName1, - 'directory', ren1.dstName, 'added in', branchName2) - output('Renaming', ren1.srcName, 'to', newPath, 'instead') - cleanMerge = False - removeFile(False, ren1.dstName) - updateFile(False, ren1.dstSha, ren1.dstMode, newPath) - elif srcShaOtherBranch == None: - output('CONFLICT (rename/delete): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', - branchName1, 'and deleted in', branchName2) - cleanMerge = False - updateFile(False, ren1.dstSha, ren1.dstMode, ren1.dstName) - elif dstShaOtherBranch: - newPath = uniquePath(ren1.dstName, branchName2) - output('CONFLICT (rename/add): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', - branchName1 + '.', ren1.dstName, 'added in', branchName2) - output('Adding as', newPath, 'instead') - updateFile(False, dstShaOtherBranch, dstModeOtherBranch, newPath) - cleanMerge = False - tryMerge = True - elif renames2.getDst(ren1.dstName): - dst2 = renames2.getDst(ren1.dstName) - newPath1 = uniquePath(ren1.dstName, branchName1) - newPath2 = uniquePath(dst2.dstName, branchName2) - output('CONFLICT (rename/rename): Rename', - fmtRename(ren1.srcName, ren1.dstName), 'in', - branchName1+'. Rename', - fmtRename(dst2.srcName, dst2.dstName), 'in', branchName2) - output('Renaming', ren1.srcName, 'to', newPath1, 'and', - dst2.srcName, 'to', newPath2, 'instead') - removeFile(False, ren1.dstName) - updateFile(False, ren1.dstSha, ren1.dstMode, newPath1) - updateFile(False, dst2.dstSha, dst2.dstMode, newPath2) - dst2.processed = True - cleanMerge = False - else: - tryMerge = True - - if tryMerge: - - oName, oSHA1, oMode = ren1.srcName, ren1.srcSha, ren1.srcMode - aName, bName = ren1.dstName, ren1.srcName - aSHA1, bSHA1 = ren1.dstSha, srcShaOtherBranch - aMode, bMode = ren1.dstMode, srcModeOtherBranch - aBranch, bBranch = branchName1, branchName2 - - if renamesA != renames1: - aName, bName = bName, aName - aSHA1, bSHA1 = bSHA1, aSHA1 - aMode, bMode = bMode, aMode - aBranch, bBranch = bBranch, aBranch - - [resSha, resMode, clean, merge] = \ - mergeFile(oName, oSHA1, oMode, - aName, aSHA1, aMode, - bName, bSHA1, bMode, - aBranch, bBranch); - - if merge or not clean: - output('Renaming', fmtRename(ren1.srcName, ren1.dstName)) - - if merge: - output('Auto-merging', ren1.dstName) - - if not clean: - output('CONFLICT (rename/modify): Merge conflict in', - ren1.dstName) - cleanMerge = False - - if not cacheOnly: - setIndexStages(ren1.dstName, - oSHA1, oMode, - aSHA1, aMode, - bSHA1, bMode) - - updateFile(clean, resSha, resMode, ren1.dstName) - - return cleanMerge - -# Per entry merge function -# ------------------------ - -def processEntry(entry, branch1Name, branch2Name): - '''Merge one cache entry.''' - - debug('processing', entry.path, 'clean cache:', cacheOnly) - - cleanMerge = True - - path = entry.path - oSha = entry.stages[1].sha1 - oMode = entry.stages[1].mode - aSha = entry.stages[2].sha1 - aMode = entry.stages[2].mode - bSha = entry.stages[3].sha1 - bMode = entry.stages[3].mode - - assert(oSha == None or isSha(oSha)) - assert(aSha == None or isSha(aSha)) - assert(bSha == None or isSha(bSha)) - - assert(oMode == None or type(oMode) is int) - assert(aMode == None or type(aMode) is int) - assert(bMode == None or type(bMode) is int) - - if (oSha and (not aSha or not bSha)): - # - # Case A: Deleted in one - # - if (not aSha and not bSha) or \ - (aSha == oSha and not bSha) or \ - (not aSha and bSha == oSha): - # Deleted in both or deleted in one and unchanged in the other - if aSha: - output('Removing', path) - removeFile(True, path) - else: - # Deleted in one and changed in the other - cleanMerge = False - if not aSha: - output('CONFLICT (delete/modify):', path, 'deleted in', - branch1Name, 'and modified in', branch2Name + '.', - 'Version', branch2Name, 'of', path, 'left in tree.') - mode = bMode - sha = bSha - else: - output('CONFLICT (modify/delete):', path, 'deleted in', - branch2Name, 'and modified in', branch1Name + '.', - 'Version', branch1Name, 'of', path, 'left in tree.') - mode = aMode - sha = aSha - - updateFile(False, sha, mode, path) - - elif (not oSha and aSha and not bSha) or \ - (not oSha and not aSha and bSha): - # - # Case B: Added in one. - # - if aSha: - addBranch = branch1Name - otherBranch = branch2Name - mode = aMode - sha = aSha - conf = 'file/directory' - else: - addBranch = branch2Name - otherBranch = branch1Name - mode = bMode - sha = bSha - conf = 'directory/file' - - if path in currentDirectorySet: - cleanMerge = False - newPath = uniquePath(path, addBranch) - output('CONFLICT (' + conf + '):', - 'There is a directory with name', path, 'in', - otherBranch + '. Adding', path, 'as', newPath) - - removeFile(False, path) - updateFile(False, sha, mode, newPath) - else: - output('Adding', path) - updateFile(True, sha, mode, path) - - elif not oSha and aSha and bSha: - # - # Case C: Added in both (check for same permissions). - # - if aSha == bSha: - if aMode != bMode: - cleanMerge = False - output('CONFLICT: File', path, - 'added identically in both branches, but permissions', - 'conflict', '0%o' % aMode, '->', '0%o' % bMode) - output('CONFLICT: adding with permission:', '0%o' % aMode) - - updateFile(False, aSha, aMode, path) - else: - # This case is handled by git-read-tree - assert(False) - else: - cleanMerge = False - newPath1 = uniquePath(path, branch1Name) - newPath2 = uniquePath(path, branch2Name) - output('CONFLICT (add/add): File', path, - 'added non-identically in both branches. Adding as', - newPath1, 'and', newPath2, 'instead.') - removeFile(False, path) - updateFile(False, aSha, aMode, newPath1) - updateFile(False, bSha, bMode, newPath2) - - elif oSha and aSha and bSha: - # - # case D: Modified in both, but differently. - # - output('Auto-merging', path) - [sha, mode, clean, dummy] = \ - mergeFile(path, oSha, oMode, - path, aSha, aMode, - path, bSha, bMode, - branch1Name, branch2Name) - if clean: - updateFile(True, sha, mode, path) - else: - cleanMerge = False - output('CONFLICT (content): Merge conflict in', path) - - if cacheOnly: - updateFile(False, sha, mode, path) - else: - updateFileExt(sha, mode, path, updateCache=False, updateWd=True) - else: - die("ERROR: Fatal merge failure, shouldn't happen.") - - return cleanMerge - -def usage(): - die('Usage:', sys.argv[0], ' ... -- ..') - -# main entry point as merge strategy module -# The first parameters up to -- are merge bases, and the rest are heads. - -if len(sys.argv) < 4: - usage() - -bases = [] -for nextArg in xrange(1, len(sys.argv)): - if sys.argv[nextArg] == '--': - if len(sys.argv) != nextArg + 3: - die('Not handling anything other than two heads merge.') - try: - h1 = firstBranch = sys.argv[nextArg + 1] - h2 = secondBranch = sys.argv[nextArg + 2] - except IndexError: - usage() - break - else: - bases.append(sys.argv[nextArg]) - -print 'Merging', h1, 'with', h2 - -try: - h1 = runProgram(['git-rev-parse', '--verify', h1 + '^0']).rstrip() - h2 = runProgram(['git-rev-parse', '--verify', h2 + '^0']).rstrip() - - if len(bases) == 1: - base = runProgram(['git-rev-parse', '--verify', - bases[0] + '^0']).rstrip() - ancestor = Commit(base, None) - [dummy, clean] = merge(Commit(h1, None), Commit(h2, None), - firstBranch, secondBranch, None, 0, - ancestor) - else: - graph = buildGraph([h1, h2]) - [dummy, clean] = merge(graph.shaMap[h1], graph.shaMap[h2], - firstBranch, secondBranch, graph) - - print '' -except: - if isinstance(sys.exc_info()[1], SystemExit): - raise - else: - traceback.print_exc(None, sys.stderr) - sys.exit(2) - -if clean: - sys.exit(0) -else: - sys.exit(1) diff --git a/git-merge.sh b/git-merge.sh index cb094388bb..75af10d3e4 100755 --- a/git-merge.sh +++ b/git-merge.sh @@ -3,22 +3,20 @@ # Copyright (c) 2005 Junio C Hamano # -USAGE='[-n] [--no-commit] [--squash] [-s ]... +' +USAGE='[-n] [--no-commit] [--squash] [-s ] [--reflog-action=] [-m=] +' + . git-sh-setup LF=' ' -all_strategies='recur recursive recursive-old octopus resolve stupid ours' +all_strategies='recur recursive octopus resolve stupid ours' default_twohead_strategies='recursive' default_octopus_strategies='octopus' no_trivial_merge_strategies='ours' use_strategies= index_merge=t -if test "@@NO_PYTHON@@"; then - all_strategies='recur recursive resolve octopus stupid ours' -fi dropsave() { rm -f -- "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/MERGE_MSG" \ @@ -95,7 +93,7 @@ finish () { case "$#" in 0) usage ;; esac -rloga= +rloga= have_message= while case "$#" in 0) break ;; esac do case "$1" in @@ -128,17 +126,81 @@ do --reflog-action=*) rloga=`expr "z$1" : 'z-[^=]*=\(.*\)'` ;; + -m=*|--m=*|--me=*|--mes=*|--mess=*|--messa=*|--messag=*|--message=*) + merge_msg=`expr "z$1" : 'z-[^=]*=\(.*\)'` + have_message=t + ;; + -m|--m|--me|--mes|--mess|--messa|--messag|--message) + shift + case "$#" in + 1) usage ;; + esac + merge_msg="$1" + have_message=t + ;; -*) usage ;; *) break ;; esac shift done -merge_msg="$1" -shift -head_arg="$1" -head=$(git-rev-parse --verify "$1"^0) || usage -shift +# This could be traditional "merge HEAD ..." and the +# way we can tell it is to see if the second token is HEAD, but some +# people might have misused the interface and used a committish that +# is the same as HEAD there instead. Traditional format never would +# have "-m" so it is an additional safety measure to check for it. + +if test -z "$have_message" && + second_token=$(git-rev-parse --verify "$2^0" 2>/dev/null) && + head_commit=$(git-rev-parse --verify "HEAD" 2>/dev/null) && + test "$second_token" = "$head_commit" +then + merge_msg="$1" + shift + head_arg="$1" + shift +elif ! git-rev-parse --verify HEAD >/dev/null 2>&1 +then + # If the merged head is a valid one there is no reason to + # forbid "git merge" into a branch yet to be born. We do + # the same for "git pull". + if test 1 -ne $# + then + echo >&2 "Can merge only exactly one commit into empty head" + exit 1 + fi + + rh=$(git rev-parse --verify "$1^0") || + die "$1 - not something we can merge" + + git-update-ref -m "initial pull" HEAD "$rh" "" && + git-read-tree --reset -u HEAD + exit + +else + # We are invoked directly as the first-class UI. + head_arg=HEAD + + # All the rest are the commits being merged; prepare + # the standard merge summary message to be appended to + # the given message. If remote is invalid we will die + # later in the common codepath so we discard the error + # in this loop. + merge_name=$(for remote + do + rh=$(git-rev-parse --verify "$remote"^0 2>/dev/null) && + if git show-ref -q --verify "refs/heads/$remote" + then + what=branch + else + what=commit + fi && + echo "$rh $what '$remote'" + done | git-fmt-merge-msg + ) + merge_msg="${merge_msg:+$merge_msg$LF$LF}$merge_name" +fi +head=$(git-rev-parse --verify "$head_arg"^0) || usage # All the rest are remote heads test "$#" = 0 && usage ;# we need at least one remote head. @@ -147,7 +209,7 @@ test "$rloga" = '' && rloga="merge: $@" remoteheads= for remote do - remotehead=$(git-rev-parse --verify "$remote"^0) || + remotehead=$(git-rev-parse --verify "$remote"^0 2>/dev/null) || die "$remote - not something we can merge" remoteheads="${remoteheads}$remotehead " done diff --git a/git-rebase.sh b/git-rebase.sh index 546fa446fc..25530dfdc5 100755 --- a/git-rebase.sh +++ b/git-rebase.sh @@ -302,15 +302,6 @@ then exit $? fi -if test "@@NO_PYTHON@@" && test "$strategy" = "recursive-old" -then - die 'The recursive-old merge strategy is written in Python, -which this installation of git was not configured with. Please consider -a different merge strategy (e.g. recursive, resolve, or stupid) -or install Python and git with Python support.' - -fi - # start doing a rebase with git-merge # this is rename-aware if the recursive (default) strategy is used diff --git a/git.spec.in b/git.spec.in index 83268fc9d9..f2374b7331 100644 --- a/git.spec.in +++ b/git.spec.in @@ -24,7 +24,7 @@ This is a dummy package which brings in all subpackages. %package core Summary: Core git tools Group: Development/Tools -Requires: zlib >= 1.2, rsync, rcs, curl, less, openssh-clients, python >= 2.3, expat +Requires: zlib >= 1.2, rsync, rcs, curl, less, openssh-clients, expat %description core This is a stupid (but extremely fast) directory content manager. It doesn't do a whole lot, but what it _does_ do is track directory diff --git a/gitMergeCommon.py b/gitMergeCommon.py deleted file mode 100644 index fdbf9e4778..0000000000 --- a/gitMergeCommon.py +++ /dev/null @@ -1,275 +0,0 @@ -# -# Copyright (C) 2005 Fredrik Kuivinen -# - -import sys, re, os, traceback -from sets import Set - -def die(*args): - printList(args, sys.stderr) - sys.exit(2) - -def printList(list, file=sys.stdout): - for x in list: - file.write(str(x)) - file.write(' ') - file.write('\n') - -import subprocess - -# Debugging machinery -# ------------------- - -DEBUG = 0 -functionsToDebug = Set() - -def addDebug(func): - if type(func) == str: - functionsToDebug.add(func) - else: - functionsToDebug.add(func.func_name) - -def debug(*args): - if DEBUG: - funcName = traceback.extract_stack()[-2][2] - if funcName in functionsToDebug: - printList(args) - -# Program execution -# ----------------- - -class ProgramError(Exception): - def __init__(self, progStr, error): - self.progStr = progStr - self.error = error - - def __str__(self): - return self.progStr + ': ' + self.error - -addDebug('runProgram') -def runProgram(prog, input=None, returnCode=False, env=None, pipeOutput=True): - debug('runProgram prog:', str(prog), 'input:', str(input)) - if type(prog) is str: - progStr = prog - else: - progStr = ' '.join(prog) - - try: - if pipeOutput: - stderr = subprocess.STDOUT - stdout = subprocess.PIPE - else: - stderr = None - stdout = None - pop = subprocess.Popen(prog, - shell = type(prog) is str, - stderr=stderr, - stdout=stdout, - stdin=subprocess.PIPE, - env=env) - except OSError, e: - debug('strerror:', e.strerror) - raise ProgramError(progStr, e.strerror) - - if input != None: - pop.stdin.write(input) - pop.stdin.close() - - if pipeOutput: - out = pop.stdout.read() - else: - out = '' - - code = pop.wait() - if returnCode: - ret = [out, code] - else: - ret = out - if code != 0 and not returnCode: - debug('error output:', out) - debug('prog:', prog) - raise ProgramError(progStr, out) -# debug('output:', out.replace('\0', '\n')) - return ret - -# Code for computing common ancestors -# ----------------------------------- - -currentId = 0 -def getUniqueId(): - global currentId - currentId += 1 - return currentId - -# The 'virtual' commit objects have SHAs which are integers -shaRE = re.compile('^[0-9a-f]{40}$') -def isSha(obj): - return (type(obj) is str and bool(shaRE.match(obj))) or \ - (type(obj) is int and obj >= 1) - -class Commit(object): - __slots__ = ['parents', 'firstLineMsg', 'children', '_tree', 'sha', - 'virtual'] - - def __init__(self, sha, parents, tree=None): - self.parents = parents - self.firstLineMsg = None - self.children = [] - - if tree: - tree = tree.rstrip() - assert(isSha(tree)) - self._tree = tree - - if not sha: - self.sha = getUniqueId() - self.virtual = True - self.firstLineMsg = 'virtual commit' - assert(isSha(tree)) - else: - self.virtual = False - self.sha = sha.rstrip() - assert(isSha(self.sha)) - - def tree(self): - self.getInfo() - assert(self._tree != None) - return self._tree - - def shortInfo(self): - self.getInfo() - return str(self.sha) + ' ' + self.firstLineMsg - - def __str__(self): - return self.shortInfo() - - def getInfo(self): - if self.virtual or self.firstLineMsg != None: - return - else: - info = runProgram(['git-cat-file', 'commit', self.sha]) - info = info.split('\n') - msg = False - for l in info: - if msg: - self.firstLineMsg = l - break - else: - if l.startswith('tree'): - self._tree = l[5:].rstrip() - elif l == '': - msg = True - -class Graph: - def __init__(self): - self.commits = [] - self.shaMap = {} - - def addNode(self, node): - assert(isinstance(node, Commit)) - self.shaMap[node.sha] = node - self.commits.append(node) - for p in node.parents: - p.children.append(node) - return node - - def reachableNodes(self, n1, n2): - res = {} - def traverse(n): - res[n] = True - for p in n.parents: - traverse(p) - - traverse(n1) - traverse(n2) - return res - - def fixParents(self, node): - for x in range(0, len(node.parents)): - node.parents[x] = self.shaMap[node.parents[x]] - -# addDebug('buildGraph') -def buildGraph(heads): - debug('buildGraph heads:', heads) - for h in heads: - assert(isSha(h)) - - g = Graph() - - out = runProgram(['git-rev-list', '--parents'] + heads) - for l in out.split('\n'): - if l == '': - continue - shas = l.split(' ') - - # This is a hack, we temporarily use the 'parents' attribute - # to contain a list of SHA1:s. They are later replaced by proper - # Commit objects. - c = Commit(shas[0], shas[1:]) - - g.commits.append(c) - g.shaMap[c.sha] = c - - for c in g.commits: - g.fixParents(c) - - for c in g.commits: - for p in c.parents: - p.children.append(c) - return g - -# Write the empty tree to the object database and return its SHA1 -def writeEmptyTree(): - tmpIndex = os.environ.get('GIT_DIR', '.git') + '/merge-tmp-index' - def delTmpIndex(): - try: - os.unlink(tmpIndex) - except OSError: - pass - delTmpIndex() - newEnv = os.environ.copy() - newEnv['GIT_INDEX_FILE'] = tmpIndex - res = runProgram(['git-write-tree'], env=newEnv).rstrip() - delTmpIndex() - return res - -def addCommonRoot(graph): - roots = [] - for c in graph.commits: - if len(c.parents) == 0: - roots.append(c) - - superRoot = Commit(sha=None, parents=[], tree=writeEmptyTree()) - graph.addNode(superRoot) - for r in roots: - r.parents = [superRoot] - superRoot.children = roots - return superRoot - -def getCommonAncestors(graph, commit1, commit2): - '''Find the common ancestors for commit1 and commit2''' - assert(isinstance(commit1, Commit) and isinstance(commit2, Commit)) - - def traverse(start, set): - stack = [start] - while len(stack) > 0: - el = stack.pop() - set.add(el) - for p in el.parents: - if p not in set: - stack.append(p) - h1Set = Set() - h2Set = Set() - traverse(commit1, h1Set) - traverse(commit2, h2Set) - shared = h1Set.intersection(h2Set) - - if len(shared) == 0: - shared = [addCommonRoot(graph)] - - res = Set() - - for s in shared: - if len([c for c in s.children if c in shared]) == 0: - res.add(s) - return list(res) diff --git a/t/Makefile b/t/Makefile index 89835093fb..e1c6c92d9c 100644 --- a/t/Makefile +++ b/t/Makefile @@ -13,10 +13,6 @@ SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH)) T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh) TSVN = $(wildcard t91[0-9][0-9]-*.sh) -ifdef NO_PYTHON - GIT_TEST_OPTS += --no-python -endif - all: $(T) clean $(T): diff --git a/t/t0000-basic.sh b/t/t0000-basic.sh index 6aff0b808c..81f3bedc90 100755 --- a/t/t0000-basic.sh +++ b/t/t0000-basic.sh @@ -20,10 +20,10 @@ modification *should* take notice and update the test vectors here. ################################################################ # It appears that people are getting bitten by not installing -# 'merge' (usually part of RCS package in binary distributions) -# or have too old python without subprocess. Check them and error -# out before running any tests. Also catch the bogosity of trying -# to run tests without building while we are at it. +# 'merge' (usually part of RCS package in binary distributions). +# Check this and error out before running any tests. Also catch +# the bogosity of trying to run tests without building while we +# are at it. ../git >/dev/null if test $? != 1 @@ -42,12 +42,6 @@ fi . ./test-lib.sh -test "$no_python" || "$PYTHON" -c 'import subprocess' || { - echo >&2 'Your python seem to lack "subprocess" module. -Please check INSTALL document.' - exit 1 -} - ################################################################ # init-db has been done in an empty repository. # make sure it is empty. diff --git a/t/test-lib.sh b/t/test-lib.sh index 3895f16709..ac7be769b4 100755 --- a/t/test-lib.sh +++ b/t/test-lib.sh @@ -76,7 +76,8 @@ do -v|--v|--ve|--ver|--verb|--verbo|--verbos|--verbose) verbose=t; shift ;; --no-python) - no_python=t; shift ;; + # noop now... + shift ;; *) break ;; esac @@ -210,18 +211,6 @@ GIT_EXEC_PATH=$(pwd)/.. HOME=$(pwd)/trash export PATH GIT_EXEC_PATH HOME -# Similarly use ../compat/subprocess.py if our python does not -# have subprocess.py on its own. -PYTHON=`sed -e '1{ - s/^#!// - q -}' ../git-merge-recursive-old` || { - error "You haven't built things yet, have you?" -} -"$PYTHON" -c 'import subprocess' 2>/dev/null || { - PYTHONPATH=$(pwd)/../compat - export PYTHONPATH -} GITPERLLIB=$(pwd)/../perl/blib/lib:$(pwd)/../perl/blib/arch/auto/Git export GITPERLLIB test -d ../templates/blt || {