Merge branch 'issue-1391-ljv'
Update Issue 1391
Status: FixPending
diff --git a/app/soc/content/css/v2/gsoc/dashboard.css b/app/soc/content/css/v2/gsoc/dashboard.css
index 6ff6527..c94dca2 100644
--- a/app/soc/content/css/v2/gsoc/dashboard.css
+++ b/app/soc/content/css/v2/gsoc/dashboard.css
@@ -161,7 +161,8 @@
}
.column-entry.view_mentor_evaluation,
.column-entry.view_student_evaluation,
-.column-entry.view_evaluation_group
+.column-entry.view_evaluation_group,
+.column-entry.projects
{
background-image: url(../../../images/soc/dashboard/view.png);
}
diff --git a/app/soc/modules/gsoc/views/admin.py b/app/soc/modules/gsoc/views/admin.py
index 5b5532a..0031c8c 100644
--- a/app/soc/modules/gsoc/views/admin.py
+++ b/app/soc/modules/gsoc/views/admin.py
@@ -53,7 +53,7 @@
from soc.modules.gsoc.views.dashboard import BIRTHDATE_FORMAT
from soc.modules.gsoc.views.helper import url_names
from soc.modules.gsoc.views.helper.url_patterns import url
-from soc.modules.gsoc.views.projects_list import ProjectList
+from soc.modules.gsoc.views import projects_list
class LookupForm(gsoc_forms.GSoCModelForm):
@@ -1631,8 +1631,8 @@
def jsonContext(self, data, check, mutator):
list_query = project_logic.getProjectsQuery(program=data.program)
- list_content = ProjectList(
- data.request, data, list_query, self.LIST_IDX).getListData()
+ list_content = projects_list.ProjectList(
+ data, list_query, idx=self.LIST_IDX).getListData()
if list_content:
return list_content.content()
else:
@@ -1642,8 +1642,7 @@
list_query = project_logic.getProjectsQuery(program=data.program)
return {
'page_name': 'Projects list page',
- # TODO(nathaniel): Drop the first parameter of ProjectList.
- 'list': ProjectList(data.request, data, list_query, self.LIST_IDX),
+ 'list': projects_list.ProjectList(data, list_query, idx=self.LIST_IDX),
}
diff --git a/app/soc/modules/gsoc/views/helper/access_checker.py b/app/soc/modules/gsoc/views/helper/access_checker.py
index 97526ab..5f38003 100644
--- a/app/soc/modules/gsoc/views/helper/access_checker.py
+++ b/app/soc/modules/gsoc/views/helper/access_checker.py
@@ -35,7 +35,7 @@
from soc.modules.gsoc.models.grading_survey_group import GSoCGradingSurveyGroup
from soc.modules.gsoc.models.grading_record import GSoCGradingRecord
from soc.modules.gsoc.models.profile import GSoCProfile
-from soc.modules.gsoc.models.project import GSoCProject
+from soc.modules.gsoc.models import project as project_model
from soc.modules.gsoc.models.project_survey import ProjectSurvey
from soc.modules.gsoc.models.project_survey_record import \
GSoCProjectSurveyRecord
@@ -178,7 +178,7 @@
if not project_id:
raise NotFound(ugettext('Proposal id must be a positive number'))
- self.data.project = GSoCProject.get_by_id(
+ self.data.project = project_model.GSoCProject.get_by_id(
project_id, parent=self.data.url_profile)
if not self.data.project:
@@ -541,6 +541,77 @@
elif status == 'withdrawn':
self.data.is_pending = False
+ def canStudentUpdateProject(self):
+ """Checks if the student can edit the project details."""
+ assert access_checker.isSet(self.data.program)
+ assert access_checker.isSet(self.data.timeline)
+ assert access_checker.isSet(self.data.project)
+ assert access_checker.isSet(self.data.project_owner)
+
+ self.isProjectInURLValid()
+
+ # check if the timeline allows updating project
+ self.isProgramVisible()
+ self.acceptedStudentsAnnounced()
+
+ # check if the current used is an active student
+ self.isActiveStudent()
+
+ # check if the project belongs to the current user
+ expected_profile_key = self.data.project.parent_key()
+ if expected_profile_key != self.data.profile.key():
+ error_msg = access_checker.DEF_ENTITY_DOES_NOT_BELONG_TO_YOU % {
+ 'name': 'project'
+ }
+ raise AccessViolation(error_msg)
+
+ # check if the status allows the project to be updated
+ if self.data.project.status in ['invalid', 'withdrawn', 'failed']:
+ raise AccessViolation(access_checker.DEF_CANNOT_UPDATE_ENTITY % {
+ 'name': 'project'
+ })
+
+ def canOrgAdminUpdateProject(self):
+ """Checks if the organization admin can edit the project details."""
+ assert access_checker.isSet(self.data.program)
+ assert access_checker.isSet(self.data.timeline)
+ assert access_checker.isSet(self.data.project)
+ assert access_checker.isSet(self.data.project_owner)
+
+ self.isProjectInURLValid()
+
+ # check if the timeline allows updating project
+ self.isProgramVisible()
+ self.acceptedStudentsAnnounced()
+
+ # check if the person is an organization admin for the organization
+ # to which the project was assigned
+ org_key = project_model.GSoCProject.org.get_value_for_datastore(
+ self.data.project)
+ self.isOrgAdminForOrganization(org_key)
+
+ # check if the status allows the project to be updated
+ if self.data.project.status in ['invalid', 'withdrawn', 'failed']:
+ raise AccessViolation(access_checker.DEF_CANNOT_UPDATE_ENTITY % {
+ 'name': 'project'
+ })
+
+ def canUpdateProject(self):
+ """Checks if the current user is allowed to update project details."""
+ self.isLoggedIn()
+ if not self.data.is_host:
+ self.hasProfile()
+ if self.data.profile.is_student:
+ # check if this is a student trying to update their project
+ self.canStudentUpdateProject()
+ elif self.data.is_org_admin:
+ # check if this is an organization admin trying to update a project
+ # belonging to one the students working for their organization
+ self.canOrgAdminUpdateProject()
+ else:
+ raise AccessViolation(access_checker.DEF_CANNOT_UPDATE_ENTITY % {
+ 'name': 'project'
+ })
class DeveloperAccessChecker(access_checker.DeveloperAccessChecker):
pass
diff --git a/app/soc/modules/gsoc/views/project_details.py b/app/soc/modules/gsoc/views/project_details.py
index eab45e4..bdecbe9 100644
--- a/app/soc/modules/gsoc/views/project_details.py
+++ b/app/soc/modules/gsoc/views/project_details.py
@@ -25,6 +25,7 @@
from django.utils.translation import ugettext
from soc.logic import exceptions
+from soc.views.helper import access_checker
from soc.views.helper import blobstore as bs_helper
from soc.views.helper.access_checker import isSet
from soc.views.template import Template
@@ -184,10 +185,8 @@
def checkAccess(self, data, check, mutator):
"""Access checks for GSoC project details page."""
- check.isLoggedIn()
- check.isActiveStudent()
mutator.projectFromKwargs()
- check.canStudentUpdateProject()
+ check.canUpdateProject()
def context(self, data, check, mutator):
"""Handler to for GSoC project details page HTTP get request."""
@@ -240,11 +239,9 @@
]
def checkAccess(self, data, check, mutator):
- check.isLoggedIn()
- check.isActiveStudent()
mutator.projectFromKwargs()
- check.canStudentUpdateProject()
check.isProjectCompleted()
+ check.canUpdateProject()
def post(self, data, check, mutator):
"""Post handler for the code sample upload file."""
@@ -328,8 +325,8 @@
def checkAccess(self, data, check, mutator):
mutator.projectFromKwargs()
- check.canStudentUpdateProject()
check.isProjectCompleted()
+ check.canUpdateProject()
def post(self, data, check, mutator):
"""Get handler for the code sample delete file."""
@@ -412,6 +409,54 @@
return "v2/modules/gsoc/project_details/_user_action.html"
+def _isUpdateLinkVisible(data):
+ """Determines whether the current user is allowed to update the project
+ and therefore if the project update link should visible or not.
+
+ Args:
+ data: a RequestData object
+
+ Returns: True if the update link should be visible, False otherwise.
+ """
+ # program hosts are able to edit project details
+ if data.is_host:
+ return True
+
+ # users without active profiles cannot definitely update projects
+ if not data.profile or data.profile.status != 'active':
+ return False
+
+ # only passed and valid project can be updated
+ if data.project.status in ['invalid', 'withdrawn', 'failed']:
+ return False
+
+ # a student who own the project can update it
+ if data.project.parent_key() == data.profile.key():
+ return True
+
+ # org admins of the organization that manages the project can update it
+ org_key = GSoCProject.org.get_value_for_datastore(data.project)
+ if data.orgAdminFor(org_key):
+ return True
+
+ # no other users are permitted to update project
+ return False
+
+
+def _getUpdateLinkText(data):
+ """Returns text which may be used to display update project link.
+
+ Args:
+ request: a RequestData object
+
+ Returns: a string with the text to be used with update project link
+ """
+ if data.timeline.afterFormSubmissionStart():
+ return 'Update or Upload Code Samples'
+ else:
+ return 'Update'
+
+
class ProjectDetails(GSoCRequestHandler):
"""Encapsulate all the methods required to generate GSoC project
details page.
@@ -446,11 +491,13 @@
if data.orgAdminFor(data.project.org):
context['user_actions'] = UserActions(data)
- user_is_owner = data.user and \
- (data.user.key() == data.project_owner.parent_key())
- if user_is_owner:
- context['update_link'] = data.redirect.project().urlOf(
+ if _isUpdateLinkVisible(data):
+ context['update_link_visible'] = True
+ context['update_link_url'] = data.redirect.project().urlOf(
url_names.GSOC_PROJECT_UPDATE)
+ context['update_link_text'] = _getUpdateLinkText(data)
+ else:
+ context['update_link_visible'] = False
if len(data.project.passed_evaluations) >= \
project_logic.NUMBER_OF_EVALUATIONS:
diff --git a/app/soc/modules/gsoc/views/projects_list.py b/app/soc/modules/gsoc/views/projects_list.py
index 92203c8..8f276a1 100644
--- a/app/soc/modules/gsoc/views/projects_list.py
+++ b/app/soc/modules/gsoc/views/projects_list.py
@@ -32,7 +32,9 @@
class ProjectList(Template):
"""Template for listing the student projects accepted in the program."""
- def __init__(self, data, query, idx=0):
+ DEFAULT_IDX = 0
+
+ def __init__(self, data, query, idx=None):
"""Initializes a new object.
Args:
@@ -42,7 +44,8 @@
"""
self.data = data
self.query = query
- self.idx = idx
+
+ self.idx = self.DEFAULT_IDX if idx is None else idx
r = data.redirect
list_config = lists.ListConfiguration(add_key_column=False)
diff --git a/app/soc/templates/v2/modules/gsoc/homepage/base.html b/app/soc/templates/v2/modules/gsoc/homepage/base.html
index ba1260a..ce14085 100644
--- a/app/soc/templates/v2/modules/gsoc/homepage/base.html
+++ b/app/soc/templates/v2/modules/gsoc/homepage/base.html
@@ -21,7 +21,7 @@
<h2 id="title-section-how-it-works">How Google Summer of Code Works</h2>
<ol id="list-how-it-works">
<li id="how-it-works-item1" class="first">
- <strong>Propose a project <span>for approval by a participating open source organization</span></strong>
+ <strong>Propose a project <span>for approval by a mentoring open source organization</span></strong>
</li>
<li id="how-it-works-item2" class="second">
<strong>Code the summer away</strong>
diff --git a/app/soc/templates/v2/modules/gsoc/project_details/base.html b/app/soc/templates/v2/modules/gsoc/project_details/base.html
index 6558374..bcabe0f 100644
--- a/app/soc/templates/v2/modules/gsoc/project_details/base.html
+++ b/app/soc/templates/v2/modules/gsoc/project_details/base.html
@@ -37,8 +37,8 @@
</h3>
<div id="edit-page" class="org-page-link">
- {% if update_link %}
- <a href="{{ update_link }}">Update</a>
+ {% if update_link_visible %}
+ <a href="{{ update_link_url }}">{{ update_link_text }}</a>
{% endif %}
</div>
</div>
diff --git a/app/soc/views/base_templates.py b/app/soc/views/base_templates.py
index 665f89e..6d0f332 100644
--- a/app/soc/views/base_templates.py
+++ b/app/soc/views/base_templates.py
@@ -44,7 +44,7 @@
context['user_email'] = self.data.gae_user.email()
if self.data.user:
- context['link_id'] = " [link_id: %s]" % self.data.user.link_id
+ context['link_id'] = " [username: %s]" % self.data.user.link_id
if self.apply_link and self.data.timeline.studentSignup() and \
self.data.student_info:
diff --git a/app/soc/views/helper/access_checker.py b/app/soc/views/helper/access_checker.py
index a698113..304bd04 100644
--- a/app/soc/views/helper/access_checker.py
+++ b/app/soc/views/helper/access_checker.py
@@ -1090,34 +1090,6 @@
}
raise AccessViolation(error_msg)
- def canStudentUpdateProject(self):
- """Checks if the student can edit the project details.
- """
- assert isSet(self.data.program)
- assert isSet(self.data.timeline)
- assert isSet(self.data.project)
- assert isSet(self.data.project_owner)
-
- self.isProjectInURLValid()
-
- # check if the timeline allows updating project
- self.isProgramVisible()
- self.acceptedStudentsAnnounced()
-
- # check if the project belongs to the current user
- expected_profile_key = self.data.project.parent_key()
- if expected_profile_key != self.data.profile.key():
- error_msg = DEF_ENTITY_DOES_NOT_BELONG_TO_YOU % {
- 'name': 'project'
- }
- raise AccessViolation(error_msg)
-
- # check if the status allows the project to be updated
- if self.data.project.status in ['invalid', 'withdrawn', 'failed']:
- raise AccessViolation(DEF_CANNOT_UPDATE_ENTITY % {
- 'name': 'project'
- })
-
def isSurveyActive(self, survey, show_url=None):
"""Checks if the survey in the request data is active.
diff --git a/app/soc/views/profile.py b/app/soc/views/profile.py
index 3405290..8586179 100644
--- a/app/soc/views/profile.py
+++ b/app/soc/views/profile.py
@@ -291,7 +291,7 @@
return notification_form
def validateStudent(self, data, dirty, profile):
- if not data.student_info or data.kwargs.get('role') == 'student':
+ if not (data.student_info or data.kwargs.get('role') == 'student'):
return EmptyForm(data.POST)
student_form = self._getStudentInfoForm(data)
diff --git a/buildout.cfg b/buildout.cfg
index 8007272..2dbd4fd 100644
--- a/buildout.cfg
+++ b/buildout.cfg
@@ -2,6 +2,9 @@
parts =
python
omelette
+ nodejs
+ node_modules_sym
+ jasmine_node_sym
develop =
.
thirdparty/google_appengine/lib/yaml
@@ -50,3 +53,21 @@
packages =
${buildout:directory}/app ./app
${buildout:directory}/thirdparty/google_appengine/google ./google
+
+[nodejs]
+recipe = gp.recipe.node
+url = http://nodejs.org/dist/v0.8.14/node-v0.8.14.tar.gz
+npms = jasmine-node testem phantomjs
+scripts = npm
+
+[node_modules_sym]
+recipe = cns.recipe.symlink
+symlink_base = ${buildout:directory}/parts/nodejs/lib/node_modules
+symlink_target = ${buildout:directory}/node_modules
+autocreate = true
+
+[jasmine_node_sym]
+recipe = cns.recipe.symlink
+symlink_base = ${buildout:directory}/parts/node-v0.8.14/bin
+symlink_target = ${buildout:directory}/bin
+autocreate = true
diff --git a/scripts/stats.py b/scripts/stats.py
index f8bd6f2..d4d7a7a 100755
--- a/scripts/stats.py
+++ b/scripts/stats.py
@@ -754,7 +754,7 @@
super(StudentKeyRequest, self).__init__(post=dict(student_key=key))
-def main(args):
+def main():
"""Main routine.
"""
@@ -860,11 +860,11 @@
'turnaroundTime': turnaroundTime,
}
- interactive.remote(args, context)
+ interactive.remote(sys.argv[1:], context)
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage: %s app_id [host]" % (sys.argv[0],)
sys.exit(1)
- main(sys.argv[1:])
+ main()
diff --git a/setup.py b/setup.py
index 2a811ba..3618e73 100644
--- a/setup.py
+++ b/setup.py
@@ -34,6 +34,7 @@
],
entry_points = {'console_scripts': ['run-tests = tests.run:main',
'gen-app-yaml = scripts.gen_app_yaml:main',
+ 'stats = scripts.stats:main'
],
},
include_package_data = True,
diff --git a/testem.json b/testem.json
new file mode 100644
index 0000000..be74e2b
--- /dev/null
+++ b/testem.json
@@ -0,0 +1,8 @@
+{
+ "framework":"jasmine",
+ "src_files": [
+ "app/jquery/jquery-1.6.4.js",
+ "tests/app/soc/content/js/jasmine-jquery-1.3.1.js",
+ "tests/app/soc/content/js/*_spec.js"
+ ]
+}
diff --git a/tests/app/soc/content/js/all_tests.js b/tests/app/soc/content/js/all_tests.js
new file mode 100644
index 0000000..d0f0edf
--- /dev/null
+++ b/tests/app/soc/content/js/all_tests.js
@@ -0,0 +1,2 @@
+jasmine.getFixtures().fixturesPath = '../js';
+loadFixtures('melange_fixture.html');
\ No newline at end of file
diff --git a/tests/app/soc/content/js/jasmine-jquery-1.3.1.js b/tests/app/soc/content/js/jasmine-jquery-1.3.1.js
new file mode 100644
index 0000000..7e85548
--- /dev/null
+++ b/tests/app/soc/content/js/jasmine-jquery-1.3.1.js
@@ -0,0 +1,288 @@
+var readFixtures = function() {
+ return jasmine.getFixtures().proxyCallTo_('read', arguments);
+};
+
+var preloadFixtures = function() {
+ jasmine.getFixtures().proxyCallTo_('preload', arguments);
+};
+
+var loadFixtures = function() {
+ jasmine.getFixtures().proxyCallTo_('load', arguments);
+};
+
+var setFixtures = function(html) {
+ jasmine.getFixtures().set(html);
+};
+
+var sandbox = function(attributes) {
+ return jasmine.getFixtures().sandbox(attributes);
+};
+
+var spyOnEvent = function(selector, eventName) {
+ jasmine.JQuery.events.spyOn(selector, eventName);
+}
+
+jasmine.getFixtures = function() {
+ return jasmine.currentFixtures_ = jasmine.currentFixtures_ || new jasmine.Fixtures();
+};
+
+jasmine.Fixtures = function() {
+ this.containerId = 'jasmine-fixtures';
+ this.fixturesCache_ = {};
+ this.fixturesPath = 'spec/javascripts/fixtures';
+};
+
+jasmine.Fixtures.prototype.set = function(html) {
+ this.cleanUp();
+ this.createContainer_(html);
+};
+
+jasmine.Fixtures.prototype.preload = function() {
+ this.read.apply(this, arguments);
+};
+
+jasmine.Fixtures.prototype.load = function() {
+ this.cleanUp();
+ this.createContainer_(this.read.apply(this, arguments));
+};
+
+jasmine.Fixtures.prototype.read = function() {
+ var htmlChunks = [];
+
+ var fixtureUrls = arguments;
+ for(var urlCount = fixtureUrls.length, urlIndex = 0; urlIndex < urlCount; urlIndex++) {
+ htmlChunks.push(this.getFixtureHtml_(fixtureUrls[urlIndex]));
+ }
+
+ return htmlChunks.join('');
+};
+
+jasmine.Fixtures.prototype.clearCache = function() {
+ this.fixturesCache_ = {};
+};
+
+jasmine.Fixtures.prototype.cleanUp = function() {
+ jQuery('#' + this.containerId).remove();
+};
+
+jasmine.Fixtures.prototype.sandbox = function(attributes) {
+ var attributesToSet = attributes || {};
+ return jQuery('<div id="sandbox" />').attr(attributesToSet);
+};
+
+jasmine.Fixtures.prototype.createContainer_ = function(html) {
+ var container;
+ if(html instanceof jQuery) {
+ container = jQuery('<div id="' + this.containerId + '" />');
+ container.html(html);
+ } else {
+ container = '<div id="' + this.containerId + '">' + html + '</div>'
+ }
+ jQuery('body').append(container);
+};
+
+jasmine.Fixtures.prototype.getFixtureHtml_ = function(url) {
+ if (typeof this.fixturesCache_[url] == 'undefined') {
+ this.loadFixtureIntoCache_(url);
+ }
+ return this.fixturesCache_[url];
+};
+
+jasmine.Fixtures.prototype.loadFixtureIntoCache_ = function(relativeUrl) {
+ var self = this;
+ var url = this.fixturesPath.match('/$') ? this.fixturesPath + relativeUrl : this.fixturesPath + '/' + relativeUrl;
+ jQuery.ajax({
+ async: false, // must be synchronous to guarantee that no tests are run before fixture is loaded
+ cache: false,
+ dataType: 'html',
+ url: url,
+ success: function(data) {
+ self.fixturesCache_[relativeUrl] = data;
+ },
+ error: function(jqXHR, status, errorThrown) {
+ throw Error('Fixture could not be loaded: ' + url + ' (status: ' + status + ', message: ' + errorThrown.message + ')');
+ }
+ });
+};
+
+jasmine.Fixtures.prototype.proxyCallTo_ = function(methodName, passedArguments) {
+ return this[methodName].apply(this, passedArguments);
+};
+
+
+jasmine.JQuery = function() {};
+
+jasmine.JQuery.browserTagCaseIndependentHtml = function(html) {
+ return jQuery('<div/>').append(html).html();
+};
+
+jasmine.JQuery.elementToString = function(element) {
+ return jQuery('<div />').append(element.clone()).html();
+};
+
+jasmine.JQuery.matchersClass = {};
+
+(function(namespace) {
+ var data = {
+ spiedEvents: {},
+ handlers: []
+ };
+
+ namespace.events = {
+ spyOn: function(selector, eventName) {
+ var handler = function(e) {
+ data.spiedEvents[[selector, eventName]] = e;
+ };
+ jQuery(selector).bind(eventName, handler);
+ data.handlers.push(handler);
+ },
+
+ wasTriggered: function(selector, eventName) {
+ return !!(data.spiedEvents[[selector, eventName]]);
+ },
+
+ cleanUp: function() {
+ data.spiedEvents = {};
+ data.handlers = [];
+ }
+ }
+})(jasmine.JQuery);
+
+(function(){
+ var jQueryMatchers = {
+ toHaveClass: function(className) {
+ return this.actual.hasClass(className);
+ },
+
+ toBeVisible: function() {
+ return this.actual.is(':visible');
+ },
+
+ toBeHidden: function() {
+ return this.actual.is(':hidden');
+ },
+
+ toBeSelected: function() {
+ return this.actual.is(':selected');
+ },
+
+ toBeChecked: function() {
+ return this.actual.is(':checked');
+ },
+
+ toBeEmpty: function() {
+ return this.actual.is(':empty');
+ },
+
+ toExist: function() {
+ return this.actual.size() > 0;
+ },
+
+ toHaveAttr: function(attributeName, expectedAttributeValue) {
+ return hasProperty(this.actual.attr(attributeName), expectedAttributeValue);
+ },
+
+ toHaveId: function(id) {
+ return this.actual.attr('id') == id;
+ },
+
+ toHaveHtml: function(html) {
+ return this.actual.html() == jasmine.JQuery.browserTagCaseIndependentHtml(html);
+ },
+
+ toHaveText: function(text) {
+ if (text && jQuery.isFunction(text.test)) {
+ return text.test(this.actual.text());
+ } else {
+ return this.actual.text() == text;
+ }
+ },
+
+ toHaveValue: function(value) {
+ return this.actual.val() == value;
+ },
+
+ toHaveData: function(key, expectedValue) {
+ return hasProperty(this.actual.data(key), expectedValue);
+ },
+
+ toBe: function(selector) {
+ return this.actual.is(selector);
+ },
+
+ toContain: function(selector) {
+ return this.actual.find(selector).size() > 0;
+ },
+
+ toBeDisabled: function(selector){
+ return this.actual.is(':disabled');
+ },
+
+ // tests the existence of a specific event binding
+ toHandle: function(eventName) {
+ var events = this.actual.data("events");
+ return events && events[eventName].length > 0;
+ },
+
+ // tests the existence of a specific event binding + handler
+ toHandleWith: function(eventName, eventHandler) {
+ var stack = this.actual.data("events")[eventName];
+ var i;
+ for (i = 0; i < stack.length; i++) {
+ if (stack[i].handler == eventHandler) {
+ return true;
+ }
+ }
+ return false;
+ }
+ };
+
+ var hasProperty = function(actualValue, expectedValue) {
+ if (expectedValue === undefined) {
+ return actualValue !== undefined;
+ }
+ return actualValue == expectedValue;
+ };
+
+ var bindMatcher = function(methodName) {
+ var builtInMatcher = jasmine.Matchers.prototype[methodName];
+
+ jasmine.JQuery.matchersClass[methodName] = function() {
+ if (this.actual instanceof jQuery) {
+ var result = jQueryMatchers[methodName].apply(this, arguments);
+ this.actual = jasmine.JQuery.elementToString(this.actual);
+ return result;
+ }
+
+ if (builtInMatcher) {
+ return builtInMatcher.apply(this, arguments);
+ }
+
+ return false;
+ };
+ };
+
+ for(var methodName in jQueryMatchers) {
+ bindMatcher(methodName);
+ }
+})();
+
+beforeEach(function() {
+ this.addMatchers(jasmine.JQuery.matchersClass);
+ this.addMatchers({
+ toHaveBeenTriggeredOn: function(selector) {
+ this.message = function() {
+ return [
+ "Expected event " + this.actual + " to have been triggered on" + selector,
+ "Expected event " + this.actual + " not to have been triggered on" + selector
+ ];
+ };
+ return jasmine.JQuery.events.wasTriggered(selector, this.actual);
+ }
+ })
+});
+
+afterEach(function() {
+ jasmine.getFixtures().cleanUp();
+ jasmine.JQuery.events.cleanUp();
+});
diff --git a/tests/app/soc/content/js/melange_fixture.html b/tests/app/soc/content/js/melange_fixture.html
new file mode 100644
index 0000000..52b806a
--- /dev/null
+++ b/tests/app/soc/content/js/melange_fixture.html
@@ -0,0 +1,10 @@
+<html>
+ <head>
+ <script type="text/javascript" src="./app/json/json2.js"></script>
+ <script type="text/javascript" src="./app/jquery/jquery-cookie.js"></script>
+ <script type="text/javascript" src="./app/soc/content/js/melange.js"></script>
+ <script type="text/javascript">window.melange = {};</script>
+ </head>
+ <body>
+ </body>
+</html>
diff --git a/tests/app/soc/content/js/melange_spec.js b/tests/app/soc/content/js/melange_spec.js
new file mode 100644
index 0000000..e98bb83
--- /dev/null
+++ b/tests/app/soc/content/js/melange_spec.js
@@ -0,0 +1,9 @@
+describe('melange', function() {
+ beforeEach(function() {
+ jasmine.getFixtures().fixturesPath = 'tests/app/soc/content/js';
+ loadFixtures('melange_fixture.html');
+ });
+ it('should be defined', function(){
+ expect(melange).toBeDefined();
+ });
+});
\ No newline at end of file
diff --git a/tests/app/soc/modules/gsoc/views/test_profile.py b/tests/app/soc/modules/gsoc/views/test_profile.py
index 8d29416..dab090c 100644
--- a/tests/app/soc/modules/gsoc/views/test_profile.py
+++ b/tests/app/soc/modules/gsoc/views/test_profile.py
@@ -168,11 +168,18 @@
})
response = self.post(role_url, postdata)
-
self.assertResponseRedirect(response, url + '?validated')
# hacky
profile = GSoCProfile.all().get()
+
+ self.assertEqual(postdata.get('given_name'), profile.given_name)
+ self.assertEqual(postdata.get('surname'), profile.surname)
+
+ # Make sure student info entity is created with right values.
+ self.assertEqual(postdata.get('school_name'),
+ profile.student_info.school_name)
+
profile.delete()
postdata.update({
diff --git a/tests/app/soc/modules/gsoc/views/test_project_details.py b/tests/app/soc/modules/gsoc/views/test_project_details.py
index 43d83f5..f318321 100644
--- a/tests/app/soc/modules/gsoc/views/test_project_details.py
+++ b/tests/app/soc/modules/gsoc/views/test_project_details.py
@@ -12,16 +12,75 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Tests for project_detail views.
-"""
+"""Tests for project_detail views."""
+
+from tests import profile_utils
+from tests import program_utils
+from tests import test_utils
+
+from soc.modules.gsoc.models import project as project_model
+from soc.modules.gsoc.views import project_details
-from tests.profile_utils import GSoCProfileHelper
-from tests.test_utils import GSoCDjangoTestCase
+def _createProjectForStudent(program, org, dev_test, student=None):
+ """Returns a newly created GSoCProject for the specified student.
+ If a new student instance is not provided, a new profile is created.
-from soc.modules.gsoc.models.project import GSoCProject
+ Args:
+ program: GSoCProgram instance for which the project is to be created
+ org: GSoCOrganization instance for which the project is to be created
+ dev_test: whether it is dev test environment
+ student: the specified GSoCProfile student instance to mentor the project
-class ProjectDetailsTest(GSoCDjangoTestCase):
+ Returns:
+ the newly created GSoCProject instance
+ """
+ if not student:
+ student_helper = profile_utils.GSoCProfileHelper(program, dev_test)
+ student_helper.createOtherUser('student@example.com')
+ student = student_helper.createStudent()
+
+ mentor_helper = profile_utils.GSoCProfileHelper(program, dev_test)
+ mentor_helper.createOtherUser('mentor@example.com')
+ mentor_helper.createMentorWithProject(org, student)
+
+ project = project_model.GSoCProject.all().get()
+ project.is_featured = False
+ project.status = 'accepted'
+ project.put()
+
+ return project
+
+
+def _createProjectForMentor(program, org, dev_test, mentor=None):
+ """Returns a newly created GSoCProject for the specified mentor.
+ If a new mentor instance is not provided, a new profile is created.
+
+ Args:
+ program: GSoCProgram instance for which the project is to be created
+ org: GSoCOrganization instance for which the project is to be created
+ dev_test: whether it is dev test environment
+ mentor: the specified GSoCProfile mentor instance to mentor the project
+
+ Returns:
+ the newly created GSoCProject instance
+ """
+ if not mentor:
+ mentor_helper = profile_utils.GSoCProfileHelper(program, dev_test)
+ mentor_helper.createOtherUser('mentor@example.com')
+ mentor = mentor_helper.createMentor(org)
+
+ student_helper = profile_utils.GSoCProfileHelper(program, dev_test)
+ student_helper.createOtherUser('student@example.com')
+ student_helper.createStudentWithProject(org, mentor)
+ project = project_model.GSoCProject.all().get()
+ project.is_featured = False
+ project.status = 'accepted'
+ project.put()
+ return project
+
+
+class ProjectDetailsTest(test_utils.GSoCDjangoTestCase):
"""Tests project details page.
"""
@@ -37,14 +96,14 @@
response, 'v2/modules/gsoc/project_details/base.html')
def createProject(self):
- mentor_helper = GSoCProfileHelper(self.gsoc, self.dev_test)
+ mentor_helper = profile_utils.GSoCProfileHelper(self.gsoc, self.dev_test)
mentor_helper.createOtherUser('mentor@example.com')
mentor = mentor_helper.createMentor(self.org)
- student_helper = GSoCProfileHelper(self.gsoc, self.dev_test)
+ student_helper = profile_utils.GSoCProfileHelper(self.gsoc, self.dev_test)
student_helper.createOtherUser('student@example.com')
student_helper.createStudentWithProject(self.org, mentor)
- print GSoCProject.all().fetch(100)
- project = GSoCProject.all().get()
+
+ project = project_model.GSoCProject.all().get()
project.is_featured = False
project.status = 'accepted'
project.put()
@@ -69,7 +128,7 @@
def testFeaturedProjectButton(self):
self.timeline.studentsAnnounced()
- student = GSoCProfileHelper(self.gsoc, self.dev_test)
+ student = profile_utils.GSoCProfileHelper(self.gsoc, self.dev_test)
student.createOtherUser('student@example.com')
student.createStudent()
@@ -88,5 +147,176 @@
self.assertResponseOK(response)
- project = GSoCProject.all().get()
+ project = project_model.GSoCProject.all().get()
self.assertEqual(project.is_featured, True)
+
+
+class ProjectDetailsUpdateTest(test_utils.GSoCDjangoTestCase):
+ """Unit tests project details update page."""
+
+ def setUp(self):
+ super(ProjectDetailsUpdateTest, self).setUp()
+ self.init()
+
+ def _getProjectUpdateUrl(self, project):
+ return '/gsoc/project/update/%s/%s' % (
+ project.parent_key().name(), project.key().id())
+
+ def testLoneUserAccessForbidden(self):
+ self.timeline.studentsAnnounced()
+ project = _createProjectForMentor(self.gsoc, self.org, self.dev_test)
+
+ url = self._getProjectUpdateUrl(project)
+ response = self.get(url)
+ self.assertErrorTemplatesUsed(response)
+ self.assertResponseForbidden(response)
+
+ def testMentorAccessForbidden(self):
+ self.timeline.studentsAnnounced()
+
+ mentor = self.data.createMentor(self.org)
+ project = _createProjectForMentor(
+ self.gsoc, self.org, self.dev_test, mentor=mentor)
+
+ url = self._getProjectUpdateUrl(project)
+ response = self.get(url)
+ self.assertErrorTemplatesUsed(response)
+ self.assertResponseForbidden(response)
+
+ def testOrgAdminAccessGranted(self):
+ self.timeline.studentsAnnounced()
+
+ self.data.createOrgAdmin(self.org)
+ project = _createProjectForMentor(self.gsoc, self.org, self.dev_test)
+
+ url = self._getProjectUpdateUrl(project)
+ response = self.get(url)
+ self.assertResponseOK(response)
+
+ def testOrgAdminForAnotherOrgForbidden(self):
+ self.timeline.studentsAnnounced()
+
+ another_org = self.createOrg()
+ self.data.createOrgAdmin(another_org)
+ project = _createProjectForMentor(self.gsoc, self.org, self.dev_test)
+
+ url = self._getProjectUpdateUrl(project)
+ response = self.get(url)
+ self.assertErrorTemplatesUsed(response)
+ self.assertResponseForbidden(response)
+
+ def testHostAccessGranted(self):
+ self.timeline.studentsAnnounced()
+
+ self.data.createHost()
+ project = _createProjectForMentor(self.gsoc, self.org, self.dev_test)
+
+ url = self._getProjectUpdateUrl(project)
+ response = self.get(url)
+ self.assertResponseOK(response)
+
+ def testStudentAccessTheirProjectGranted(self):
+ self.timeline.studentsAnnounced()
+
+ student = self.data.createStudent()
+ project = _createProjectForStudent(
+ self.gsoc, self.org, self.dev_test, student=student)
+
+ url = self._getProjectUpdateUrl(project)
+ response = self.get(url)
+ self.assertResponseOK(response)
+
+ def testStudentAccessOtherProjectForbidden(self):
+ self.timeline.studentsAnnounced()
+
+ student = self.data.createStudent()
+ project = _createProjectForStudent(self.gsoc, self.org, self.dev_test)
+
+ url = self._getProjectUpdateUrl(project)
+ response = self.get(url)
+ self.assertErrorTemplatesUsed(response)
+ self.assertResponseForbidden(response)
+
+
+class TestIsUpdateLinkVisible(test_utils.GSoCTestCase):
+ """Unit tests for _isUpdateLinkVisible function."""
+
+ def setUp(self):
+ super(TestIsUpdateLinkVisible, self).setUp()
+ self.init()
+
+ class MockRequestData(object):
+ """Mock class used to simulate RequestData which is passed as argument."""
+
+ def __init__(self, is_host=False, project=None, profile=None):
+ self.is_host = is_host
+ self.project = project
+ self.profile = profile
+
+ def orgAdminFor(self, org_key):
+ return org_key in self.profile.org_admin_for
+
+ def testForHost(self):
+ request_data = TestIsUpdateLinkVisible.MockRequestData(is_host=True)
+ result = project_details._isUpdateLinkVisible(request_data)
+ self.assertTrue(request_data)
+
+ def testForProjectStudent(self):
+ student = self.data.createStudent()
+ project = _createProjectForStudent(
+ self.gsoc, self.org, self.dev_test, student=student)
+
+ request_data = TestIsUpdateLinkVisible.MockRequestData(
+ project=project, profile=student)
+ self.assertTrue(project_details._isUpdateLinkVisible(request_data))
+
+ def testForOtherStudent(self):
+ student = self.data.createStudent()
+ project = _createProjectForStudent(self.gsoc, self.org, self.dev_test)
+
+ request_data = TestIsUpdateLinkVisible.MockRequestData(
+ project=project, profile=student)
+ self.assertFalse(project_details._isUpdateLinkVisible(request_data))
+
+ def testForProjectMentor(self):
+ mentor = self.data.createMentor(self.org)
+ project = _createProjectForMentor(
+ self.gsoc, self.org, self.dev_test, mentor=mentor)
+
+ request_data = TestIsUpdateLinkVisible.MockRequestData(
+ project=project, profile=mentor)
+ self.assertFalse(project_details._isUpdateLinkVisible(request_data))
+
+ def testForOtherMentor(self):
+ mentor = self.data.createMentor(self.org)
+ project = _createProjectForMentor(self.gsoc, self.org, self.dev_test)
+
+ request_data = TestIsUpdateLinkVisible.MockRequestData(
+ project=project, profile=mentor)
+ self.assertFalse(project_details._isUpdateLinkVisible(request_data))
+
+ def testForProjectOrgAdmin(self):
+ org_admin = self.data.createOrgAdmin(self.org)
+ project = _createProjectForMentor(self.gsoc, self.org, self.dev_test)
+
+ request_data = TestIsUpdateLinkVisible.MockRequestData(
+ project=project, profile=org_admin)
+ self.assertTrue(project_details._isUpdateLinkVisible(request_data))
+
+ def testForOtherOrgAdmin(self):
+ program_helper = program_utils.GSoCProgramHelper()
+ another_org = program_helper.createOrg()
+ org_admin = self.data.createOrgAdmin(self.org)
+ project = _createProjectForMentor(self.gsoc, another_org, self.dev_test)
+
+ request_data = TestIsUpdateLinkVisible.MockRequestData(
+ project=project, profile=org_admin)
+ self.assertFalse(project_details._isUpdateLinkVisible(request_data))
+
+ def testForLoneUser(self):
+ self.data.createUser()
+ project = _createProjectForMentor(self.gsoc, self.org, self.dev_test)
+
+ request_data = TestIsUpdateLinkVisible.MockRequestData(
+ project=project)
+ self.assertFalse(project_details._isUpdateLinkVisible(request_data))
diff --git a/tests/app/soc/views/test_org_app.py b/tests/app/soc/views/test_org_app.py
index 1dbb1db..2c58a34 100644
--- a/tests/app/soc/views/test_org_app.py
+++ b/tests/app/soc/views/test_org_app.py
@@ -135,7 +135,7 @@
'agreed_to_admin_agreement': True,
'backup_admin_id': backup_admin_profile.link_id,
}
-
+
def testOrgAppCreateOrEditByProgramAdmin(self):
"""Tests that program admin can create an organization application.
"""
diff --git a/tests/run.py b/tests/run.py
index f21a008..6ca3b14 100644
--- a/tests/run.py
+++ b/tests/run.py
@@ -16,6 +16,7 @@
import sys
import os
+import subprocess
HERE = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..'))
@@ -252,8 +253,7 @@
break
log.debug("Worker %s ending", ix)
-
-def main():
+def run_pyunit_tests():
sys.path = extra_paths + sys.path
os.environ['SERVER_SOFTWARE'] = 'Development via nose'
os.environ['SERVER_NAME'] = 'Foo'
@@ -310,6 +310,24 @@
sys.argv += args
nose.main(addplugins=plugins)
+def run_js_tests():
+ _environ = os.environ.copy()
+ _environ["PATH"] += ':./node_modules/phantomjs/lib/phantom/bin'
+ subprocess.call("./bin/node ./node_modules/testem/testem.js ci", env=_environ, shell=True)
+
+def main():
+ tests = set()
+ if '-t' in sys.argv:
+ i = sys.argv.index('-t')
+ tests.update(sys.argv[i+1].split(','))
+ del sys.argv[i:i+2]
+ else:
+ tests = {'js', 'pyunit'}
+
+ if 'pyunit' in tests:
+ run_pyunit_tests()
+ if 'js' in tests:
+ run_js_tests()
if __name__ == '__main__':
main()
diff --git a/thirdparty/chromium/LICENSE b/thirdparty/chromium/LICENSE
deleted file mode 100644
index 9314092..0000000
--- a/thirdparty/chromium/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/thirdparty/chromium/gcl.py b/thirdparty/chromium/gcl.py
deleted file mode 100755
index 5adbb51..0000000
--- a/thirdparty/chromium/gcl.py
+++ /dev/null
@@ -1,676 +0,0 @@
-#!/usr/bin/python
-# Wrapper script around Rietveld's upload.py that groups files into
-# changelists.
-
-import getpass
-import linecache
-import os
-import random
-import re
-import string
-import subprocess
-import sys
-import tempfile
-import upload
-import urllib2
-
-CODEREVIEW_SETTINGS = {
- # Default values.
- "CODE_REVIEW_SERVER": "codereviews.googleopensourceprograms.com",
- "CC_LIST": "melange-soc-dev@googlegroups.com",
- "VIEW_VC": "http://code.google.com/p/soc/source/detail?r=",
-}
-
-# Use a shell for subcommands on Windows to get a PATH search, and because svn
-# may be a batch file.
-use_shell = sys.platform.startswith("win")
-
-
-# globals that store the root of the current repositary and the directory where
-# we store information about changelists.
-repository_root = ""
-gcl_info_dir = ""
-
-
-def GetSVNFileInfo(file, field):
- """Returns a field from the svn info output for the given file."""
- output = RunShell(["svn", "info", file])
- for line in output.splitlines():
- search = field + ": "
- if line.startswith(search):
- return line[len(search):]
- return ""
-
-
-def GetRepositoryRoot():
- """Returns the top level directory of the current repository."""
- global repository_root
- if not repository_root:
- cur_dir_repo_root = GetSVNFileInfo(os.getcwd(), "Repository Root")
- if not cur_dir_repo_root:
- ErrorExit("gcl run outside of repository")
-
- repository_root = os.getcwd()
- while True:
- parent = os.path.dirname(repository_root)
- if GetSVNFileInfo(parent, "Repository Root") != cur_dir_repo_root:
- break
- repository_root = parent
- # Now read the code review settings for this repository.
- settings_file = os.path.join(repository_root, "codereview.settings")
- if os.path.exists(settings_file):
- output = ReadFile(settings_file)
- for line in output.splitlines():
- if not line or line.startswith("#"):
- continue
- key, value = line.split(": ", 1)
- CODEREVIEW_SETTINGS[key] = value
- return repository_root
-
-
-def GetCodeReviewSetting(key):
- """Returns a value for the given key for this repository."""
- return CODEREVIEW_SETTINGS.get(key, "")
-
-
-def GetInfoDir():
- """Returns the directory where gcl info files are stored."""
- global gcl_info_dir
- if not gcl_info_dir:
- gcl_info_dir = os.path.join(GetRepositoryRoot(), '.svn', 'gcl_info')
- return gcl_info_dir
-
-
-def ErrorExit(msg):
- """Print an error message to stderr and exit."""
- print >>sys.stderr, msg
- sys.exit(1)
-
-
-def RunShell(command, print_output=False):
- """Executes a command and returns the output."""
- p = subprocess.Popen(command, stdout = subprocess.PIPE,
- stderr = subprocess.STDOUT, shell = use_shell,
- universal_newlines=True)
- if print_output:
- output_array = []
- while True:
- line = p.stdout.readline()
- if not line:
- break
- if print_output:
- print line.strip('\n')
- output_array.append(line)
- output = "".join(output_array)
- else:
- output = p.stdout.read()
- p.wait()
- p.stdout.close()
- return output
-
-
-def ReadFile(filename):
- """Returns the contents of a file."""
- file = open(filename, 'r')
- result = file.read()
- file.close()
- return result
-
-
-def WriteFile(filename, contents):
- """Overwrites the file with the given contents."""
- file = open(filename, 'w')
- file.write(contents)
- file.close()
-
-
-class ChangeInfo:
- """Holds information about a changelist.
-
- issue: the Rietveld issue number, of "" if it hasn't been uploaded yet.
- description: the description.
- files: a list of 2 tuple containing (status, filename) of changed files,
- with paths being relative to the top repository directory.
- """
- def __init__(self, name="", issue="", description="", files=[]):
- self.name = name
- self.issue = issue
- self.description = description
- self.files = files
-
- def FileList(self):
- """Returns a list of files."""
- return [file[1] for file in self.files]
-
- def Save(self):
- """Writes the changelist information to disk."""
- data = SEPARATOR.join([self.issue,
- "\n".join([f[0] + f[1] for f in self.files]),
- self.description])
- WriteFile(GetChangelistInfoFile(self.name), data)
-
- def Delete(self):
- """Removes the changelist information from disk."""
- os.remove(GetChangelistInfoFile(self.name))
-
- def CloseIssue(self):
- """Closes the Rietveld issue for this changelist."""
- data = [("description", self.description),]
- ctype, body = upload.EncodeMultipartFormData(data, [])
- SendToRietveld("/" + self.issue + "/close", body, ctype)
-
- def UpdateRietveldDescription(self):
- """Sets the description for an issue on Rietveld."""
- data = [("description", self.description),]
- ctype, body = upload.EncodeMultipartFormData(data, [])
- SendToRietveld("/" + self.issue + "/description", body, ctype)
-
-
-SEPARATOR = "\n-----\n"
-# The info files have the following format:
-# issue_id\n
-# SEPARATOR\n
-# filepath1\n
-# filepath2\n
-# .
-# .
-# filepathn\n
-# SEPARATOR\n
-# description
-
-
-def GetChangelistInfoFile(changename):
- """Returns the file that stores information about a changelist."""
- if not changename or re.search(r'\W', changename):
- ErrorExit("Invalid changelist name: " + changename)
- return os.path.join(GetInfoDir(), changename)
-
-
-def LoadChangelistInfo(changename, fail_on_not_found=True,
- update_status=False):
- """Gets information about a changelist.
-
- Args:
- fail_on_not_found: if True, this function will quit the program if the
- changelist doesn't exist.
- update_status: if True, the svn status will be updated for all the files
- and unchanged files will be removed.
-
- Returns: a ChangeInfo object.
- """
- info_file = GetChangelistInfoFile(changename)
- if not os.path.exists(info_file):
- if fail_on_not_found:
- ErrorExit("Changelist " + changename + " not found.")
- return ChangeInfo(changename)
- data = ReadFile(info_file)
- split_data = data.split(SEPARATOR, 2)
- if len(split_data) != 3:
- os.remove(info_file)
- ErrorExit("Changelist file %s was corrupt and deleted" % info_file)
- issue = split_data[0]
- files = []
- for line in split_data[1].splitlines():
- status = line[:7]
- file = line[7:]
- files.append((status, file))
- description = split_data[2]
- save = False
- if update_status:
- for file in files:
- filename = os.path.join(GetRepositoryRoot(), file[1])
- status = RunShell(["svn", "status", filename])[:7]
- if not status: # File has been reverted.
- save = True
- files.remove(file)
- elif status != file[0]:
- save = True
- files[files.index(file)] = (status, file[1])
- change_info = ChangeInfo(changename, issue, description, files)
- if save:
- change_info.Save()
- return change_info
-
-
-def GetCLs():
- """Returns a list of all the changelists in this repository."""
- return os.listdir(GetInfoDir())
-
-
-def GenerateChangeName():
- """Generate a random changelist name."""
- random.seed()
- current_cl_names = GetCLs()
- while True:
- cl_name = (random.choice(string.ascii_lowercase) +
- random.choice(string.digits) +
- random.choice(string.ascii_lowercase) +
- random.choice(string.digits))
- if cl_name not in current_cl_names:
- return cl_name
-
-
-def GetModifiedFiles():
- """Returns a set that maps from changelist name to (status,filename) tuples.
-
- Files not in a changelist have an empty changelist name. Filenames are in
- relation to the top level directory of the current repositary. Note that
- only the current directory and subdirectories are scanned, in order to
- improve performance while still being flexible.
- """
- files = {}
-
- # Since the files are normalized to the root folder of the repositary, figure
- # out what we need to add to the paths.
- dir_prefix = os.getcwd()[len(GetRepositoryRoot()):].strip(os.sep)
-
- # Get a list of all files in changelists.
- files_in_cl = {}
- for cl in GetCLs():
- change_info = LoadChangelistInfo(cl)
- for status, filename in change_info.files:
- files_in_cl[filename] = change_info.name
-
- # Get all the modified files.
- status = RunShell(["svn", "status"])
- for line in status.splitlines():
- if not len(line) or line[0] == "?":
- continue
- status = line[:7]
- filename = line[7:]
- if dir_prefix:
- filename = os.path.join(dir_prefix, filename)
- change_list_name = ""
- if filename in files_in_cl:
- change_list_name = files_in_cl[filename]
- files.setdefault(change_list_name, []).append((status, filename))
-
- return files
-
-
-def GetFilesNotInCL():
- """Returns a list of tuples (status,filename) that aren't in any changelists.
-
- See docstring of GetModifiedFiles for information about path of files and
- which directories are scanned.
- """
- modified_files = GetModifiedFiles()
- if "" not in modified_files:
- return []
- return modified_files[""]
-
-
-def SendToRietveld(request_path, payload=None,
- content_type="application/octet-stream"):
- """Send a POST/GET to Rietveld. Returns the response body."""
- def GetUserCredentials():
- """Prompts the user for a username and password."""
- email = raw_input("Email: ").strip()
- password = getpass.getpass("Password for %s: " % email)
- return email, password
-
- server = GetCodeReviewSetting("CODE_REVIEW_SERVER")
- rpc_server = upload.HttpRpcServer(server,
- GetUserCredentials,
- host_override=server,
- save_cookies=True)
- return rpc_server.Send(request_path, payload, content_type)
-
-
-def GetIssueDescription(issue):
- """Returns the issue description from Rietveld."""
- return SendToRietveld("/" + issue + "/description")
-
-
-def UnknownFiles(extra_args):
- """Runs svn status and prints unknown files.
-
- Any args in |extra_args| are passed to the tool to support giving alternate
- code locations.
- """
- args = ["svn", "status"]
- args += extra_args
- p = subprocess.Popen(args, stdout = subprocess.PIPE,
- stderr = subprocess.STDOUT, shell = use_shell)
- while 1:
- line = p.stdout.readline()
- if not line:
- break
- if line[0] != '?':
- continue # Not an unknown file to svn.
- # The lines look like this:
- # "? foo.txt"
- # and we want just "foo.txt"
- print line[7:].strip()
- p.wait()
- p.stdout.close()
-
-
-def Opened():
- """Prints a list of modified files in the current directory down."""
- files = GetModifiedFiles()
- cl_keys = files.keys()
- cl_keys.sort()
- for cl_name in cl_keys:
- if cl_name:
- note = ""
- if len(LoadChangelistInfo(cl_name).files) != len(files[cl_name]):
- note = " (Note: this changelist contains files outside this directory)"
- print "\n--- Changelist " + cl_name + note + ":"
- for file in files[cl_name]:
- print "".join(file)
-
-
-def Help():
- print ("GCL is a wrapper for Subversion that simplifies working with groups "
- "of files.\n")
- print "Basic commands:"
- print "-----------------------------------------"
- print " gcl change change_name"
- print (" Add/remove files to a changelist. Only scans the current "
- "directory and subdirectories.\n")
- print (" gcl upload change_name [-r reviewer1@gmail.com,"
- "reviewer2@gmail.com,...] [--send_mail]")
- print " Uploads the changelist to the server for review.\n"
- print " gcl commit change_name"
- print " Commits the changelist to the repository.\n"
- print "Advanced commands:"
- print "-----------------------------------------"
- print " gcl delete change_name"
- print " Deletes a changelist.\n"
- print " gcl diff change_name"
- print " Diffs all files in the changelist.\n"
- print " gcl diff"
- print (" Diffs all files in the current directory and subdirectories "
- "that aren't in a changelist.\n")
- print " gcl changes"
- print " Lists all the the changelists and the files in them.\n"
- print " gcl nothave [optional directory]"
- print " Lists files unknown to Subversion.\n"
- print " gcl opened"
- print (" Lists modified files in the current directory and "
- "subdirectories.\n")
- print " gcl try change_name"
- print (" Sends the change to the tryserver so a trybot can do a test"
- " run on your code.\n")
-
-
-def GetEditor():
- editor = os.environ.get("SVN_EDITOR")
- if not editor:
- editor = os.environ.get("EDITOR")
-
- if not editor:
- if sys.platform.startswith("win"):
- editor = "notepad"
- else:
- editor = "vi"
-
- return editor
-
-
-def GenerateDiff(files):
- """Returns a string containing the diff for the given file list."""
- diff = []
- for file in files:
- # Use svn info output instead of os.path.isdir because the latter fails
- # when the file is deleted.
- if GetSVNFileInfo(file, "Node Kind") == "directory":
- continue
- # If the user specified a custom diff command in their svn config file,
- # then it'll be used when we do svn diff, which we don't want to happen
- # since we want the unified diff. Using --diff-cmd=diff doesn't always
- # work, since they can have another diff executable in their path that
- # gives different line endings. So we use a bogus temp directory as the
- # config directory, which gets around these problems.
- if sys.platform.startswith("win"):
- parent_dir = tempfile.gettempdir()
- else:
- parent_dir = sys.path[0] # tempdir is not secure.
- bogus_dir = os.path.join(parent_dir, "temp_svn_config")
- if not os.path.exists(bogus_dir):
- os.mkdir(bogus_dir)
- diff.append(RunShell(["svn", "diff", "--config-dir", bogus_dir, file]))
- return "".join(diff)
-
-
-def UploadCL(change_info, args):
- if not change_info.FileList():
- print "Nothing to upload, changelist is empty."
- return
-
- upload_arg = ["upload.py", "-y", "-l"]
- upload_arg.append("--server=" + GetCodeReviewSetting("CODE_REVIEW_SERVER"))
- upload_arg.extend(args)
-
- desc_file = ""
- if change_info.issue: # Uploading a new patchset.
- upload_arg.append("--message=''")
- upload_arg.append("--issue=" + change_info.issue)
- else: # First time we upload.
- handle, desc_file = tempfile.mkstemp(text=True)
- os.write(handle, change_info.description)
- os.close(handle)
-
- upload_arg.append("--cc=" + GetCodeReviewSetting("CC_LIST"))
- upload_arg.append("--description_file=" + desc_file + "")
- if change_info.description:
- subject = change_info.description[:77]
- if subject.find("\r\n") != -1:
- subject = subject[:subject.find("\r\n")]
- if subject.find("\n") != -1:
- subject = subject[:subject.find("\n")]
- if len(change_info.description) > 77:
- subject = subject + "..."
- upload_arg.append("--message=" + subject)
-
- # Change the current working directory before calling upload.py so that it
- # shows the correct base.
- os.chdir(GetRepositoryRoot())
-
- # If we have a lot of files with long paths, then we won't be able to fit
- # the command to "svn diff". Instead, we generate the diff manually for
- # each file and concatenate them before passing it to upload.py.
- issue = upload.RealMain(upload_arg, GenerateDiff(change_info.FileList()))
- if issue and issue != change_info.issue:
- change_info.issue = issue
- change_info.Save()
-
- if desc_file:
- os.remove(desc_file)
-
-
-def TryChange(change_info, args):
- """Create a diff file of change_info and send it to the try server."""
- try:
- import trychange
- except ImportError:
- ErrorExit("You need to install trychange.py to use the try server.")
-
- trychange.TryChange(args, change_info.name, change_info.FileList())
-
-
-def Commit(change_info):
- if not change_info.FileList():
- print "Nothing to commit, changelist is empty."
- return
-
- commit_cmd = ["svn", "commit"]
- filename = ''
- if change_info.issue:
- # Get the latest description from Rietveld.
- change_info.description = GetIssueDescription(change_info.issue)
-
- commit_message = change_info.description.replace('\r\n', '\n')
- if change_info.issue:
- commit_message += ('\nReview URL: http://%s/%s' %
- (GetCodeReviewSetting("CODE_REVIEW_SERVER"),
- change_info.issue))
-
- handle, commit_filename = tempfile.mkstemp(text=True)
- os.write(handle, commit_message)
- os.close(handle)
-
- handle, targets_filename = tempfile.mkstemp(text=True)
- os.write(handle, "\n".join(change_info.FileList()))
- os.close(handle)
-
- commit_cmd += ['--file=' + commit_filename]
- commit_cmd += ['--targets=' + targets_filename]
- # Change the current working directory before calling commit.
- os.chdir(GetRepositoryRoot())
- output = RunShell(commit_cmd, True)
- os.remove(commit_filename)
- os.remove(targets_filename)
- if output.find("Committed revision") != -1:
- change_info.Delete()
-
- if change_info.issue:
- revision = re.compile(".*?\nCommitted revision (\d+)",
- re.DOTALL).match(output).group(1)
- viewvc_url = GetCodeReviewSetting("VIEW_VC")
- change_info.description = (change_info.description +
- "\n\nCommitted: " + viewvc_url + revision)
- change_info.CloseIssue()
-
-
-def Change(change_info):
- """Creates/edits a changelist."""
- if change_info.issue:
- try:
- description = GetIssueDescription(change_info.issue)
- except urllib2.HTTPError, err:
- if err.code == 404:
- # The user deleted the issue in Rietveld, so forget the old issue id.
- description = change_info.description
- change_info.issue = ""
- change_info.Save()
- else:
- ErrorExit("Error getting the description from Rietveld: " + err)
- else:
- description = change_info.description
-
- other_files = GetFilesNotInCL()
-
- separator1 = ("\n---All lines above this line become the description.\n"
- "---Repository Root: " + GetRepositoryRoot() + "\n"
- "---Paths in this changelist (" + change_info.name + "):\n")
- separator2 = "\n\n---Paths modified but not in any changelist:\n\n"
- text = (description + separator1 + '\n' +
- '\n'.join([f[0] + f[1] for f in change_info.files]) + separator2 +
- '\n'.join([f[0] + f[1] for f in other_files]) + '\n')
-
- handle, filename = tempfile.mkstemp(text=True)
- os.write(handle, text)
- os.close(handle)
-
- command = GetEditor() + " " + filename
- os.system(command)
-
- result = ReadFile(filename)
- os.remove(filename)
-
- if not result:
- return
-
- split_result = result.split(separator1, 1)
- if len(split_result) != 2:
- ErrorExit("Don't modify the text starting with ---!\n\n" + result)
-
- new_description = split_result[0]
- cl_files_text = split_result[1]
- if new_description != description:
- change_info.description = new_description
- if change_info.issue:
- # Update the Rietveld issue with the new description.
- change_info.UpdateRietveldDescription()
-
- new_cl_files = []
- for line in cl_files_text.splitlines():
- if not len(line):
- continue
- if line.startswith("---"):
- break
- status = line[:7]
- file = line[7:]
- new_cl_files.append((status, file))
- change_info.files = new_cl_files
-
- change_info.Save()
- print change_info.name + " changelist saved."
-
-
-def Changes():
- """Print all the changlists and their files."""
- for cl in GetCLs():
- change_info = LoadChangelistInfo(cl, True, True)
- print "\n--- Changelist " + change_info.name + ":"
- for file in change_info.files:
- print "".join(file)
-
-
-def main(argv=None):
- if argv is None:
- argv = sys.argv
-
- if len(argv) == 1:
- Help()
- return 0;
-
- # Create the directory where we store information about changelists if it
- # doesn't exist.
- if not os.path.exists(GetInfoDir()):
- os.mkdir(GetInfoDir())
-
- command = argv[1]
- if command == "opened":
- Opened()
- return 0
- if command == "nothave":
- UnknownFiles(argv[2:])
- return 0
- if command == "changes":
- Changes()
- return 0
- if command == "diff" and len(argv) == 2:
- files = GetFilesNotInCL()
- print GenerateDiff([os.path.join(GetRepositoryRoot(), x[1]) for x in files])
- return 0
-
- if len(argv) == 2:
- if command == "change":
- # Generate a random changelist name.
- changename = GenerateChangeName()
- elif command == "help":
- Help()
- return 0
- else:
- ErrorExit("Need a changelist name.")
- else:
- changename = argv[2]
-
- fail_on_not_found = command != "change"
- change_info = LoadChangelistInfo(changename, fail_on_not_found, True)
-
- if command == "change":
- Change(change_info)
- elif command == "upload":
- UploadCL(change_info, argv[3:])
- elif command == "commit":
- Commit(change_info)
- elif command == "delete":
- change_info.Delete()
- elif command == "try":
- TryChange(change_info, argv[3:])
- else:
- # Everything else that is passed into gcl we redirect to svn, after adding
- # the files. This allows commands such as 'gcl diff xxx' to work.
- args =["svn", command]
- root = GetRepositoryRoot()
- args.extend([os.path.join(root, x) for x in change_info.FileList()])
- RunShell(args, True)
- return 0
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/thirdparty/chromium/upload.py b/thirdparty/chromium/upload.py
deleted file mode 120000
index b8ba213..0000000
--- a/thirdparty/chromium/upload.py
+++ /dev/null
@@ -1 +0,0 @@
-../rietveld/upload.py
\ No newline at end of file
diff --git a/thirdparty/mocker/LICENSE b/thirdparty/mocker/LICENSE
deleted file mode 100644
index c5b5923..0000000
--- a/thirdparty/mocker/LICENSE
+++ /dev/null
@@ -1,259 +0,0 @@
-A. HISTORY OF THE SOFTWARE
-==========================
-
-Python was created in the early 1990s by Guido van Rossum at Stichting
-Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
-as a successor of a language called ABC. Guido remains Python's
-principal author, although it includes many contributions from others.
-
-In 1995, Guido continued his work on Python at the Corporation for
-National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
-in Reston, Virginia where he released several versions of the
-software.
-
-In May 2000, Guido and the Python core development team moved to
-BeOpen.com to form the BeOpen PythonLabs team. In October of the same
-year, the PythonLabs team moved to Digital Creations (now Zope
-Corporation, see http://www.zope.com). In 2001, the Python Software
-Foundation (PSF, see http://www.python.org/psf/) was formed, a
-non-profit organization created specifically to own Python-related
-Intellectual Property. Zope Corporation is a sponsoring member of
-the PSF.
-
-All Python releases are Open Source (see http://www.opensource.org for
-the Open Source Definition). Historically, most, but not all, Python
-releases have also been GPL-compatible; the table below summarizes
-the various releases.
-
- Release Derived Year Owner GPL-
- from compatible? (1)
-
- 0.9.0 thru 1.2 1991-1995 CWI yes
- 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
- 1.6 1.5.2 2000 CNRI no
- 2.0 1.6 2000 BeOpen.com no
- 1.6.1 1.6 2001 CNRI yes (2)
- 2.1 2.0+1.6.1 2001 PSF no
- 2.0.1 2.0+1.6.1 2001 PSF yes
- 2.1.1 2.1+2.0.1 2001 PSF yes
- 2.2 2.1.1 2001 PSF yes
- 2.1.2 2.1.1 2002 PSF yes
- 2.1.3 2.1.2 2002 PSF yes
- 2.2.1 2.2 2002 PSF yes
- 2.2.2 2.2.1 2002 PSF yes
- 2.2.3 2.2.2 2003 PSF yes
- 2.3 2.2.2 2002-2003 PSF yes
-
-Footnotes:
-
-(1) GPL-compatible doesn't mean that we're distributing Python under
- the GPL. All Python licenses, unlike the GPL, let you distribute
- a modified version without making your changes open source. The
- GPL-compatible licenses make it possible to combine Python with
- other software that is released under the GPL; the others don't.
-
-(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
- because its license has a choice of law clause. According to
- CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
- is "not incompatible" with the GPL.
-
-Thanks to the many outside volunteers who have worked under Guido's
-direction to make these releases possible.
-
-
-B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
-===============================================================
-
-PSF LICENSE AGREEMENT FOR PYTHON 2.3
-------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using Python 2.3 software in source or binary form and its
-associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 2.3
-alone or in any derivative version, provided, however, that PSF's
-License Agreement and PSF's notice of copyright, i.e., "Copyright (c)
-2001, 2002, 2003 Python Software Foundation; All Rights Reserved" are
-retained in Python 2.3 alone or in any derivative version prepared by
-Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 2.3 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 2.3.
-
-4. PSF is making Python 2.3 available to Licensee on an "AS IS"
-basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 2.3 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-2.3 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.3,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee. This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python 2.3, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
--------------------------------------------
-
-BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
-
-1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
-office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
-Individual or Organization ("Licensee") accessing and otherwise using
-this software in source or binary form and its associated
-documentation ("the Software").
-
-2. Subject to the terms and conditions of this BeOpen Python License
-Agreement, BeOpen hereby grants Licensee a non-exclusive,
-royalty-free, world-wide license to reproduce, analyze, test, perform
-and/or display publicly, prepare derivative works, distribute, and
-otherwise use the Software alone or in any derivative version,
-provided, however, that the BeOpen Python License is retained in the
-Software, alone or in any derivative version prepared by Licensee.
-
-3. BeOpen is making the Software available to Licensee on an "AS IS"
-basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
-SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
-AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
-DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-5. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-6. This License Agreement shall be governed by and interpreted in all
-respects by the law of the State of California, excluding conflict of
-law provisions. Nothing in this License Agreement shall be deemed to
-create any relationship of agency, partnership, or joint venture
-between BeOpen and Licensee. This License Agreement does not grant
-permission to use BeOpen trademarks or trade names in a trademark
-sense to endorse or promote products or services of Licensee, or any
-third party. As an exception, the "BeOpen Python" logos available at
-http://www.pythonlabs.com/logos.html may be used according to the
-permissions granted on that web page.
-
-7. By copying, installing or otherwise using the software, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
----------------------------------------
-
-1. This LICENSE AGREEMENT is between the Corporation for National
-Research Initiatives, having an office at 1895 Preston White Drive,
-Reston, VA 20191 ("CNRI"), and the Individual or Organization
-("Licensee") accessing and otherwise using Python 1.6.1 software in
-source or binary form and its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, CNRI
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 1.6.1
-alone or in any derivative version, provided, however, that CNRI's
-License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
-1995-2001 Corporation for National Research Initiatives; All Rights
-Reserved" are retained in Python 1.6.1 alone or in any derivative
-version prepared by Licensee. Alternately, in lieu of CNRI's License
-Agreement, Licensee may substitute the following text (omitting the
-quotes): "Python 1.6.1 is made available subject to the terms and
-conditions in CNRI's License Agreement. This Agreement together with
-Python 1.6.1 may be located on the Internet using the following
-unique, persistent identifier (known as a handle): 1895.22/1013. This
-Agreement may also be obtained from a proxy server on the Internet
-using the following URL: http://hdl.handle.net/1895.22/1013".
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 1.6.1 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 1.6.1.
-
-4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
-basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. This License Agreement shall be governed by the federal
-intellectual property law of the United States, including without
-limitation the federal copyright law, and, to the extent such
-U.S. federal law does not apply, by the law of the Commonwealth of
-Virginia, excluding Virginia's conflict of law provisions.
-Notwithstanding the foregoing, with regard to derivative works based
-on Python 1.6.1 that incorporate non-separable material that was
-previously distributed under the GNU General Public License (GPL), the
-law of the Commonwealth of Virginia shall govern this License
-Agreement only as to issues arising under or with respect to
-Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
-License Agreement shall be deemed to create any relationship of
-agency, partnership, or joint venture between CNRI and Licensee. This
-License Agreement does not grant permission to use CNRI trademarks or
-trade name in a trademark sense to endorse or promote products or
-services of Licensee, or any third party.
-
-8. By clicking on the "ACCEPT" button where indicated, or by copying,
-installing or otherwise using Python 1.6.1, Licensee agrees to be
-bound by the terms and conditions of this License Agreement.
-
- ACCEPT
-
-
-CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
---------------------------------------------------
-
-Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
-The Netherlands. All rights reserved.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Stichting Mathematisch
-Centrum or CWI not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission.
-
-STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/thirdparty/mocker/__init__.py b/thirdparty/mocker/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/thirdparty/mocker/__init__.py
+++ /dev/null
diff --git a/thirdparty/mocker/mocker.py b/thirdparty/mocker/mocker.py
deleted file mode 100644
index 1e92f72..0000000
--- a/thirdparty/mocker/mocker.py
+++ /dev/null
@@ -1,2068 +0,0 @@
-"""
-Copyright (c) 2007 Gustavo Niemeyer <gustavo@niemeyer.net>
-
-Graceful platform for test doubles in Python (mocks, stubs, fakes, and dummies).
-"""
-import __builtin__
-import tempfile
-import unittest
-import inspect
-import shutil
-import types
-import sys
-import os
-import gc
-
-
-if sys.version_info < (2, 4):
- from sets import Set as set # pragma: nocover
-
-
-__all__ = ["Mocker", "expect", "IS", "CONTAINS", "IN", "MATCH",
- "ANY", "ARGS", "KWARGS"]
-
-
-__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
-__license__ = "PSF License"
-__version__ = "0.10.1"
-
-
-ERROR_PREFIX = "[Mocker] "
-
-
-# --------------------------------------------------------------------
-# Exceptions
-
-class MatchError(AssertionError):
- """Raised when an unknown expression is seen in playback mode."""
-
-
-# --------------------------------------------------------------------
-# Helper for chained-style calling.
-
-class expect(object):
- """This is a simple helper that allows a different call-style.
-
- With this class one can comfortably do chaining of calls to the
- mocker object responsible by the object being handled. For instance::
-
- expect(obj.attr).result(3).count(1, 2)
-
- Is the same as::
-
- obj.attr
- mocker.result(3)
- mocker.count(1, 2)
-
- """
-
- def __init__(self, mock, attr=None):
- self._mock = mock
- self._attr = attr
-
- def __getattr__(self, attr):
- return self.__class__(self._mock, attr)
-
- def __call__(self, *args, **kwargs):
- getattr(self._mock.__mocker__, self._attr)(*args, **kwargs)
- return self
-
-
-# --------------------------------------------------------------------
-# Extensions to Python's unittest.
-
-class MockerTestCase(unittest.TestCase):
- """unittest.TestCase subclass with Mocker support.
-
- @ivar mocker: The mocker instance.
-
- This is a convenience only. Mocker may easily be used with the
- standard C{unittest.TestCase} class if wanted.
-
- Test methods have a Mocker instance available on C{self.mocker}.
- At the end of each test method, expectations of the mocker will
- be verified, and any requested changes made to the environment
- will be restored.
-
- In addition to the integration with Mocker, this class provides
- a few additional helper methods.
- """
-
- expect = expect
-
- def __init__(self, methodName="runTest"):
- # So here is the trick: we take the real test method, wrap it on
- # a function that do the job we have to do, and insert it in the
- # *instance* dictionary, so that getattr() will return our
- # replacement rather than the class method.
- test_method = getattr(self, methodName, None)
- if test_method is not None:
- def test_method_wrapper():
- try:
- result = test_method()
- except:
- raise
- else:
- if (self.mocker.is_recording() and
- self.mocker.get_events()):
- raise RuntimeError("Mocker must be put in replay "
- "mode with self.mocker.replay()")
- if (hasattr(result, "addCallback") and
- hasattr(result, "addErrback")):
- def verify(result):
- self.mocker.verify()
- return result
- result.addCallback(verify)
- else:
- self.mocker.verify()
- return result
- # Copy all attributes from the original method..
- for attr in dir(test_method):
- # .. unless they're present in our wrapper already.
- if not hasattr(test_method_wrapper, attr) or attr == "__doc__":
- setattr(test_method_wrapper, attr,
- getattr(test_method, attr))
- setattr(self, methodName, test_method_wrapper)
-
- # We could overload run() normally, but other well-known testing
- # frameworks do it as well, and some of them won't call the super,
- # which might mean that cleanup wouldn't happen. With that in mind,
- # we make integration easier by using the following trick.
- run_method = self.run
- def run_wrapper(*args, **kwargs):
- try:
- return run_method(*args, **kwargs)
- finally:
- self.__cleanup()
- self.run = run_wrapper
-
- self.mocker = Mocker()
-
- self.__cleanup_funcs = []
- self.__cleanup_paths = []
-
- super(MockerTestCase, self).__init__(methodName)
-
- def __cleanup(self):
- for path in self.__cleanup_paths:
- if os.path.isfile(path):
- os.unlink(path)
- elif os.path.isdir(path):
- shutil.rmtree(path)
- self.mocker.restore()
- for func, args, kwargs in self.__cleanup_funcs:
- func(*args, **kwargs)
-
- def addCleanup(self, func, *args, **kwargs):
- self.__cleanup_funcs.append((func, args, kwargs))
-
- def makeFile(self, content=None, suffix="", prefix="tmp", basename=None,
- dirname=None, path=None):
- """Create a temporary file and return the path to it.
-
- @param content: Initial content for the file.
- @param suffix: Suffix to be given to the file's basename.
- @param prefix: Prefix to be given to the file's basename.
- @param basename: Full basename for the file.
- @param dirname: Put file inside this directory.
-
- The file is removed after the test runs.
- """
- if path is not None:
- self.__cleanup_paths.append(path)
- elif basename is not None:
- if dirname is None:
- dirname = tempfile.mkdtemp()
- self.__cleanup_paths.append(dirname)
- path = os.path.join(dirname, basename)
- else:
- fd, path = tempfile.mkstemp(suffix, prefix, dirname)
- self.__cleanup_paths.append(path)
- os.close(fd)
- if content is None:
- os.unlink(path)
- if content is not None:
- file = open(path, "w")
- file.write(content)
- file.close()
- return path
-
- def makeDir(self, suffix="", prefix="tmp", dirname=None, path=None):
- """Create a temporary directory and return the path to it.
-
- @param suffix: Suffix to be given to the file's basename.
- @param prefix: Prefix to be given to the file's basename.
- @param dirname: Put directory inside this parent directory.
-
- The directory is removed after the test runs.
- """
- if path is not None:
- os.makedirs(path)
- else:
- path = tempfile.mkdtemp(suffix, prefix, dirname)
- self.__cleanup_paths.append(path)
- return path
-
- def failUnlessIs(self, first, second, msg=None):
- """Assert that C{first} is the same object as C{second}."""
- if first is not second:
- raise self.failureException(msg or "%r is not %r" % (first, second))
-
- def failIfIs(self, first, second, msg=None):
- """Assert that C{first} is not the same object as C{second}."""
- if first is second:
- raise self.failureException(msg or "%r is %r" % (first, second))
-
- def failUnlessIn(self, first, second, msg=None):
- """Assert that C{first} is contained in C{second}."""
- if first not in second:
- raise self.failureException(msg or "%r not in %r" % (first, second))
-
- def failUnlessStartsWith(self, first, second, msg=None):
- """Assert that C{first} starts with C{second}."""
- if first[:len(second)] != second:
- raise self.failureException(msg or "%r doesn't start with %r" %
- (first, second))
-
- def failIfStartsWith(self, first, second, msg=None):
- """Assert that C{first} doesn't start with C{second}."""
- if first[:len(second)] == second:
- raise self.failureException(msg or "%r starts with %r" %
- (first, second))
-
- def failUnlessEndsWith(self, first, second, msg=None):
- """Assert that C{first} starts with C{second}."""
- if first[len(first)-len(second):] != second:
- raise self.failureException(msg or "%r doesn't end with %r" %
- (first, second))
-
- def failIfEndsWith(self, first, second, msg=None):
- """Assert that C{first} doesn't start with C{second}."""
- if first[len(first)-len(second):] == second:
- raise self.failureException(msg or "%r ends with %r" %
- (first, second))
-
- def failIfIn(self, first, second, msg=None):
- """Assert that C{first} is not contained in C{second}."""
- if first in second:
- raise self.failureException(msg or "%r in %r" % (first, second))
-
- def failUnlessApproximates(self, first, second, tolerance, msg=None):
- """Assert that C{first} is near C{second} by at most C{tolerance}."""
- if abs(first - second) > tolerance:
- raise self.failureException(msg or "abs(%r - %r) > %r" %
- (first, second, tolerance))
-
- def failIfApproximates(self, first, second, tolerance, msg=None):
- """Assert that C{first} is far from C{second} by at least C{tolerance}.
- """
- if abs(first - second) <= tolerance:
- raise self.failureException(msg or "abs(%r - %r) <= %r" %
- (first, second, tolerance))
-
- def failUnlessMethodsMatch(self, first, second):
- """Assert that public methods in C{first} are present in C{second}.
-
- This method asserts that all public methods found in C{first} are also
- present in C{second} and accept the same arguments. C{first} may
- have its own private methods, though, and may not have all methods
- found in C{second}. Note that if a private method in C{first} matches
- the name of one in C{second}, their specification is still compared.
-
- This is useful to verify if a fake or stub class have the same API as
- the real class being simulated.
- """
- first_methods = dict(inspect.getmembers(first, inspect.ismethod))
- second_methods = dict(inspect.getmembers(second, inspect.ismethod))
- for name, first_method in first_methods.items():
- first_argspec = inspect.getargspec(first_method)
- first_formatted = inspect.formatargspec(*first_argspec)
-
- second_method = second_methods.get(name)
- if second_method is None:
- if name[:1] == "_":
- continue # First may have its own private methods.
- raise self.failureException("%s.%s%s not present in %s" %
- (first.__name__, name, first_formatted, second.__name__))
-
- second_argspec = inspect.getargspec(second_method)
- if first_argspec != second_argspec:
- second_formatted = inspect.formatargspec(*second_argspec)
- raise self.failureException("%s.%s%s != %s.%s%s" %
- (first.__name__, name, first_formatted,
- second.__name__, name, second_formatted))
-
-
- assertIs = failUnlessIs
- assertIsNot = failIfIs
- assertIn = failUnlessIn
- assertNotIn = failIfIn
- assertStartsWith = failUnlessStartsWith
- assertNotStartsWith = failIfStartsWith
- assertEndsWith = failUnlessEndsWith
- assertNotEndsWith = failIfEndsWith
- assertApproximates = failUnlessApproximates
- assertNotApproximates = failIfApproximates
- assertMethodsMatch = failUnlessMethodsMatch
-
- # The following are missing in Python < 2.4.
- assertTrue = unittest.TestCase.failUnless
- assertFalse = unittest.TestCase.failIf
-
- # The following is provided for compatibility with Twisted's trial.
- assertIdentical = assertIs
- assertNotIdentical = assertIsNot
- failUnlessIdentical = failUnlessIs
- failIfIdentical = failIfIs
-
-
-# --------------------------------------------------------------------
-# Mocker.
-
-class classinstancemethod(object):
-
- def __init__(self, method):
- self.method = method
-
- def __get__(self, obj, cls=None):
- def bound_method(*args, **kwargs):
- return self.method(cls, obj, *args, **kwargs)
- return bound_method
-
-
-class MockerBase(object):
- """Controller of mock objects.
-
- A mocker instance is used to command recording and replay of
- expectations on any number of mock objects.
-
- Expectations should be expressed for the mock object while in
- record mode (the initial one) by using the mock object itself,
- and using the mocker (and/or C{expect()} as a helper) to define
- additional behavior for each event. For instance::
-
- mock = mocker.mock()
- mock.hello()
- mocker.result("Hi!")
- mocker.replay()
- assert mock.hello() == "Hi!"
- mock.restore()
- mock.verify()
-
- In this short excerpt a mock object is being created, then an
- expectation of a call to the C{hello()} method was recorded, and
- when called the method should return the value C{10}. Then, the
- mocker is put in replay mode, and the expectation is satisfied by
- calling the C{hello()} method, which indeed returns 10. Finally,
- a call to the L{restore()} method is performed to undo any needed
- changes made in the environment, and the L{verify()} method is
- called to ensure that all defined expectations were met.
-
- The same logic can be expressed more elegantly using the
- C{with mocker:} statement, as follows::
-
- mock = mocker.mock()
- mock.hello()
- mocker.result("Hi!")
- with mocker:
- assert mock.hello() == "Hi!"
-
- Also, the MockerTestCase class, which integrates the mocker on
- a unittest.TestCase subclass, may be used to reduce the overhead
- of controlling the mocker. A test could be written as follows::
-
- class SampleTest(MockerTestCase):
-
- def test_hello(self):
- mock = self.mocker.mock()
- mock.hello()
- self.mocker.result("Hi!")
- self.mocker.replay()
- self.assertEquals(mock.hello(), "Hi!")
- """
-
- _recorders = []
-
- # For convenience only.
- on = expect
-
- class __metaclass__(type):
- def __init__(self, name, bases, dict):
- # Make independent lists on each subclass, inheriting from parent.
- self._recorders = list(getattr(self, "_recorders", ()))
-
- def __init__(self):
- self._recorders = self._recorders[:]
- self._events = []
- self._recording = True
- self._ordering = False
- self._last_orderer = None
-
- def is_recording(self):
- """Return True if in recording mode, False if in replay mode.
-
- Recording is the initial state.
- """
- return self._recording
-
- def replay(self):
- """Change to replay mode, where recorded events are reproduced.
-
- If already in replay mode, the mocker will be restored, with all
- expectations reset, and then put again in replay mode.
-
- An alternative and more comfortable way to replay changes is
- using the 'with' statement, as follows::
-
- mocker = Mocker()
- <record events>
- with mocker:
- <reproduce events>
-
- The 'with' statement will automatically put mocker in replay
- mode, and will also verify if all events were correctly reproduced
- at the end (using L{verify()}), and also restore any changes done
- in the environment (with L{restore()}).
-
- Also check the MockerTestCase class, which integrates the
- unittest.TestCase class with mocker.
- """
- if not self._recording:
- for event in self._events:
- event.restore()
- else:
- self._recording = False
- for event in self._events:
- event.replay()
-
- def restore(self):
- """Restore changes in the environment, and return to recording mode.
-
- This should always be called after the test is complete (succeeding
- or not). There are ways to call this method automatically on
- completion (e.g. using a C{with mocker:} statement, or using the
- L{MockerTestCase} class.
- """
- if not self._recording:
- self._recording = True
- for event in self._events:
- event.restore()
-
- def reset(self):
- """Reset the mocker state.
-
- This will restore environment changes, if currently in replay
- mode, and then remove all events previously recorded.
- """
- if not self._recording:
- self.restore()
- self.unorder()
- del self._events[:]
-
- def get_events(self):
- """Return all recorded events."""
- return self._events[:]
-
- def add_event(self, event):
- """Add an event.
-
- This method is used internally by the implementation, and
- shouldn't be needed on normal mocker usage.
- """
- self._events.append(event)
- if self._ordering:
- orderer = event.add_task(Orderer(event.path))
- if self._last_orderer:
- orderer.add_dependency(self._last_orderer)
- self._last_orderer = orderer
- return event
-
- def verify(self):
- """Check if all expectations were met, and raise AssertionError if not.
-
- The exception message will include a nice description of which
- expectations were not met, and why.
- """
- errors = []
- for event in self._events:
- try:
- event.verify()
- except AssertionError, e:
- error = str(e)
- if not error:
- raise RuntimeError("Empty error message from %r"
- % event)
- errors.append(error)
- if errors:
- message = [ERROR_PREFIX + "Unmet expectations:", ""]
- for error in errors:
- lines = error.splitlines()
- message.append("=> " + lines.pop(0))
- message.extend([" " + line for line in lines])
- message.append("")
- raise AssertionError(os.linesep.join(message))
-
- def mock(self, spec_and_type=None, spec=None, type=None,
- name=None, count=True):
- """Return a new mock object.
-
- @param spec_and_type: Handy positional argument which sets both
- spec and type.
- @param spec: Method calls will be checked for correctness against
- the given class.
- @param type: If set, the Mock's __class__ attribute will return
- the given type. This will make C{isinstance()} calls
- on the object work.
- @param name: Name for the mock object, used in the representation of
- expressions. The name is rarely needed, as it's usually
- guessed correctly from the variable name used.
- @param count: If set to false, expressions may be executed any number
- of times, unless an expectation is explicitly set using
- the L{count()} method. By default, expressions are
- expected once.
- """
- if spec_and_type is not None:
- spec = type = spec_and_type
- return Mock(self, spec=spec, type=type, name=name, count=count)
-
- def proxy(self, object, spec=True, type=True, name=None, count=True,
- passthrough=True):
- """Return a new mock object which proxies to the given object.
-
- Proxies are useful when only part of the behavior of an object
- is to be mocked. Unknown expressions may be passed through to
- the real implementation implicitly (if the C{passthrough} argument
- is True), or explicitly (using the L{passthrough()} method
- on the event).
-
- @param object: Real object to be proxied, and replaced by the mock
- on replay mode. It may also be an "import path",
- such as C{"time.time"}, in which case the object
- will be the C{time} function from the C{time} module.
- @param spec: Method calls will be checked for correctness against
- the given object, which may be a class or an instance
- where attributes will be looked up. Defaults to the
- the C{object} parameter. May be set to None explicitly,
- in which case spec checking is disabled. Checks may
- also be disabled explicitly on a per-event basis with
- the L{nospec()} method.
- @param type: If set, the Mock's __class__ attribute will return
- the given type. This will make C{isinstance()} calls
- on the object work. Defaults to the type of the
- C{object} parameter. May be set to None explicitly.
- @param name: Name for the mock object, used in the representation of
- expressions. The name is rarely needed, as it's usually
- guessed correctly from the variable name used.
- @param count: If set to false, expressions may be executed any number
- of times, unless an expectation is explicitly set using
- the L{count()} method. By default, expressions are
- expected once.
- @param passthrough: If set to False, passthrough of actions on the
- proxy to the real object will only happen when
- explicitly requested via the L{passthrough()}
- method.
- """
- if isinstance(object, basestring):
- if name is None:
- name = object
- import_stack = object.split(".")
- attr_stack = []
- while import_stack:
- module_path = ".".join(import_stack)
- try:
- object = __import__(module_path, {}, {}, [""])
- except ImportError:
- attr_stack.insert(0, import_stack.pop())
- if not import_stack:
- raise
- continue
- else:
- for attr in attr_stack:
- object = getattr(object, attr)
- break
- if spec is True:
- spec = object
- if type is True:
- type = __builtin__.type(object)
- return Mock(self, spec=spec, type=type, object=object,
- name=name, count=count, passthrough=passthrough)
-
- def replace(self, object, spec=True, type=True, name=None, count=True,
- passthrough=True):
- """Create a proxy, and replace the original object with the mock.
-
- On replay, the original object will be replaced by the returned
- proxy in all dictionaries found in the running interpreter via
- the garbage collecting system. This should cover module
- namespaces, class namespaces, instance namespaces, and so on.
-
- @param object: Real object to be proxied, and replaced by the mock
- on replay mode. It may also be an "import path",
- such as C{"time.time"}, in which case the object
- will be the C{time} function from the C{time} module.
- @param spec: Method calls will be checked for correctness against
- the given object, which may be a class or an instance
- where attributes will be looked up. Defaults to the
- the C{object} parameter. May be set to None explicitly,
- in which case spec checking is disabled. Checks may
- also be disabled explicitly on a per-event basis with
- the L{nospec()} method.
- @param type: If set, the Mock's __class__ attribute will return
- the given type. This will make C{isinstance()} calls
- on the object work. Defaults to the type of the
- C{object} parameter. May be set to None explicitly.
- @param name: Name for the mock object, used in the representation of
- expressions. The name is rarely needed, as it's usually
- guessed correctly from the variable name used.
- @param passthrough: If set to False, passthrough of actions on the
- proxy to the real object will only happen when
- explicitly requested via the L{passthrough()}
- method.
- """
- mock = self.proxy(object, spec, type, name, count, passthrough)
- event = self._get_replay_restore_event()
- event.add_task(ProxyReplacer(mock))
- return mock
-
- def patch(self, object, spec=True):
- """Patch an existing object to reproduce recorded events.
-
- @param object: Class or instance to be patched.
- @param spec: Method calls will be checked for correctness against
- the given object, which may be a class or an instance
- where attributes will be looked up. Defaults to the
- the C{object} parameter. May be set to None explicitly,
- in which case spec checking is disabled. Checks may
- also be disabled explicitly on a per-event basis with
- the L{nospec()} method.
-
- The result of this method is still a mock object, which can be
- used like any other mock object to record events. The difference
- is that when the mocker is put on replay mode, the *real* object
- will be modified to behave according to recorded expectations.
-
- Patching works in individual instances, and also in classes.
- When an instance is patched, recorded events will only be
- considered on this specific instance, and other instances should
- behave normally. When a class is patched, the reproduction of
- events will be considered on any instance of this class once
- created (collectively).
-
- Observe that, unlike with proxies which catch only events done
- through the mock object, *all* accesses to recorded expectations
- will be considered; even these coming from the object itself
- (e.g. C{self.hello()} is considered if this method was patched).
- While this is a very powerful feature, and many times the reason
- to use patches in the first place, it's important to keep this
- behavior in mind.
-
- Patching of the original object only takes place when the mocker
- is put on replay mode, and the patched object will be restored
- to its original state once the L{restore()} method is called
- (explicitly, or implicitly with alternative conventions, such as
- a C{with mocker:} block, or a MockerTestCase class).
- """
- if spec is True:
- spec = object
- patcher = Patcher()
- event = self._get_replay_restore_event()
- event.add_task(patcher)
- mock = Mock(self, object=object, patcher=patcher,
- passthrough=True, spec=spec)
- object.__mocker_mock__ = mock
- return mock
-
- def act(self, path):
- """This is called by mock objects whenever something happens to them.
-
- This method is part of the implementation between the mocker
- and mock objects.
- """
- if self._recording:
- event = self.add_event(Event(path))
- for recorder in self._recorders:
- recorder(self, event)
- return Mock(self, path)
- else:
- # First run events that may run, then run unsatisfied events, then
- # ones not previously run. We put the index in the ordering tuple
- # instead of the actual event because we want a stable sort
- # (ordering between 2 events is undefined).
- events = self._events
- order = [(events[i].satisfied()*2 + events[i].has_run(), i)
- for i in range(len(events))]
- order.sort()
- postponed = None
- for weight, i in order:
- event = events[i]
- if event.matches(path):
- if event.may_run(path):
- return event.run(path)
- elif postponed is None:
- postponed = event
- if postponed is not None:
- return postponed.run(path)
- raise MatchError(ERROR_PREFIX + "Unexpected expression: %s" % path)
-
- def get_recorders(cls, self):
- """Return recorders associated with this mocker class or instance.
-
- This method may be called on mocker instances and also on mocker
- classes. See the L{add_recorder()} method for more information.
- """
- return (self or cls)._recorders[:]
- get_recorders = classinstancemethod(get_recorders)
-
- def add_recorder(cls, self, recorder):
- """Add a recorder to this mocker class or instance.
-
- @param recorder: Callable accepting C{(mocker, event)} as parameters.
-
- This is part of the implementation of mocker.
-
- All registered recorders are called for translating events that
- happen during recording into expectations to be met once the state
- is switched to replay mode.
-
- This method may be called on mocker instances and also on mocker
- classes. When called on a class, the recorder will be used by
- all instances, and also inherited on subclassing. When called on
- instances, the recorder is added only to the given instance.
- """
- (self or cls)._recorders.append(recorder)
- return recorder
- add_recorder = classinstancemethod(add_recorder)
-
- def remove_recorder(cls, self, recorder):
- """Remove the given recorder from this mocker class or instance.
-
- This method may be called on mocker classes and also on mocker
- instances. See the L{add_recorder()} method for more information.
- """
- (self or cls)._recorders.remove(recorder)
- remove_recorder = classinstancemethod(remove_recorder)
-
- def result(self, value):
- """Make the last recorded event return the given value on replay.
-
- @param value: Object to be returned when the event is replayed.
- """
- self.call(lambda *args, **kwargs: value)
-
- def generate(self, sequence):
- """Last recorded event will return a generator with the given sequence.
-
- @param sequence: Sequence of values to be generated.
- """
- def generate(*args, **kwargs):
- for value in sequence:
- yield value
- self.call(generate)
-
- def throw(self, exception):
- """Make the last recorded event raise the given exception on replay.
-
- @param exception: Class or instance of exception to be raised.
- """
- def raise_exception(*args, **kwargs):
- raise exception
- self.call(raise_exception)
-
- def call(self, func):
- """Make the last recorded event cause the given function to be called.
-
- @param func: Function to be called.
-
- The result of the function will be used as the event result.
- """
- self._events[-1].add_task(FunctionRunner(func))
-
- def count(self, min, max=False):
- """Last recorded event must be replayed between min and max times.
-
- @param min: Minimum number of times that the event must happen.
- @param max: Maximum number of times that the event must happen. If
- not given, it defaults to the same value of the C{min}
- parameter. If set to None, there is no upper limit, and
- the expectation is met as long as it happens at least
- C{min} times.
- """
- event = self._events[-1]
- for task in event.get_tasks():
- if isinstance(task, RunCounter):
- event.remove_task(task)
- event.add_task(RunCounter(min, max))
-
- def is_ordering(self):
- """Return true if all events are being ordered.
-
- See the L{order()} method.
- """
- return self._ordering
-
- def unorder(self):
- """Disable the ordered mode.
-
- See the L{order()} method for more information.
- """
- self._ordering = False
- self._last_orderer = None
-
- def order(self, *path_holders):
- """Create an expectation of order between two or more events.
-
- @param path_holders: Objects returned as the result of recorded events.
-
- By default, mocker won't force events to happen precisely in
- the order they were recorded. Calling this method will change
- this behavior so that events will only match if reproduced in
- the correct order.
-
- There are two ways in which this method may be used. Which one
- is used in a given occasion depends only on convenience.
-
- If no arguments are passed, the mocker will be put in a mode where
- all the recorded events following the method call will only be met
- if they happen in order. When that's used, the mocker may be put
- back in unordered mode by calling the L{unorder()} method, or by
- using a 'with' block, like so::
-
- with mocker.ordered():
- <record events>
-
- In this case, only expressions in <record events> will be ordered,
- and the mocker will be back in unordered mode after the 'with' block.
-
- The second way to use it is by specifying precisely which events
- should be ordered. As an example::
-
- mock = mocker.mock()
- expr1 = mock.hello()
- expr2 = mock.world
- expr3 = mock.x.y.z
- mocker.order(expr1, expr2, expr3)
-
- This method of ordering only works when the expression returns
- another object.
-
- Also check the L{after()} and L{before()} methods, which are
- alternative ways to perform this.
- """
- if not path_holders:
- self._ordering = True
- return OrderedContext(self)
-
- last_orderer = None
- for path_holder in path_holders:
- if type(path_holder) is Path:
- path = path_holder
- else:
- path = path_holder.__mocker_path__
- for event in self._events:
- if event.path is path:
- for task in event.get_tasks():
- if isinstance(task, Orderer):
- orderer = task
- break
- else:
- orderer = Orderer(path)
- event.add_task(orderer)
- if last_orderer:
- orderer.add_dependency(last_orderer)
- last_orderer = orderer
- break
-
- def after(self, *path_holders):
- """Last recorded event must happen after events referred to.
-
- @param path_holders: Objects returned as the result of recorded events
- which should happen before the last recorded event
-
- As an example, the idiom::
-
- expect(mock.x).after(mock.y, mock.z)
-
- is an alternative way to say::
-
- expr_x = mock.x
- expr_y = mock.y
- expr_z = mock.z
- mocker.order(expr_y, expr_x)
- mocker.order(expr_z, expr_x)
-
- See L{order()} for more information.
- """
- last_path = self._events[-1].path
- for path_holder in path_holders:
- self.order(path_holder, last_path)
-
- def before(self, *path_holders):
- """Last recorded event must happen before events referred to.
-
- @param path_holders: Objects returned as the result of recorded events
- which should happen after the last recorded event
-
- As an example, the idiom::
-
- expect(mock.x).before(mock.y, mock.z)
-
- is an alternative way to say::
-
- expr_x = mock.x
- expr_y = mock.y
- expr_z = mock.z
- mocker.order(expr_x, expr_y)
- mocker.order(expr_x, expr_z)
-
- See L{order()} for more information.
- """
- last_path = self._events[-1].path
- for path_holder in path_holders:
- self.order(last_path, path_holder)
-
- def nospec(self):
- """Don't check method specification of real object on last event.
-
- By default, when using a mock created as the result of a call to
- L{proxy()}, L{replace()}, and C{patch()}, or when passing the spec
- attribute to the L{mock()} method, method calls on the given object
- are checked for correctness against the specification of the real
- object (or the explicitly provided spec).
-
- This method will disable that check specifically for the last
- recorded event.
- """
- event = self._events[-1]
- for task in event.get_tasks():
- if isinstance(task, SpecChecker):
- event.remove_task(task)
-
- def passthrough(self, result_callback=None):
- """Make the last recorded event run on the real object once seen.
-
- @param result_callback: If given, this function will be called with
- the result of the *real* method call as the only argument.
-
- This can only be used on proxies, as returned by the L{proxy()}
- and L{replace()} methods, or on mocks representing patched objects,
- as returned by the L{patch()} method.
- """
- event = self._events[-1]
- if event.path.root_object is None:
- raise TypeError("Mock object isn't a proxy")
- event.add_task(PathExecuter(result_callback))
-
- def __enter__(self):
- """Enter in a 'with' context. This will run replay()."""
- self.replay()
- return self
-
- def __exit__(self, type, value, traceback):
- """Exit from a 'with' context.
-
- This will run restore() at all times, but will only run verify()
- if the 'with' block itself hasn't raised an exception. Exceptions
- in that block are never swallowed.
- """
- self.restore()
- if type is None:
- self.verify()
- return False
-
- def _get_replay_restore_event(self):
- """Return unique L{ReplayRestoreEvent}, creating if needed.
-
- Some tasks only want to replay/restore. When that's the case,
- they shouldn't act on other events during replay. Also, they
- can all be put in a single event when that's the case. Thus,
- we add a single L{ReplayRestoreEvent} as the first element of
- the list.
- """
- if not self._events or type(self._events[0]) != ReplayRestoreEvent:
- self._events.insert(0, ReplayRestoreEvent())
- return self._events[0]
-
-
-class OrderedContext(object):
-
- def __init__(self, mocker):
- self._mocker = mocker
-
- def __enter__(self):
- return None
-
- def __exit__(self, type, value, traceback):
- self._mocker.unorder()
-
-
-class Mocker(MockerBase):
- __doc__ = MockerBase.__doc__
-
-# Decorator to add recorders on the standard Mocker class.
-recorder = Mocker.add_recorder
-
-
-# --------------------------------------------------------------------
-# Mock object.
-
-class Mock(object):
-
- def __init__(self, mocker, path=None, name=None, spec=None, type=None,
- object=None, passthrough=False, patcher=None, count=True):
- self.__mocker__ = mocker
- self.__mocker_path__ = path or Path(self, object)
- self.__mocker_name__ = name
- self.__mocker_spec__ = spec
- self.__mocker_object__ = object
- self.__mocker_passthrough__ = passthrough
- self.__mocker_patcher__ = patcher
- self.__mocker_replace__ = False
- self.__mocker_type__ = type
- self.__mocker_count__ = count
-
- def __mocker_act__(self, kind, args=(), kwargs={}, object=None):
- if self.__mocker_name__ is None:
- self.__mocker_name__ = find_object_name(self, 2)
- action = Action(kind, args, kwargs, self.__mocker_path__)
- path = self.__mocker_path__ + action
- if object is not None:
- path.root_object = object
- try:
- return self.__mocker__.act(path)
- except MatchError, exception:
- root_mock = path.root_mock
- if (path.root_object is not None and
- root_mock.__mocker_passthrough__):
- return path.execute(path.root_object)
- # Reinstantiate to show raise statement on traceback, and
- # also to make the traceback shown shorter.
- raise MatchError(str(exception))
- except AssertionError, e:
- lines = str(e).splitlines()
- message = [ERROR_PREFIX + "Unmet expectation:", ""]
- message.append("=> " + lines.pop(0))
- message.extend([" " + line for line in lines])
- message.append("")
- raise AssertionError(os.linesep.join(message))
-
- def __getattribute__(self, name):
- if name.startswith("__mocker_"):
- return super(Mock, self).__getattribute__(name)
- if name == "__class__":
- if self.__mocker__.is_recording() or self.__mocker_type__ is None:
- return type(self)
- return self.__mocker_type__
- return self.__mocker_act__("getattr", (name,))
-
- def __setattr__(self, name, value):
- if name.startswith("__mocker_"):
- return super(Mock, self).__setattr__(name, value)
- return self.__mocker_act__("setattr", (name, value))
-
- def __delattr__(self, name):
- return self.__mocker_act__("delattr", (name,))
-
- def __call__(self, *args, **kwargs):
- return self.__mocker_act__("call", args, kwargs)
-
- def __contains__(self, value):
- return self.__mocker_act__("contains", (value,))
-
- def __getitem__(self, key):
- return self.__mocker_act__("getitem", (key,))
-
- def __setitem__(self, key, value):
- return self.__mocker_act__("setitem", (key, value))
-
- def __delitem__(self, key):
- return self.__mocker_act__("delitem", (key,))
-
- def __len__(self):
- # MatchError is turned on an AttributeError so that list() and
- # friends act properly when trying to get length hints on
- # something that doesn't offer them.
- try:
- result = self.__mocker_act__("len")
- except MatchError, e:
- raise AttributeError(str(e))
- if type(result) is Mock:
- return 0
- return result
-
- def __nonzero__(self):
- try:
- return self.__mocker_act__("nonzero")
- except MatchError, e:
- return True
-
- def __iter__(self):
- # XXX On py3k, when next() becomes __next__(), we'll be able
- # to return the mock itself because it will be considered
- # an iterator (we'll be mocking __next__ as well, which we
- # can't now).
- result = self.__mocker_act__("iter")
- if type(result) is Mock:
- return iter([])
- return result
-
- # When adding a new action kind here, also add support for it on
- # Action.execute() and Path.__str__().
-
-
-def find_object_name(obj, depth=0):
- """Try to detect how the object is named on a previous scope."""
- try:
- frame = sys._getframe(depth+1)
- except:
- return None
- for name, frame_obj in frame.f_locals.iteritems():
- if frame_obj is obj:
- return name
- self = frame.f_locals.get("self")
- if self is not None:
- try:
- items = list(self.__dict__.iteritems())
- except:
- pass
- else:
- for name, self_obj in items:
- if self_obj is obj:
- return name
- return None
-
-
-# --------------------------------------------------------------------
-# Action and path.
-
-class Action(object):
-
- def __init__(self, kind, args, kwargs, path=None):
- self.kind = kind
- self.args = args
- self.kwargs = kwargs
- self.path = path
- self._execute_cache = {}
-
- def __repr__(self):
- if self.path is None:
- return "Action(%r, %r, %r)" % (self.kind, self.args, self.kwargs)
- return "Action(%r, %r, %r, %r)" % \
- (self.kind, self.args, self.kwargs, self.path)
-
- def __eq__(self, other):
- return (self.kind == other.kind and
- self.args == other.args and
- self.kwargs == other.kwargs)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def matches(self, other):
- return (self.kind == other.kind and
- match_params(self.args, self.kwargs, other.args, other.kwargs))
-
- def execute(self, object):
- # This caching scheme may fail if the object gets deallocated before
- # the action, as the id might get reused. It's somewhat easy to fix
- # that with a weakref callback. For our uses, though, the object
- # should never get deallocated before the action itself, so we'll
- # just keep it simple.
- if id(object) in self._execute_cache:
- return self._execute_cache[id(object)]
- execute = getattr(object, "__mocker_execute__", None)
- if execute is not None:
- result = execute(self, object)
- else:
- kind = self.kind
- if kind == "getattr":
- result = getattr(object, self.args[0])
- elif kind == "setattr":
- result = setattr(object, self.args[0], self.args[1])
- elif kind == "delattr":
- result = delattr(object, self.args[0])
- elif kind == "call":
- result = object(*self.args, **self.kwargs)
- elif kind == "contains":
- result = self.args[0] in object
- elif kind == "getitem":
- result = object[self.args[0]]
- elif kind == "setitem":
- result = object[self.args[0]] = self.args[1]
- elif kind == "delitem":
- del object[self.args[0]]
- result = None
- elif kind == "len":
- result = len(object)
- elif kind == "nonzero":
- result = bool(object)
- elif kind == "iter":
- result = iter(object)
- else:
- raise RuntimeError("Don't know how to execute %r kind." % kind)
- self._execute_cache[id(object)] = result
- return result
-
-
-class Path(object):
-
- def __init__(self, root_mock, root_object=None, actions=()):
- self.root_mock = root_mock
- self.root_object = root_object
- self.actions = tuple(actions)
- self.__mocker_replace__ = False
-
- def parent_path(self):
- if not self.actions:
- return None
- return self.actions[-1].path
- parent_path = property(parent_path)
-
- def __add__(self, action):
- """Return a new path which includes the given action at the end."""
- return self.__class__(self.root_mock, self.root_object,
- self.actions + (action,))
-
- def __eq__(self, other):
- """Verify if the two paths are equal.
-
- Two paths are equal if they refer to the same mock object, and
- have the actions with equal kind, args and kwargs.
- """
- if (self.root_mock is not other.root_mock or
- self.root_object is not other.root_object or
- len(self.actions) != len(other.actions)):
- return False
- for action, other_action in zip(self.actions, other.actions):
- if action != other_action:
- return False
- return True
-
- def matches(self, other):
- """Verify if the two paths are equivalent.
-
- Two paths are equal if they refer to the same mock object, and
- have the same actions performed on them.
- """
- if (self.root_mock is not other.root_mock or
- len(self.actions) != len(other.actions)):
- return False
- for action, other_action in zip(self.actions, other.actions):
- if not action.matches(other_action):
- return False
- return True
-
- def execute(self, object):
- """Execute all actions sequentially on object, and return result.
- """
- for action in self.actions:
- object = action.execute(object)
- return object
-
- def __str__(self):
- """Transform the path into a nice string such as obj.x.y('z')."""
- result = self.root_mock.__mocker_name__ or "<mock>"
- for action in self.actions:
- if action.kind == "getattr":
- result = "%s.%s" % (result, action.args[0])
- elif action.kind == "setattr":
- result = "%s.%s = %r" % (result, action.args[0], action.args[1])
- elif action.kind == "delattr":
- result = "del %s.%s" % (result, action.args[0])
- elif action.kind == "call":
- args = [repr(x) for x in action.args]
- items = list(action.kwargs.iteritems())
- items.sort()
- for pair in items:
- args.append("%s=%r" % pair)
- result = "%s(%s)" % (result, ", ".join(args))
- elif action.kind == "contains":
- result = "%r in %s" % (action.args[0], result)
- elif action.kind == "getitem":
- result = "%s[%r]" % (result, action.args[0])
- elif action.kind == "setitem":
- result = "%s[%r] = %r" % (result, action.args[0],
- action.args[1])
- elif action.kind == "delitem":
- result = "del %s[%r]" % (result, action.args[0])
- elif action.kind == "len":
- result = "len(%s)" % result
- elif action.kind == "nonzero":
- result = "bool(%s)" % result
- elif action.kind == "iter":
- result = "iter(%s)" % result
- else:
- raise RuntimeError("Don't know how to format kind %r" %
- action.kind)
- return result
-
-
-class SpecialArgument(object):
- """Base for special arguments for matching parameters."""
-
- def __init__(self, object=None):
- self.object = object
-
- def __repr__(self):
- if self.object is None:
- return self.__class__.__name__
- else:
- return "%s(%r)" % (self.__class__.__name__, self.object)
-
- def matches(self, other):
- return True
-
- def __eq__(self, other):
- return type(other) == type(self) and self.object == other.object
-
-
-class ANY(SpecialArgument):
- """Matches any single argument."""
-
-ANY = ANY()
-
-
-class ARGS(SpecialArgument):
- """Matches zero or more positional arguments."""
-
-ARGS = ARGS()
-
-
-class KWARGS(SpecialArgument):
- """Matches zero or more keyword arguments."""
-
-KWARGS = KWARGS()
-
-
-class IS(SpecialArgument):
-
- def matches(self, other):
- return self.object is other
-
- def __eq__(self, other):
- return type(other) == type(self) and self.object is other.object
-
-
-class CONTAINS(SpecialArgument):
-
- def matches(self, other):
- try:
- other.__contains__
- except AttributeError:
- try:
- iter(other)
- except TypeError:
- # If an object can't be iterated, and has no __contains__
- # hook, it'd blow up on the test below. We test this in
- # advance to prevent catching more errors than we really
- # want.
- return False
- return self.object in other
-
-
-class IN(SpecialArgument):
-
- def matches(self, other):
- return other in self.object
-
-
-class MATCH(SpecialArgument):
-
- def matches(self, other):
- return bool(self.object(other))
-
- def __eq__(self, other):
- return type(other) == type(self) and self.object is other.object
-
-
-def match_params(args1, kwargs1, args2, kwargs2):
- """Match the two sets of parameters, considering special parameters."""
-
- has_args = ARGS in args1
- has_kwargs = KWARGS in args1
-
- if has_kwargs:
- args1 = [arg1 for arg1 in args1 if arg1 is not KWARGS]
- elif len(kwargs1) != len(kwargs2):
- return False
-
- if not has_args and len(args1) != len(args2):
- return False
-
- # Either we have the same number of kwargs, or unknown keywords are
- # accepted (KWARGS was used), so check just the ones in kwargs1.
- for key, arg1 in kwargs1.iteritems():
- if key not in kwargs2:
- return False
- arg2 = kwargs2[key]
- if isinstance(arg1, SpecialArgument):
- if not arg1.matches(arg2):
- return False
- elif arg1 != arg2:
- return False
-
- # Keywords match. Now either we have the same number of
- # arguments, or ARGS was used. If ARGS wasn't used, arguments
- # must match one-on-one necessarily.
- if not has_args:
- for arg1, arg2 in zip(args1, args2):
- if isinstance(arg1, SpecialArgument):
- if not arg1.matches(arg2):
- return False
- elif arg1 != arg2:
- return False
- return True
-
- # Easy choice. Keywords are matching, and anything on args is accepted.
- if (ARGS,) == args1:
- return True
-
- # We have something different there. If we don't have positional
- # arguments on the original call, it can't match.
- if not args2:
- # Unless we have just several ARGS (which is bizarre, but..).
- for arg1 in args1:
- if arg1 is not ARGS:
- return False
- return True
-
- # Ok, all bets are lost. We have to actually do the more expensive
- # matching. This is an algorithm based on the idea of the Levenshtein
- # Distance between two strings, but heavily hacked for this purpose.
- args2l = len(args2)
- if args1[0] is ARGS:
- args1 = args1[1:]
- array = [0]*args2l
- else:
- array = [1]*args2l
- for i in range(len(args1)):
- last = array[0]
- if args1[i] is ARGS:
- for j in range(1, args2l):
- last, array[j] = array[j], min(array[j-1], array[j], last)
- else:
- array[0] = i or int(args1[i] != args2[0])
- for j in range(1, args2l):
- last, array[j] = array[j], last or int(args1[i] != args2[j])
- if 0 not in array:
- return False
- if array[-1] != 0:
- return False
- return True
-
-
-# --------------------------------------------------------------------
-# Event and task base.
-
-class Event(object):
- """Aggregation of tasks that keep track of a recorded action.
-
- An event represents something that may or may not happen while the
- mocked environment is running, such as an attribute access, or a
- method call. The event is composed of several tasks that are
- orchestrated together to create a composed meaning for the event,
- including for which actions it should be run, what happens when it
- runs, and what's the expectations about the actions run.
- """
-
- def __init__(self, path=None):
- self.path = path
- self._tasks = []
- self._has_run = False
-
- def add_task(self, task):
- """Add a new task to this taks."""
- self._tasks.append(task)
- return task
-
- def remove_task(self, task):
- self._tasks.remove(task)
-
- def get_tasks(self):
- return self._tasks[:]
-
- def matches(self, path):
- """Return true if *all* tasks match the given path."""
- for task in self._tasks:
- if not task.matches(path):
- return False
- return bool(self._tasks)
-
- def has_run(self):
- return self._has_run
-
- def may_run(self, path):
- """Verify if any task would certainly raise an error if run.
-
- This will call the C{may_run()} method on each task and return
- false if any of them returns false.
- """
- for task in self._tasks:
- if not task.may_run(path):
- return False
- return True
-
- def run(self, path):
- """Run all tasks with the given action.
-
- @param path: The path of the expression run.
-
- Running an event means running all of its tasks individually and in
- order. An event should only ever be run if all of its tasks claim to
- match the given action.
-
- The result of this method will be the last result of a task
- which isn't None, or None if they're all None.
- """
- self._has_run = True
- result = None
- errors = []
- for task in self._tasks:
- try:
- task_result = task.run(path)
- except AssertionError, e:
- error = str(e)
- if not error:
- raise RuntimeError("Empty error message from %r" % task)
- errors.append(error)
- else:
- if task_result is not None:
- result = task_result
- if errors:
- message = [str(self.path)]
- if str(path) != message[0]:
- message.append("- Run: %s" % path)
- for error in errors:
- lines = error.splitlines()
- message.append("- " + lines.pop(0))
- message.extend([" " + line for line in lines])
- raise AssertionError(os.linesep.join(message))
- return result
-
- def satisfied(self):
- """Return true if all tasks are satisfied.
-
- Being satisfied means that there are no unmet expectations.
- """
- for task in self._tasks:
- try:
- task.verify()
- except AssertionError:
- return False
- return True
-
- def verify(self):
- """Run verify on all tasks.
-
- The verify method is supposed to raise an AssertionError if the
- task has unmet expectations, with a one-line explanation about
- why this item is unmet. This method should be safe to be called
- multiple times without side effects.
- """
- errors = []
- for task in self._tasks:
- try:
- task.verify()
- except AssertionError, e:
- error = str(e)
- if not error:
- raise RuntimeError("Empty error message from %r" % task)
- errors.append(error)
- if errors:
- message = [str(self.path)]
- for error in errors:
- lines = error.splitlines()
- message.append("- " + lines.pop(0))
- message.extend([" " + line for line in lines])
- raise AssertionError(os.linesep.join(message))
-
- def replay(self):
- """Put all tasks in replay mode."""
- self._has_run = False
- for task in self._tasks:
- task.replay()
-
- def restore(self):
- """Restore the state of all tasks."""
- for task in self._tasks:
- task.restore()
-
-
-class ReplayRestoreEvent(Event):
- """Helper event for tasks which need replay/restore but shouldn't match."""
-
- def matches(self, path):
- return False
-
-
-class Task(object):
- """Element used to track one specific aspect on an event.
-
- A task is responsible for adding any kind of logic to an event.
- Examples of that are counting the number of times the event was
- made, verifying parameters if any, and so on.
- """
-
- def matches(self, path):
- """Return true if the task is supposed to be run for the given path.
- """
- return True
-
- def may_run(self, path):
- """Return false if running this task would certainly raise an error."""
- return True
-
- def run(self, path):
- """Perform the task item, considering that the given action happened.
- """
-
- def verify(self):
- """Raise AssertionError if expectations for this item are unmet.
-
- The verify method is supposed to raise an AssertionError if the
- task has unmet expectations, with a one-line explanation about
- why this item is unmet. This method should be safe to be called
- multiple times without side effects.
- """
-
- def replay(self):
- """Put the task in replay mode.
-
- Any expectations of the task should be reset.
- """
-
- def restore(self):
- """Restore any environmental changes made by the task.
-
- Verify should continue to work after this is called.
- """
-
-
-# --------------------------------------------------------------------
-# Task implementations.
-
-class OnRestoreCaller(Task):
- """Call a given callback when restoring."""
-
- def __init__(self, callback):
- self._callback = callback
-
- def restore(self):
- self._callback()
-
-
-class PathMatcher(Task):
- """Match the action path against a given path."""
-
- def __init__(self, path):
- self.path = path
-
- def matches(self, path):
- return self.path.matches(path)
-
-def path_matcher_recorder(mocker, event):
- event.add_task(PathMatcher(event.path))
-
-Mocker.add_recorder(path_matcher_recorder)
-
-
-class RunCounter(Task):
- """Task which verifies if the number of runs are within given boundaries.
- """
-
- def __init__(self, min, max=False):
- self.min = min
- if max is None:
- self.max = sys.maxint
- elif max is False:
- self.max = min
- else:
- self.max = max
- self._runs = 0
-
- def replay(self):
- self._runs = 0
-
- def may_run(self, path):
- return self._runs < self.max
-
- def run(self, path):
- self._runs += 1
- if self._runs > self.max:
- self.verify()
-
- def verify(self):
- if not self.min <= self._runs <= self.max:
- if self._runs < self.min:
- raise AssertionError("Performed fewer times than expected.")
- raise AssertionError("Performed more times than expected.")
-
-
-class ImplicitRunCounter(RunCounter):
- """RunCounter inserted by default on any event.
-
- This is a way to differentiate explicitly added counters and
- implicit ones.
- """
-
-def run_counter_recorder(mocker, event):
- """Any event may be repeated once, unless disabled by default."""
- if event.path.root_mock.__mocker_count__:
- event.add_task(ImplicitRunCounter(1))
-
-Mocker.add_recorder(run_counter_recorder)
-
-def run_counter_removal_recorder(mocker, event):
- """
- Events created by getattr actions which lead to other events
- may be repeated any number of times. For that, we remove implicit
- run counters of any getattr actions leading to the current one.
- """
- parent_path = event.path.parent_path
- for event in mocker.get_events()[::-1]:
- if (event.path is parent_path and
- event.path.actions[-1].kind == "getattr"):
- for task in event.get_tasks():
- if type(task) is ImplicitRunCounter:
- event.remove_task(task)
-
-Mocker.add_recorder(run_counter_removal_recorder)
-
-
-class MockReturner(Task):
- """Return a mock based on the action path."""
-
- def __init__(self, mocker):
- self.mocker = mocker
-
- def run(self, path):
- return Mock(self.mocker, path)
-
-def mock_returner_recorder(mocker, event):
- """Events that lead to other events must return mock objects."""
- parent_path = event.path.parent_path
- for event in mocker.get_events():
- if event.path is parent_path:
- for task in event.get_tasks():
- if isinstance(task, MockReturner):
- break
- else:
- event.add_task(MockReturner(mocker))
- break
-
-Mocker.add_recorder(mock_returner_recorder)
-
-
-class FunctionRunner(Task):
- """Task that runs a function everything it's run.
-
- Arguments of the last action in the path are passed to the function,
- and the function result is also returned.
- """
-
- def __init__(self, func):
- self._func = func
-
- def run(self, path):
- action = path.actions[-1]
- return self._func(*action.args, **action.kwargs)
-
-
-class PathExecuter(Task):
- """Task that executes a path in the real object, and returns the result."""
-
- def __init__(self, result_callback=None):
- self._result_callback = result_callback
-
- def get_result_callback(self):
- return self._result_callback
-
- def run(self, path):
- result = path.execute(path.root_object)
- if self._result_callback is not None:
- self._result_callback(result)
- return result
-
-
-class Orderer(Task):
- """Task to establish an order relation between two events.
-
- An orderer task will only match once all its dependencies have
- been run.
- """
-
- def __init__(self, path):
- self.path = path
- self._run = False
- self._dependencies = []
-
- def replay(self):
- self._run = False
-
- def has_run(self):
- return self._run
-
- def may_run(self, path):
- for dependency in self._dependencies:
- if not dependency.has_run():
- return False
- return True
-
- def run(self, path):
- for dependency in self._dependencies:
- if not dependency.has_run():
- raise AssertionError("Should be after: %s" % dependency.path)
- self._run = True
-
- def add_dependency(self, orderer):
- self._dependencies.append(orderer)
-
- def get_dependencies(self):
- return self._dependencies
-
-
-class SpecChecker(Task):
- """Task to check if arguments of the last action conform to a real method.
- """
-
- def __init__(self, method):
- self._method = method
- self._unsupported = False
-
- if method:
- try:
- self._args, self._varargs, self._varkwargs, self._defaults = \
- inspect.getargspec(method)
- except TypeError:
- self._unsupported = True
- else:
- if self._defaults is None:
- self._defaults = ()
- if type(method) is type(self.run):
- self._args = self._args[1:]
-
- def get_method(self):
- return self._method
-
- def _raise(self, message):
- spec = inspect.formatargspec(self._args, self._varargs,
- self._varkwargs, self._defaults)
- raise AssertionError("Specification is %s%s: %s" %
- (self._method.__name__, spec, message))
-
- def verify(self):
- if not self._method:
- raise AssertionError("Method not found in real specification")
-
- def may_run(self, path):
- try:
- self.run(path)
- except AssertionError:
- return False
- return True
-
- def run(self, path):
- if not self._method:
- raise AssertionError("Method not found in real specification")
- if self._unsupported:
- return # Can't check it. Happens with builtin functions. :-(
- action = path.actions[-1]
- obtained_len = len(action.args)
- obtained_kwargs = action.kwargs.copy()
- nodefaults_len = len(self._args) - len(self._defaults)
- for i, name in enumerate(self._args):
- if i < obtained_len and name in action.kwargs:
- self._raise("%r provided twice" % name)
- if (i >= obtained_len and i < nodefaults_len and
- name not in action.kwargs):
- self._raise("%r not provided" % name)
- obtained_kwargs.pop(name, None)
- if obtained_len > len(self._args) and not self._varargs:
- self._raise("too many args provided")
- if obtained_kwargs and not self._varkwargs:
- self._raise("unknown kwargs: %s" % ", ".join(obtained_kwargs))
-
-def spec_checker_recorder(mocker, event):
- spec = event.path.root_mock.__mocker_spec__
- if spec:
- actions = event.path.actions
- if len(actions) == 1:
- if actions[0].kind == "call":
- method = getattr(spec, "__call__", None)
- event.add_task(SpecChecker(method))
- elif len(actions) == 2:
- if actions[0].kind == "getattr" and actions[1].kind == "call":
- method = getattr(spec, actions[0].args[0], None)
- event.add_task(SpecChecker(method))
-
-Mocker.add_recorder(spec_checker_recorder)
-
-
-class ProxyReplacer(Task):
- """Task which installs and deinstalls proxy mocks.
-
- This task will replace a real object by a mock in all dictionaries
- found in the running interpreter via the garbage collecting system.
- """
-
- def __init__(self, mock):
- self.mock = mock
- self.__mocker_replace__ = False
-
- def replay(self):
- global_replace(self.mock.__mocker_object__, self.mock)
-
- def restore(self):
- global_replace(self.mock, self.mock.__mocker_object__)
-
-
-def global_replace(remove, install):
- """Replace object 'remove' with object 'install' on all dictionaries."""
- for referrer in gc.get_referrers(remove):
- if (type(referrer) is dict and
- referrer.get("__mocker_replace__", True)):
- for key, value in referrer.items():
- if value is remove:
- referrer[key] = install
-
-
-class Undefined(object):
-
- def __repr__(self):
- return "Undefined"
-
-Undefined = Undefined()
-
-
-class Patcher(Task):
-
- def __init__(self):
- super(Patcher, self).__init__()
- self._monitored = {} # {kind: {id(object): object}}
- self._patched = {}
-
- def is_monitoring(self, obj, kind):
- monitored = self._monitored.get(kind)
- if monitored:
- if id(obj) in monitored:
- return True
- cls = type(obj)
- if issubclass(cls, type):
- cls = obj
- bases = set([id(base) for base in cls.__mro__])
- bases.intersection_update(monitored)
- return bool(bases)
- return False
-
- def monitor(self, obj, kind):
- if kind not in self._monitored:
- self._monitored[kind] = {}
- self._monitored[kind][id(obj)] = obj
-
- def patch_attr(self, obj, attr, value):
- original = obj.__dict__.get(attr, Undefined)
- self._patched[id(obj), attr] = obj, attr, original
- setattr(obj, attr, value)
-
- def get_unpatched_attr(self, obj, attr):
- cls = type(obj)
- if issubclass(cls, type):
- cls = obj
- result = Undefined
- for mro_cls in cls.__mro__:
- key = (id(mro_cls), attr)
- if key in self._patched:
- result = self._patched[key][2]
- if result is not Undefined:
- break
- elif attr in mro_cls.__dict__:
- result = mro_cls.__dict__.get(attr, Undefined)
- break
- if isinstance(result, object) and hasattr(type(result), "__get__"):
- if cls is obj:
- obj = None
- return result.__get__(obj, cls)
- return result
-
- def _get_kind_attr(self, kind):
- if kind == "getattr":
- return "__getattribute__"
- return "__%s__" % kind
-
- def replay(self):
- for kind in self._monitored:
- attr = self._get_kind_attr(kind)
- seen = set()
- for obj in self._monitored[kind].itervalues():
- cls = type(obj)
- if issubclass(cls, type):
- cls = obj
- if cls not in seen:
- seen.add(cls)
- unpatched = getattr(cls, attr, Undefined)
- self.patch_attr(cls, attr,
- PatchedMethod(kind, unpatched,
- self.is_monitoring))
- self.patch_attr(cls, "__mocker_execute__",
- self.execute)
-
- def restore(self):
- for obj, attr, original in self._patched.itervalues():
- if original is Undefined:
- delattr(obj, attr)
- else:
- setattr(obj, attr, original)
- self._patched.clear()
-
- def execute(self, action, object):
- attr = self._get_kind_attr(action.kind)
- unpatched = self.get_unpatched_attr(object, attr)
- try:
- return unpatched(*action.args, **action.kwargs)
- except AttributeError:
- if action.kind == "getattr":
- # The normal behavior of Python is to try __getattribute__,
- # and if it raises AttributeError, try __getattr__. We've
- # tried the unpatched __getattribute__ above, and we'll now
- # try __getattr__.
- try:
- __getattr__ = unpatched("__getattr__")
- except AttributeError:
- pass
- else:
- return __getattr__(*action.args, **action.kwargs)
- raise
-
-
-class PatchedMethod(object):
-
- def __init__(self, kind, unpatched, is_monitoring):
- self._kind = kind
- self._unpatched = unpatched
- self._is_monitoring = is_monitoring
-
- def __get__(self, obj, cls=None):
- object = obj or cls
- if not self._is_monitoring(object, self._kind):
- return self._unpatched.__get__(obj, cls)
- def method(*args, **kwargs):
- if self._kind == "getattr" and args[0].startswith("__mocker_"):
- return self._unpatched.__get__(obj, cls)(args[0])
- mock = object.__mocker_mock__
- return mock.__mocker_act__(self._kind, args, kwargs, object)
- return method
-
- def __call__(self, obj, *args, **kwargs):
- # At least with __getattribute__, Python seems to use *both* the
- # descriptor API and also call the class attribute directly. It
- # looks like an interpreter bug, or at least an undocumented
- # inconsistency.
- return self.__get__(obj)(*args, **kwargs)
-
-
-def patcher_recorder(mocker, event):
- mock = event.path.root_mock
- if mock.__mocker_patcher__ and len(event.path.actions) == 1:
- patcher = mock.__mocker_patcher__
- patcher.monitor(mock.__mocker_object__, event.path.actions[0].kind)
-
-Mocker.add_recorder(patcher_recorder)
diff --git a/thirdparty/rietveld/COPYING b/thirdparty/rietveld/COPYING
deleted file mode 100644
index d645695..0000000
--- a/thirdparty/rietveld/COPYING
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/thirdparty/rietveld/upload.py b/thirdparty/rietveld/upload.py
deleted file mode 100755
index f6f45ff..0000000
--- a/thirdparty/rietveld/upload.py
+++ /dev/null
@@ -1,1339 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2007 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Tool for uploading diffs from a version control system to the codereview app.
-
-Usage summary: upload.py [options] [-- diff_options]
-
-Diff options are passed to the diff command of the underlying system.
-
-Supported version control systems:
- Git
- Mercurial
- Subversion
-
-It is important for Git/Mercurial users to specify a tree/node/branch to diff
-against by using the '--rev' option.
-"""
-# This code is derived from appcfg.py in the App Engine SDK (open source),
-# and from ASPN recipe #146306.
-
-import cookielib
-import getpass
-import logging
-import md5
-import mimetypes
-import optparse
-import os
-import re
-import socket
-import subprocess
-import sys
-import urllib
-import urllib2
-import urlparse
-
-try:
- import readline
-except ImportError:
- pass
-
-# The logging verbosity:
-# 0: Errors only.
-# 1: Status messages.
-# 2: Info logs.
-# 3: Debug logs.
-verbosity = 1
-
-# Max size of patch or base file.
-MAX_UPLOAD_SIZE = 900 * 1024
-
-
-def StatusUpdate(msg):
- """Print a status message to stdout.
-
- If 'verbosity' is greater than 0, print the message.
-
- Args:
- msg: The string to print.
- """
- if verbosity > 0:
- print msg
-
-
-def ErrorExit(msg):
- """Print an error message to stderr and exit."""
- print >>sys.stderr, msg
- sys.exit(1)
-
-
-class ClientLoginError(urllib2.HTTPError):
- """Raised to indicate there was an error authenticating with ClientLogin."""
-
- def __init__(self, url, code, msg, headers, args):
- urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
- self.args = args
- self.reason = args["Error"]
-
-
-class AbstractRpcServer(object):
- """Provides a common interface for a simple RPC server."""
-
- def __init__(self, host, auth_function, host_override=None, extra_headers={},
- save_cookies=False):
- """Creates a new HttpRpcServer.
-
- Args:
- host: The host to send requests to.
- auth_function: A function that takes no arguments and returns an
- (email, password) tuple when called. Will be called if authentication
- is required.
- host_override: The host header to send to the server (defaults to host).
- extra_headers: A dict of extra headers to append to every request.
- save_cookies: If True, save the authentication cookies to local disk.
- If False, use an in-memory cookiejar instead. Subclasses must
- implement this functionality. Defaults to False.
- """
- self.host = host
- self.host_override = host_override
- self.auth_function = auth_function
- self.authenticated = False
- self.extra_headers = extra_headers
- self.save_cookies = save_cookies
- self.opener = self._GetOpener()
- if self.host_override:
- logging.info("Server: %s; Host: %s", self.host, self.host_override)
- else:
- logging.info("Server: %s", self.host)
-
- def _GetOpener(self):
- """Returns an OpenerDirector for making HTTP requests.
-
- Returns:
- A urllib2.OpenerDirector object.
- """
- raise NotImplementedError()
-
- def _CreateRequest(self, url, data=None):
- """Creates a new urllib request."""
- logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
- req = urllib2.Request(url, data=data)
- if self.host_override:
- req.add_header("Host", self.host_override)
- for key, value in self.extra_headers.iteritems():
- req.add_header(key, value)
- return req
-
- def _GetAuthToken(self, email, password):
- """Uses ClientLogin to authenticate the user, returning an auth token.
-
- Args:
- email: The user's email address
- password: The user's password
-
- Raises:
- ClientLoginError: If there was an error authenticating with ClientLogin.
- HTTPError: If there was some other form of HTTP error.
-
- Returns:
- The authentication token returned by ClientLogin.
- """
- account_type = "HOSTED_OR_GOOGLE"
- if self.host.endswith(".google.com"):
- # Needed for use inside Google.
- account_type = "HOSTED"
- req = self._CreateRequest(
- url="https://www.google.com/accounts/ClientLogin",
- data=urllib.urlencode({
- "Email": email,
- "Passwd": password,
- "service": "ah",
- "source": "rietveld-codereview-upload",
- "accountType": account_type,
- }),
- )
- try:
- response = self.opener.open(req)
- response_body = response.read()
- response_dict = dict(x.split("=")
- for x in response_body.split("\n") if x)
- return response_dict["Auth"]
- except urllib2.HTTPError, e:
- if e.code == 403:
- body = e.read()
- response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
- raise ClientLoginError(req.get_full_url(), e.code, e.msg,
- e.headers, response_dict)
- else:
- raise
-
- def _GetAuthCookie(self, auth_token):
- """Fetches authentication cookies for an authentication token.
-
- Args:
- auth_token: The authentication token returned by ClientLogin.
-
- Raises:
- HTTPError: If there was an error fetching the authentication cookies.
- """
- # This is a dummy value to allow us to identify when we're successful.
- continue_location = "http://localhost/"
- args = {"continue": continue_location, "auth": auth_token}
- req = self._CreateRequest("http://%s/_ah/login?%s" %
- (self.host, urllib.urlencode(args)))
- try:
- response = self.opener.open(req)
- except urllib2.HTTPError, e:
- response = e
- if (response.code != 302 or
- response.info()["location"] != continue_location):
- raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
- response.headers, response.fp)
- self.authenticated = True
-
- def _Authenticate(self):
- """Authenticates the user.
-
- The authentication process works as follows:
- 1) We get a username and password from the user
- 2) We use ClientLogin to obtain an AUTH token for the user
- (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
- 3) We pass the auth token to /_ah/login on the server to obtain an
- authentication cookie. If login was successful, it tries to redirect
- us to the URL we provided.
-
- If we attempt to access the upload API without first obtaining an
- authentication cookie, it returns a 401 response and directs us to
- authenticate ourselves with ClientLogin.
- """
- for i in range(3):
- credentials = self.auth_function()
- try:
- auth_token = self._GetAuthToken(credentials[0], credentials[1])
- except ClientLoginError, e:
- if e.reason == "BadAuthentication":
- print >>sys.stderr, "Invalid username or password."
- continue
- if e.reason == "CaptchaRequired":
- print >>sys.stderr, (
- "Please go to\n"
- "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
- "and verify you are a human. Then try again.")
- break
- if e.reason == "NotVerified":
- print >>sys.stderr, "Account not verified."
- break
- if e.reason == "TermsNotAgreed":
- print >>sys.stderr, "User has not agreed to TOS."
- break
- if e.reason == "AccountDeleted":
- print >>sys.stderr, "The user account has been deleted."
- break
- if e.reason == "AccountDisabled":
- print >>sys.stderr, "The user account has been disabled."
- break
- if e.reason == "ServiceDisabled":
- print >>sys.stderr, ("The user's access to the service has been "
- "disabled.")
- break
- if e.reason == "ServiceUnavailable":
- print >>sys.stderr, "The service is not available; try again later."
- break
- raise
- self._GetAuthCookie(auth_token)
- return
-
- def Send(self, request_path, payload=None,
- content_type="application/octet-stream",
- timeout=None,
- **kwargs):
- """Sends an RPC and returns the response.
-
- Args:
- request_path: The path to send the request to, eg /api/appversion/create.
- payload: The body of the request, or None to send an empty request.
- content_type: The Content-Type header to use.
- timeout: timeout in seconds; default None i.e. no timeout.
- (Note: for large requests on OS X, the timeout doesn't work right.)
- kwargs: Any keyword arguments are converted into query string parameters.
-
- Returns:
- The response body, as a string.
- """
- # TODO: Don't require authentication. Let the server say
- # whether it is necessary.
- if not self.authenticated:
- self._Authenticate()
-
- old_timeout = socket.getdefaulttimeout()
- socket.setdefaulttimeout(timeout)
- try:
- tries = 0
- while True:
- tries += 1
- args = dict(kwargs)
- url = "http://%s%s" % (self.host, request_path)
- if args:
- url += "?" + urllib.urlencode(args)
- req = self._CreateRequest(url=url, data=payload)
- req.add_header("Content-Type", content_type)
- try:
- f = self.opener.open(req)
- response = f.read()
- f.close()
- return response
- except urllib2.HTTPError, e:
- if tries > 3:
- raise
- elif e.code == 401:
- self._Authenticate()
-## elif e.code >= 500 and e.code < 600:
-## # Server Error - try again.
-## continue
- else:
- raise
- finally:
- socket.setdefaulttimeout(old_timeout)
-
-
-class HttpRpcServer(AbstractRpcServer):
- """Provides a simplified RPC-style interface for HTTP requests."""
-
- def _Authenticate(self):
- """Save the cookie jar after authentication."""
- super(HttpRpcServer, self)._Authenticate()
- if self.save_cookies:
- StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
- self.cookie_jar.save()
-
- def _GetOpener(self):
- """Returns an OpenerDirector that supports cookies and ignores redirects.
-
- Returns:
- A urllib2.OpenerDirector object.
- """
- opener = urllib2.OpenerDirector()
- opener.add_handler(urllib2.ProxyHandler())
- opener.add_handler(urllib2.UnknownHandler())
- opener.add_handler(urllib2.HTTPHandler())
- opener.add_handler(urllib2.HTTPDefaultErrorHandler())
- opener.add_handler(urllib2.HTTPSHandler())
- opener.add_handler(urllib2.HTTPErrorProcessor())
- if self.save_cookies:
- self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
- self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
- if os.path.exists(self.cookie_file):
- try:
- self.cookie_jar.load()
- self.authenticated = True
- StatusUpdate("Loaded authentication cookies from %s" %
- self.cookie_file)
- except (cookielib.LoadError, IOError):
- # Failed to load cookies - just ignore them.
- pass
- else:
- # Create an empty cookie file with mode 600
- fd = os.open(self.cookie_file, os.O_CREAT, 0600)
- os.close(fd)
- # Always chmod the cookie file
- os.chmod(self.cookie_file, 0600)
- else:
- # Don't save cookies across runs of update.py.
- self.cookie_jar = cookielib.CookieJar()
- opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
- return opener
-
-
-parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
-parser.add_option("-y", "--assume_yes", action="store_true",
- dest="assume_yes", default=False,
- help="Assume that the answer to yes/no questions is 'yes'.")
-# Logging
-group = parser.add_option_group("Logging options")
-group.add_option("-q", "--quiet", action="store_const", const=0,
- dest="verbose", help="Print errors only.")
-group.add_option("-v", "--verbose", action="store_const", const=2,
- dest="verbose", default=1,
- help="Print info level logs (default).")
-group.add_option("--noisy", action="store_const", const=3,
- dest="verbose", help="Print all logs.")
-# Review server
-group = parser.add_option_group("Review server options")
-group.add_option("-s", "--server", action="store", dest="server",
- default="codereviews.googleopensourceprograms.com",
- metavar="SERVER",
- help=("The server to upload to. The format is host[:port]. "
- "Defaults to '%default'."))
-group.add_option("-e", "--email", action="store", dest="email",
- metavar="EMAIL", default=None,
- help="The username to use. Will prompt if omitted.")
-group.add_option("-H", "--host", action="store", dest="host",
- metavar="HOST", default=None,
- help="Overrides the Host header sent with all RPCs.")
-group.add_option("--no_cookies", action="store_false",
- dest="save_cookies", default=True,
- help="Do not save authentication cookies to local disk.")
-# Issue
-group = parser.add_option_group("Issue options")
-group.add_option("-d", "--description", action="store", dest="description",
- metavar="DESCRIPTION", default=None,
- help="Optional description when creating an issue.")
-group.add_option("-f", "--description_file", action="store",
- dest="description_file", metavar="DESCRIPTION_FILE",
- default=None,
- help="Optional path of a file that contains "
- "the description when creating an issue.")
-group.add_option("-r", "--reviewers", action="store", dest="reviewers",
- metavar="REVIEWERS", default=None,
- help="Add reviewers (comma separated email addresses).")
-group.add_option("--cc", action="store", dest="cc",
- metavar="CC", default=None,
- help="Add CC (comma separated email addresses).")
-# Upload options
-group = parser.add_option_group("Patch options")
-group.add_option("-m", "--message", action="store", dest="message",
- metavar="MESSAGE", default=None,
- help="A message to identify the patch. "
- "Will prompt if omitted.")
-group.add_option("-i", "--issue", type="int", action="store",
- metavar="ISSUE", default=None,
- help="Issue number to which to add. Defaults to new issue.")
-group.add_option("--download_base", action="store_true",
- dest="download_base", default=False,
- help="Base files will be downloaded by the server "
- "(side-by-side diffs may not work on files with CRs).")
-group.add_option("--rev", action="store", dest="revision",
- metavar="REV", default=None,
- help="Branch/tree/revision to diff against (used by DVCS).")
-group.add_option("--send_mail", action="store_true",
- dest="send_mail", default=False,
- help="Send notification email to reviewers.")
-
-
-def GetRpcServer(options):
- """Returns an instance of an AbstractRpcServer.
-
- Returns:
- A new AbstractRpcServer, on which RPC calls can be made.
- """
-
- rpc_server_class = HttpRpcServer
-
- def GetUserCredentials():
- """Prompts the user for a username and password."""
- email = options.email
- if email is None:
- prompt = "Email (login for uploading to %s): " % options.server
- email = raw_input(prompt).strip()
- password = getpass.getpass("Password for %s: " % email)
- return (email, password)
-
- # If this is the dev_appserver, use fake authentication.
- host = (options.host or options.server).lower()
- if host == "localhost" or host.startswith("localhost:"):
- email = options.email
- if email is None:
- email = "test@example.com"
- logging.info("Using debug user %s. Override with --email" % email)
- server = rpc_server_class(
- options.server,
- lambda: (email, "password"),
- host_override=options.host,
- extra_headers={"Cookie":
- 'dev_appserver_login="%s:False"' % email},
- save_cookies=options.save_cookies)
- # Don't try to talk to ClientLogin.
- server.authenticated = True
- return server
-
- return rpc_server_class(options.server, GetUserCredentials,
- host_override=options.host,
- save_cookies=options.save_cookies)
-
-
-def EncodeMultipartFormData(fields, files):
- """Encode form fields for multipart/form-data.
-
- Args:
- fields: A sequence of (name, value) elements for regular form fields.
- files: A sequence of (name, filename, value) elements for data to be
- uploaded as files.
- Returns:
- (content_type, body) ready for httplib.HTTP instance.
-
- Source:
- http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
- """
- BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
- CRLF = '\r\n'
- lines = []
- for (key, value) in fields:
- lines.append('--' + BOUNDARY)
- lines.append('Content-Disposition: form-data; name="%s"' % key)
- lines.append('')
- lines.append(value)
- for (key, filename, value) in files:
- lines.append('--' + BOUNDARY)
- lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
- (key, filename))
- lines.append('Content-Type: %s' % GetContentType(filename))
- lines.append('')
- lines.append(value)
- lines.append('--' + BOUNDARY + '--')
- lines.append('')
- body = CRLF.join(lines)
- content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
- return content_type, body
-
-
-def GetContentType(filename):
- """Helper to guess the content-type from the filename."""
- return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
-
-
-# Use a shell for subcommands on Windows to get a PATH search.
-use_shell = sys.platform.startswith("win")
-
-def RunShellWithReturnCode(command, print_output=False,
- universal_newlines=True):
- """Executes a command and returns the output from stdout and the return code.
-
- Args:
- command: Command to execute.
- print_output: If True, the output is printed to stdout.
- If False, both stdout and stderr are ignored.
- universal_newlines: Use universal_newlines flag (default: True).
-
- Returns:
- Tuple (output, return code)
- """
- logging.info("Running %s", command)
- p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
- shell=use_shell, universal_newlines=universal_newlines)
- if print_output:
- output_array = []
- while True:
- line = p.stdout.readline()
- if not line:
- break
- print line.strip("\n")
- output_array.append(line)
- output = "".join(output_array)
- else:
- output = p.stdout.read()
- p.wait()
- errout = p.stderr.read()
- if print_output and errout:
- print >>sys.stderr, errout
- p.stdout.close()
- p.stderr.close()
- return output, p.returncode
-
-
-def RunShell(command, silent_ok=False, universal_newlines=True,
- print_output=False):
- data, retcode = RunShellWithReturnCode(command, print_output,
- universal_newlines)
- if retcode:
- ErrorExit("Got error status from %s:\n%s" % (command, data))
- if not silent_ok and not data:
- ErrorExit("No output from %s" % command)
- return data
-
-
-class VersionControlSystem(object):
- """Abstract base class providing an interface to the VCS."""
-
- def __init__(self, options):
- """Constructor.
-
- Args:
- options: Command line options.
- """
- self.options = options
-
- def GenerateDiff(self, args):
- """Return the current diff as a string.
-
- Args:
- args: Extra arguments to pass to the diff command.
- """
- raise NotImplementedError(
- "abstract method -- subclass %s must override" % self.__class__)
-
- def GetUnknownFiles(self):
- """Return a list of files unknown to the VCS."""
- raise NotImplementedError(
- "abstract method -- subclass %s must override" % self.__class__)
-
- def CheckForUnknownFiles(self):
- """Show an "are you sure?" prompt if there are unknown files."""
- unknown_files = self.GetUnknownFiles()
- if unknown_files:
- print "The following files are not added to version control:"
- for line in unknown_files:
- print line
- prompt = "Are you sure to continue?(y/N) "
- answer = raw_input(prompt).strip()
- if answer != "y":
- ErrorExit("User aborted")
-
- def GetBaseFile(self, filename):
- """Get the content of the upstream version of a file.
-
- Returns:
- A tuple (base_content, new_content, is_binary, status)
- base_content: The contents of the base file.
- new_content: For text files, this is empty. For binary files, this is
- the contents of the new file, since the diff output won't contain
- information to reconstruct the current file.
- is_binary: True iff the file is binary.
- status: The status of the file.
- """
-
- raise NotImplementedError(
- "abstract method -- subclass %s must override" % self.__class__)
-
-
- def GetBaseFiles(self, diff):
- """Helper that calls GetBase file for each file in the patch.
-
- Returns:
- A dictionary that maps from filename to GetBaseFile's tuple. Filenames
- are retrieved based on lines that start with "Index:" or
- "Property changes on:".
- """
- files = {}
- for line in diff.splitlines(True):
- if line.startswith('Index:') or line.startswith('Property changes on:'):
- unused, filename = line.split(':', 1)
- # On Windows if a file has property changes its filename uses '\'
- # instead of '/'.
- filename = filename.strip().replace('\\', '/')
- files[filename] = self.GetBaseFile(filename)
- return files
-
-
- def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
- files):
- """Uploads the base files (and if necessary, the current ones as well)."""
-
- def UploadFile(filename, file_id, content, is_binary, status, is_base):
- """Uploads a file to the server."""
- file_too_large = False
- if is_base:
- type = "base"
- else:
- type = "current"
- if len(content) > MAX_UPLOAD_SIZE:
- print ("Not uploading the %s file for %s because it's too large." %
- (type, filename))
- file_too_large = True
- content = ""
- checksum = md5.new(content).hexdigest()
- if options.verbose > 0 and not file_too_large:
- print "Uploading %s file for %s" % (type, filename)
- url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
- form_fields = [("filename", filename),
- ("status", status),
- ("checksum", checksum),
- ("is_binary", str(is_binary)),
- ("is_current", str(not is_base)),
- ]
- if file_too_large:
- form_fields.append(("file_too_large", "1"))
- if options.email:
- form_fields.append(("user", options.email))
- ctype, body = EncodeMultipartFormData(form_fields,
- [("data", filename, content)])
- response_body = rpc_server.Send(url, body,
- content_type=ctype)
- if not response_body.startswith("OK"):
- StatusUpdate(" --> %s" % response_body)
- sys.exit(1)
-
- patches = dict()
- [patches.setdefault(v, k) for k, v in patch_list]
- for filename in patches.keys():
- base_content, new_content, is_binary, status = files[filename]
- file_id_str = patches.get(filename)
- if file_id_str.find("nobase") != -1:
- base_content = None
- file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
- file_id = int(file_id_str)
- if base_content != None:
- UploadFile(filename, file_id, base_content, is_binary, status, True)
- if new_content != None:
- UploadFile(filename, file_id, new_content, is_binary, status, False)
-
- def IsImage(self, filename):
- """Returns true if the filename has an image extension."""
- mimetype = mimetypes.guess_type(filename)[0]
- if not mimetype:
- return False
- return mimetype.startswith("image/")
-
-
-class SubversionVCS(VersionControlSystem):
- """Implementation of the VersionControlSystem interface for Subversion."""
-
- def __init__(self, options):
- super(SubversionVCS, self).__init__(options)
- if self.options.revision:
- match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
- if not match:
- ErrorExit("Invalid Subversion revision %s." % self.options.revision)
- self.rev_start = match.group(1)
- self.rev_end = match.group(3)
- else:
- self.rev_start = self.rev_end = None
- # Cache output from "svn list -r REVNO dirname".
- # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
- self.svnls_cache = {}
- # SVN base URL is required to fetch files deleted in an older revision.
- # Result is cached to not guess it over and over again in GetBaseFile().
- required = self.options.download_base or self.options.revision is not None
- self.svn_base = self._GuessBase(required)
-
- def GuessBase(self, required):
- """Wrapper for _GuessBase."""
- return self.svn_base
-
- def _GuessBase(self, required):
- """Returns the SVN base URL.
-
- Args:
- required: If true, exits if the url can't be guessed, otherwise None is
- returned.
- """
- info = RunShell(["svn", "info"])
- for line in info.splitlines():
- words = line.split()
- if len(words) == 2 and words[0] == "URL:":
- url = words[1]
- scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
- username, netloc = urllib.splituser(netloc)
- if username:
- logging.info("Removed username from base URL")
- if netloc.endswith("svn.python.org"):
- if netloc == "svn.python.org":
- if path.startswith("/projects/"):
- path = path[9:]
- elif netloc != "pythondev@svn.python.org":
- ErrorExit("Unrecognized Python URL: %s" % url)
- base = "http://svn.python.org/view/*checkout*%s/" % path
- logging.info("Guessed Python base = %s", base)
- elif netloc.endswith("svn.collab.net"):
- if path.startswith("/repos/"):
- path = path[6:]
- base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
- logging.info("Guessed CollabNet base = %s", base)
- elif netloc.endswith(".googlecode.com"):
- path = path + "/"
- base = urlparse.urlunparse(("http", netloc, path, params,
- query, fragment))
- logging.info("Guessed Google Code base = %s", base)
- else:
- path = path + "/"
- base = urlparse.urlunparse((scheme, netloc, path, params,
- query, fragment))
- logging.info("Guessed base = %s", base)
- return base
- if required:
- ErrorExit("Can't find URL in output from svn info")
- return None
-
- def GenerateDiff(self, args):
- cmd = ["svn", "diff"]
- if self.options.revision:
- cmd += ["-r", self.options.revision]
- cmd.extend(args)
- data = RunShell(cmd)
- count = 0
- for line in data.splitlines():
- if line.startswith("Index:") or line.startswith("Property changes on:"):
- count += 1
- logging.info(line)
- if not count:
- ErrorExit("No valid patches found in output from svn diff")
- return data
-
- def _CollapseKeywords(self, content, keyword_str):
- """Collapses SVN keywords."""
- # svn cat translates keywords but svn diff doesn't. As a result of this
- # behavior patching.PatchChunks() fails with a chunk mismatch error.
- # This part was originally written by the Review Board development team
- # who had the same problem (http://reviews.review-board.org/r/276/).
- # Mapping of keywords to known aliases
- svn_keywords = {
- # Standard keywords
- 'Date': ['Date', 'LastChangedDate'],
- 'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
- 'Author': ['Author', 'LastChangedBy'],
- 'HeadURL': ['HeadURL', 'URL'],
- 'Id': ['Id'],
-
- # Aliases
- 'LastChangedDate': ['LastChangedDate', 'Date'],
- 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
- 'LastChangedBy': ['LastChangedBy', 'Author'],
- 'URL': ['URL', 'HeadURL'],
- }
-
- def repl(m):
- if m.group(2):
- return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
- return "$%s$" % m.group(1)
- keywords = [keyword
- for name in keyword_str.split(" ")
- for keyword in svn_keywords.get(name, [])]
- return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
-
- def GetUnknownFiles(self):
- status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
- unknown_files = []
- for line in status.split("\n"):
- if line and line[0] == "?":
- unknown_files.append(line)
- return unknown_files
-
- def ReadFile(self, filename):
- """Returns the contents of a file."""
- file = open(filename, 'rb')
- result = ""
- try:
- result = file.read()
- finally:
- file.close()
- return result
-
- def GetStatus(self, filename):
- """Returns the status of a file."""
- if not self.options.revision:
- status = RunShell(["svn", "status", "--ignore-externals", filename])
- if not status:
- ErrorExit("svn status returned no output for %s" % filename)
- status_lines = status.splitlines()
- # If file is in a cl, the output will begin with
- # "\n--- Changelist 'cl_name':\n". See
- # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
- if (len(status_lines) == 3 and
- not status_lines[0] and
- status_lines[1].startswith("--- Changelist")):
- status = status_lines[2]
- else:
- status = status_lines[0]
- # If we have a revision to diff against we need to run "svn list"
- # for the old and the new revision and compare the results to get
- # the correct status for a file.
- else:
- dirname, relfilename = os.path.split(filename)
- if dirname not in self.svnls_cache:
- cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
- out, returncode = RunShellWithReturnCode(cmd)
- if returncode:
- ErrorExit("Failed to get status for %s." % filename)
- old_files = out.splitlines()
- args = ["svn", "list"]
- if self.rev_end:
- args += ["-r", self.rev_end]
- cmd = args + [dirname or "."]
- out, returncode = RunShellWithReturnCode(cmd)
- if returncode:
- ErrorExit("Failed to run command %s" % cmd)
- self.svnls_cache[dirname] = (old_files, out.splitlines())
- old_files, new_files = self.svnls_cache[dirname]
- if relfilename in old_files and relfilename not in new_files:
- status = "D "
- elif relfilename in old_files and relfilename in new_files:
- status = "M "
- else:
- status = "A "
- return status
-
- def GetBaseFile(self, filename):
- status = self.GetStatus(filename)
- base_content = None
- new_content = None
-
- # If a file is copied its status will be "A +", which signifies
- # "addition-with-history". See "svn st" for more information. We need to
- # upload the original file or else diff parsing will fail if the file was
- # edited.
- if status[0] == "A" and status[3] != "+":
- # We'll need to upload the new content if we're adding a binary file
- # since diff's output won't contain it.
- mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
- silent_ok=True)
- base_content = ""
- is_binary = mimetype and not mimetype.startswith("text/")
- if is_binary and self.IsImage(filename):
- new_content = self.ReadFile(filename)
- elif (status[0] in ("M", "D", "R") or
- (status[0] == "A" and status[3] == "+") or # Copied file.
- (status[0] == " " and status[1] == "M")): # Property change.
- args = []
- if self.options.revision:
- url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
- else:
- # Don't change filename, it's needed later.
- url = filename
- args += ["-r", "BASE"]
- cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
- mimetype, returncode = RunShellWithReturnCode(cmd)
- if returncode:
- # File does not exist in the requested revision.
- # Reset mimetype, it contains an error message.
- mimetype = ""
- get_base = False
- is_binary = mimetype and not mimetype.startswith("text/")
- if status[0] == " ":
- # Empty base content just to force an upload.
- base_content = ""
- elif is_binary:
- if self.IsImage(filename):
- get_base = True
- if status[0] == "M":
- if not self.rev_end:
- new_content = self.ReadFile(filename)
- else:
- url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
- new_content = RunShell(["svn", "cat", url],
- universal_newlines=True)
- else:
- base_content = ""
- else:
- get_base = True
-
- if get_base:
- if is_binary:
- universal_newlines = False
- else:
- universal_newlines = True
- if self.rev_start:
- # "svn cat -r REV delete_file.txt" doesn't work. cat requires
- # the full URL with "@REV" appended instead of using "-r" option.
- url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
- base_content = RunShell(["svn", "cat", url],
- universal_newlines=universal_newlines)
- else:
- base_content = RunShell(["svn", "cat", filename],
- universal_newlines=universal_newlines)
- if not is_binary:
- args = []
- if self.rev_start:
- url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
- else:
- url = filename
- args += ["-r", "BASE"]
- cmd = ["svn"] + args + ["propget", "svn:keywords", url]
- keywords, returncode = RunShellWithReturnCode(cmd)
- if keywords and not returncode:
- base_content = self._CollapseKeywords(base_content, keywords)
- else:
- StatusUpdate("svn status returned unexpected output: %s" % status)
- sys.exit(1)
- return base_content, new_content, is_binary, status[0:5]
-
-
-class GitVCS(VersionControlSystem):
- """Implementation of the VersionControlSystem interface for Git."""
-
- def __init__(self, options):
- super(GitVCS, self).__init__(options)
- # Map of filename -> hash of base file.
- self.base_hashes = {}
-
- def GenerateDiff(self, extra_args):
- # This is more complicated than svn's GenerateDiff because we must convert
- # the diff output to include an svn-style "Index:" line as well as record
- # the hashes of the base files, so we can upload them along with our diff.
- if self.options.revision:
- extra_args = [self.options.revision] + extra_args
- gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
- svndiff = []
- filecount = 0
- filename = None
- for line in gitdiff.splitlines():
- match = re.match(r"diff --git a/(.*) b/.*$", line)
- if match:
- filecount += 1
- filename = match.group(1)
- svndiff.append("Index: %s\n" % filename)
- else:
- # The "index" line in a git diff looks like this (long hashes elided):
- # index 82c0d44..b2cee3f 100755
- # We want to save the left hash, as that identifies the base file.
- match = re.match(r"index (\w+)\.\.", line)
- if match:
- self.base_hashes[filename] = match.group(1)
- svndiff.append(line + "\n")
- if not filecount:
- ErrorExit("No valid patches found in output from git diff")
- return "".join(svndiff)
-
- def GetUnknownFiles(self):
- status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
- silent_ok=True)
- return status.splitlines()
-
- def GetBaseFile(self, filename):
- hash = self.base_hashes[filename]
- base_content = None
- new_content = None
- is_binary = False
- if hash == "0" * 40: # All-zero hash indicates no base file.
- status = "A"
- base_content = ""
- else:
- status = "M"
- base_content = RunShell(["git", "show", hash])
- return (base_content, new_content, is_binary, status)
-
-
-class MercurialVCS(VersionControlSystem):
- """Implementation of the VersionControlSystem interface for Mercurial."""
-
- def __init__(self, options, repo_dir):
- super(MercurialVCS, self).__init__(options)
- # Absolute path to repository (we can be in a subdir)
- self.repo_dir = os.path.normpath(repo_dir)
- # Compute the subdir
- cwd = os.path.normpath(os.getcwd())
- assert cwd.startswith(self.repo_dir)
- self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
- if self.options.revision:
- self.base_rev = self.options.revision
- else:
- self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
-
- def _GetRelPath(self, filename):
- """Get relative path of a file according to the current directory,
- given its logical path in the repo."""
- assert filename.startswith(self.subdir), filename
- return filename[len(self.subdir):].lstrip(r"\/")
-
- def GenerateDiff(self, extra_args):
- # If no file specified, restrict to the current subdir
- extra_args = extra_args or ["."]
- cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
- data = RunShell(cmd, silent_ok=True)
- svndiff = []
- filecount = 0
- for line in data.splitlines():
- m = re.match("diff --git a/(\S+) b/(\S+)", line)
- if m:
- # Modify line to make it look like as it comes from svn diff.
- # With this modification no changes on the server side are required
- # to make upload.py work with Mercurial repos.
- # NOTE: for proper handling of moved/copied files, we have to use
- # the second filename.
- filename = m.group(2)
- svndiff.append("Index: %s" % filename)
- svndiff.append("=" * 67)
- filecount += 1
- logging.info(line)
- else:
- svndiff.append(line)
- if not filecount:
- ErrorExit("No valid patches found in output from hg diff")
- return "\n".join(svndiff) + "\n"
-
- def GetUnknownFiles(self):
- """Return a list of files unknown to the VCS."""
- args = []
- status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
- silent_ok=True)
- unknown_files = []
- for line in status.splitlines():
- st, fn = line.split(" ", 1)
- if st == "?":
- unknown_files.append(fn)
- return unknown_files
-
- def GetBaseFile(self, filename):
- # "hg status" and "hg cat" both take a path relative to the current subdir
- # rather than to the repo root, but "hg diff" has given us the full path
- # to the repo root.
- base_content = ""
- new_content = None
- is_binary = False
- oldrelpath = relpath = self._GetRelPath(filename)
- # "hg status -C" returns two lines for moved/copied files, one otherwise
- out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
- out = out.splitlines()
- # HACK: strip error message about missing file/directory if it isn't in
- # the working copy
- if out[0].startswith('%s: ' % relpath):
- out = out[1:]
- if len(out) > 1:
- # Moved/copied => considered as modified, use old filename to
- # retrieve base contents
- oldrelpath = out[1].strip()
- status = "M"
- else:
- status, _ = out[0].split(' ', 1)
- if status != "A":
- base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
- silent_ok=True)
- is_binary = "\0" in base_content # Mercurial's heuristic
- if status != "R":
- new_content = open(relpath, "rb").read()
- is_binary = is_binary or "\0" in new_content
- if is_binary and base_content:
- # Fetch again without converting newlines
- base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
- silent_ok=True, universal_newlines=False)
- if not is_binary or not self.IsImage(relpath):
- new_content = None
- return base_content, new_content, is_binary, status
-
-
-# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
-def SplitPatch(data):
- """Splits a patch into separate pieces for each file.
-
- Args:
- data: A string containing the output of svn diff.
-
- Returns:
- A list of 2-tuple (filename, text) where text is the svn diff output
- pertaining to filename.
- """
- patches = []
- filename = None
- diff = []
- for line in data.splitlines(True):
- new_filename = None
- if line.startswith('Index:'):
- unused, new_filename = line.split(':', 1)
- new_filename = new_filename.strip()
- elif line.startswith('Property changes on:'):
- unused, temp_filename = line.split(':', 1)
- # When a file is modified, paths use '/' between directories, however
- # when a property is modified '\' is used on Windows. Make them the same
- # otherwise the file shows up twice.
- temp_filename = temp_filename.strip().replace('\\', '/')
- if temp_filename != filename:
- # File has property changes but no modifications, create a new diff.
- new_filename = temp_filename
- if new_filename:
- if filename and diff:
- patches.append((filename, ''.join(diff)))
- filename = new_filename
- diff = [line]
- continue
- if diff is not None:
- diff.append(line)
- if filename and diff:
- patches.append((filename, ''.join(diff)))
- return patches
-
-
-def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
- """Uploads a separate patch for each file in the diff output.
-
- Returns a list of [patch_key, filename] for each file.
- """
- patches = SplitPatch(data)
- rv = []
- for patch in patches:
- if len(patch[1]) > MAX_UPLOAD_SIZE:
- print ("Not uploading the patch for " + patch[0] +
- " because the file is too large.")
- continue
- form_fields = [("filename", patch[0])]
- if not options.download_base:
- form_fields.append(("content_upload", "1"))
- files = [("data", "data.diff", patch[1])]
- ctype, body = EncodeMultipartFormData(form_fields, files)
- url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
- print "Uploading patch for " + patch[0]
- response_body = rpc_server.Send(url, body, content_type=ctype)
- lines = response_body.splitlines()
- if not lines or lines[0] != "OK":
- StatusUpdate(" --> %s" % response_body)
- sys.exit(1)
- rv.append([lines[1], patch[0]])
- return rv
-
-
-def GuessVCS(options):
- """Helper to guess the version control system.
-
- This examines the current directory, guesses which VersionControlSystem
- we're using, and returns an instance of the appropriate class. Exit with an
- error if we can't figure it out.
-
- Returns:
- A VersionControlSystem instance. Exits if the VCS can't be guessed.
- """
- # Mercurial has a command to get the base directory of a repository
- # Try running it, but don't die if we don't have hg installed.
- # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
- try:
- out, returncode = RunShellWithReturnCode(["hg", "root"])
- if returncode == 0:
- return MercurialVCS(options, out.strip())
- except OSError, (errno, message):
- if errno != 2: # ENOENT -- they don't have hg installed.
- raise
-
- # Subversion has a .svn in all working directories.
- if os.path.isdir('.svn'):
- logging.info("Guessed VCS = Subversion")
- return SubversionVCS(options)
-
- # Git has a command to test if you're in a git tree.
- # Try running it, but don't die if we don't have git installed.
- try:
- out, returncode = RunShellWithReturnCode(["git", "rev-parse",
- "--is-inside-work-tree"])
- if returncode == 0:
- return GitVCS(options)
- except OSError, (errno, message):
- if errno != 2: # ENOENT -- they don't have git installed.
- raise
-
- ErrorExit(("Could not guess version control system. "
- "Are you in a working copy directory?"))
-
-
-def RealMain(argv, data=None):
- logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
- "%(lineno)s %(message)s "))
- os.environ['LC_ALL'] = 'C'
- options, args = parser.parse_args(argv[1:])
- global verbosity
- verbosity = options.verbose
- if verbosity >= 3:
- logging.getLogger().setLevel(logging.DEBUG)
- elif verbosity >= 2:
- logging.getLogger().setLevel(logging.INFO)
- vcs = GuessVCS(options)
- if isinstance(vcs, SubversionVCS):
- # base field is only allowed for Subversion.
- # Note: Fetching base files may become deprecated in future releases.
- base = vcs.GuessBase(options.download_base)
- else:
- base = None
- if not base and options.download_base:
- options.download_base = True
- logging.info("Enabled upload of base file")
- if not options.assume_yes:
- vcs.CheckForUnknownFiles()
- if data is None:
- data = vcs.GenerateDiff(args)
- files = vcs.GetBaseFiles(data)
- if verbosity >= 1:
- print "Upload server:", options.server, "(change with -s/--server)"
- if options.issue:
- prompt = "Message describing this patch set: "
- else:
- prompt = "New issue subject: "
- message = options.message or raw_input(prompt).strip()
- if not message:
- ErrorExit("A non-empty message is required")
- rpc_server = GetRpcServer(options)
- form_fields = [("subject", message)]
- if base:
- form_fields.append(("base", base))
- if options.issue:
- form_fields.append(("issue", str(options.issue)))
- if options.email:
- form_fields.append(("user", options.email))
- if options.reviewers:
- for reviewer in options.reviewers.split(','):
- if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
- ErrorExit("Invalid email address: %s" % reviewer)
- form_fields.append(("reviewers", options.reviewers))
- if options.cc:
- for cc in options.cc.split(','):
- if "@" in cc and not cc.split("@")[1].count(".") == 1:
- ErrorExit("Invalid email address: %s" % cc)
- form_fields.append(("cc", options.cc))
- description = options.description
- if options.description_file:
- if options.description:
- ErrorExit("Can't specify description and description_file")
- file = open(options.description_file, 'r')
- description = file.read()
- file.close()
- if description:
- form_fields.append(("description", description))
- # Send a hash of all the base file so the server can determine if a copy
- # already exists in an earlier patchset.
- base_hashes = ""
- for file, info in files.iteritems():
- if not info[0] is None:
- checksum = md5.new(info[0]).hexdigest()
- if base_hashes:
- base_hashes += "|"
- base_hashes += checksum + ":" + file
- form_fields.append(("base_hashes", base_hashes))
- # If we're uploading base files, don't send the email before the uploads, so
- # that it contains the file status.
- if options.send_mail and options.download_base:
- form_fields.append(("send_mail", "1"))
- if not options.download_base:
- form_fields.append(("content_upload", "1"))
- if len(data) > MAX_UPLOAD_SIZE:
- print "Patch is large, so uploading file patches separately."
- uploaded_diff_file = []
- form_fields.append(("separate_patches", "1"))
- else:
- uploaded_diff_file = [("data", "data.diff", data)]
- ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
- response_body = rpc_server.Send("/upload", body, content_type=ctype)
- if not options.download_base or not uploaded_diff_file:
- lines = response_body.splitlines()
- if len(lines) >= 2:
- msg = lines[0]
- patchset = lines[1].strip()
- patches = [x.split(" ", 1) for x in lines[2:]]
- else:
- msg = response_body
- else:
- msg = response_body
- StatusUpdate(msg)
- if not response_body.startswith("Issue created.") and \
- not response_body.startswith("Issue updated."):
- sys.exit(0)
- issue = msg[msg.rfind("/")+1:]
-
- if not uploaded_diff_file:
- result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
- if not options.download_base:
- patches = result
-
- if not options.download_base:
- vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
- if options.send_mail:
- rpc_server.Send("/" + issue + "/mail", payload="")
- return issue
-
-
-def main():
- try:
- RealMain(sys.argv)
- except KeyboardInterrupt:
- print
- StatusUpdate("Interrupted.")
- sys.exit(1)
-
-
-if __name__ == "__main__":
- main()
diff --git a/thirdparty/svn/svn_load_dirs.pl b/thirdparty/svn/svn_load_dirs.pl
deleted file mode 100755
index 388ce68..0000000
--- a/thirdparty/svn/svn_load_dirs.pl
+++ /dev/null
@@ -1,2043 +0,0 @@
-#!/usr/bin/perl -w
-
-# $HeadURL$
-# $LastChangedDate$
-# $LastChangedBy$
-# $LastChangedRevision$
-
-$| = 1;
-
-use strict;
-use Carp;
-use Cwd;
-use Digest::MD5 2.20;
-use File::Copy 2.03;
-use File::Find;
-use File::Path 1.0404;
-use File::Temp 0.12 qw(tempdir tempfile);
-use Getopt::Long 2.25;
-use Text::Wrap;
-use URI 1.17;
-use English;
-
-$Text::Wrap::columns = 72;
-
-# Specify the location of the svn command.
-my $svn = 'svn';
-
-# Process the command line options.
-
-# The base URL for the portion of the repository to work in. Note
-# that this does not have to be the root of the subversion repository,
-# it can point to a subdirectory in the repository.
-my $repos_base_url;
-
-# The relative path from the repository base URL to work in to the
-# directory to load the input directories into.
-my $repos_load_rel_path;
-
-# To specify where tags, which are simply copies of the imported
-# directory, should be placed relative to the repository base URL, use
-# the -t command line option. This value must contain regular
-# expressions that match portions of the input directory names to
-# create an unique tag for each input directory. The regular
-# expressions are surrounded by a specified character to distinguish
-# the regular expression from the normal directory path.
-my $opt_import_tag_location;
-
-# Do not ask for any user input. Just go ahead and do everything.
-my $opt_no_user_input;
-
-# Do not automatically set the svn:executable property based on the
-# file's exe bit.
-my $opt_no_auto_exe;
-
-# Username to use for commits.
-my $opt_svn_username;
-
-# Password to use for commits.
-my $opt_svn_password;
-
-# Verbosity level.
-my $opt_verbose;
-
-# Path to already checked-out working copy.
-my $opt_existing_wc_dir;
-
-# List of filename patterns to ignore (as in .subversion/config's
-# "global-ignores" option).
-my $opt_glob_ignores;
-
-# This is the character used to separate regular expressions occuring
-# in the tag directory path from the path itself.
-my $REGEX_SEP_CHAR = '@';
-
-# This specifies a configuration file that contains a list of regular
-# expressions to check against a file and the properties to set on
-# matching files.
-my $property_config_filename;
-
-GetOptions('no_user_input' => \$opt_no_user_input,
- 'no_auto_exe' => \$opt_no_auto_exe,
- 'property_cfg_filename=s' => \$property_config_filename,
- 'svn_password=s' => \$opt_svn_password,
- 'svn_username=s' => \$opt_svn_username,
- 'tag_location=s' => \$opt_import_tag_location,
- 'verbose+' => \$opt_verbose,
- 'wc=s' => \$opt_existing_wc_dir,
- 'glob_ignores=s' => \$opt_glob_ignores)
- or &usage;
-&usage("$0: too few arguments") if @ARGV < 2;
-
-$repos_base_url = shift;
-$repos_load_rel_path = shift;
-
-# Check that the repository base URL and the import directories do not
-# contain any ..'s.
-if ($repos_base_url =~ /\.{2}/)
- {
- die "$0: repos base URL $repos_base_url cannot contain ..'s.\n";
- }
-if ($repos_load_rel_path =~ /\.{2}/)
- {
- die "$0: repos import relative directory path $repos_load_rel_path ",
- "cannot contain ..'s.\n";
- }
-
-# If there are no directories listed on the command line, then the
-# directories are read from standard input. In this case, the
-# -no_user_input command line option must be specified.
-if (!@ARGV and !$opt_no_user_input)
- {
- &usage("$0: must use -no_user_input if no dirs listed on command line.");
- }
-
-# The tag option cannot be used when directories are read from
-# standard input because tags may collide and no user input can be
-# taken to verify that the input is ok.
-if (!@ARGV and $opt_import_tag_location)
- {
- &usage("$0: cannot use -tag_location when dirs are read from stdin.");
- }
-
-# If the tag directory is set, then the import directory cannot be '.'.
-if (defined $opt_import_tag_location and $repos_load_rel_path eq '.')
- {
- &usage("$0: cannot set import_dir to '.' and use -t command line option.");
- }
-
-# Set the svn command line options that are used anytime svn connects
-# to the repository.
-my @svn_use_repos_cmd_opts;
-&set_svn_use_repos_cmd_opts($opt_svn_username, $opt_svn_password);
-
-# Check that the tag directories do not contain any ..'s. Also, the
-# import and tag directories cannot be absolute.
-if (defined $opt_import_tag_location and $opt_import_tag_location =~ /\.{2}/)
- {
- die "$0: repos tag relative directory path $opt_import_tag_location ",
- "cannot contain ..'s.\n";
- }
-if ($repos_load_rel_path =~ m|^/|)
- {
- die "$0: repos import relative directory path $repos_load_rel_path ",
- "cannot start with /.\n";
- }
-if (defined $opt_import_tag_location and $opt_import_tag_location =~ m|^/|)
- {
- die "$0: repos tagrelative directory path $opt_import_tag_location ",
- "cannot start with /.\n";
- }
-
-if (defined $opt_existing_wc_dir)
- {
- unless (-e $opt_existing_wc_dir)
- {
- die "$0: working copy '$opt_existing_wc_dir' does not exist.\n";
- }
-
- unless (-d _)
- {
- die "$0: working copy '$opt_existing_wc_dir' is not a directory.\n";
- }
-
- unless (-d "$opt_existing_wc_dir/.svn")
- {
- die "$0: working copy '$opt_existing_wc_dir' does not have .svn ",
- "directory.\n";
- }
-
- $opt_existing_wc_dir = Cwd::abs_path($opt_existing_wc_dir)
- }
-
-# If no glob_ignores specified, try to deduce from config file,
-# or use the default below.
-my $ignores_str =
- '*.o *.lo *.la #*# .*.rej *.rej .*~ *~ .#* .DS_Store';
-
-if ( defined $opt_glob_ignores)
- {
- $ignores_str = $opt_glob_ignores;
- }
-elsif ( -f "$ENV{HOME}/.subversion/config" )
- {
- open my $conf, "$ENV{HOME}/.subversion/config";
- while (<$conf>)
- {
- if ( /^global-ignores\s*=\s*(.*?)\s*$/ )
- {
- $ignores_str = $1;
- last;
- }
- }
- }
-
-my @glob_ignores = map
- {
- s/\./\\\./g; s/\*/\.\*/g; "^$_\$";
- } split(/\s+/, $ignores_str);
-unshift @glob_ignores, '\.svn$';
-
-# Convert the string URL into a URI object.
-$repos_base_url =~ s|/*$||;
-my $repos_base_uri = URI->new($repos_base_url);
-
-# Check that $repos_load_rel_path is not a directory here implying
-# that a command line option was forgotten.
-if ($repos_load_rel_path ne '.' and -d $repos_load_rel_path)
- {
- die "$0: import_dir '$repos_load_rel_path' is a directory.\n";
- }
-
-# The remaining command line arguments should be directories. Check
-# that they all exist and that there are no duplicates.
-if (@ARGV)
- {
- my %dirs;
- foreach my $dir (@ARGV)
- {
- unless (-e $dir)
- {
- die "$0: directory '$dir' does not exist.\n";
- }
-
- unless (-d _)
- {
- die "$0: directory '$dir' is not a directory.\n";
- }
-
- if ($dirs{$dir})
- {
- die "$0: directory '$dir' is listed more than once on command ",
- "line.\n";
- }
- $dirs{$dir} = 1;
- }
- }
-
-# Create the tag locations and print them for the user to review.
-# Check that there are no duplicate tags.
-my %load_tags;
-if (@ARGV and defined $opt_import_tag_location)
- {
- my %seen_tags;
-
- foreach my $load_dir (@ARGV)
- {
- my $load_tag = &get_tag_dir($load_dir);
-
- print "Directory $load_dir will be tagged as $load_tag\n";
-
- if ($seen_tags{$load_tag})
- {
- die "$0: duplicate tag generated.\n";
- }
- $seen_tags{$load_tag} = 1;
-
- $load_tags{$load_dir} = $load_tag;
- }
-
- exit 0 unless &get_answer("Please examine identified tags. Are they " .
- "acceptable? (Y/n) ", 'ny', 1);
- print "\n";
- }
-
-# Load the property configuration filename, if one was specified, into
-# an array of hashes, where each hash contains a regular expression
-# and a property to apply to the file if the regular expression
-# matches.
-my @property_settings;
-if (defined $property_config_filename and length $property_config_filename)
- {
- open(CFG, $property_config_filename)
- or die "$0: cannot open '$property_config_filename' for reading: $!\n";
-
- my $ok = 1;
-
- while (my $line = <CFG>)
- {
- next if $line =~ /^\s*$/;
- next if $line =~ /^\s*#/;
-
- # Split the input line into words taking into account that
- # single or double quotes may define a single word with
- # whitespace in it. The format for the file is
- # regex control property_name property_value
- my @line = &split_line($line);
- next if @line == 0;
-
- unless (@line == 2 or @line == 4)
- {
- warn "$0: line $. of '$property_config_filename' has to have 2 ",
- "or 4 columns.\n";
- $ok = 0;
- next;
- }
- my ($regex, $control, $property_name, $property_value) = @line;
-
- unless ($control eq 'break' or $control eq 'cont')
- {
- warn "$0: line $. of '$property_config_filename' has illegal ",
- "value for column 3 '$control', must be 'break' or 'cont'.\n";
- $ok = 0;
- next;
- }
-
- # Compile the regular expression.
- my $re;
- eval { $re = qr/$regex/i };
- if ($@)
- {
- warn "$0: line $. of '$property_config_filename' regex '$regex' ",
- "does not compile:\n$@\n";
- $ok = 0;
- next;
- }
-
- push(@property_settings, {name => $property_name,
- value => $property_value,
- control => $control,
- re => $re});
- }
- close(CFG)
- or warn "$0: error in closing '$property_config_filename' for ",
- "reading: $!\n";
-
- exit 1 unless $ok;
- }
-
-# Check that the svn base URL works by running svn log on it. Only
-# get the HEAD revision log message; there's no need to waste
-# bandwidth seeing all of the log messages.
-print "Checking that the base URL is a Subversion repository.\n";
-read_from_process($svn, 'log', '-r', 'HEAD',
- @svn_use_repos_cmd_opts, $repos_base_uri);
-print "\n";
-
-my $orig_cwd = cwd;
-
-# The first step is to determine the root of the svn repository. Do
-# this with the svn log command. Take the svn_url hostname and port
-# as the initial url and append to it successive portions of the final
-# path until svn log succeeds.
-print "Finding the root URL of the Subversion repository.\n";
-my $repos_root_uri;
-my $repos_root_uri_path;
-my $repos_base_path_segment;
-{
- my $r = $repos_base_uri->clone;
- my @path_segments = grep { length($_) } $r->path_segments;
- my @repos_base_path_segments = @path_segments;
- unshift(@path_segments, '');
- $r->path('');
- my @r_path_segments;
-
- while (@path_segments)
- {
- $repos_root_uri_path = shift @path_segments;
- push(@r_path_segments, $repos_root_uri_path);
- $r->path_segments(@r_path_segments);
- if (safe_read_from_pipe($svn, 'log', '-r', 'HEAD',
- @svn_use_repos_cmd_opts, $r) == 0)
- {
- $repos_root_uri = $r;
- last;
- }
- shift @repos_base_path_segments;
- }
- $repos_base_path_segment = join('/', @repos_base_path_segments);
-}
-
-if ($repos_root_uri)
- {
- print "Determined that the svn root URL is $repos_root_uri.\n\n";
- }
-else
- {
- die "$0: cannot determine root svn URL.\n";
- }
-
-# Create a temporary directory for svn to work in.
-my $temp_dir = tempdir( "svn_load_dirs_XXXXXXXXXX", TMPDIR => 1 );
-
-# Put in a signal handler to clean up any temporary directories.
-sub catch_signal {
- my $signal = shift;
- warn "$0: caught signal $signal. Quitting now.\n";
- exit 1;
-}
-
-$SIG{HUP} = \&catch_signal;
-$SIG{INT} = \&catch_signal;
-$SIG{TERM} = \&catch_signal;
-$SIG{PIPE} = \&catch_signal;
-
-# Create an object that when DESTROY'ed will delete the temporary
-# directory. The CLEANUP flag to tempdir should do this, but they
-# call rmtree with 1 as the last argument which takes extra security
-# measures that do not clean up the .svn directories.
-my $temp_dir_cleanup = Temp::Delete->new;
-
-# Determine the native end of line style for this system. Do this the
-# most portable way, by writing a file with a single \n in non-binary
-# mode and then reading the file in binary mode.
-my $native_eol = &determine_native_eol;
-
-# Check if all the directories exist to load the directories into the
-# repository. If not, ask if they should be created. For tags, do
-# not create the tag directory itself, that is done on the svn cp.
-{
- print "Finding if any directories need to be created in repository.\n";
-
- my @dirs_to_create;
- my @urls_to_create;
- my %seen_dir;
- my @load_tags_without_last_segment;
-
- # Assume that the last portion of the tag directory contains the
- # version number and remove it from the directories to create,
- # because the tag directory will be created by svn cp.
- foreach my $load_tag (sort values %load_tags)
- {
- # Skip this tag if there is only one segment in its name.
- my $index = rindex($load_tag, '/');
- next if $index == -1;
-
- # Trim off the last segment and record the result.
- push(@load_tags_without_last_segment, substr($load_tag, 0, $index));
- }
-
- foreach my $dir ($repos_load_rel_path, @load_tags_without_last_segment)
- {
- next unless length $dir;
- my $d = '';
- foreach my $segment (split('/', $dir))
- {
- $d = length $d ? "$d/$segment" : $segment;
- my $url = "$repos_base_url/$d";
- unless ($seen_dir{$d})
- {
- $seen_dir{$d} = 1;
- if (safe_read_from_pipe($svn, 'log', '-r', 'HEAD',
- @svn_use_repos_cmd_opts, $url) != 0)
- {
- push(@dirs_to_create, $d);
- push(@urls_to_create, $url);
- }
- }
- }
- }
-
- if (@dirs_to_create)
- {
- print "The following directories do not exist and need to exist:\n";
- foreach my $dir (@dirs_to_create)
- {
- print " $dir\n";
- }
- exit 0 unless &get_answer("You must add them now to load the " .
- "directories. Continue (Y/n)? ", 'ny', 1);
-
- my $message = "Create directories to load project into.\n\n";
-
- foreach my $dir (@dirs_to_create)
- {
- if (length $repos_base_path_segment)
- {
- $message .= "* $repos_base_path_segment/$dir: New directory.\n";
- }
- else
- {
- $message .= "* $dir: New directory.\n";
- }
- }
- $message = wrap('', ' ', $message);
-
- read_from_process($svn, 'mkdir', @svn_use_repos_cmd_opts,
- '-m', $message, @urls_to_create);
- }
- else
- {
- print "No directories need to be created to prepare repository.\n";
- }
-}
-
-# Either checkout a new working copy from the repository or use an
-# existing working copy.
-if (defined $opt_existing_wc_dir)
- {
- # Update an already existing working copy.
- print "Not checking out anything; using existing working directory at\n";
- print "$opt_existing_wc_dir\n";
-
- chdir($opt_existing_wc_dir)
- or die "$0: cannot chdir '$opt_existing_wc_dir': $!\n";
-
- read_from_process($svn, 'update', @svn_use_repos_cmd_opts);
- }
-else
- {
- # Check out the svn repository starting at the svn URL into a
- # fixed directory name.
- my $checkout_dir_name = 'my_import_wc';
-
- # Check out only the directory being imported to, otherwise the
- # checkout of the entire base URL can be very huge, if it contains
- # a large number of tags.
- my $checkout_url;
- if ($repos_load_rel_path eq '.')
- {
- $checkout_url = $repos_base_url;
- }
- else
- {
- $checkout_url = "$repos_base_url/$repos_load_rel_path";
- }
-
- print "Checking out $checkout_url into $temp_dir/$checkout_dir_name\n";
-
- chdir($temp_dir)
- or die "$0: cannot chdir '$temp_dir': $!\n";
-
- read_from_process($svn, 'checkout',
- @svn_use_repos_cmd_opts,
- $checkout_url, $checkout_dir_name);
-
- chdir($checkout_dir_name)
- or die "$0: cannot chdir '$checkout_dir_name': $!\n";
- }
-
-# At this point, the current working directory is the top level
-# directory of the working copy. Record the absolute path to this
-# location because the script will chdir back here later on.
-my $wc_import_dir_cwd = cwd;
-
-# Set up the names for the path to the import and tag directories.
-my $repos_load_abs_path;
-if ($repos_load_rel_path eq '.')
- {
- $repos_load_abs_path = length($repos_base_path_segment) ?
- $repos_base_path_segment : "/";
- }
-else
- {
- $repos_load_abs_path = length($repos_base_path_segment) ?
- "$repos_base_path_segment/$repos_load_rel_path" :
- $repos_load_rel_path;
- }
-
-# Now go through each source directory and copy each file from the
-# source directory to the target directory. For new target files, add
-# them to svn. For files that no longer exist, delete them.
-my $print_rename_message = 1;
-my @load_dirs = @ARGV;
-while (defined (my $load_dir = &get_next_load_dir))
- {
- my $load_tag = $load_tags{$load_dir};
-
- if (defined $load_tag)
- {
- print "\nLoading $load_dir and will save in tag $load_tag.\n";
- }
- else
- {
- print "\nLoading $load_dir.\n";
- }
-
- # The first hash is keyed by the old name in a rename and the
- # second by the new name. The last variable contains a list of
- # old and new filenames in a rename.
- my %rename_from_files;
- my %rename_to_files;
- my @renamed_filenames;
-
- unless ($opt_no_user_input)
- {
- my $repeat_loop;
- do
- {
- $repeat_loop = 0;
-
- my %add_files;
- my %del_files;
-
- # Get the list of files and directories in the repository
- # working copy. This hash is called %del_files because
- # each file or directory will be deleted from the hash
- # using the list of files and directories in the source
- # directory, leaving the files and directories that need
- # to be deleted.
- %del_files = &recursive_ls_and_hash($wc_import_dir_cwd);
-
- # This anonymous subroutine finds all the files and
- # directories in the directory to load. It notes the file
- # type and for each file found, it deletes it from
- # %del_files.
- my $wanted = sub
- {
- s#^\./##;
- return if $_ eq '.';
-
- my $source_path = $_;
- my $dest_path = "$wc_import_dir_cwd/$_";
-
- my ($source_type) = &file_info($source_path);
- my ($dest_type) = &file_info($dest_path);
-
- # Fail if the destination type exists but is of a
- # different type of file than the source type.
- if ($dest_type ne '0' and $source_type ne $dest_type)
- {
- die "$0: does not handle changing source and destination ",
- "type for '$source_path'.\n";
- }
-
- if ($source_type ne 'd' and
- $source_type ne 'f' and
- $source_type ne 'l')
- {
- warn "$0: skipping loading file '$source_path' of type ",
- "'$source_type'.\n";
- unless ($opt_no_user_input)
- {
- print STDERR "Press return to continue: ";
- <STDIN>;
- }
- return;
- }
-
- unless (defined delete $del_files{$source_path})
- {
- $add_files{$source_path}{type} = $source_type;
- }
- };
-
- # Now change into the directory containing the files to
- # load. First change to the original directory where this
- # script was run so that if the specified directory is a
- # relative directory path, then the script can change into
- # it.
- chdir($orig_cwd)
- or die "$0: cannot chdir '$orig_cwd': $!\n";
- chdir($load_dir)
- or die "$0: cannot chdir '$load_dir': $!\n";
-
- find({no_chdir => 1,
- preprocess => sub { sort { $b cmp $a }
- grep { $_ !~ /^[._]svn$/ } @_ },
- wanted => $wanted
- }, '.');
-
- # At this point %add_files contains the list of new files
- # and directories to be created in the working copy tree
- # and %del_files contains the files and directories that
- # need to be deleted. Because there may be renames that
- # have taken place, give the user the opportunity to
- # rename any deleted files and directories to ones being
- # added.
- my @add_files = sort keys %add_files;
- my @del_files = sort keys %del_files;
-
- # Because the source code management system may keep the
- # original renamed file or directory in the working copy
- # until a commit, remove them from the list of deleted
- # files or directories.
- &filter_renamed_files(\@del_files, \%rename_from_files);
-
- # Now change into the working copy directory in case any
- # renames need to be performed.
- chdir($wc_import_dir_cwd)
- or die "$0: cannot chdir '$wc_import_dir_cwd': $!\n";
-
- # Only do renames if there are both added and deleted
- # files and directories.
- if (@add_files and @del_files)
- {
- my $max = @add_files > @del_files ? @add_files : @del_files;
-
- # Print the files that have been added and deleted.
- # Find the deleted file with the longest name and use
- # that for the width of the filename column. Add one
- # to the filename width to let the directory /
- # character be appended to a directory name.
- my $line_number_width = 4;
- my $filename_width = 0;
- foreach my $f (@del_files)
- {
- my $l = length($f);
- $filename_width = $l if $l > $filename_width;
- }
- ++$filename_width;
- my $printf_format = "%${line_number_width}d";
-
- if ($print_rename_message)
- {
- $print_rename_message = 0;
- print "\n",
- "The following table lists files and directories that\n",
- "exist in either the Subversion repository or the\n",
- "directory to be imported but not both. You now have\n",
- "the opportunity to match them up as renames instead\n",
- "of deletes and adds. This is a Good Thing as it'll\n",
- "make the repository take less space.\n\n",
- "The left column lists files and directories that\n",
- "exist in the Subversion repository and do not exist\n",
- "in the directory being imported. The right column\n",
- "lists files and directories that exist in the\n",
- "directory being imported. Match up a deleted item\n",
- "from the left column with an added item from the\n",
- "right column. Note the line numbers on the left\n",
- "which you type into this script to have a rename\n",
- "performed.\n";
- }
-
- # Sort the added and deleted files and directories by
- # the lowercase versions of their basenames instead of
- # their complete path, which makes finding files that
- # were moved into different directories easier to
- # match up.
- @add_files = map { $_->[0] }
- sort { $a->[1] cmp $b->[1] }
- map { [$_->[0], lc($_->[1])] }
- map { [$_, m#([^/]+)$#] }
- @add_files;
- @del_files = map { $_->[0] }
- sort { $a->[1] cmp $b->[1] }
- map { [$_->[0], lc($_->[1])] }
- map { [$_, m#([^/]+)$#] }
- @del_files;
-
- RELIST:
-
- for (my $i=0; $i<$max; ++$i)
- {
- my $add_filename = '';
- my $del_filename = '';
- if ($i < @add_files)
- {
- $add_filename = $add_files[$i];
- if ($add_files{$add_filename}{type} eq 'd')
- {
- $add_filename .= '/';
- }
- }
- if ($i < @del_files)
- {
- $del_filename = $del_files[$i];
- if ($del_files{$del_filename}{type} eq 'd')
- {
- $del_filename .= '/';
- }
- }
-
- if ($i % 22 == 0)
- {
- print
- "\n",
- " " x $line_number_width,
- " ",
- "Deleted", " " x ($filename_width-length("Deleted")),
- " ",
- "Added\n";
- }
-
- printf $printf_format, $i;
- print " ", $del_filename,
- "_" x ($filename_width - length($del_filename)),
- " ", $add_filename, "\n";
-
- if (($i+1) % 22 == 0)
- {
- unless (&get_answer("Continue printing (Y/n)? ",
- 'ny', 1))
- {
- last;
- }
- }
- }
-
- # Get the feedback from the user.
- my $line;
- my $add_filename;
- my $add_index;
- my $del_filename;
- my $del_index;
- my $got_line = 0;
- do {
- print "Enter two indexes for each column to rename, ",
- "(R)elist, or (F)inish: ";
- $line = <STDIN>;
- $line = '' unless defined $line;
- if ($line =~ /^R$/i )
- {
- goto RELIST;
- }
-
- if ($line =~ /^F$/i)
- {
- $got_line = 1;
- }
- elsif ($line =~ /^(\d+)\s+(\d+)$/)
- {
- print "\n";
-
- $del_index = $1;
- $add_index = $2;
- if ($del_index >= @del_files)
- {
- print "Delete index $del_index is larger than ",
- "maximum index of ", scalar @del_files - 1,
- ".\n";
- $del_index = undef;
- }
- if ($add_index > @add_files)
- {
- print "Add index $add_index is larger than maximum ",
- "index of ", scalar @add_files - 1, ".\n";
- $add_index = undef;
- }
- $got_line = defined $del_index && defined $add_index;
-
- # Check that the file or directory to be renamed
- # has the same file type.
- if ($got_line)
- {
- $add_filename = $add_files[$add_index];
- $del_filename = $del_files[$del_index];
- if ($add_files{$add_filename}{type} ne
- $del_files{$del_filename}{type})
- {
- print "File types for $del_filename and ",
- "$add_filename differ.\n";
- $got_line = undef;
- }
- }
- }
- } until ($got_line);
-
- if ($line !~ /^F$/i)
- {
- print "Renaming $del_filename to $add_filename.\n";
-
- $repeat_loop = 1;
-
- # Because subversion cannot rename the same file
- # or directory twice, which includes doing a
- # rename of a file in a directory that was
- # previously renamed, a commit has to be
- # performed. Check if the file or directory being
- # renamed now would cause such a problem and
- # commit if so.
- my $do_commit_now = 0;
- foreach my $rename_to_filename (keys %rename_to_files)
- {
- if (contained_in($del_filename,
- $rename_to_filename,
- $rename_to_files{$rename_to_filename}{type}))
- {
- $do_commit_now = 1;
- last;
- }
- }
-
- if ($do_commit_now)
- {
- print "Now committing previously run renames.\n";
- &commit_renames($load_dir,
- \@renamed_filenames,
- \%rename_from_files,
- \%rename_to_files);
- }
-
- push(@renamed_filenames, $del_filename, $add_filename);
- {
- my $d = $del_files{$del_filename};
- $rename_from_files{$del_filename} = $d;
- $rename_to_files{$add_filename} = $d;
- }
-
- # Check that any required directories to do the
- # rename exist.
- my @add_segments = split('/', $add_filename);
- pop(@add_segments);
- my $add_dir = '';
- my @add_dirs;
- foreach my $segment (@add_segments)
- {
- $add_dir = length($add_dir) ? "$add_dir/$segment" :
- $segment;
- unless (-d $add_dir)
- {
- push(@add_dirs, $add_dir);
- }
- }
-
- if (@add_dirs)
- {
- read_from_process($svn, 'mkdir', @add_dirs);
- }
-
- read_from_process($svn, 'mv',
- $del_filename, $add_filename);
- }
- }
- } while ($repeat_loop);
- }
-
- # If there are any renames that have not been committed, then do
- # that now.
- if (@renamed_filenames)
- {
- &commit_renames($load_dir,
- \@renamed_filenames,
- \%rename_from_files,
- \%rename_to_files);
- }
-
- # At this point all renames have been performed. Now get the
- # final list of files and directories in the working copy
- # directory. The %add_files hash will contain the list of files
- # and directories to add to the working copy and %del_files starts
- # with all the files already in the working copy and gets files
- # removed that are in the imported directory, which results in a
- # list of files that should be deleted. %upd_files holds the list
- # of files that have been updated.
- my %add_files;
- my %del_files = &recursive_ls_and_hash($wc_import_dir_cwd);
- my %upd_files;
-
- # This anonymous subroutine copies files from the source directory
- # to the working copy directory.
- my $wanted = sub
- {
- s#^\./##;
- return if $_ eq '.';
-
- my $source_path = $_;
- my $dest_path = "$wc_import_dir_cwd/$_";
-
- my ($source_type, $source_is_exe) = &file_info($source_path);
- my ($dest_type) = &file_info($dest_path);
-
- return if ($source_type ne 'd' and
- $source_type ne 'f' and
- $source_type ne 'l');
-
- # Fail if the destination type exists but is of a different
- # type of file than the source type.
- if ($dest_type ne '0' and $source_type ne $dest_type)
- {
- die "$0: does not handle changing source and destination type ",
- "for '$source_path'.\n";
- }
-
- # Determine if the file is being added or is an update to an
- # already existing file using the file's digest.
- my $del_info = delete $del_files{$source_path};
- if (defined $del_info)
- {
- if (defined (my $del_digest = $del_info->{digest}))
- {
- my $new_digest = &digest_hash_file($source_path);
- if ($new_digest ne $del_digest)
- {
- print "U $source_path\n";
- $upd_files{$source_path} = $del_info;
- }
- }
- }
- else
- {
- print "A $source_path\n";
- $add_files{$source_path}{type} = $source_type;
-
- # Create an array reference to hold the list of properties
- # to apply to this object.
- unless (defined $add_files{$source_path}{properties})
- {
- $add_files{$source_path}{properties} = [];
- }
-
- # Go through the list of properties for a match on this
- # file or directory and if there is a match, then apply
- # the property to it.
- foreach my $property (@property_settings)
- {
- my $re = $property->{re};
- if ($source_path =~ $re)
- {
- my $property_name = $property->{name};
- my $property_value = $property->{value};
-
- # The property value may not be set in the
- # configuration file, since the user may just want
- # to set the control flag.
- if (defined $property_name and defined $property_value)
- {
- # Ignore properties that do not apply to
- # directories.
- if ($source_type eq 'd')
- {
- if ($property_name eq 'svn:eol-style' or
- $property_name eq 'svn:executable' or
- $property_name eq 'svn:keywords' or
- $property_name eq 'svn:mime-type')
- {
- next;
- }
- }
-
- # Ignore properties that do not apply to
- # files.
- if ($source_type eq 'f')
- {
- if ($property_name eq 'svn:externals' or
- $property_name eq 'svn:ignore')
- {
- next;
- }
- }
-
- print "Adding to '$source_path' property ",
- "'$property_name' with value ",
- "'$property_value'.\n";
-
- push(@{$add_files{$source_path}{properties}},
- $property);
- }
-
- last if $property->{control} eq 'break';
- }
- }
- }
-
- # Add svn:executable to files that have their executable bit
- # set.
- if ($source_is_exe and !$opt_no_auto_exe)
- {
- print "Adding to '$source_path' property 'svn:executable' with ",
- "value '*'.\n";
- my $property = {name => 'svn:executable', value => '*'};
- push (@{$add_files{$source_path}{properties}},
- $property);
- }
-
- # Now make sure the file or directory in the source directory
- # exists in the repository.
- if ($source_type eq 'd')
- {
- if ($dest_type eq '0')
- {
- mkdir($dest_path)
- or die "$0: cannot mkdir '$dest_path': $!\n";
- }
- }
- elsif
- ($source_type eq 'l') {
- my $link_target = readlink($source_path)
- or die "$0: cannot readlink '$source_path': $!\n";
- if ($dest_type eq 'l')
- {
- my $old_target = readlink($dest_path)
- or die "$0: cannot readlink '$dest_path': $!\n";
- return if ($old_target eq $link_target);
- unlink($dest_path)
- or die "$0: unlink '$dest_path' failed: $!\n";
- }
- symlink($link_target, $dest_path)
- or die "$0: cannot symlink '$dest_path' to '$link_target': $!\n";
- }
- elsif
- ($source_type eq 'f') {
- # Only copy the file if the digests do not match.
- if ($add_files{$source_path} or $upd_files{$source_path})
- {
- copy($source_path, $dest_path)
- or die "$0: copy '$source_path' to '$dest_path': $!\n";
- }
- }
- else
- {
- die "$0: does not handle copying files of type '$source_type'.\n";
- }
- };
-
- # Now change into the directory containing the files to load.
- # First change to the original directory where this script was run
- # so that if the specified directory is a relative directory path,
- # then the script can change into it.
- chdir($orig_cwd)
- or die "$0: cannot chdir '$orig_cwd': $!\n";
- chdir($load_dir)
- or die "$0: cannot chdir '$load_dir': $!\n";
-
- find({no_chdir => 1,
- preprocess => sub { sort { $b cmp $a }
- grep { $_ !~ /^[._]svn$/ } @_ },
- wanted => $wanted
- }, '.');
-
- # The files and directories that are in %del_files are the files
- # and directories that need to be deleted. Because svn will
- # return an error if a file or directory is deleted in a directory
- # that subsequently is deleted, first find all directories and
- # remove from the list any files and directories inside those
- # directories from this list. Work through the list repeatedly
- # working from short to long names so that directories containing
- # other files and directories will be deleted first.
- my $repeat_loop;
- do
- {
- $repeat_loop = 0;
- my @del_files = sort {length($a) <=> length($b) || $a cmp $b}
- keys %del_files;
- &filter_renamed_files(\@del_files, \%rename_from_files);
- foreach my $file (@del_files)
- {
- if ($del_files{$file}{type} eq 'd')
- {
- my $dir = "$file/";
- my $dir_length = length($dir);
- foreach my $f (@del_files)
- {
- next if $file eq $f;
- if (length($f) >= $dir_length and
- substr($f, 0, $dir_length) eq $dir)
- {
- print "d $f\n";
- delete $del_files{$f};
- $repeat_loop = 1;
- }
- }
-
- # If there were any deletions of files and/or
- # directories inside a directory that will be deleted,
- # then restart the entire loop again, because one or
- # more keys have been deleted from %del_files.
- # Equally important is not to stop this loop if no
- # deletions have been done, otherwise later
- # directories that may contain files and directories
- # to be deleted will not be deleted.
- last if $repeat_loop;
- }
- }
- } while ($repeat_loop);
-
- # What is left are files that are not in any directories to be
- # deleted and directories to be deleted. To delete the files,
- # deeper files and directories must be deleted first. Because we
- # have a hash keyed by remaining files and directories to be
- # deleted, instead of trying to figure out which directories and
- # files are contained in other directories, just reverse sort by
- # the path length and then alphabetically.
- my @del_files = sort {length($b) <=> length($a) || $a cmp $b }
- keys %del_files;
- &filter_renamed_files(\@del_files, \%rename_from_files);
- foreach my $file (@del_files)
- {
- print "D $file\n";
- }
-
- # Now change back to the trunk directory and run the svn commands.
- chdir($wc_import_dir_cwd)
- or die "$0: cannot chdir '$wc_import_dir_cwd': $!\n";
-
- # If any of the added files have the svn:eol-style property set,
- # then pass -b to diff, otherwise diff may fail because the end of
- # lines have changed and the source file and file in the
- # repository will not be identical.
- my @diff_ignore_space_changes;
-
- if (keys %add_files)
- {
- my @add_files = sort {length($a) <=> length($b) || $a cmp $b}
- keys %add_files;
- my $target_filename = &make_targets_file(@add_files);
- read_from_process($svn, 'add', '-N', '--targets', $target_filename);
- unlink($target_filename);
-
- # Add properties on the added files.
- foreach my $add_file (@add_files)
- {
- foreach my $property (@{$add_files{$add_file}{properties}})
- {
- my $property_name = $property->{name};
- my $property_value = $property->{value};
-
- if ($property_name eq 'svn:eol-style')
- {
- @diff_ignore_space_changes = ('-b');
- }
-
- # Write the value to a temporary file in case it's multi-line
- my ($handle, $tmpfile) = tempfile(DIR => $temp_dir);
- print $handle $property_value;
- close($handle);
-
- read_from_process($svn,
- 'propset',
- $property_name,
- '--file',
- $tmpfile,
- $add_file);
- }
- }
- }
- if (@del_files)
- {
- my $target_filename = &make_targets_file(@del_files);
- read_from_process($svn, 'rm', '--targets', $target_filename);
- unlink($target_filename);
- }
-
- # Go through the list of updated files and check the svn:eol-style
- # property. If it is set to native, then convert all CR, CRLF and
- # LF's in the file to the native end of line characters. Also,
- # modify diff's command line so that it will ignore the change in
- # end of line style.
- if (keys %upd_files)
- {
- my @upd_files = sort {length($a) <=> length($b) || $a cmp $b}
- keys %upd_files;
- foreach my $upd_file (@upd_files)
- {
- # Always append @BASE to a filename in case they contain a
- # @ character, in which case the Subversion command line
- # client will attempt to parse the characters after the @
- # as a revision and most likely fail, or if the characters
- # after the @ are a valid revision, then it'll possibly
- # get the incorrect information. So always append @BASE
- # and any preceding @'s will be treated normally and the
- # correct information will be retrieved.
- my @command = ($svn,
- 'propget',
- 'svn:eol-style',
- "$upd_file\@BASE");
- my @lines = read_from_process(@command);
- next unless @lines;
- if (@lines > 1)
- {
- warn "$0: '@command' returned more than one line of output: ",
- "'@lines'.\n";
- next;
- }
-
- my $eol_style = $lines[0];
- if ($eol_style eq 'native')
- {
- @diff_ignore_space_changes = ('-b');
- if (&convert_file_to_native_eol($upd_file))
- {
- print "Native eol-style conversion modified $upd_file.\n";
- }
- }
- }
- }
-
- my $message = wrap('', '', "Load $load_dir into $repos_load_abs_path.\n");
- read_from_process($svn, 'commit',
- @svn_use_repos_cmd_opts,
- '-m', $message);
-
- # If an update is not run now after a commit, then some file and
- # directory paths will have an older revisions associated with
- # them and any future commits will fail because they are out of
- # date.
- read_from_process($svn, 'update', @svn_use_repos_cmd_opts);
-
- # Now remove any files and directories to be deleted in the
- # repository.
- if (@del_files)
- {
- rmtree(\@del_files, 1, 0);
- }
-
- # Now make the tag by doing a copy in the svn repository itself.
- if (defined $load_tag)
- {
- my $repos_tag_abs_path = length($repos_base_path_segment) ?
- "$repos_base_path_segment/$load_tag" :
- $load_tag;
-
- my $from_url = $repos_load_rel_path eq '.' ?
- $repos_load_rel_path :
- "$repos_base_url/$repos_load_rel_path";
- my $to_url = "$repos_base_url/$load_tag";
-
- $message = wrap("",
- "",
- "Tag $repos_load_abs_path as " .
- "$repos_tag_abs_path.\n");
- read_from_process($svn, 'cp', @svn_use_repos_cmd_opts,
- '-m', $message, $from_url, $to_url);
-
- # Now check out the tag and run a recursive diff between the
- # original source directory and the tag for a consistency
- # check.
- my $checkout_dir_name = "my_tag_wc_named_$load_tag";
- print "Checking out $to_url into $temp_dir/$checkout_dir_name\n";
-
- chdir($temp_dir)
- or die "$0: cannot chdir '$temp_dir': $!\n";
-
- read_from_process($svn, 'checkout',
- @svn_use_repos_cmd_opts,
- $to_url, $checkout_dir_name);
-
- chdir($checkout_dir_name)
- or die "$0: cannot chdir '$checkout_dir_name': $!\n";
-
- chdir($orig_cwd)
- or die "$0: cannot chdir '$orig_cwd': $!\n";
- read_from_process('diff', '-u', @diff_ignore_space_changes,
- '-x', '.svn',
- '-r', $load_dir, "$temp_dir/$checkout_dir_name");
- }
- }
-
-exit 0;
-
-sub usage
-{
- warn "@_\n" if @_;
- die "usage: $0 [options] svn_url svn_import_dir [dir_v1 [dir_v2 [..]]]\n",
- " svn_url is the file:// or http:// URL of the svn repository\n",
- " svn_import_dir is the path relative to svn_url where to load dirs\n",
- " dir_v1 .. list dirs to import otherwise read from stdin\n",
- "options are\n",
- " -no_user_input don't ask yes/no questions and assume yes answer\n",
- " -no_auto_exe don't set svn:executable for executable files\n",
- " -p filename table listing properties to apply to matching files\n",
- " -svn_username username to perform commits as\n",
- " -svn_password password to supply to svn commit\n",
- " -t tag_dir create a tag copy in tag_dir, relative to svn_url\n",
- " -v increase program verbosity, multiple -v's allowed\n",
- " -wc path use the already checked-out working copy at path\n",
- " instead of checkout out a fresh working copy\n",
- " -glob_ignores List of filename patterns to ignore (as in svn's\n",
- " global-ignores config option)\n";
-}
-
-# Get the next directory to load, either from the command line or from
-# standard input.
-my $get_next_load_dir_init = 0;
-my @get_next_load_dirs;
-sub get_next_load_dir
-{
- if (@ARGV)
- {
- unless ($get_next_load_dir_init)
- {
- $get_next_load_dir_init = 1;
- @get_next_load_dirs = @ARGV;
- }
- return shift @get_next_load_dirs;
- }
-
- if ($opt_verbose)
- {
- print "Waiting for next directory to import on standard input:\n";
- }
- my $line = <STDIN>;
-
- print "\n" if $opt_verbose;
-
- chomp $line;
- if ($line =~ m|(\S+)\s+(\S+)|)
- {
- $line = $1;
- set_svn_use_repos_cmd_opts($2, $opt_svn_password);
- }
- $line;
-}
-
-# This constant stores the commonly used string to indicate that a
-# subroutine has been passed an incorrect number of arguments.
-use vars qw($INCORRECT_NUMBER_OF_ARGS);
-$INCORRECT_NUMBER_OF_ARGS = "passed incorrect number of arguments.\n";
-
-# Creates a temporary file in the temporary directory and stores the
-# arguments in it for use by the svn --targets command line option.
-# If any part of the file creation failed, exit the program, as
-# there's no workaround. Use a unique number as a counter to the
-# files.
-my $make_targets_file_counter;
-sub make_targets_file
-{
- unless (@_)
- {
- confess "$0: make_targets_file $INCORRECT_NUMBER_OF_ARGS";
- }
-
- $make_targets_file_counter = 1 unless defined $make_targets_file_counter;
-
- my $filename = sprintf "%s/targets.%05d",
- $temp_dir,
- $make_targets_file_counter;
- ++$make_targets_file_counter;
-
- open(TARGETS, ">$filename")
- or die "$0: cannot open '$filename' for writing: $!\n";
-
- foreach my $file (@_)
- {
- print TARGETS "$file\n";
- }
-
- close(TARGETS)
- or die "$0: error in closing '$filename' for writing: $!\n";
-
- $filename;
-}
-
-# Set the svn command line options that are used anytime svn connects
-# to the repository.
-sub set_svn_use_repos_cmd_opts
-{
- unless (@_ == 2)
- {
- confess "$0: set_svn_use_repos_cmd_opts $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $username = shift;
- my $password = shift;
-
- @svn_use_repos_cmd_opts = ('--non-interactive');
- if (defined $username and length $username)
- {
- push(@svn_use_repos_cmd_opts, '--username', $username);
- }
- if (defined $password)
- {
- push(@svn_use_repos_cmd_opts, '--password', $password);
- }
-}
-
-sub get_tag_dir
-{
- unless (@_ == 1)
- {
- confess "$0: get_tag_dir $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $load_dir = shift;
-
- # Take the tag relative directory, search for pairs of
- # REGEX_SEP_CHAR's and use the regular expression inside the pair to
- # put in the tag directory name.
- my $tag_location = $opt_import_tag_location;
- my $load_tag = '';
- while ((my $i = index($tag_location, $REGEX_SEP_CHAR)) >= 0)
- {
- $load_tag .= substr($tag_location, 0, $i, '');
- substr($tag_location, 0, 1, '');
- my $j = index($tag_location, $REGEX_SEP_CHAR);
- if ($j < 0)
- {
- die "$0: -t value '$opt_import_tag_location' does not have ",
- "matching $REGEX_SEP_CHAR.\n";
- }
- my $regex = substr($tag_location, 0, $j, '');
- $regex = "($regex)" unless ($regex =~ /\(.+\)/);
- substr($tag_location, 0, 1, '');
- my @results = $load_dir =~ m/$regex/;
- $load_tag .= join('', @results);
- }
- $load_tag .= $tag_location;
-
- $load_tag;
-}
-
-# Return a two element array. The first element is a single character
-# that represents the type of object the path points to. The second
-# is a boolean (1 for true, '' for false) if the path points to a file
-# and if the file is executable.
-sub file_info
-{
- lstat(shift) or return ('0', '');
- -b _ and return ('b', '');
- -c _ and return ('c', '');
- -d _ and return ('d', '');
- -f _ and return ('f', -x _);
- -l _ and return ('l', '');
- -p _ and return ('p', '');
- -S _ and return ('S', '');
- return '?';
-}
-
-# Start a child process safely without using /bin/sh.
-sub safe_read_from_pipe
-{
- unless (@_)
- {
- croak "$0: safe_read_from_pipe $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $openfork_available = "MSWin32" ne $OSNAME;
- if ($openfork_available)
- {
- print "Running @_\n";
- my $pid = open(SAFE_READ, "-|");
- unless (defined $pid)
- {
- die "$0: cannot fork: $!\n";
- }
- unless ($pid)
- {
- # child
- open(STDERR, ">&STDOUT")
- or die "$0: cannot dup STDOUT: $!\n";
- exec(@_)
- or die "$0: cannot exec '@_': $!\n";
- }
- }
- else
- {
- # Redirect the comment into a temp file and use that to work around
- # Windoze's (non-)handling of multi-line commands.
- my @commandline = ();
- my $command;
- my $comment;
-
- while ($command = shift)
- {
- if ("-m" eq $command)
- {
- my $comment = shift;
- my ($handle, $tmpfile) = tempfile(DIR => $temp_dir);
- print $handle $comment;
- close($handle);
-
- push(@commandline, "--file");
- push(@commandline, $tmpfile);
- }
- else
- {
- # Munge the command to protect it from the command line
- $command =~ s/\"/\\\"/g;
- if ($command =~ m"\s") { $command = "\"$command\""; }
- if ($command eq "") { $command = "\"\""; }
- if ($command =~ m"\n")
- {
- warn "$0: carriage return detected in command - may not work\n";
- }
- push(@commandline, $command);
- }
- }
-
- print "Running @commandline\n";
- if ( $comment ) { print $comment; }
-
- # Now do the pipe.
- open(SAFE_READ, "@commandline |")
- or die "$0: cannot pipe to command: $!\n";
- }
-
- # parent
- my @output;
- while (<SAFE_READ>)
- {
- chomp;
- push(@output, $_);
- }
- close(SAFE_READ);
- my $result = $?;
- my $exit = $result >> 8;
- my $signal = $result & 127;
- my $cd = $result & 128 ? "with core dump" : "";
- if ($signal or $cd)
- {
- warn "$0: pipe from '@_' failed $cd: exit=$exit signal=$signal\n";
- }
- if (wantarray)
- {
- return ($result, @output);
- }
- else
- {
- return $result;
- }
-}
-
-# Use safe_read_from_pipe to start a child process safely and exit the
-# script if the child failed for whatever reason.
-sub read_from_process
-{
- unless (@_)
- {
- croak "$0: read_from_process $INCORRECT_NUMBER_OF_ARGS";
- }
- my ($status, @output) = &safe_read_from_pipe(@_);
- if ($status)
- {
- print STDERR "$0: @_ failed with this output:\n", join("\n", @output),
- "\n";
- unless ($opt_no_user_input)
- {
- print STDERR
- "Press return to quit and clean up svn working directory: ";
- <STDIN>;
- }
- exit 1;
- }
- else
- {
- return @output;
- }
-}
-
-# Get a list of all the files and directories in the specified
-# directory, the type of file and a digest hash of file types.
-sub recursive_ls_and_hash
-{
- unless (@_ == 1)
- {
- croak "$0: recursive_ls_and_hash $INCORRECT_NUMBER_OF_ARGS";
- }
-
- # This is the directory to change into.
- my $dir = shift;
-
- # Get the current directory so that the script can change into the
- # current working directory after changing into the specified
- # directory.
- my $return_cwd = cwd;
-
- chdir($dir)
- or die "$0: cannot chdir '$dir': $!\n";
-
- my %files;
-
- my $wanted = sub
- {
- s#^\./##;
- return if $_ eq '.';
- my ($file_type) = &file_info($_);
- my $file_digest;
- if ($file_type eq 'f' or ($file_type eq 'l' and stat($_) and -f _))
- {
- $file_digest = &digest_hash_file($_);
- }
- $files{$_} = {type => $file_type,
- digest => $file_digest};
- };
- find({no_chdir => 1,
- preprocess => sub
- {
- grep
- {
- my $ok=1;
- foreach my $x (@glob_ignores)
- {
- if ( $_ =~ /$x/ ) {$ok=0;last;}
- }
- $ok
- } @_
- },
- wanted => $wanted
- }, '.');
-
- chdir($return_cwd)
- or die "$0: cannot chdir '$return_cwd': $!\n";
-
- %files;
-}
-
-# Given a list of files and directories which have been renamed but
-# not commtited, commit them with a proper log message.
-sub commit_renames
-{
- unless (@_ == 4)
- {
- croak "$0: commit_renames $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $load_dir = shift;
- my $renamed_filenames = shift;
- my $rename_from_files = shift;
- my $rename_to_files = shift;
-
- my $number_renames = @$renamed_filenames/2;
-
- my $message = "To prepare to load $load_dir into $repos_load_abs_path, " .
- "perform $number_renames rename" .
- ($number_renames > 1 ? "s" : "") . ".\n";
-
- # Text::Wrap::wrap appears to replace multiple consecutive \n's with
- # one \n, so wrap the text and then append the second \n.
- $message = wrap("", "", $message) . "\n";
- while (@$renamed_filenames)
- {
- my $from = "$repos_load_abs_path/" . shift @$renamed_filenames;
- my $to = "$repos_load_abs_path/" . shift @$renamed_filenames;
- $message .= wrap("", " ", "* $to: Renamed from $from.\n");
- }
-
- # Change to the top of the working copy so that any
- # directories will also be updated.
- my $cwd = cwd;
- chdir($wc_import_dir_cwd)
- or die "$0: cannot chdir '$wc_import_dir_cwd': $!\n";
- read_from_process($svn, 'commit', @svn_use_repos_cmd_opts, '-m', $message);
- read_from_process($svn, 'update', @svn_use_repos_cmd_opts);
- chdir($cwd)
- or die "$0: cannot chdir '$cwd': $!\n";
-
- # Some versions of subversion have a bug where renamed files
- # or directories are not deleted after a commit, so do that
- # here.
- my @del_files = sort {length($b) <=> length($a) || $a cmp $b }
- keys %$rename_from_files;
- rmtree(\@del_files, 1, 0);
-
- # Empty the list of old and new renamed names.
- undef %$rename_from_files;
- undef %$rename_to_files;
-}
-
-# Take a one file or directory and see if its name is equal to a
-# second or is contained in the second if the second file's file type
-# is a directory.
-sub contained_in
-{
- unless (@_ == 3)
- {
- croak "$0: contain_in $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $contained = shift;
- my $container = shift;
- my $container_type = shift;
-
- if ($container eq $contained)
- {
- return 1;
- }
-
- if ($container_type eq 'd')
- {
- my $dirname = "$container/";
- my $dirname_length = length($dirname);
-
- if ($dirname_length <= length($contained) and
- $dirname eq substr($contained, 0, $dirname_length))
- {
- return 1;
- }
- }
-
- return 0;
-}
-
-# Take an array reference containing a list of files and directories
-# and take a hash reference and remove from the array reference any
-# files and directories and the files the directory contains listed in
-# the hash.
-sub filter_renamed_files
-{
- unless (@_ == 2)
- {
- croak "$0: filter_renamed_files $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $array_ref = shift;
- my $hash_ref = shift;
-
- foreach my $remove_filename (keys %$hash_ref)
- {
- my $remove_file_type = $hash_ref->{$remove_filename}{type};
- for (my $i=0; $i<@$array_ref;)
- {
- if (contained_in($array_ref->[$i],
- $remove_filename,
- $remove_file_type))
- {
- splice(@$array_ref, $i, 1);
- next;
- }
- ++$i;
- }
- }
-}
-
-# Get a digest hash of the specified filename.
-sub digest_hash_file
-{
- unless (@_ == 1)
- {
- croak "$0: digest_hash_file $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $filename = shift;
-
- my $ctx = Digest::MD5->new;
- if (open(READ, $filename))
- {
- binmode READ;
- $ctx->addfile(*READ);
- close(READ);
- }
- else
- {
- die "$0: cannot open '$filename' for reading: $!\n";
- }
- $ctx->digest;
-}
-
-# Read standard input until a line contains the required input or an
-# empty line to signify the default answer.
-sub get_answer
-{
- unless (@_ == 3)
- {
- croak "$0: get_answer $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $message = shift;
- my $answers = shift;
- my $def_ans = shift;
-
- return $def_ans if $opt_no_user_input;
-
- my $char;
- do
- {
- print $message;
- $char = '';
- my $line = <STDIN>;
- if (defined $line and length $line)
- {
- $char = substr($line, 0, 1);
- $char = '' if $char eq "\n";
- }
- } until $char eq '' or $answers =~ /$char/ig;
-
- return $def_ans if $char eq '';
- return pos($answers) - 1;
-}
-
-# Determine the native end of line on this system by writing a \n in
-# non-binary mode to an empty file and reading the same file back in
-# binary mode.
-sub determine_native_eol
-{
- my $filename = "$temp_dir/svn_load_dirs_eol_test.$$";
- if (-e $filename)
- {
- unlink($filename)
- or die "$0: cannot unlink '$filename': $!\n";
- }
-
- # Write the \n in non-binary mode.
- open(NL_TEST, ">$filename")
- or die "$0: cannot open '$filename' for writing: $!\n";
- print NL_TEST "\n";
- close(NL_TEST)
- or die "$0: error in closing '$filename' for writing: $!\n";
-
- # Read the \n in binary mode.
- open(NL_TEST, $filename)
- or die "$0: cannot open '$filename' for reading: $!\n";
- binmode NL_TEST;
- local $/;
- undef $/;
- my $eol = <NL_TEST>;
- close(NL_TEST)
- or die "$0: cannot close '$filename' for reading: $!\n";
- unlink($filename)
- or die "$0: cannot unlink '$filename': $!\n";
-
- my $eol_length = length($eol);
- unless ($eol_length)
- {
- die "$0: native eol length on this system is 0.\n";
- }
-
- print "Native EOL on this system is ";
- for (my $i=0; $i<$eol_length; ++$i)
- {
- printf "\\%03o", ord(substr($eol, $i, 1));
- }
- print ".\n\n";
-
- $eol;
-}
-
-# Take a filename, open the file and replace all CR, CRLF and LF's
-# with the native end of line style for this system.
-sub convert_file_to_native_eol
-{
- unless (@_ == 1)
- {
- croak "$0: convert_file_to_native_eol $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $filename = shift;
- open(FILE, $filename)
- or die "$0: cannot open '$filename' for reading: $!\n";
- binmode FILE;
- local $/;
- undef $/;
- my $in = <FILE>;
- close(FILE)
- or die "$0: error in closing '$filename' for reading: $!\n";
- my $out = '';
-
- # Go through the file and transform it byte by byte.
- my $i = 0;
- while ($i < length($in))
- {
- my $cc = substr($in, $i, 2);
- if ($cc eq "\015\012")
- {
- $out .= $native_eol;
- $i += 2;
- next;
- }
-
- my $c = substr($cc, 0, 1);
- if ($c eq "\012" or $c eq "\015")
- {
- $out .= $native_eol;
- }
- else
- {
- $out .= $c;
- }
- ++$i;
- }
-
- return 0 if $in eq $out;
-
- my $tmp_filename = ".svn/tmp/svn_load_dirs.$$";
- open(FILE, ">$tmp_filename")
- or die "$0: cannot open '$tmp_filename' for writing: $!\n";
- binmode FILE;
- print FILE $out;
- close(FILE)
- or die "$0: cannot close '$tmp_filename' for writing: $!\n";
- rename($tmp_filename, $filename)
- or die "$0: cannot rename '$tmp_filename' to '$filename': $!\n";
-
- return 1;
-}
-
-# Split the input line into words taking into account that single or
-# double quotes may define a single word with whitespace in it.
-sub split_line
-{
- unless (@_ == 1)
- {
- croak "$0: split_line $INCORRECT_NUMBER_OF_ARGS";
- }
-
- my $line = shift;
-
- # Strip leading whitespace. Do not strip trailing whitespace which
- # may be part of quoted text that was never closed.
- $line =~ s/^\s+//;
-
- my $line_length = length $line;
- my @words = ();
- my $current_word = '';
- my $in_quote = '';
- my $in_protect = '';
- my $in_space = '';
- my $i = 0;
-
- while ($i < $line_length)
- {
- my $c = substr($line, $i, 1);
- ++$i;
-
- if ($in_protect)
- {
- if ($c eq $in_quote)
- {
- $current_word .= $c;
- }
- elsif ($c eq '"' or $c eq "'")
- {
- $current_word .= $c;
- }
- else
- {
- $current_word .= "$in_protect$c";
- }
- $in_protect = '';
- }
- elsif ($c eq '\\')
- {
- $in_protect = $c;
- }
- elsif ($in_quote)
- {
- if ($c eq $in_quote)
- {
- $in_quote = '';
- }
- else
- {
- $current_word .= $c;
- }
- }
- elsif ($c eq '"' or $c eq "'")
- {
- $in_quote = $c;
- }
- elsif ($c =~ m/^\s$/)
- {
- unless ($in_space)
- {
- push(@words, $current_word);
- $current_word = '';
- }
- }
- else
- {
- $current_word .= $c;
- }
-
- $in_space = $c =~ m/^\s$/;
- }
-
- # Handle any leftovers.
- $current_word .= $in_protect if $in_protect;
- push(@words, $current_word) if length $current_word;
-
- @words;
-}
-
-# This package exists just to delete the temporary directory.
-package Temp::Delete;
-
-sub new
-{
- bless {}, shift;
-}
-
-sub DESTROY
-{
- print "Cleaning up $temp_dir\n";
- File::Path::rmtree([$temp_dir], 0, 0);
-}
diff --git a/thirdparty/svnmerge/COPYING b/thirdparty/svnmerge/COPYING
deleted file mode 100644
index d511905..0000000
--- a/thirdparty/svnmerge/COPYING
+++ /dev/null
@@ -1,339 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 2, June 1991
-
- Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The licenses for most software are designed to take away your
-freedom to share and change it. By contrast, the GNU General Public
-License is intended to guarantee your freedom to share and change free
-software--to make sure the software is free for all its users. This
-General Public License applies to most of the Free Software
-Foundation's software and to any other program whose authors commit to
-using it. (Some other Free Software Foundation software is covered by
-the GNU Lesser General Public License instead.) You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-this service if you wish), that you receive source code or can get it
-if you want it, that you can change the software or use pieces of it
-in new free programs; and that you know you can do these things.
-
- To protect your rights, we need to make restrictions that forbid
-anyone to deny you these rights or to ask you to surrender the rights.
-These restrictions translate to certain responsibilities for you if you
-distribute copies of the software, or if you modify it.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must give the recipients all the rights that
-you have. You must make sure that they, too, receive or can get the
-source code. And you must show them these terms so they know their
-rights.
-
- We protect your rights with two steps: (1) copyright the software, and
-(2) offer you this license which gives you legal permission to copy,
-distribute and/or modify the software.
-
- Also, for each author's protection and ours, we want to make certain
-that everyone understands that there is no warranty for this free
-software. If the software is modified by someone else and passed on, we
-want its recipients to know that what they have is not the original, so
-that any problems introduced by others will not reflect on the original
-authors' reputations.
-
- Finally, any free program is threatened constantly by software
-patents. We wish to avoid the danger that redistributors of a free
-program will individually obtain patent licenses, in effect making the
-program proprietary. To prevent this, we have made it clear that any
-patent must be licensed for everyone's free use or not licensed at all.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- GNU GENERAL PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. This License applies to any program or other work which contains
-a notice placed by the copyright holder saying it may be distributed
-under the terms of this General Public License. The "Program", below,
-refers to any such program or work, and a "work based on the Program"
-means either the Program or any derivative work under copyright law:
-that is to say, a work containing the Program or a portion of it,
-either verbatim or with modifications and/or translated into another
-language. (Hereinafter, translation is included without limitation in
-the term "modification".) Each licensee is addressed as "you".
-
-Activities other than copying, distribution and modification are not
-covered by this License; they are outside its scope. The act of
-running the Program is not restricted, and the output from the Program
-is covered only if its contents constitute a work based on the
-Program (independent of having been made by running the Program).
-Whether that is true depends on what the Program does.
-
- 1. You may copy and distribute verbatim copies of the Program's
-source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate
-copyright notice and disclaimer of warranty; keep intact all the
-notices that refer to this License and to the absence of any warranty;
-and give any other recipients of the Program a copy of this License
-along with the Program.
-
-You may charge a fee for the physical act of transferring a copy, and
-you may at your option offer warranty protection in exchange for a fee.
-
- 2. You may modify your copy or copies of the Program or any portion
-of it, thus forming a work based on the Program, and copy and
-distribute such modifications or work under the terms of Section 1
-above, provided that you also meet all of these conditions:
-
- a) You must cause the modified files to carry prominent notices
- stating that you changed the files and the date of any change.
-
- b) You must cause any work that you distribute or publish, that in
- whole or in part contains or is derived from the Program or any
- part thereof, to be licensed as a whole at no charge to all third
- parties under the terms of this License.
-
- c) If the modified program normally reads commands interactively
- when run, you must cause it, when started running for such
- interactive use in the most ordinary way, to print or display an
- announcement including an appropriate copyright notice and a
- notice that there is no warranty (or else, saying that you provide
- a warranty) and that users may redistribute the program under
- these conditions, and telling the user how to view a copy of this
- License. (Exception: if the Program itself is interactive but
- does not normally print such an announcement, your work based on
- the Program is not required to print an announcement.)
-
-These requirements apply to the modified work as a whole. If
-identifiable sections of that work are not derived from the Program,
-and can be reasonably considered independent and separate works in
-themselves, then this License, and its terms, do not apply to those
-sections when you distribute them as separate works. But when you
-distribute the same sections as part of a whole which is a work based
-on the Program, the distribution of the whole must be on the terms of
-this License, whose permissions for other licensees extend to the
-entire whole, and thus to each and every part regardless of who wrote it.
-
-Thus, it is not the intent of this section to claim rights or contest
-your rights to work written entirely by you; rather, the intent is to
-exercise the right to control the distribution of derivative or
-collective works based on the Program.
-
-In addition, mere aggregation of another work not based on the Program
-with the Program (or with a work based on the Program) on a volume of
-a storage or distribution medium does not bring the other work under
-the scope of this License.
-
- 3. You may copy and distribute the Program (or a work based on it,
-under Section 2) in object code or executable form under the terms of
-Sections 1 and 2 above provided that you also do one of the following:
-
- a) Accompany it with the complete corresponding machine-readable
- source code, which must be distributed under the terms of Sections
- 1 and 2 above on a medium customarily used for software interchange; or,
-
- b) Accompany it with a written offer, valid for at least three
- years, to give any third party, for a charge no more than your
- cost of physically performing source distribution, a complete
- machine-readable copy of the corresponding source code, to be
- distributed under the terms of Sections 1 and 2 above on a medium
- customarily used for software interchange; or,
-
- c) Accompany it with the information you received as to the offer
- to distribute corresponding source code. (This alternative is
- allowed only for noncommercial distribution and only if you
- received the program in object code or executable form with such
- an offer, in accord with Subsection b above.)
-
-The source code for a work means the preferred form of the work for
-making modifications to it. For an executable work, complete source
-code means all the source code for all modules it contains, plus any
-associated interface definition files, plus the scripts used to
-control compilation and installation of the executable. However, as a
-special exception, the source code distributed need not include
-anything that is normally distributed (in either source or binary
-form) with the major components (compiler, kernel, and so on) of the
-operating system on which the executable runs, unless that component
-itself accompanies the executable.
-
-If distribution of executable or object code is made by offering
-access to copy from a designated place, then offering equivalent
-access to copy the source code from the same place counts as
-distribution of the source code, even though third parties are not
-compelled to copy the source along with the object code.
-
- 4. You may not copy, modify, sublicense, or distribute the Program
-except as expressly provided under this License. Any attempt
-otherwise to copy, modify, sublicense or distribute the Program is
-void, and will automatically terminate your rights under this License.
-However, parties who have received copies, or rights, from you under
-this License will not have their licenses terminated so long as such
-parties remain in full compliance.
-
- 5. You are not required to accept this License, since you have not
-signed it. However, nothing else grants you permission to modify or
-distribute the Program or its derivative works. These actions are
-prohibited by law if you do not accept this License. Therefore, by
-modifying or distributing the Program (or any work based on the
-Program), you indicate your acceptance of this License to do so, and
-all its terms and conditions for copying, distributing or modifying
-the Program or works based on it.
-
- 6. Each time you redistribute the Program (or any work based on the
-Program), the recipient automatically receives a license from the
-original licensor to copy, distribute or modify the Program subject to
-these terms and conditions. You may not impose any further
-restrictions on the recipients' exercise of the rights granted herein.
-You are not responsible for enforcing compliance by third parties to
-this License.
-
- 7. If, as a consequence of a court judgment or allegation of patent
-infringement or for any other reason (not limited to patent issues),
-conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot
-distribute so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you
-may not distribute the Program at all. For example, if a patent
-license would not permit royalty-free redistribution of the Program by
-all those who receive copies directly or indirectly through you, then
-the only way you could satisfy both it and this License would be to
-refrain entirely from distribution of the Program.
-
-If any portion of this section is held invalid or unenforceable under
-any particular circumstance, the balance of the section is intended to
-apply and the section as a whole is intended to apply in other
-circumstances.
-
-It is not the purpose of this section to induce you to infringe any
-patents or other property right claims or to contest validity of any
-such claims; this section has the sole purpose of protecting the
-integrity of the free software distribution system, which is
-implemented by public license practices. Many people have made
-generous contributions to the wide range of software distributed
-through that system in reliance on consistent application of that
-system; it is up to the author/donor to decide if he or she is willing
-to distribute software through any other system and a licensee cannot
-impose that choice.
-
-This section is intended to make thoroughly clear what is believed to
-be a consequence of the rest of this License.
-
- 8. If the distribution and/or use of the Program is restricted in
-certain countries either by patents or by copyrighted interfaces, the
-original copyright holder who places the Program under this License
-may add an explicit geographical distribution limitation excluding
-those countries, so that distribution is permitted only in or among
-countries not thus excluded. In such case, this License incorporates
-the limitation as if written in the body of this License.
-
- 9. The Free Software Foundation may publish revised and/or new versions
-of the General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
-Each version is given a distinguishing version number. If the Program
-specifies a version number of this License which applies to it and "any
-later version", you have the option of following the terms and conditions
-either of that version or of any later version published by the Free
-Software Foundation. If the Program does not specify a version number of
-this License, you may choose any version ever published by the Free Software
-Foundation.
-
- 10. If you wish to incorporate parts of the Program into other free
-programs whose distribution conditions are different, write to the author
-to ask for permission. For software which is copyrighted by the Free
-Software Foundation, write to the Free Software Foundation; we sometimes
-make exceptions for this. Our decision will be guided by the two goals
-of preserving the free status of all derivatives of our free software and
-of promoting the sharing and reuse of software generally.
-
- NO WARRANTY
-
- 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
-FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
-OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
-PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
-TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
-PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
-REPAIR OR CORRECTION.
-
- 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
-REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
-INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
-OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
-TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
-YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
-PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGES.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-convey the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License along
- with this program; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program is interactive, make it output a short notice like this
-when it starts in an interactive mode:
-
- Gnomovision version 69, Copyright (C) year name of author
- Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, the commands you use may
-be called something other than `show w' and `show c'; they could even be
-mouse-clicks or menu items--whatever suits your program.
-
-You should also get your employer (if you work as a programmer) or your
-school, if any, to sign a "copyright disclaimer" for the program, if
-necessary. Here is a sample; alter the names:
-
- Yoyodyne, Inc., hereby disclaims all copyright interest in the program
- `Gnomovision' (which makes passes at compilers) written by James Hacker.
-
- <signature of Ty Coon>, 1 April 1989
- Ty Coon, President of Vice
-
-This General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may
-consider it more useful to permit linking proprietary applications with the
-library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License.
diff --git a/thirdparty/svnmerge/svnmerge.py b/thirdparty/svnmerge/svnmerge.py
deleted file mode 100755
index b1cd9da..0000000
--- a/thirdparty/svnmerge/svnmerge.py
+++ /dev/null
@@ -1,2170 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2005, Giovanni Bajo
-# Copyright (c) 2004-2005, Awarix, Inc.
-# All rights reserved.
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
-#
-# Author: Archie Cobbs <archie at awarix dot com>
-# Rewritten in Python by: Giovanni Bajo <rasky at develer dot com>
-#
-# Acknowledgments:
-# John Belmonte <john at neggie dot net> - metadata and usability
-# improvements
-# Blair Zajac <blair at orcaware dot com> - random improvements
-# Raman Gupta <rocketraman at fastmail dot fm> - bidirectional and transitive
-# merging support
-#
-# $HeadURL$
-# $LastChangedDate$
-# $LastChangedBy$
-# $LastChangedRevision$
-#
-# Requisites:
-# svnmerge.py has been tested with all SVN major versions since 1.1 (both
-# client and server). It is unknown if it works with previous versions.
-#
-# Differences from svnmerge.sh:
-# - More portable: tested as working in FreeBSD and OS/2.
-# - Add double-verbose mode, which shows every svn command executed (-v -v).
-# - "svnmerge avail" now only shows commits in source, not also commits in
-# other parts of the repository.
-# - Add "svnmerge block" to flag some revisions as blocked, so that
-# they will not show up anymore in the available list. Added also
-# the complementary "svnmerge unblock".
-# - "svnmerge avail" has grown two new options:
-# -B to display a list of the blocked revisions
-# -A to display both the blocked and the available revisions.
-# - Improved generated commit message to make it machine parsable even when
-# merging commits which are themselves merges.
-# - Add --force option to skip working copy check
-# - Add --record-only option to "svnmerge merge" to avoid performing
-# an actual merge, yet record that a merge happened.
-#
-# TODO:
-# - Add "svnmerge avail -R": show logs in reverse order
-#
-# Information for Hackers:
-#
-# Identifiers for branches:
-# A branch is identified in three ways within this source:
-# - as a working copy (variable name usually includes 'dir')
-# - as a fully qualified URL
-# - as a path identifier (an opaque string indicating a particular path
-# in a particular repository; variable name includes 'pathid')
-# A "target" is generally user-specified, and may be a working copy or
-# a URL.
-
-import sys, os, getopt, re, types, tempfile, time, popen2, locale
-from bisect import bisect
-from xml.dom import pulldom
-
-NAME = "svnmerge"
-if not hasattr(sys, "version_info") or sys.version_info < (2, 0):
- error("requires Python 2.0 or newer")
-
-# Set up the separator used to separate individual log messages from
-# each revision merged into the target location. Also, create a
-# regular expression that will find this same separator in already
-# committed log messages, so that the separator used for this run of
-# svnmerge.py will have one more LOG_SEPARATOR appended to the longest
-# separator found in all the commits.
-LOG_SEPARATOR = 8 * '.'
-LOG_SEPARATOR_RE = re.compile('^((%s)+)' % re.escape(LOG_SEPARATOR),
- re.MULTILINE)
-
-# Each line of the embedded log messages will be prefixed by LOG_LINE_PREFIX.
-LOG_LINE_PREFIX = 2 * ' '
-
-# Set python to the default locale as per environment settings, same as svn
-# TODO we should really parse config and if log-encoding is specified, set
-# the locale to match that encoding
-locale.setlocale(locale.LC_ALL, '')
-
-# We want the svn output (such as svn info) to be non-localized
-# Using LC_MESSAGES should not affect localized output of svn log, for example
-if os.environ.has_key("LC_ALL"):
- del os.environ["LC_ALL"]
-os.environ["LC_MESSAGES"] = "C"
-
-###############################################################################
-# Support for older Python versions
-###############################################################################
-
-# True/False constants are Python 2.2+
-try:
- True, False
-except NameError:
- True, False = 1, 0
-
-def lstrip(s, ch):
- """Replacement for str.lstrip (support for arbitrary chars to strip was
- added in Python 2.2.2)."""
- i = 0
- try:
- while s[i] == ch:
- i = i+1
- return s[i:]
- except IndexError:
- return ""
-
-def rstrip(s, ch):
- """Replacement for str.rstrip (support for arbitrary chars to strip was
- added in Python 2.2.2)."""
- try:
- if s[-1] != ch:
- return s
- i = -2
- while s[i] == ch:
- i = i-1
- return s[:i+1]
- except IndexError:
- return ""
-
-def strip(s, ch):
- """Replacement for str.strip (support for arbitrary chars to strip was
- added in Python 2.2.2)."""
- return lstrip(rstrip(s, ch), ch)
-
-def rsplit(s, sep, maxsplits=0):
- """Like str.rsplit, which is Python 2.4+ only."""
- L = s.split(sep)
- if not 0 < maxsplits <= len(L):
- return L
- return [sep.join(L[0:-maxsplits])] + L[-maxsplits:]
-
-###############################################################################
-
-def kwextract(s):
- """Extract info from a svn keyword string."""
- try:
- return strip(s, "$").strip().split(": ")[1]
- except IndexError:
- return "<unknown>"
-
-__revision__ = kwextract('$Rev$')
-__date__ = kwextract('$Date$')
-
-# Additional options, not (yet?) mapped to command line flags
-default_opts = {
- "svn": "svn",
- "prop": NAME + "-integrated",
- "block-prop": NAME + "-blocked",
- "commit-verbose": True,
-}
-logs = {}
-
-def console_width():
- """Get the width of the console screen (if any)."""
- try:
- return int(os.environ["COLUMNS"])
- except (KeyError, ValueError):
- pass
-
- try:
- # Call the Windows API (requires ctypes library)
- from ctypes import windll, create_string_buffer
- h = windll.kernel32.GetStdHandle(-11)
- csbi = create_string_buffer(22)
- res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
- if res:
- import struct
- (bufx, bufy,
- curx, cury, wattr,
- left, top, right, bottom,
- maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
- return right - left + 1
- except ImportError:
- pass
-
- # Parse the output of stty -a
- out = os.popen("stty -a").read()
- m = re.search(r"columns (\d+);", out)
- if m:
- return int(m.group(1))
-
- # sensible default
- return 80
-
-def error(s):
- """Subroutine to output an error and bail."""
- print >> sys.stderr, "%s: %s" % (NAME, s)
- sys.exit(1)
-
-def report(s):
- """Subroutine to output progress message, unless in quiet mode."""
- if opts["verbose"]:
- print "%s: %s" % (NAME, s)
-
-def prefix_lines(prefix, lines):
- """Given a string representing one or more lines of text, insert the
- specified prefix at the beginning of each line, and return the result.
- The input must be terminated by a newline."""
- assert lines[-1] == "\n"
- return prefix + lines[:-1].replace("\n", "\n"+prefix) + "\n"
-
-def recode_stdout_to_file(s):
- if locale.getdefaultlocale()[1] is None or not hasattr(sys.stdout, "encoding") \
- or sys.stdout.encoding is None:
- return s
- u = s.decode(sys.stdout.encoding)
- return u.encode(locale.getdefaultlocale()[1])
-
-class LaunchError(Exception):
- """Signal a failure in execution of an external command. Parameters are the
- exit code of the process, the original command line, and the output of the
- command."""
-
-try:
- """Launch a sub-process. Return its output (both stdout and stderr),
- optionally split by lines (if split_lines is True). Raise a LaunchError
- exception if the exit code of the process is non-zero (failure).
-
- This function has two implementations, one based on subprocess (preferred),
- and one based on popen (for compatibility).
- """
- import subprocess
- import shlex
-
- def launch(cmd, split_lines=True):
- # Requiring python 2.4 or higher, on some platforms we get
- # much faster performance from the subprocess module (where python
- # doesn't try to close an exhorbitant number of file descriptors)
- stdout = ""
- stderr = ""
- try:
- if os.name == 'nt':
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, \
- close_fds=False, stderr=subprocess.PIPE)
- else:
- # Use shlex to break up the parameters intelligently,
- # respecting quotes. shlex can't handle unicode.
- args = shlex.split(cmd.encode('ascii'))
- p = subprocess.Popen(args, stdout=subprocess.PIPE, \
- close_fds=False, stderr=subprocess.PIPE)
- stdoutAndErr = p.communicate()
- stdout = stdoutAndErr[0]
- stderr = stdoutAndErr[1]
- except OSError, inst:
- # Using 1 as failure code; should get actual number somehow? For
- # examples see svnmerge_test.py's TestCase_launch.test_failure and
- # TestCase_launch.test_failurecode.
- raise LaunchError(1, cmd, stdout + " " + stderr + ": " + str(inst))
-
- if p.returncode == 0:
- if split_lines:
- # Setting keepends=True for compatibility with previous logic
- # (where file.readlines() preserves newlines)
- return stdout.splitlines(True)
- else:
- return stdout
- else:
- raise LaunchError(p.returncode, cmd, stdout + stderr)
-except ImportError:
- # support versions of python before 2.4 (slower on some systems)
- def launch(cmd, split_lines=True):
- if os.name not in ['nt', 'os2']:
- p = popen2.Popen4(cmd)
- p.tochild.close()
- if split_lines:
- out = p.fromchild.readlines()
- else:
- out = p.fromchild.read()
- ret = p.wait()
- if ret == 0:
- ret = None
- else:
- ret >>= 8
- else:
- i,k = os.popen4(cmd)
- i.close()
- if split_lines:
- out = k.readlines()
- else:
- out = k.read()
- ret = k.close()
-
- if ret is None:
- return out
- raise LaunchError(ret, cmd, out)
-
-def launchsvn(s, show=False, pretend=False, **kwargs):
- """Launch SVN and grab its output."""
- username = opts.get("username", None)
- password = opts.get("password", None)
- if username:
- username = " --username=" + username
- else:
- username = ""
- if password:
- password = " --password=" + password
- else:
- password = ""
- cmd = opts["svn"] + " --non-interactive" + username + password + " " + s
- if show or opts["verbose"] >= 2:
- print cmd
- if pretend:
- return None
- return launch(cmd, **kwargs)
-
-def svn_command(s):
- """Do (or pretend to do) an SVN command."""
- out = launchsvn(s, show=opts["show-changes"] or opts["dry-run"],
- pretend=opts["dry-run"],
- split_lines=False)
- if not opts["dry-run"]:
- print out
-
-def check_dir_clean(dir):
- """Check the current status of dir for local mods."""
- if opts["force"]:
- report('skipping status check because of --force')
- return
- report('checking status of "%s"' % dir)
-
- # Checking with -q does not show unversioned files or external
- # directories. Though it displays a debug message for external
- # directories, after a blank line. So, practically, the first line
- # matters: if it's non-empty there is a modification.
- out = launchsvn("status -q %s" % dir)
- if out and out[0].strip():
- error('"%s" has local modifications; it must be clean' % dir)
-
-class RevisionLog:
- """
- A log of the revisions which affected a given URL between two
- revisions.
- """
-
- def __init__(self, url, begin, end, find_propchanges=False):
- """
- Create a new RevisionLog object, which stores, in self.revs, a list
- of the revisions which affected the specified URL between begin and
- end. If find_propchanges is True, self.propchange_revs will contain a
- list of the revisions which changed properties directly on the
- specified URL. URL must be the URL for a directory in the repository.
- """
- self.url = url
-
- # Setup the log options (--quiet, so we don't show log messages)
- log_opts = '--xml --quiet -r%s:%s "%s"' % (begin, end, url)
- if find_propchanges:
- # The --verbose flag lets us grab merge tracking information
- # by looking at propchanges
- log_opts = "--verbose " + log_opts
-
- # Read the log to look for revision numbers and merge-tracking info
- self.revs = []
- self.propchange_revs = []
- repos_pathid = target_to_pathid(url)
- for chg in SvnLogParser(launchsvn("log %s" % log_opts,
- split_lines=False)):
- self.revs.append(chg.revision())
- for p in chg.paths():
- if p.action() == 'M' and p.pathid() == repos_pathid:
- self.propchange_revs.append(chg.revision())
-
- # Save the range of the log
- self.begin = int(begin)
- if end == "HEAD":
- # If end is not provided, we do not know which is the latest
- # revision in the repository. So we set 'end' to the latest
- # known revision.
- self.end = self.revs[-1]
- else:
- self.end = int(end)
-
- self._merges = None
- self._blocks = None
-
- def merge_metadata(self):
- """
- Return a VersionedProperty object, with a cached view of the merge
- metadata in the range of this log.
- """
-
- # Load merge metadata if necessary
- if not self._merges:
- self._merges = VersionedProperty(self.url, opts["prop"])
- self._merges.load(self)
-
- return self._merges
-
- def block_metadata(self):
- if not self._blocks:
- self._blocks = VersionedProperty(self.url, opts["block-prop"])
- self._blocks.load(self)
-
- return self._blocks
-
-
-class VersionedProperty:
- """
- A read-only, cached view of a versioned property.
-
- self.revs contains a list of the revisions in which the property changes.
- self.values stores the new values at each corresponding revision. If the
- value of the property is unknown, it is set to None.
-
- Initially, we set self.revs to [0] and self.values to [None]. This
- indicates that, as of revision zero, we know nothing about the value of
- the property.
-
- Later, if you run self.load(log), we cache the value of this property over
- the entire range of the log by noting each revision in which the property
- was changed. At the end of the range of the log, we invalidate our cache
- by adding the value "None" to our cache for any revisions which fall out
- of the range of our log.
-
- Once self.revs and self.values are filled, we can find the value of the
- property at any arbitrary revision using a binary search on self.revs.
- Once we find the last revision during which the property was changed,
- we can lookup the associated value in self.values. (If the associated
- value is None, the associated value was not cached and we have to do
- a full propget.)
-
- An example: We know that the 'svnmerge' property was added in r10, and
- changed in r21. We gathered log info up until r40.
-
- revs = [0, 10, 21, 40]
- values = [None, "val1", "val2", None]
-
- What these values say:
- - From r0 to r9, we know nothing about the property.
- - In r10, the property was set to "val1". This property stayed the same
- until r21, when it was changed to "val2".
- - We don't know what happened after r40.
- """
-
- def __init__(self, url, name):
- """View the history of a versioned property at URL with name"""
- self.url = url
- self.name = name
-
- # We know nothing about the value of the property. Setup revs
- # and values to indicate as such.
- self.revs = [0]
- self.values = [None]
-
- # We don't have any revisions cached
- self._initial_value = None
- self._changed_revs = []
- self._changed_values = []
-
- def load(self, log):
- """
- Load the history of property changes from the specified
- RevisionLog object.
- """
-
- # Get the property value before the range of the log
- if log.begin > 1:
- self.revs.append(log.begin-1)
- try:
- self._initial_value = self.raw_get(log.begin-1)
- except LaunchError:
- # The specified URL might not exist before the
- # range of the log. If so, we can safely assume
- # that the property was empty at that time.
- self._initial_value = { }
- self.values.append(self._initial_value)
- else:
- self._initial_value = { }
- self.values[0] = self._initial_value
-
- # Cache the property values in the log range
- old_value = self._initial_value
- for rev in log.propchange_revs:
- new_value = self.raw_get(rev)
- if new_value != old_value:
- self._changed_revs.append(rev)
- self._changed_values.append(new_value)
- self.revs.append(rev)
- self.values.append(new_value)
- old_value = new_value
-
- # Indicate that we know nothing about the value of the property
- # after the range of the log.
- if log.revs:
- self.revs.append(log.end+1)
- self.values.append(None)
-
- def raw_get(self, rev=None):
- """
- Get the property at revision REV. If rev is not specified, get
- the property at revision HEAD.
- """
- return get_revlist_prop(self.url, self.name, rev)
-
- def get(self, rev=None):
- """
- Get the property at revision REV. If rev is not specified, get
- the property at revision HEAD.
- """
-
- if rev is not None:
-
- # Find the index using a binary search
- i = bisect(self.revs, rev) - 1
-
- # Return the value of the property, if it was cached
- if self.values[i] is not None:
- return self.values[i]
-
- # Get the current value of the property
- return self.raw_get(rev)
-
- def changed_revs(self, key=None):
- """
- Get a list of the revisions in which the specified dictionary
- key was changed in this property. If key is not specified,
- return a list of revisions in which any key was changed.
- """
- if key is None:
- return self._changed_revs
- else:
- changed_revs = []
- old_val = self._initial_value
- for rev, val in zip(self._changed_revs, self._changed_values):
- if val.get(key) != old_val.get(key):
- changed_revs.append(rev)
- old_val = val
- return changed_revs
-
- def initialized_revs(self):
- """
- Get a list of the revisions in which keys were added or
- removed in this property.
- """
- initialized_revs = []
- old_len = len(self._initial_value)
- for rev, val in zip(self._changed_revs, self._changed_values):
- if len(val) != old_len:
- initialized_revs.append(rev)
- old_len = len(val)
- return initialized_revs
-
-class RevisionSet:
- """
- A set of revisions, held in dictionary form for easy manipulation. If we
- were to rewrite this script for Python 2.3+, we would subclass this from
- set (or UserSet). As this class does not include branch
- information, it's assumed that one instance will be used per
- branch.
- """
- def __init__(self, parm):
- """Constructs a RevisionSet from a string in property form, or from
- a dictionary whose keys are the revisions. Raises ValueError if the
- input string is invalid."""
-
- self._revs = {}
-
- revision_range_split_re = re.compile('[-:]')
-
- if isinstance(parm, types.DictType):
- self._revs = parm.copy()
- elif isinstance(parm, types.ListType):
- for R in parm:
- self._revs[int(R)] = 1
- else:
- parm = parm.strip()
- if parm:
- for R in parm.split(","):
- rev_or_revs = re.split(revision_range_split_re, R)
- if len(rev_or_revs) == 1:
- self._revs[int(rev_or_revs[0])] = 1
- elif len(rev_or_revs) == 2:
- for rev in range(int(rev_or_revs[0]),
- int(rev_or_revs[1])+1):
- self._revs[rev] = 1
- else:
- raise ValueError, 'Ill formatted revision range: ' + R
-
- def sorted(self):
- revnums = self._revs.keys()
- revnums.sort()
- return revnums
-
- def normalized(self):
- """Returns a normalized version of the revision set, which is an
- ordered list of couples (start,end), with the minimum number of
- intervals."""
- revnums = self.sorted()
- revnums.reverse()
- ret = []
- while revnums:
- s = e = revnums.pop()
- while revnums and revnums[-1] in (e, e+1):
- e = revnums.pop()
- ret.append((s, e))
- return ret
-
- def __str__(self):
- """Convert the revision set to a string, using its normalized form."""
- L = []
- for s,e in self.normalized():
- if s == e:
- L.append(str(s))
- else:
- L.append(str(s) + "-" + str(e))
- return ",".join(L)
-
- def __contains__(self, rev):
- return self._revs.has_key(rev)
-
- def __sub__(self, rs):
- """Compute subtraction as in sets."""
- revs = {}
- for r in self._revs.keys():
- if r not in rs:
- revs[r] = 1
- return RevisionSet(revs)
-
- def __and__(self, rs):
- """Compute intersections as in sets."""
- revs = {}
- for r in self._revs.keys():
- if r in rs:
- revs[r] = 1
- return RevisionSet(revs)
-
- def __nonzero__(self):
- return len(self._revs) != 0
-
- def __len__(self):
- """Return the number of revisions in the set."""
- return len(self._revs)
-
- def __iter__(self):
- return iter(self.sorted())
-
- def __or__(self, rs):
- """Compute set union."""
- revs = self._revs.copy()
- revs.update(rs._revs)
- return RevisionSet(revs)
-
-def merge_props_to_revision_set(merge_props, pathid):
- """A converter which returns a RevisionSet instance containing the
- revisions from PATH as known to BRANCH_PROPS. BRANCH_PROPS is a
- dictionary of pathid -> revision set branch integration information
- (as returned by get_merge_props())."""
- if not merge_props.has_key(pathid):
- error('no integration info available for path "%s"' % pathid)
- return RevisionSet(merge_props[pathid])
-
-def dict_from_revlist_prop(propvalue):
- """Given a property value as a string containing per-source revision
- lists, return a dictionary whose key is a source path identifier
- and whose value is the revisions for that source."""
- prop = {}
-
- # Multiple sources are separated by any whitespace.
- for L in propvalue.split():
- # We use rsplit to play safe and allow colons in pathids.
- source, revs = rsplit(L.strip(), ":", 1)
- prop[source] = revs
- return prop
-
-def get_revlist_prop(url_or_dir, propname, rev=None):
- """Given a repository URL or working copy path and a property
- name, extract the values of the property which store per-source
- revision lists and return a dictionary whose key is a source path
- identifier, and whose value is the revisions for that source."""
-
- # Note that propget does not return an error if the property does
- # not exist, it simply does not output anything. So we do not need
- # to check for LaunchError here.
- args = '--strict "%s" "%s"' % (propname, url_or_dir)
- if rev:
- args = '-r %s %s' % (rev, args)
- out = launchsvn('propget %s' % args, split_lines=False)
-
- return dict_from_revlist_prop(out)
-
-def get_merge_props(dir):
- """Extract the merged revisions."""
- return get_revlist_prop(dir, opts["prop"])
-
-def get_block_props(dir):
- """Extract the blocked revisions."""
- return get_revlist_prop(dir, opts["block-prop"])
-
-def get_blocked_revs(dir, source_pathid):
- p = get_block_props(dir)
- if p.has_key(source_pathid):
- return RevisionSet(p[source_pathid])
- return RevisionSet("")
-
-def format_merge_props(props, sep=" "):
- """Formats the hash PROPS as a string suitable for use as a
- Subversion property value."""
- assert sep in ["\t", "\n", " "] # must be a whitespace
- props = props.items()
- props.sort()
- L = []
- for h, r in props:
- L.append(h + ":" + r)
- return sep.join(L)
-
-def _run_propset(dir, prop, value):
- """Set the property 'prop' of directory 'dir' to value 'value'. We go
- through a temporary file to not run into command line length limits."""
- try:
- fd, fname = tempfile.mkstemp()
- f = os.fdopen(fd, "wb")
- except AttributeError:
- # Fallback for Python <= 2.3 which does not have mkstemp (mktemp
- # suffers from race conditions. Not that we care...)
- fname = tempfile.mktemp()
- f = open(fname, "wb")
-
- try:
- f.write(value)
- f.close()
- report("property data written to temp file: %s" % value)
- svn_command('propset "%s" -F "%s" "%s"' % (prop, fname, dir))
- finally:
- os.remove(fname)
-
-def set_props(dir, name, props):
- props = format_merge_props(props)
- if props:
- _run_propset(dir, name, props)
- else:
- svn_command('propdel "%s" "%s"' % (name, dir))
-
-def set_merge_props(dir, props):
- set_props(dir, opts["prop"], props)
-
-def set_block_props(dir, props):
- set_props(dir, opts["block-prop"], props)
-
-def set_blocked_revs(dir, source_pathid, revs):
- props = get_block_props(dir)
- if revs:
- props[source_pathid] = str(revs)
- elif props.has_key(source_pathid):
- del props[source_pathid]
- set_block_props(dir, props)
-
-def is_url(url):
- """Check if url is a valid url."""
- return re.search(r"^[a-zA-Z][-+\.\w]*://[^\s]+$", url) is not None
-
-def is_wc(dir):
- """Check if a directory is a working copy."""
- return os.path.isdir(os.path.join(dir, ".svn")) or \
- os.path.isdir(os.path.join(dir, "_svn"))
-
-_cache_svninfo = {}
-def get_svninfo(target):
- """Extract the subversion information for a target (through 'svn info').
- This function uses an internal cache to let clients query information
- many times."""
- if _cache_svninfo.has_key(target):
- return _cache_svninfo[target]
- info = {}
- for L in launchsvn('info "%s"' % target):
- L = L.strip()
- if not L:
- continue
- key, value = L.split(": ", 1)
- info[key] = value.strip()
- _cache_svninfo[target] = info
- return info
-
-def target_to_url(target):
- """Convert working copy path or repos URL to a repos URL."""
- if is_wc(target):
- info = get_svninfo(target)
- return info["URL"]
- return target
-
-_cache_reporoot = {}
-def get_repo_root(target):
- """Compute the root repos URL given a working-copy path, or a URL."""
- # Try using "svn info WCDIR". This works only on SVN clients >= 1.3
- if not is_url(target):
- try:
- info = get_svninfo(target)
- root = info["Repository Root"]
- _cache_reporoot[root] = None
- return root
- except KeyError:
- pass
- url = target_to_url(target)
- assert url[-1] != '/'
- else:
- url = target
-
- # Go through the cache of the repository roots. This avoids extra
- # server round-trips if we are asking the root of different URLs
- # in the same repository (the cache in get_svninfo() cannot detect
- # that of course and would issue a remote command).
- assert is_url(url)
- for r in _cache_reporoot:
- if url.startswith(r):
- return r
-
- # Try using "svn info URL". This works only on SVN clients >= 1.2
- try:
- info = get_svninfo(url)
- root = info["Repository Root"]
- _cache_reporoot[root] = None
- return root
- except LaunchError:
- pass
-
- # Constrained to older svn clients, we are stuck with this ugly
- # trial-and-error implementation. It could be made faster with a
- # binary search.
- while url:
- temp = os.path.dirname(url)
- try:
- launchsvn('proplist "%s"' % temp)
- except LaunchError:
- _cache_reporoot[url] = None
- return url
- url = temp
-
- assert False, "svn repos root not found"
-
-def target_to_pathid(target):
- """Convert a target (either a working copy path or an URL) into a
- path identifier."""
- root = get_repo_root(target)
- url = target_to_url(target)
- assert root[-1] != "/"
- assert url[:len(root)] == root, "url=%r, root=%r" % (url, root)
- return url[len(root):]
-
-class SvnLogParser:
- """
- Parse the "svn log", going through the XML output and using pulldom (which
- would even allow streaming the command output).
- """
- def __init__(self, xml):
- self._events = pulldom.parseString(xml)
- def __getitem__(self, idx):
- for event, node in self._events:
- if event == pulldom.START_ELEMENT and node.tagName == "logentry":
- self._events.expandNode(node)
- return self.SvnLogRevision(node)
- raise IndexError, "Could not find 'logentry' tag in xml"
-
- class SvnLogRevision:
- def __init__(self, xmlnode):
- self.n = xmlnode
- def revision(self):
- return int(self.n.getAttribute("revision"))
- def author(self):
- return self.n.getElementsByTagName("author")[0].firstChild.data
- def paths(self):
- return [self.SvnLogPath(n)
- for n in self.n.getElementsByTagName("path")]
-
- class SvnLogPath:
- def __init__(self, xmlnode):
- self.n = xmlnode
- def action(self):
- return self.n.getAttribute("action")
- def pathid(self):
- return self.n.firstChild.data
- def copyfrom_rev(self):
- try: return self.n.getAttribute("copyfrom-rev")
- except KeyError: return None
- def copyfrom_pathid(self):
- try: return self.n.getAttribute("copyfrom-path")
- except KeyError: return None
-
-def get_copyfrom(target):
- """Get copyfrom info for a given target (it represents the directory from
- where it was branched). NOTE: repos root has no copyfrom info. In this case
- None is returned.
-
- Returns the:
- - source file or directory from which the copy was made
- - revision from which that source was copied
- - revision in which the copy was committed
- """
- repos_path = target_to_pathid(target)
- for chg in SvnLogParser(launchsvn('log -v --xml --stop-on-copy "%s"'
- % target, split_lines=False)):
- for p in chg.paths():
- if p.action() == 'A' and p.pathid() == repos_path:
- # These values will be None if the corresponding elements are
- # not found in the log.
- return p.copyfrom_pathid(), p.copyfrom_rev(), chg.revision()
- return None,None,None
-
-def get_latest_rev(url):
- """Get the latest revision of the repository of which URL is part."""
- try:
- return get_svninfo(url)["Revision"]
- except LaunchError:
- # Alternative method for latest revision checking (for svn < 1.2)
- report('checking latest revision of "%s"' % url)
- L = launchsvn('proplist --revprop -r HEAD "%s"' % opts["source-url"])[0]
- rev = re.search("revision (\d+)", L).group(1)
- report('latest revision of "%s" is %s' % (url, rev))
- return rev
-
-def get_created_rev(url):
- """Lookup the revision at which the path identified by the
- provided URL was first created."""
- oldest_rev = -1
- report('determining oldest revision for URL "%s"' % url)
- ### TODO: Refactor this to use a modified RevisionLog class.
- lines = None
- cmd = "log -r1:HEAD --stop-on-copy -q " + url
- try:
- lines = launchsvn(cmd + " --limit=1")
- except LaunchError:
- # Assume that --limit isn't supported by the installed 'svn'.
- lines = launchsvn(cmd)
- if lines and len(lines) > 1:
- i = lines[1].find(" ")
- if i != -1:
- oldest_rev = int(lines[1][1:i])
- if oldest_rev == -1:
- error('unable to determine oldest revision for URL "%s"' % url)
- return oldest_rev
-
-def get_commit_log(url, revnum):
- """Return the log message for a specific integer revision
- number."""
- out = launchsvn("log --incremental -r%d %s" % (revnum, url))
- return recode_stdout_to_file("".join(out[1:]))
-
-def construct_merged_log_message(url, revnums):
- """Return a commit log message containing all the commit messages
- in the specified revisions at the given URL. The separator used
- in this log message is determined by searching for the longest
- svnmerge separator existing in the commit log messages and
- extending it by one more separator. This results in a new commit
- log message that is clearer in describing merges that contain
- other merges. Trailing newlines are removed from the embedded
- log messages."""
- messages = ['']
- longest_sep = ''
- for r in revnums.sorted():
- message = get_commit_log(url, r)
- if message:
- message = re.sub(r'(\r\n|\r|\n)', "\n", message)
- message = rstrip(message, "\n") + "\n"
- messages.append(prefix_lines(LOG_LINE_PREFIX, message))
- for match in LOG_SEPARATOR_RE.findall(message):
- sep = match[1]
- if len(sep) > len(longest_sep):
- longest_sep = sep
-
- longest_sep += LOG_SEPARATOR + "\n"
- messages.append('')
- return longest_sep.join(messages)
-
-def get_default_source(branch_target, branch_props):
- """Return the default source for branch_target (given its branch_props).
- Error out if there is ambiguity."""
- if not branch_props:
- error("no integration info available")
-
- props = branch_props.copy()
- pathid = target_to_pathid(branch_target)
-
- # To make bidirectional merges easier, find the target's
- # repository local path so it can be removed from the list of
- # possible integration sources.
- if props.has_key(pathid):
- del props[pathid]
-
- if len(props) > 1:
- err_msg = "multiple sources found. "
- err_msg += "Explicit source argument (-S/--source) required.\n"
- err_msg += "The merge sources available are:"
- for prop in props:
- err_msg += "\n " + prop
- error(err_msg)
-
- return props.keys()[0]
-
-def check_old_prop_version(branch_target, branch_props):
- """Check if branch_props (of branch_target) are svnmerge properties in
- old format, and emit an error if so."""
-
- # Previous svnmerge versions allowed trailing /'s in the repository
- # local path. Newer versions of svnmerge will trim trailing /'s
- # appearing in the command line, so if there are any properties with
- # trailing /'s, they will not be properly matched later on, so require
- # the user to change them now.
- fixed = {}
- changed = False
- for source, revs in branch_props.items():
- src = rstrip(source, "/")
- fixed[src] = revs
- if src != source:
- changed = True
-
- if changed:
- err_msg = "old property values detected; an upgrade is required.\n\n"
- err_msg += "Please execute and commit these changes to upgrade:\n\n"
- err_msg += 'svn propset "%s" "%s" "%s"' % \
- (opts["prop"], format_merge_props(fixed), branch_target)
- error(err_msg)
-
-def should_find_reflected(branch_dir):
- should_find_reflected = opts["bidirectional"]
-
- # If the source has integration info for the target, set find_reflected
- # even if --bidirectional wasn't specified
- if not should_find_reflected:
- source_props = get_merge_props(opts["source-url"])
- should_find_reflected = source_props.has_key(target_to_pathid(branch_dir))
-
- return should_find_reflected
-
-def analyze_revs(target_pathid, url, begin=1, end=None,
- find_reflected=False):
- """For the source of the merges in the source URL being merged into
- target_pathid, analyze the revisions in the interval begin-end (which
- defaults to 1-HEAD), to find out which revisions are changes in
- the url, which are changes elsewhere (so-called 'phantom'
- revisions), optionally which are reflected changes (to avoid
- conflicts that can occur when doing bidirectional merging between
- branches), and which revisions initialize merge tracking against other
- branches. Return a tuple of four RevisionSet's:
- (real_revs, phantom_revs, reflected_revs, initialized_revs).
-
- NOTE: To maximize speed, if "end" is not provided, the function is
- not able to find phantom revisions following the last real
- revision in the URL.
- """
-
- begin = str(begin)
- if end is None:
- end = "HEAD"
- else:
- end = str(end)
- if long(begin) > long(end):
- return RevisionSet(""), RevisionSet(""), \
- RevisionSet(""), RevisionSet("")
-
- logs[url] = RevisionLog(url, begin, end, find_reflected)
- revs = RevisionSet(logs[url].revs)
-
- if end == "HEAD":
- # If end is not provided, we do not know which is the latest revision
- # in the repository. So return the phantom revision set only up to
- # the latest known revision.
- end = str(list(revs)[-1])
-
- phantom_revs = RevisionSet("%s-%s" % (begin, end)) - revs
-
- if find_reflected:
- reflected_revs = logs[url].merge_metadata().changed_revs(target_pathid)
- reflected_revs += logs[url].block_metadata().changed_revs(target_pathid)
- else:
- reflected_revs = []
-
- initialized_revs = RevisionSet(logs[url].merge_metadata().initialized_revs())
- reflected_revs = RevisionSet(reflected_revs)
-
- return revs, phantom_revs, reflected_revs, initialized_revs
-
-def analyze_source_revs(branch_target, source_url, **kwargs):
- """For the given branch and source, extract the real and phantom
- source revisions."""
- branch_url = target_to_url(branch_target)
- branch_pathid = target_to_pathid(branch_target)
-
- # Extract the latest repository revision from the URL of the branch
- # directory (which is already cached at this point).
- end_rev = get_latest_rev(source_url)
-
- # Calculate the base of analysis. If there is a "1-XX" interval in the
- # merged_revs, we do not need to check those.
- base = 1
- r = opts["merged-revs"].normalized()
- if r and r[0][0] == 1:
- base = r[0][1] + 1
-
- # See if the user filtered the revision set. If so, we are not
- # interested in something outside that range.
- if opts["revision"]:
- revs = RevisionSet(opts["revision"]).sorted()
- if base < revs[0]:
- base = revs[0]
- if end_rev > revs[-1]:
- end_rev = revs[-1]
-
- return analyze_revs(branch_pathid, source_url, base, end_rev, **kwargs)
-
-def minimal_merge_intervals(revs, phantom_revs):
- """Produce the smallest number of intervals suitable for merging. revs
- is the RevisionSet which we want to merge, and phantom_revs are phantom
- revisions which can be used to concatenate intervals, thus minimizing the
- number of operations."""
- revnums = revs.normalized()
- ret = []
-
- cur = revnums.pop()
- while revnums:
- next = revnums.pop()
- assert next[1] < cur[0] # otherwise it is not ordered
- assert cur[0] - next[1] > 1 # otherwise it is not normalized
- for i in range(next[1]+1, cur[0]):
- if i not in phantom_revs:
- ret.append(cur)
- cur = next
- break
- else:
- cur = (next[0], cur[1])
-
- ret.append(cur)
- ret.reverse()
- return ret
-
-def display_revisions(revs, display_style, revisions_msg, source_url):
- """Show REVS as dictated by DISPLAY_STYLE, either numerically, in
- log format, or as diffs. When displaying revisions numerically,
- prefix output with REVISIONS_MSG when in verbose mode. Otherwise,
- request logs or diffs using SOURCE_URL."""
- if display_style == "revisions":
- if revs:
- report(revisions_msg)
- print revs
- elif display_style == "logs":
- for start,end in revs.normalized():
- svn_command('log --incremental -v -r %d:%d %s' % \
- (start, end, source_url))
- elif display_style in ("diffs", "summarize"):
- if display_style == 'summarize':
- summarize = '--summarize '
- else:
- summarize = ''
-
- for start, end in revs.normalized():
- print
- if start == end:
- print "%s: changes in revision %d follow" % (NAME, start)
- else:
- print "%s: changes in revisions %d-%d follow" % (NAME,
- start, end)
- print
-
- # Note: the starting revision number to 'svn diff' is
- # NOT inclusive so we have to subtract one from ${START}.
- svn_command("diff -r %d:%d %s %s" % (start - 1, end, summarize,
- source_url))
- else:
- assert False, "unhandled display style: %s" % display_style
-
-def action_init(target_dir, target_props):
- """Initialize for merges."""
- # Check that directory is ready for being modified
- check_dir_clean(target_dir)
-
- # If the user hasn't specified the revisions to use, see if the
- # "source" is a copy from the current tree and if so, we can use
- # the version data obtained from it.
- revision_range = opts["revision"]
- if not revision_range:
- # Determining a default endpoint for the revision range that "init"
- # will use, since none was provided by the user.
- cf_source, cf_rev, copy_committed_in_rev = \
- get_copyfrom(opts["source-url"])
- target_path = target_to_pathid(target_dir)
-
- if target_path == cf_source:
- # If source was originally copyied from target, and we are merging
- # changes from source to target (the copy target is the merge
- # source, and the copy source is the merge target), then we want to
- # mark as integrated up to the rev in which the copy was committed
- # which created the merge source:
- report('the source "%s" is a branch of "%s"' %
- (opts["source-url"], target_dir))
- revision_range = "1-" + str(copy_committed_in_rev)
- else:
- # If the copy source is the merge source, and
- # the copy target is the merge target, then we want to
- # mark as integrated up to the specific rev of the merge
- # target from which the merge source was copied. Longer
- # discussion here:
- # http://subversion.tigris.org/issues/show_bug.cgi?id=2810
- target_url = target_to_url(target_dir)
- source_path = target_to_pathid(opts["source-url"])
- cf_source_path, cf_rev, copy_committed_in_rev = get_copyfrom(target_url)
- if source_path == cf_source_path:
- report('the merge source "%s" is the copy source of "%s"' %
- (opts["source-url"], target_dir))
- revision_range = "1-" + cf_rev
-
- # When neither the merge source nor target is a copy of the other, and
- # the user did not specify a revision range, then choose a default which is
- # the current revision; saying, in effect, "everything has been merged, so
- # mark as integrated up to the latest rev on source url).
- revs = revision_range or "1-" + get_latest_rev(opts["source-url"])
- revs = RevisionSet(revs)
-
- report('marking "%s" as already containing revisions "%s" of "%s"' %
- (target_dir, revs, opts["source-url"]))
-
- revs = str(revs)
- # If the local svnmerge-integrated property already has an entry
- # for the source-pathid, simply error out.
- if not opts["force"] and target_props.has_key(opts["source-pathid"]):
- error('Repository-relative path %s has already been initialized at %s\n'
- 'Use --force to re-initialize'
- % (opts["source-pathid"], target_dir))
- target_props[opts["source-pathid"]] = revs
-
- # Set property
- set_merge_props(target_dir, target_props)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Initialized merge tracking via "%s" with revisions "%s" from ' \
- % (NAME, revs)
- print >>f, '%s' % opts["source-url"]
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_avail(branch_dir, branch_props):
- """Show commits available for merges."""
- source_revs, phantom_revs, reflected_revs, initialized_revs = \
- analyze_source_revs(branch_dir, opts["source-url"],
- find_reflected=
- should_find_reflected(branch_dir))
- report('skipping phantom revisions: %s' % phantom_revs)
- if reflected_revs:
- report('skipping reflected revisions: %s' % reflected_revs)
- report('skipping initialized revisions: %s' % initialized_revs)
-
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- avail_revs = source_revs - opts["merged-revs"] - blocked_revs - \
- reflected_revs - initialized_revs
-
- # Compose the set of revisions to show
- revs = RevisionSet("")
- report_msg = "revisions available to be merged are:"
- if "avail" in opts["avail-showwhat"]:
- revs |= avail_revs
- if "blocked" in opts["avail-showwhat"]:
- revs |= blocked_revs
- report_msg = "revisions blocked are:"
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs = revs & RevisionSet(opts["revision"])
-
- display_revisions(revs, opts["avail-display"],
- report_msg,
- opts["source-url"])
-
-def action_integrated(branch_dir, branch_props):
- """Show change sets already merged. This set of revisions is
- calculated from taking svnmerge-integrated property from the
- branch, and subtracting any revision older than the branch
- creation revision."""
- # Extract the integration info for the branch_dir
- branch_props = get_merge_props(branch_dir)
- check_old_prop_version(branch_dir, branch_props)
- revs = merge_props_to_revision_set(branch_props, opts["source-pathid"])
-
- # Lookup the oldest revision on the branch path.
- oldest_src_rev = get_created_rev(opts["source-url"])
-
- # Subtract any revisions which pre-date the branch.
- report("subtracting revisions which pre-date the source URL (%d)" %
- oldest_src_rev)
- revs = revs - RevisionSet(range(1, oldest_src_rev))
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs = revs & RevisionSet(opts["revision"])
-
- display_revisions(revs, opts["integrated-display"],
- "revisions already integrated are:", opts["source-url"])
-
-def action_merge(branch_dir, branch_props):
- """Record merge meta data, and do the actual merge (if not
- requested otherwise via --record-only)."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- source_revs, phantom_revs, reflected_revs, initialized_revs = \
- analyze_source_revs(branch_dir, opts["source-url"],
- find_reflected=
- should_find_reflected(branch_dir))
-
- if opts["revision"]:
- revs = RevisionSet(opts["revision"])
- else:
- revs = source_revs
-
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- merged_revs = opts["merged-revs"]
-
- # Show what we're doing
- if opts["verbose"]: # just to avoid useless calculations
- if merged_revs & revs:
- report('"%s" already contains revisions %s' % (branch_dir,
- merged_revs & revs))
- if phantom_revs:
- report('memorizing phantom revision(s): %s' % phantom_revs)
- if reflected_revs:
- report('memorizing reflected revision(s): %s' % reflected_revs)
- if blocked_revs & revs:
- report('skipping blocked revisions(s): %s' % (blocked_revs & revs))
- if initialized_revs:
- report('skipping initialized revision(s): %s' % initialized_revs)
-
- # Compute final merge set.
- revs = revs - merged_revs - blocked_revs - reflected_revs - \
- phantom_revs - initialized_revs
- if not revs:
- report('no revisions to merge, exiting')
- return
-
- # When manually marking revisions as merged, we only update the
- # integration meta data, and don't perform an actual merge.
- record_only = opts["record-only"]
-
- if record_only:
- report('recording merge of revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
- else:
- report('merging in revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
-
- # Do the merge(s). Note: the starting revision number to 'svn merge'
- # is NOT inclusive so we have to subtract one from start.
- # We try to keep the number of merge operations as low as possible,
- # because it is faster and reduces the number of conflicts.
- old_block_props = get_block_props(branch_dir)
- merge_metadata = logs[opts["source-url"]].merge_metadata()
- block_metadata = logs[opts["source-url"]].block_metadata()
- for start,end in minimal_merge_intervals(revs, phantom_revs):
- if not record_only:
- # Preset merge/blocked properties to the source value at
- # the start rev to avoid spurious property conflicts
- set_merge_props(branch_dir, merge_metadata.get(start - 1))
- set_block_props(branch_dir, block_metadata.get(start - 1))
- # Do the merge
- svn_command("merge --force -r %d:%d %s %s" % \
- (start - 1, end, opts["source-url"], branch_dir))
- # TODO: to support graph merging, add logic to merge the property
- # meta-data manually
-
- # Update the set of merged revisions.
- merged_revs = merged_revs | revs | reflected_revs | phantom_revs | initialized_revs
- branch_props[opts["source-pathid"]] = str(merged_revs)
- set_merge_props(branch_dir, branch_props)
- # Reset the blocked revs
- set_block_props(branch_dir, old_block_props)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- if record_only:
- print >>f, 'Recorded merge of revisions %s via %s from ' % \
- (revs, NAME)
- else:
- print >>f, 'Merged revisions %s via %s from ' % \
- (revs, NAME)
- print >>f, '%s' % opts["source-url"]
- if opts["commit-verbose"]:
- print >>f
- print >>f, construct_merged_log_message(opts["source-url"], revs),
-
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_block(branch_dir, branch_props):
- """Block revisions."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- source_revs, phantom_revs, reflected_revs, initialized_revs = \
- analyze_source_revs(branch_dir, opts["source-url"])
- revs_to_block = source_revs - opts["merged-revs"]
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs_to_block = RevisionSet(opts["revision"]) & revs_to_block
-
- if not revs_to_block:
- error('no available revisions to block')
-
- # Change blocked information
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- blocked_revs = blocked_revs | revs_to_block
- set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Blocked revisions %s via %s' % (revs_to_block, NAME)
- if opts["commit-verbose"]:
- print >>f
- print >>f, construct_merged_log_message(opts["source-url"],
- revs_to_block),
-
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_unblock(branch_dir, branch_props):
- """Unblock revisions."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- revs_to_unblock = blocked_revs
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs_to_unblock = revs_to_unblock & RevisionSet(opts["revision"])
-
- if not revs_to_unblock:
- error('no available revisions to unblock')
-
- # Change blocked information
- blocked_revs = blocked_revs - revs_to_unblock
- set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Unblocked revisions %s via %s' % (revs_to_unblock, NAME)
- if opts["commit-verbose"]:
- print >>f
- print >>f, construct_merged_log_message(opts["source-url"],
- revs_to_unblock),
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_rollback(branch_dir, branch_props):
- """Rollback previously integrated revisions."""
-
- # Make sure the revision arguments are present
- if not opts["revision"]:
- error("The '-r' option is mandatory for rollback")
-
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- # Extract the integration info for the branch_dir
- branch_props = get_merge_props(branch_dir)
- check_old_prop_version(branch_dir, branch_props)
- # Get the list of all revisions already merged into this source-pathid.
- merged_revs = merge_props_to_revision_set(branch_props,
- opts["source-pathid"])
-
- # At which revision was the src created?
- oldest_src_rev = get_created_rev(opts["source-url"])
- src_pre_exist_range = RevisionSet("1-%d" % oldest_src_rev)
-
- # Limit to revisions specified by -r (if any)
- revs = merged_revs & RevisionSet(opts["revision"])
-
- # make sure there's some revision to rollback
- if not revs:
- report("Nothing to rollback in revision range r%s" % opts["revision"])
- return
-
- # If even one specified revision lies outside the lifetime of the
- # merge source, error out.
- if revs & src_pre_exist_range:
- err_str = "Specified revision range falls out of the rollback range.\n"
- err_str += "%s was created at r%d" % (opts["source-pathid"],
- oldest_src_rev)
- error(err_str)
-
- record_only = opts["record-only"]
-
- if record_only:
- report('recording rollback of revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
- else:
- report('rollback of revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
-
- # Do the reverse merge(s). Note: the starting revision number
- # to 'svn merge' is NOT inclusive so we have to subtract one from start.
- # We try to keep the number of merge operations as low as possible,
- # because it is faster and reduces the number of conflicts.
- rollback_intervals = minimal_merge_intervals(revs, [])
- # rollback in the reverse order of merge
- rollback_intervals.reverse()
- for start, end in rollback_intervals:
- if not record_only:
- # Do the merge
- svn_command("merge --force -r %d:%d %s %s" % \
- (end, start - 1, opts["source-url"], branch_dir))
-
- # Write out commit message if desired
- # calculate the phantom revs first
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- if record_only:
- print >>f, 'Recorded rollback of revisions %s via %s from ' % \
- (revs , NAME)
- else:
- print >>f, 'Rolled back revisions %s via %s from ' % \
- (revs , NAME)
- print >>f, '%s' % opts["source-url"]
-
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
- # Update the set of merged revisions.
- merged_revs = merged_revs - revs
- branch_props[opts["source-pathid"]] = str(merged_revs)
- set_merge_props(branch_dir, branch_props)
-
-def action_uninit(branch_dir, branch_props):
- """Uninit SOURCE URL."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- # If the source-pathid does not have an entry in the svnmerge-integrated
- # property, simply error out.
- if not branch_props.has_key(opts["source-pathid"]):
- error('Repository-relative path "%s" does not contain merge '
- 'tracking information for "%s"' \
- % (opts["source-pathid"], branch_dir))
-
- del branch_props[opts["source-pathid"]]
-
- # Set merge property with the selected source deleted
- set_merge_props(branch_dir, branch_props)
-
- # Set blocked revisions for the selected source to None
- set_blocked_revs(branch_dir, opts["source-pathid"], None)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Removed merge tracking for "%s" for ' % NAME
- print >>f, '%s' % opts["source-url"]
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-###############################################################################
-# Command line parsing -- options and commands management
-###############################################################################
-
-class OptBase:
- def __init__(self, *args, **kwargs):
- self.help = kwargs["help"]
- del kwargs["help"]
- self.lflags = []
- self.sflags = []
- for a in args:
- if a.startswith("--"): self.lflags.append(a)
- elif a.startswith("-"): self.sflags.append(a)
- else:
- raise TypeError, "invalid flag name: %s" % a
- if kwargs.has_key("dest"):
- self.dest = kwargs["dest"]
- del kwargs["dest"]
- else:
- if not self.lflags:
- raise TypeError, "cannot deduce dest name without long options"
- self.dest = self.lflags[0][2:]
- if kwargs:
- raise TypeError, "invalid keyword arguments: %r" % kwargs.keys()
- def repr_flags(self):
- f = self.sflags + self.lflags
- r = f[0]
- for fl in f[1:]:
- r += " [%s]" % fl
- return r
-
-class Option(OptBase):
- def __init__(self, *args, **kwargs):
- self.default = kwargs.setdefault("default", 0)
- del kwargs["default"]
- self.value = kwargs.setdefault("value", None)
- del kwargs["value"]
- OptBase.__init__(self, *args, **kwargs)
- def apply(self, state, value):
- assert value == ""
- if self.value is not None:
- state[self.dest] = self.value
- else:
- state[self.dest] += 1
-
-class OptionArg(OptBase):
- def __init__(self, *args, **kwargs):
- self.default = kwargs["default"]
- del kwargs["default"]
- self.metavar = kwargs.setdefault("metavar", None)
- del kwargs["metavar"]
- OptBase.__init__(self, *args, **kwargs)
-
- if self.metavar is None:
- if self.dest is not None:
- self.metavar = self.dest.upper()
- else:
- self.metavar = "arg"
- if self.default:
- self.help += " (default: %s)" % self.default
- def apply(self, state, value):
- assert value is not None
- state[self.dest] = value
- def repr_flags(self):
- r = OptBase.repr_flags(self)
- return r + " " + self.metavar
-
-class CommandOpts:
- class Cmd:
- def __init__(self, *args):
- self.name, self.func, self.usage, self.help, self.opts = args
- def short_help(self):
- return self.help.split(".")[0]
- def __str__(self):
- return self.name
- def __call__(self, *args, **kwargs):
- return self.func(*args, **kwargs)
-
- def __init__(self, global_opts, common_opts, command_table, version=None):
- self.progname = NAME
- self.version = version.replace("%prog", self.progname)
- self.cwidth = console_width() - 2
- self.ctable = command_table.copy()
- self.gopts = global_opts[:]
- self.copts = common_opts[:]
- self._add_builtins()
- for k in self.ctable.keys():
- cmd = self.Cmd(k, *self.ctable[k])
- opts = []
- for o in cmd.opts:
- if isinstance(o, types.StringType) or \
- isinstance(o, types.UnicodeType):
- o = self._find_common(o)
- opts.append(o)
- cmd.opts = opts
- self.ctable[k] = cmd
-
- def _add_builtins(self):
- self.gopts.append(
- Option("-h", "--help", help="show help for this command and exit"))
- if self.version is not None:
- self.gopts.append(
- Option("-V", "--version", help="show version info and exit"))
- self.ctable["help"] = (self._cmd_help,
- "help [COMMAND]",
- "Display help for a specific command. If COMMAND is omitted, "
- "display brief command description.",
- [])
-
- def _cmd_help(self, cmd=None, *args):
- if args:
- self.error("wrong number of arguments", "help")
- if cmd is not None:
- cmd = self._command(cmd)
- self.print_command_help(cmd)
- else:
- self.print_command_list()
-
- def _paragraph(self, text, width=78):
- chunks = re.split("\s+", text.strip())
- chunks.reverse()
- lines = []
- while chunks:
- L = chunks.pop()
- while chunks and len(L) + len(chunks[-1]) + 1 <= width:
- L += " " + chunks.pop()
- lines.append(L)
- return lines
-
- def _paragraphs(self, text, *args, **kwargs):
- pars = text.split("\n\n")
- lines = self._paragraph(pars[0], *args, **kwargs)
- for p in pars[1:]:
- lines.append("")
- lines.extend(self._paragraph(p, *args, **kwargs))
- return lines
-
- def _print_wrapped(self, text, indent=0):
- text = self._paragraphs(text, self.cwidth - indent)
- print text.pop(0)
- for t in text:
- print " " * indent + t
-
- def _find_common(self, fl):
- for o in self.copts:
- if fl in o.lflags+o.sflags:
- return o
- assert False, fl
-
- def _compute_flags(self, opts, check_conflicts=True):
- back = {}
- sfl = ""
- lfl = []
- for o in opts:
- sapp = lapp = ""
- if isinstance(o, OptionArg):
- sapp, lapp = ":", "="
- for s in o.sflags:
- if check_conflicts and back.has_key(s):
- raise RuntimeError, "option conflict: %s" % s
- back[s] = o
- sfl += s[1:] + sapp
- for l in o.lflags:
- if check_conflicts and back.has_key(l):
- raise RuntimeError, "option conflict: %s" % l
- back[l] = o
- lfl.append(l[2:] + lapp)
- return sfl, lfl, back
-
- def _extract_command(self, args):
- """
- Try to extract the command name from the argument list. This is
- non-trivial because we want to allow command-specific options even
- before the command itself.
- """
- opts = self.gopts[:]
- for cmd in self.ctable.values():
- opts.extend(cmd.opts)
- sfl, lfl, _ = self._compute_flags(opts, check_conflicts=False)
-
- lopts,largs = getopt.getopt(args, sfl, lfl)
- if not largs:
- return None
- return self._command(largs[0])
-
- def _fancy_getopt(self, args, opts, state=None):
- if state is None:
- state= {}
- for o in opts:
- if not state.has_key(o.dest):
- state[o.dest] = o.default
-
- sfl, lfl, back = self._compute_flags(opts)
- try:
- lopts,args = getopt.gnu_getopt(args, sfl, lfl)
- except AttributeError:
- # Before Python 2.3, there was no gnu_getopt support.
- # So we can't parse intermixed positional arguments
- # and options.
- lopts,args = getopt.getopt(args, sfl, lfl)
-
- for o,v in lopts:
- back[o].apply(state, v)
- return state, args
-
- def _command(self, cmd):
- if not self.ctable.has_key(cmd):
- self.error("unknown command: '%s'" % cmd)
- return self.ctable[cmd]
-
- def parse(self, args):
- if not args:
- self.print_small_help()
- sys.exit(0)
-
- cmd = None
- try:
- cmd = self._extract_command(args)
- opts = self.gopts[:]
- if cmd:
- opts.extend(cmd.opts)
- args.remove(cmd.name)
- state, args = self._fancy_getopt(args, opts)
- except getopt.GetoptError, e:
- self.error(e, cmd)
-
- # Handle builtins
- if self.version is not None and state["version"]:
- self.print_version()
- sys.exit(0)
- if state["help"]: # special case for --help
- if cmd:
- self.print_command_help(cmd)
- sys.exit(0)
- cmd = self.ctable["help"]
- else:
- if cmd is None:
- self.error("command argument required")
- if str(cmd) == "help":
- cmd(*args)
- sys.exit(0)
- return cmd, args, state
-
- def error(self, s, cmd=None):
- print >>sys.stderr, "%s: %s" % (self.progname, s)
- if cmd is not None:
- self.print_command_help(cmd)
- else:
- self.print_small_help()
- sys.exit(1)
- def print_small_help(self):
- print "Type '%s help' for usage" % self.progname
- def print_usage_line(self):
- print "usage: %s <subcommand> [options...] [args...]\n" % self.progname
- def print_command_list(self):
- print "Available commands (use '%s help COMMAND' for more details):\n" \
- % self.progname
- cmds = self.ctable.keys()
- cmds.sort()
- indent = max(map(len, cmds))
- for c in cmds:
- h = self.ctable[c].short_help()
- print " %-*s " % (indent, c),
- self._print_wrapped(h, indent+6)
- def print_command_help(self, cmd):
- cmd = self.ctable[str(cmd)]
- print 'usage: %s %s\n' % (self.progname, cmd.usage)
- self._print_wrapped(cmd.help)
- def print_opts(opts, self=self):
- if not opts: return
- flags = [o.repr_flags() for o in opts]
- indent = max(map(len, flags))
- for f,o in zip(flags, opts):
- print " %-*s :" % (indent, f),
- self._print_wrapped(o.help, indent+5)
- print '\nCommand options:'
- print_opts(cmd.opts)
- print '\nGlobal options:'
- print_opts(self.gopts)
-
- def print_version(self):
- print self.version
-
-###############################################################################
-# Options and Commands description
-###############################################################################
-
-global_opts = [
- Option("-F", "--force",
- help="force operation even if the working copy is not clean, or "
- "there are pending updates"),
- Option("-n", "--dry-run",
- help="don't actually change anything, just pretend; "
- "implies --show-changes"),
- Option("-s", "--show-changes",
- help="show subversion commands that make changes"),
- Option("-v", "--verbose",
- help="verbose mode: output more information about progress"),
- OptionArg("-u", "--username",
- default=None,
- help="invoke subversion commands with the supplied username"),
- OptionArg("-p", "--password",
- default=None,
- help="invoke subversion commands with the supplied password"),
-]
-
-common_opts = [
- Option("-b", "--bidirectional",
- value=True,
- default=False,
- help="remove reflected and initialized revisions from merge candidates. "
- "Not required but may be specified to speed things up slightly"),
- OptionArg("-f", "--commit-file", metavar="FILE",
- default="svnmerge-commit-message.txt",
- help="set the name of the file where the suggested log message "
- "is written to"),
- Option("-M", "--record-only",
- value=True,
- default=False,
- help="do not perform an actual merge of the changes, yet record "
- "that a merge happened"),
- OptionArg("-r", "--revision",
- metavar="REVLIST",
- default="",
- help="specify a revision list, consisting of revision numbers "
- 'and ranges separated by commas, e.g., "534,537-539,540"'),
- OptionArg("-S", "--source", "--head",
- default=None,
- help="specify a merge source for this branch. It can be either "
- "a path, a full URL, or an unambiguous substring of one "
- "of the paths for which merge tracking was already "
- "initialized. Needed only to disambiguate in case of "
- "multiple merge sources"),
-]
-
-command_table = {
- "init": (action_init,
- "init [OPTION...] [SOURCE]",
- """Initialize merge tracking from SOURCE on the current working
- directory.
-
- If SOURCE is specified, all the revisions in SOURCE are marked as already
- merged; if this is not correct, you can use --revision to specify the
- exact list of already-merged revisions.
-
- If SOURCE is omitted, then it is computed from the "svn cp" history of the
- current working directory (searching back for the branch point); in this
- case, %s assumes that no revision has been integrated yet since
- the branch point (unless you teach it with --revision).""" % NAME,
- [
- "-f", "-r", # import common opts
- ]),
-
- "avail": (action_avail,
- "avail [OPTION...] [PATH]",
- """Show unmerged revisions available for PATH as a revision list.
- If --revision is given, the revisions shown will be limited to those
- also specified in the option.
-
- When svnmerge is used to bidirectionally merge changes between a
- branch and its source, it is necessary to not merge the same changes
- forth and back: e.g., if you committed a merge of a certain
- revision of the branch into the source, you do not want that commit
- to appear as available to merged into the branch (as the code
- originated in the branch itself!). svnmerge will automatically
- exclude these so-called "reflected" revisions.""",
- [
- Option("-A", "--all",
- dest="avail-showwhat",
- value=["blocked", "avail"],
- default=["avail"],
- help="show both available and blocked revisions (aka ignore "
- "blocked revisions)"),
- "-b",
- Option("-B", "--blocked",
- dest="avail-showwhat",
- value=["blocked"],
- help="show the blocked revision list (see '%s block')" % NAME),
- Option("-d", "--diff",
- dest="avail-display",
- value="diffs",
- default="revisions",
- help="show corresponding diff instead of revision list"),
- Option("--summarize",
- dest="avail-display",
- value="summarize",
- help="show summarized diff instead of revision list"),
- Option("-l", "--log",
- dest="avail-display",
- value="logs",
- help="show corresponding log history instead of revision list"),
- "-r",
- "-S",
- ]),
-
- "integrated": (action_integrated,
- "integrated [OPTION...] [PATH]",
- """Show merged revisions available for PATH as a revision list.
- If --revision is given, the revisions shown will be limited to
- those also specified in the option.""",
- [
- Option("-d", "--diff",
- dest="integrated-display",
- value="diffs",
- default="revisions",
- help="show corresponding diff instead of revision list"),
- Option("-l", "--log",
- dest="integrated-display",
- value="logs",
- help="show corresponding log history instead of revision list"),
- "-r",
- "-S",
- ]),
-
- "rollback": (action_rollback,
- "rollback [OPTION...] [PATH]",
- """Rollback previously merged in revisions from PATH. The
- --revision option is mandatory, and specifies which revisions
- will be rolled back. Only the previously integrated merges
- will be rolled back.
-
- When manually rolling back changes, --record-only can be used to
- instruct %s that a manual rollback of a certain revision
- already happened, so that it can record it and offer that
- revision for merge henceforth.""" % (NAME),
- [
- "-f", "-r", "-S", "-M", # import common opts
- ]),
-
- "merge": (action_merge,
- "merge [OPTION...] [PATH]",
- """Merge in revisions into PATH from its source. If --revision is omitted,
- all the available revisions will be merged. In any case, already merged-in
- revisions will NOT be merged again.
-
- When svnmerge is used to bidirectionally merge changes between a
- branch and its source, it is necessary to not merge the same changes
- forth and back: e.g., if you committed a merge of a certain
- revision of the branch into the source, you do not want that commit
- to appear as available to merged into the branch (as the code
- originated in the branch itself!). svnmerge will automatically
- exclude these so-called "reflected" revisions.
-
- When manually merging changes across branches, --record-only can
- be used to instruct %s that a manual merge of a certain revision
- already happened, so that it can record it and not offer that
- revision for merge anymore. Conversely, when there are revisions
- which should not be merged, use '%s block'.""" % (NAME, NAME),
- [
- "-b", "-f", "-r", "-S", "-M", # import common opts
- ]),
-
- "block": (action_block,
- "block [OPTION...] [PATH]",
- """Block revisions within PATH so that they disappear from the available
- list. This is useful to hide revisions which will not be integrated.
- If --revision is omitted, it defaults to all the available revisions.
-
- Do not use this option to hide revisions that were manually merged
- into the branch. Instead, use '%s merge --record-only', which
- records that a merge happened (as opposed to a merge which should
- not happen).""" % NAME,
- [
- "-f", "-r", "-S", # import common opts
- ]),
-
- "unblock": (action_unblock,
- "unblock [OPTION...] [PATH]",
- """Revert the effect of '%s block'. If --revision is omitted, all the
- blocked revisions are unblocked""" % NAME,
- [
- "-f", "-r", "-S", # import common opts
- ]),
-
- "uninit": (action_uninit,
- "uninit [OPTION...] [PATH]",
- """Remove merge tracking information from PATH. It cleans any kind of merge
- tracking information (including the list of blocked revisions). If there
- are multiple sources, use --source to indicate which source you want to
- forget about.""",
- [
- "-f", "-S", # import common opts
- ]),
-}
-
-
-def main(args):
- global opts
-
- # Initialize default options
- opts = default_opts.copy()
- logs.clear()
-
- optsparser = CommandOpts(global_opts, common_opts, command_table,
- version="%%prog r%s\n modified: %s\n\n"
- "Copyright (C) 2004,2005 Awarix Inc.\n"
- "Copyright (C) 2005, Giovanni Bajo"
- % (__revision__, __date__))
-
- cmd, args, state = optsparser.parse(args)
- opts.update(state)
-
- source = opts.get("source", None)
- branch_dir = "."
-
- if str(cmd) == "init":
- if len(args) == 1:
- source = args[0]
- elif len(args) > 1:
- optsparser.error("wrong number of parameters", cmd)
- elif str(cmd) in command_table.keys():
- if len(args) == 1:
- branch_dir = args[0]
- elif len(args) > 1:
- optsparser.error("wrong number of parameters", cmd)
- else:
- assert False, "command not handled: %s" % cmd
-
- # Validate branch_dir
- if not is_wc(branch_dir):
- error('"%s" is not a subversion working directory' % branch_dir)
-
- # Extract the integration info for the branch_dir
- branch_props = get_merge_props(branch_dir)
- check_old_prop_version(branch_dir, branch_props)
-
- # Calculate source_url and source_path
- report("calculate source path for the branch")
- if not source:
- if str(cmd) == "init":
- cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(branch_dir)
- if not cf_source:
- error('no copyfrom info available. '
- 'Explicit source argument (-S/--source) required.')
- opts["source-pathid"] = cf_source
- if not opts["revision"]:
- opts["revision"] = "1-" + cf_rev
- else:
- opts["source-pathid"] = get_default_source(branch_dir, branch_props)
-
- # (assumes pathid is a repository-relative-path)
- assert opts["source-pathid"][0] == '/'
- opts["source-url"] = get_repo_root(branch_dir) + opts["source-pathid"]
- else:
- # The source was given as a command line argument and is stored in
- # SOURCE. Ensure that the specified source does not end in a /,
- # otherwise it's easy to have the same source path listed more
- # than once in the integrated version properties, with and without
- # trailing /'s.
- source = rstrip(source, "/")
- if not is_wc(source) and not is_url(source):
- # Check if it is a substring of a pathid recorded
- # within the branch properties.
- found = []
- for pathid in branch_props.keys():
- if pathid.find(source) > 0:
- found.append(pathid)
- if len(found) == 1:
- # (assumes pathid is a repository-relative-path)
- source = get_repo_root(branch_dir) + found[0]
- else:
- error('"%s" is neither a valid URL, nor an unambiguous '
- 'substring of a repository path, nor a working directory'
- % source)
-
- source_pathid = target_to_pathid(source)
- if str(cmd) == "init" and \
- source_pathid == target_to_pathid("."):
- error("cannot init integration source path '%s'\n"
- "Its repository-relative path must differ from the "
- "repository-relative path of the current directory."
- % source_pathid)
- opts["source-pathid"] = source_pathid
- opts["source-url"] = target_to_url(source)
-
- # Sanity check source_url
- assert is_url(opts["source-url"])
- # SVN does not support non-normalized URL (and we should not
- # have created them)
- assert opts["source-url"].find("/..") < 0
-
- report('source is "%s"' % opts["source-url"])
-
- # Get previously merged revisions (except when command is init)
- if str(cmd) != "init":
- opts["merged-revs"] = merge_props_to_revision_set(branch_props,
- opts["source-pathid"])
-
- # Perform the action
- cmd(branch_dir, branch_props)
-
-
-if __name__ == "__main__":
- try:
- main(sys.argv[1:])
- except LaunchError, (ret, cmd, out):
- err_msg = "command execution failed (exit code: %d)\n" % ret
- err_msg += cmd + "\n"
- err_msg += "".join(out)
- error(err_msg)
- except KeyboardInterrupt:
- # Avoid traceback on CTRL+C
- print "aborted by user"
- sys.exit(1)