# Compile, build, and install cmake projects:
class CMake:
- def __init__(self, sourceFolder, buildFolder, config):
- self.sourceFolder = os.path.abspath(sourceFolder)
- self.buildFolder = os.path.abspath(buildFolder)
- self.config = config
-
- def setEnv(self, name, val):
- '''Set the given environment variable, return old value'''
- oldVal = os.getenv(name)
- os.putenv(name, val)
- return oldVal
-
- def prependDirToEnv(self, name, dir, default):
- '''Prepends the given directory to the environment variable. If the variable is empty, dir isprepended to the default.
- Returns the old value.'''
- oldVal = os.getenv(name)
- oldPaths = default if oldVal is None else oldVal
- os.putenv(name, dir+':'+oldPaths)
- return oldVal
-
- def restoreEnv(self, name, oldVal):
- '''Restore environment variable to previous value'''
- if oldVal is None:
- os.unsetenv(name)
- else:
- os.putenv(name, oldVal)
-
- def build(self, reconfigure, waitAfterConfig):
- # Make sure we have a build directory
- if not os.path.exists(self.buildFolder): os.makedirs(self.buildFolder)
- os.chdir(self.buildFolder)
- # In case of reconfiguration, delete cache file if it exists
- cacheFile = 'CMakeCache.txt'
- if os.path.exists(cacheFile) and reconfigure: os.remove(cacheFile)
- # Run cmake, in the proper environment, then restore old environment
- oldPKGConfigPath = self.setEnv('PKG_CONFIG_PATH', os.path.join(self.config['installDir'], 'lib', 'pkgconfig'))
- oldCMakePrefixPath = self.setEnv('CMAKE_PREFIX_PATH', self.config['installDir'])
- oldXDGDataDirs = self.prependDirToEnv('XDG_DATA_DIRS', os.path.join(self.config['installDir'], 'share'), '/usr/share')
- oldXDGConfigDirs = self.prependDirToEnv('XDG_CONFIG_DIRS', os.path.join(self.config['installDir'], 'etc', 'xdg'), '/etc/xdg')
- subprocess.check_call(['cmake', self.sourceFolder, '-DCMAKE_BUILD_TYPE='+self.config['buildType'],
- '-DCMAKE_INSTALL_PREFIX='+self.config['installDir']]+self.config.get('cmakeParameters', []))
- self.restoreEnv('PKG_CONFIG_PATH', oldPKGConfigPath)
- self.restoreEnv('CMAKE_PREFIX_PATH', oldCMakePrefixPath)
- self.restoreEnv('XDG_DATA_DIRS', oldXDGDataDirs)
- self.restoreEnv('XDG_CONFIG_DIRS', oldXDGConfigDirs)
- # if asked to do so, wait
- if waitAfterConfig:
- input('Configuration done. Hit "Enter" to build the project. ')
- # run compilation
- jobs = multiprocessing.cpu_count()+1
- subprocess.check_call(self.config.get('buildCmdPrefix', []) + ['make', '-j'+str(jobs)])
- # run installation
- subprocess.check_call(self.config.get('installCmdPrefix', []) + ['make', 'install', '-j'+str((jobs+1)//2)]) # jobs/2, rounded up
+ def __init__(self, sourceFolder, buildFolder, config):
+ self.sourceFolder = os.path.abspath(sourceFolder)
+ self.buildFolder = os.path.abspath(buildFolder)
+ self.config = config
+
+ def setEnv(self, name, val):
+ '''Set the given environment variable, return old value'''
+ oldVal = os.getenv(name)
+ os.putenv(name, val)
+ return oldVal
+
+ def prependDirToEnv(self, name, dir, default):
+ '''Prepends the given directory to the environment variable. If the variable is empty, dir isprepended to the default.
+ Returns the old value.'''
+ oldVal = os.getenv(name)
+ oldPaths = default if oldVal is None else oldVal
+ os.putenv(name, dir+':'+oldPaths)
+ return oldVal
+
+ def restoreEnv(self, name, oldVal):
+ '''Restore environment variable to previous value'''
+ if oldVal is None:
+ os.unsetenv(name)
+ else:
+ os.putenv(name, oldVal)
+
+ def build(self, reconfigure, waitAfterConfig):
+ # Make sure we have a build directory
+ if not os.path.exists(self.buildFolder): os.makedirs(self.buildFolder)
+ os.chdir(self.buildFolder)
+ # In case of reconfiguration, delete cache file if it exists
+ cacheFile = 'CMakeCache.txt'
+ if os.path.exists(cacheFile) and reconfigure: os.remove(cacheFile)
+ # Run cmake, in the proper environment, then restore old environment
+ oldPKGConfigPath = self.setEnv('PKG_CONFIG_PATH', os.path.join(self.config['installDir'], 'lib', 'pkgconfig'))
+ oldCMakePrefixPath = self.setEnv('CMAKE_PREFIX_PATH', self.config['installDir'])
+ oldXDGDataDirs = self.prependDirToEnv('XDG_DATA_DIRS', os.path.join(self.config['installDir'], 'share'), '/usr/share')
+ oldXDGConfigDirs = self.prependDirToEnv('XDG_CONFIG_DIRS', os.path.join(self.config['installDir'], 'etc', 'xdg'), '/etc/xdg')
+ subprocess.check_call(['cmake', self.sourceFolder, '-DCMAKE_BUILD_TYPE='+self.config['buildType'],
+ '-DCMAKE_INSTALL_PREFIX='+self.config['installDir']]+self.config.get('cmakeParameters', []))
+ self.restoreEnv('PKG_CONFIG_PATH', oldPKGConfigPath)
+ self.restoreEnv('CMAKE_PREFIX_PATH', oldCMakePrefixPath)
+ self.restoreEnv('XDG_DATA_DIRS', oldXDGDataDirs)
+ self.restoreEnv('XDG_CONFIG_DIRS', oldXDGConfigDirs)
+ # if asked to do so, wait
+ if waitAfterConfig:
+ input('Configuration done. Hit "Enter" to build the project. ')
+ # run compilation
+ jobs = multiprocessing.cpu_count()+1
+ subprocess.check_call(self.config.get('buildCmdPrefix', []) + ['make', '-j'+str(jobs)])
+ # run installation
+ subprocess.check_call(self.config.get('installCmdPrefix', []) + ['make', 'install', '-j'+str((jobs+1)//2)]) # jobs/2, rounded up
# if auto-debuild is available, provide a wrapper for it
try:
- import auto_debuild
- class AutoDebuild:
- def __init__(self, sourceFolder, buildFolder, config, vcs):
- self.sourceFolder = os.path.abspath(sourceFolder)
- self.buildFolder = os.path.abspath(buildFolder)
- self.debFolder = os.path.abspath(config['debDir'])
- self.config = config
- self.vcs = vcs
+ import auto_debuild
+ class AutoDebuild:
+ def __init__(self, sourceFolder, buildFolder, config, vcs):
+ self.sourceFolder = os.path.abspath(sourceFolder)
+ self.buildFolder = os.path.abspath(buildFolder)
+ self.debFolder = os.path.abspath(config['debDir'])
+ self.config = config
+ self.vcs = vcs
- def build(self, reconfigure, waitAfterConfig): # reconfigure is ignored (we always do a reconfiguration)
- # get version name
- versionName = self.config['versionName'] if 'versionName' in self.config else self.vcs.version()
- if versionName is None:
- raise Exception("VCS did not provide us with a proper version number, please provide one manually")
- # create auto-debuild configuration
- autoDebuildConfig = {
- 'sourceName': self.config['name'],
- 'buildSystem': self.config['buildSystem'],
- 'debDir': self.debFolder,
- 'buildDir': self.buildFolder,
- 'name': self.config['debName'],
- 'email': self.config['debEMail'],
- 'version': versionName + self.config.get('versionSuffix', ''),
- 'waitAfterConfig': waitAfterConfig,
- }
- # copy some more optional configuration
- for option in ('epoch', 'dbgPackage', 'section', 'withPython2', 'withSIP', 'binarySkipFiles', 'binaryInstallFiles',
- 'buildDepends', 'binaryDepends', 'binaryShims', 'binaryRecommends', 'binaryProvides', 'binaryConflicts', 'binaryBreaks',
- 'binaryReplaces', 'binaryBreaksReplaces',
- 'alternatives', 'cmakeParameters', 'automakeParameters', 'autogen'):
- if option in self.config:
- autoDebuildConfig[option] = self.config[option]
- # create Debian files
- os.chdir(self.sourceFolder)
- auto_debuild.deleteDebianFolder()
- files = auto_debuild.createDebianFiles(autoDebuildConfig)
- # build package(s)
- auto_debuild.buildDebianPackage(autoDebuildConfig)
- # install package(s)
- if self.config.get('debInstall', True):
- subprocess.check_call(['sudo', 'dpkg', '--install'] + files)
+ def build(self, reconfigure, waitAfterConfig): # reconfigure is ignored (we always do a reconfiguration)
+ # get version name
+ versionName = self.config['versionName'] if 'versionName' in self.config else self.vcs.version()
+ if versionName is None:
+ raise Exception("VCS did not provide us with a proper version number, please provide one manually")
+ # create auto-debuild configuration
+ autoDebuildConfig = {
+ 'sourceName': self.config['name'],
+ 'buildSystem': self.config['buildSystem'],
+ 'debDir': self.debFolder,
+ 'buildDir': self.buildFolder,
+ 'name': self.config['debName'],
+ 'email': self.config['debEMail'],
+ 'version': versionName + self.config.get('versionSuffix', ''),
+ 'waitAfterConfig': waitAfterConfig,
+ }
+ # copy some more optional configuration
+ for option in ('epoch', 'dbgPackage', 'section', 'withPython2', 'withSIP', 'binarySkipFiles', 'binaryInstallFiles',
+ 'buildDepends', 'binaryDepends', 'binaryShims', 'binaryRecommends', 'binaryProvides', 'binaryConflicts', 'binaryBreaks',
+ 'binaryReplaces', 'binaryBreaksReplaces',
+ 'alternatives', 'cmakeParameters', 'automakeParameters', 'autogen'):
+ if option in self.config:
+ autoDebuildConfig[option] = self.config[option]
+ # create Debian files
+ os.chdir(self.sourceFolder)
+ auto_debuild.deleteDebianFolder()
+ files = auto_debuild.createDebianFiles(autoDebuildConfig)
+ # build package(s)
+ auto_debuild.buildDebianPackage(autoDebuildConfig)
+ # install package(s)
+ if self.config.get('debInstall', True):
+ subprocess.check_call(['sudo', 'dpkg', '--install'] + files)
except ImportError:
- #print "auto_debuild not found, disabling auto-debuild system"
- pass
+ #print "auto_debuild not found, disabling auto-debuild system"
+ pass
# an entire Project
class Project:
- def __init__(self, folder, config):
- self.folder = folder
- self.name = config['name']
- # VCS
- vcsName = config['vcs']
- if vcsName == 'git':
- self.vcs = vcs.Git(self.sourceFolder(), config)
- elif vcsName == 'svn':
- self.vcs = vcs.SVN(self.sourceFolder(), config['url'])
- else:
- raise Exception("Unknown VCS type "+vcsName)
- # build system
- if config.get('buildDeb', False):
- self.buildSystem = build_system.AutoDebuild(self.sourceFolder(), self.buildFolder(), config, self.vcs)
- else:
- buildSystemName = config['buildSystem']
- if buildSystemName == 'cmake':
- self.buildSystem = build_system.CMake(self.sourceFolder(), self.buildFolder(), config)
- else:
- raise Exception("Unknown build system type "+buildSystemName)
-
- def sourceFolder(self):
- return os.path.join(self.folder, self.name)
-
- def buildFolder(self):
- return os.path.join(config['buildDir'], self.sourceFolder())
+ def __init__(self, folder, config):
+ self.folder = folder
+ self.name = config['name']
+ # VCS
+ vcsName = config['vcs']
+ if vcsName == 'git':
+ self.vcs = vcs.Git(self.sourceFolder(), config)
+ elif vcsName == 'svn':
+ self.vcs = vcs.SVN(self.sourceFolder(), config['url'])
+ else:
+ raise Exception("Unknown VCS type "+vcsName)
+ # build system
+ if config.get('buildDeb', False):
+ self.buildSystem = build_system.AutoDebuild(self.sourceFolder(), self.buildFolder(), config, self.vcs)
+ else:
+ buildSystemName = config['buildSystem']
+ if buildSystemName == 'cmake':
+ self.buildSystem = build_system.CMake(self.sourceFolder(), self.buildFolder(), config)
+ else:
+ raise Exception("Unknown build system type "+buildSystemName)
+
+ def sourceFolder(self):
+ return os.path.join(self.folder, self.name)
+
+ def buildFolder(self):
+ return os.path.join(config['buildDir'], self.sourceFolder())
# read command-line arguments
parser = argparse.ArgumentParser(description='Update and build a bunch of stuff')
help="Manually specify projects or folders to be built (project names take precedence)")
args = parser.parse_args()
if args.reset_source and not args.update:
- raise Exception("Can not reset sources without doing an update")
+ raise Exception("Can not reset sources without doing an update")
# load config as dictionary
config = vars(load_module('config', args.config))
# copy all items which don't exist below, except for those in the exclude list
def inherit(subConfig, superConfig, exclude = ('name', 'projects')):
- for name in superConfig.keys():
- if (not name in subConfig) and (not name in exclude):
- subConfig[name] = superConfig[name]
+ for name in superConfig.keys():
+ if (not name in subConfig) and (not name in exclude):
+ subConfig[name] = superConfig[name]
# populate list of projects, return list of projects in that folder
def loadProjects(config, folder=''):
- folderProjects = []
- for projectConfig in config['projects']:
- assert 'name' in projectConfig # everything must have a name
- inherit(projectConfig, config)
- if 'projects' in projectConfig: # a subpath
- folderProjects += loadProjects(projectConfig, os.path.join(folder, projectConfig['name']))
- else: # a proper project
- if projectConfig['name'] in allProjects:
- raise Exception("Duplicate project name "+projectConfig['name'])
- project = Project(folder, projectConfig)
- allProjects[projectConfig['name']] = project
- folderProjects.append(project)
- # store projects of this folder
- if folder in allFolders:
- raise Exception("Duplicate folder name "+folder)
- allFolders[folder] = folderProjects
- return folderProjects
+ folderProjects = []
+ for projectConfig in config['projects']:
+ assert 'name' in projectConfig # everything must have a name
+ inherit(projectConfig, config)
+ if 'projects' in projectConfig: # a subpath
+ folderProjects += loadProjects(projectConfig, os.path.join(folder, projectConfig['name']))
+ else: # a proper project
+ if projectConfig['name'] in allProjects:
+ raise Exception("Duplicate project name "+projectConfig['name'])
+ project = Project(folder, projectConfig)
+ allProjects[projectConfig['name']] = project
+ folderProjects.append(project)
+ # store projects of this folder
+ if folder in allFolders:
+ raise Exception("Duplicate folder name "+folder)
+ allFolders[folder] = folderProjects
+ return folderProjects
# load available projects
loadProjects(config)
# get base set og projects to process
if args.projects:
- for name in args.projects:
- if name in allProjects:
- workProjects.append(allProjects[name])
- elif name in allFolders:
- workProjects += allFolders[name]
- else:
- raise Exception("Project or folder %s does not exist" % name)
+ for name in args.projects:
+ if name in allProjects:
+ workProjects.append(allProjects[name])
+ elif name in allFolders:
+ workProjects += allFolders[name]
+ else:
+ raise Exception("Project or folder %s does not exist" % name)
else:
- workProjects = list(allProjects.values()) # all the projects
+ workProjects = list(allProjects.values()) # all the projects
# apply the "resume from"
if args.resume_from is not None:
- # find project index
- startIndex = 0
- while startIndex < len(workProjects):
- if workProjects[startIndex].name == args.resume_from:
- break # we found it
- else:
- startIndex += 1
- if startIndex >= len(workProjects): # project not found
- raise Exception("%s not found in list of projects to work on" % args.resume_from)
- # start here
- workProjects = workProjects[startIndex:]
+ # find project index
+ startIndex = 0
+ while startIndex < len(workProjects):
+ if workProjects[startIndex].name == args.resume_from:
+ break # we found it
+ else:
+ startIndex += 1
+ if startIndex >= len(workProjects): # project not found
+ raise Exception("%s not found in list of projects to work on" % args.resume_from)
+ # start here
+ workProjects = workProjects[startIndex:]
# and do it!
for project in workProjects:
- try:
- if args.version_check:
- print("Checking project",project.sourceFolder())
- project.vcs.checkVersions()
- else:
- if args.update:
- print("Updating project",project.sourceFolder())
- project.vcs.update(mode = vcs.MODE_RESET if args.reset_source else vcs.MODE_REBASE)
- print("Building project",project.sourceFolder())
- project.buildSystem.build(reconfigure=args.reconfigure, waitAfterConfig=args.wait_after_config)
- print()
- except (subprocess.CalledProcessError, KeyboardInterrupt) as e: # for some exceptions, a stackrace is usually pointless
- print(file=sys.stderr)
- print(file=sys.stderr)
- if isinstance(e, KeyboardInterrupt): # str(e) would be the empty string
- print("Interruped by user while processing %s" % (project.name), file=sys.stderr)
- else:
- print("Error while processing %s: %s" % (project.name, str(e)), file=sys.stderr)
- print(file=sys.stderr)
- sys.exit(1)
+ try:
+ if args.version_check:
+ print("Checking project",project.sourceFolder())
+ project.vcs.checkVersions()
+ else:
+ if args.update:
+ print("Updating project",project.sourceFolder())
+ project.vcs.update(mode = vcs.MODE_RESET if args.reset_source else vcs.MODE_REBASE)
+ print("Building project",project.sourceFolder())
+ project.buildSystem.build(reconfigure=args.reconfigure, waitAfterConfig=args.wait_after_config)
+ print()
+ except (subprocess.CalledProcessError, KeyboardInterrupt) as e: # for some exceptions, a stackrace is usually pointless
+ print(file=sys.stderr)
+ print(file=sys.stderr)
+ if isinstance(e, KeyboardInterrupt): # str(e) would be the empty string
+ print("Interruped by user while processing %s" % (project.name), file=sys.stderr)
+ else:
+ print("Error while processing %s: %s" % (project.name, str(e)), file=sys.stderr)
+ print(file=sys.stderr)
+ sys.exit(1)
print("All operations successfully completed")
import os, subprocess, re
'''A VCS must have an "update" method with an optional "mode" parameter taking one of the three values below,
- a "version" method returning a version name (or None),
- and a "newVersions" method which checks for new versions and prints the result to standard output.'''
+a "version" method returning a version name (or None),
+and a "newVersions" method which checks for new versions and prints the result to standard output.'''
MODE_FETCH = 0
MODE_REBASE = 1
MODE_RESET = 2
def natural_sort_key(val):
- return [ (int(c) if c.isdigit() else c) for c in re.split('([0-9]+)', val) ]
+ return [ (int(c) if c.isdigit() else c) for c in re.split('([0-9]+)', val) ]
def get_non_digit_prefix(val):
- return re.match('[^0-9]*', val).group(0)
+ return re.match('[^0-9]*', val).group(0)
class GitCommand:
- def __getattr__(self, name):
- def call(*args, get_stderr = False):
- cmd = ["git", name.replace('_', '-')] + list(args)
- output = subprocess.check_output(cmd, stderr=subprocess.STDOUT if get_stderr else None)
- return output.decode('utf-8').strip('\n')
- return call
+ def __getattr__(self, name):
+ def call(*args, get_stderr = False):
+ cmd = ["git", name.replace('_', '-')] + list(args)
+ output = subprocess.check_output(cmd, stderr=subprocess.STDOUT if get_stderr else None)
+ return output.decode('utf-8').strip('\n')
+ return call
git = GitCommand()
# Fetch updates from git
class Git:
- def __init__(self, folder, config):
- self.folder = os.path.abspath(folder)
- self.url = config['url']
- self.commit = config['version']
+ def __init__(self, folder, config):
+ self.folder = os.path.abspath(folder)
+ self.url = config['url']
+ self.commit = config['version']
- def update(self, mode = MODE_REBASE):
- isBranch = (self.commit.startswith('origin/'))
- if isBranch:
- branchname = self.commit[len('origin/'):]
- else:
- branchname = "tag"
- # get us a git repository, and the "origin" remote
- if os.path.exists(self.folder):
- # load existing repo
- os.chdir(self.folder)
- git.remote("set-url", "origin", self.url) # make sure we use the current URL
- else:
- # create a new one
- os.makedirs(self.folder)
- os.chdir(self.folder)
- git.init()
- git.remote("add", "origin", self.url)
- git.fetch("origin")
- if mode == MODE_FETCH:
- return
- # create/find correct branch
- if not git.branch("--list", branchname): # the branch does not yet exit
- git.branch(branchname, self.commit)
- if isBranch: # make sure we track the correct remote branch
- git.branch("-u", self.commit, branchname)
- # update it to the latest remote commit
- git.checkout(branchname, get_stderr=True)
- if mode == MODE_RESET:
- git.reset("--hard", self.commit)
- else:
- git.rebase(self.commit)
- # update submodules
- git.submodule("update", "--init", "--recursive", "--rebase")
- # done
- print("...done", end=' ')
- if git.rev_parse("HEAD") != git.rev_parse(self.commit):
- print("(keeping local patches around)", end=' ')
- print()
+ def update(self, mode = MODE_REBASE):
+ isBranch = (self.commit.startswith('origin/'))
+ if isBranch:
+ branchname = self.commit[len('origin/'):]
+ else:
+ branchname = "tag"
+ # get us a git repository, and the "origin" remote
+ if os.path.exists(self.folder):
+ # load existing repo
+ os.chdir(self.folder)
+ git.remote("set-url", "origin", self.url) # make sure we use the current URL
+ else:
+ # create a new one
+ os.makedirs(self.folder)
+ os.chdir(self.folder)
+ git.init()
+ git.remote("add", "origin", self.url)
+ git.fetch("origin")
+ if mode == MODE_FETCH:
+ return
+ # create/find correct branch
+ if not git.branch("--list", branchname): # the branch does not yet exit
+ git.branch(branchname, self.commit)
+ if isBranch: # make sure we track the correct remote branch
+ git.branch("-u", self.commit, branchname)
+ # update it to the latest remote commit
+ git.checkout(branchname, get_stderr=True)
+ if mode == MODE_RESET:
+ git.reset("--hard", self.commit)
+ else:
+ git.rebase(self.commit)
+ # update submodules
+ git.submodule("update", "--init", "--recursive", "--rebase")
+ # done
+ print("...done", end=' ')
+ if git.rev_parse("HEAD") != git.rev_parse(self.commit):
+ print("(keeping local patches around)", end=' ')
+ print()
- def version(self):
- v = git.describe()
- return v[len(get_non_digit_prefix(v)):] # remove the non-digit prefix from v (so that it starts with a number)
+ def version(self):
+ v = git.describe()
+ return v[len(get_non_digit_prefix(v)):] # remove the non-digit prefix from v (so that it starts with a number)
- def checkVersions(self):
- self.update(mode = MODE_FETCH)
- currentVersion = git.describe()
- # get sorted list of tag names with the same non-digit prefix and higher version number
- tags = git.tag().split('\n')
- tags = [t for t in tags if get_non_digit_prefix(t) == get_non_digit_prefix(currentVersion) and natural_sort_key(t) > natural_sort_key(currentVersion)]
- if not tags: return
- tags.sort(key = natural_sort_key)
- print("Versions newer than "+currentVersion+" available:")
- print(tags)
+ def checkVersions(self):
+ self.update(mode = MODE_FETCH)
+ currentVersion = git.describe()
+ # get sorted list of tag names with the same non-digit prefix and higher version number
+ tags = git.tag().split('\n')
+ tags = [t for t in tags if get_non_digit_prefix(t) == get_non_digit_prefix(currentVersion) and natural_sort_key(t) > natural_sort_key(currentVersion)]
+ if not tags: return
+ tags.sort(key = natural_sort_key)
+ print("Versions newer than "+currentVersion+" available:")
+ print(tags)
# Fetch updates via SVN
class SVN:
- def __init__(self, folder, url):
- self.folder = os.path.abspath(folder)
- self.url = url
+ def __init__(self, folder, url):
+ self.folder = os.path.abspath(folder)
+ self.url = url
- def update(self, mode = MODE_REBASE):
- if mode == MODE_FETCH: raise Exception("Just fetching is not supported with SVN")
- if os.path.exists(self.folder):
- os.chdir(self.folder) # go into repository
- if mode == MODE_RESET: subprocess.check_call(['svn', 'revert', '-R', '.'])
- subprocess.check_call(['svn', 'switch', self.url]) # and update to the URL we got
- else:
- os.makedirs(self.folder) # if even the parent folder does not exist, svn fails
- subprocess.check_call(['svn', 'co', self.url, self.folder]) # just download it
-
- def version(self):
- return None
-
- def checkVersions(self):
- print("Version checking not supporting with SVN")
+ def update(self, mode = MODE_REBASE):
+ if mode == MODE_FETCH: raise Exception("Just fetching is not supported with SVN")
+ if os.path.exists(self.folder):
+ os.chdir(self.folder) # go into repository
+ if mode == MODE_RESET: subprocess.check_call(['svn', 'revert', '-R', '.'])
+ subprocess.check_call(['svn', 'switch', self.url]) # and update to the URL we got
+ else:
+ os.makedirs(self.folder) # if even the parent folder does not exist, svn fails
+ subprocess.check_call(['svn', 'co', self.url, self.folder]) # just download it
+
+ def version(self):
+ return None
+
+ def checkVersions(self):
+ print("Version checking not supporting with SVN")