* Fixed an issue with globbing the --test argument

* Added a custom 'mkdir' function which makes parents and doesn't fail on
  existence
* Added output for 'SKIP' messages (it did not indicate WHY it was skipped)
* Added the ability to generate DB packages in the sync dir (not the cache dir)
  for testing downloading.  (self.cachepkgs = False)
* Added pmtest.path for the full path to the package file
This commit is contained in:
Aaron Griffin 2007-03-05 18:06:12 +00:00
parent 714a414e72
commit 2caadb33bf
6 changed files with 34 additions and 15 deletions

View File

@ -36,7 +36,8 @@ def globTests(option, opt_str, value, parser):
globlist = []
# maintain the idx so we can modify rargs
while not parser.rargs[idx].startswith('-'):
while idx < len(parser.rargs) and \
not parser.rargs[idx].startswith('-'):
globlist += glob.glob(parser.rargs[idx])
idx += 1

View File

@ -219,8 +219,7 @@ def db_write(self, pkg):
"""
path = os.path.join(self.dbdir, self.treename, pkg.fullname())
if not os.path.isdir(path):
os.makedirs(path);
mkdir(path)
# desc
# for local db entries: name, version, desc, groups, url, license,
@ -332,7 +331,7 @@ def gensync(self, path):
mkdescfile(pkg.fullname(), pkg)
# Generate database archive
os.makedirs(path, 0755)
mkdir(path)
archive = os.path.join(path, "%s%s" % (self.treename, PM_EXT_DB))
os.system("tar zcf %s *" % archive)

View File

@ -33,6 +33,7 @@ class pmpkg:
"""
def __init__(self, name, version = "1.0-1"):
self.path = "" #the path of the generated package
# desc
self.name = name
self.version = version
@ -116,7 +117,7 @@ def makepkg(self, path):
A package archive is generated in the location 'path', based on the data
from the object.
"""
archive = os.path.join(path, self.filename())
self.path = os.path.join(path, self.filename())
curdir = os.getcwd()
tmpdir = tempfile.mkdtemp()
@ -172,8 +173,11 @@ def makepkg(self, path):
os.system("touch .FILELIST")
targets += " .FILELIST"
#safely create the dir
mkdir(os.path.dirname(self.path))
# Generate package archive
os.system("tar zcf %s %s" % (archive, targets))
os.system("tar zcf %s %s" % (self.path, targets))
os.chdir(curdir)
shutil.rmtree(tmpdir)

View File

@ -57,6 +57,7 @@ def check(self, root, retcode, localdb, files):
if not grep(os.path.join(root, LOGFILE), key):
success = 0
else:
print "PACMAN rule '%s' not found" % case
success = -1
elif kind == "PKG":
newpkg = localdb.db_read(key)
@ -100,6 +101,7 @@ def check(self, root, retcode, localdb, files):
if not found:
success = 0
else:
print "PKG rule '%s' not found" % case
success = -1
elif kind == "FILE":
filename = os.path.join(root, key)
@ -122,8 +124,10 @@ def check(self, root, retcode, localdb, files):
if not os.path.isfile("%s%s" % (filename, PM_PACSAVE)):
success = 0
else:
print "FILE rule '%s' not found" % case
success = -1
else:
print "Rule kind '%s' not found" % kind
success = -1
if self.false and success != -1:

View File

@ -38,6 +38,7 @@ def __init__(self, name, root):
self.name = name
self.testname = os.path.basename(name).replace('.py', '')
self.root = root
self.cachepkgs = True
def __str__(self):
return "name = %s\n" \
@ -130,14 +131,15 @@ def generate(self):
vprint("\t%s" % os.path.join(TMPDIR, pkg.filename()))
pkg.makepkg(tmpdir)
for key, value in self.db.iteritems():
if key == "local":
continue
if key == "local": continue
for pkg in value.pkgs:
archive = pkg.filename()
vprint("\t%s" % os.path.join(PM_CACHEDIR, archive))
pkg.makepkg(cachedir)
pkg.md5sum = getmd5sum(os.path.join(cachedir, archive))
pkg.csize = os.stat(os.path.join(cachedir, archive))[stat.ST_SIZE]
vprint("\t%s" % os.path.join(PM_CACHEDIR, pkg.filename()))
if self.cachepkgs:
pkg.makepkg(cachedir)
else:
pkg.makepkg(os.path.join(syncdir, value.treename))
pkg.md5sum = getmd5sum(pkg.path)
pkg.csize = os.stat(pkg.path)[stat.ST_SIZE]
# Populating databases
vprint(" Populating databases")
@ -151,8 +153,7 @@ def generate(self):
# Creating sync database archives
vprint(" Creating sync database archives")
for key, value in self.db.iteritems():
if key == "local":
continue
if key == "local": continue
archive = value.treename + PM_EXT_DB
vprint("\t" + os.path.join(SYNCREPO, archive))
value.gensync(os.path.join(syncdir, value.treename))

View File

@ -247,6 +247,16 @@ def grep(filename, pattern):
fd.close()
return found
def mkdir(dir):
if os.path.isdir(dir):
return
elif os.path.isfile(dir):
raise OSError("'%s' already exists and is not a directory" % dir)
else:
parent, thisdir = os.path.split(dir)
if parent: mkdir(parent) #recurse to make all parents
print "making dir %s" % thisdir
if thisdir: os.mkdir(dir)
if __name__ == "__main__":
pass