pactest: generate sync DB's in memory
Sync database are no longer exploded on the filesystem. Rework the logic used to generate our test databases so we can create them completely in memory without having to write the individual files to disk at all. The local database is unaffected. Note that several shortcomings in libalpm parsing were discovered by this change, which have since been temporarily patched around in this test suite: * archive_fgets() did not properly handle a file that ended in a non-newline, and would silently drop the data in this line. * sync database with only the file entries and not the directories would fail to parse properly, and even cause segfaults in some cases. Signed-off-by: Dan McGee <dan@archlinux.org>
This commit is contained in:
parent
63335859d1
commit
624a878701
3 changed files with 45 additions and 45 deletions
|
@ -17,6 +17,8 @@
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
from StringIO import StringIO
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
import pmpkg
|
import pmpkg
|
||||||
|
@ -49,6 +51,7 @@ class pmdb(object):
|
||||||
|
|
||||||
def __init__(self, treename, root):
|
def __init__(self, treename, root):
|
||||||
self.treename = treename
|
self.treename = treename
|
||||||
|
self.root = root
|
||||||
self.pkgs = []
|
self.pkgs = []
|
||||||
self.option = {}
|
self.option = {}
|
||||||
if self.treename == "local":
|
if self.treename == "local":
|
||||||
|
@ -56,9 +59,7 @@ class pmdb(object):
|
||||||
self.dbfile = None
|
self.dbfile = None
|
||||||
self.is_local = True
|
self.is_local = True
|
||||||
else:
|
else:
|
||||||
self.dbdir = os.path.join(root, util.PM_SYNCDBPATH, treename)
|
self.dbdir = None
|
||||||
# TODO: we should be doing this, don't need a sync db dir
|
|
||||||
#self.dbdir = None
|
|
||||||
self.dbfile = os.path.join(root, util.PM_SYNCDBPATH, treename + ".db")
|
self.dbfile = os.path.join(root, util.PM_SYNCDBPATH, treename + ".db")
|
||||||
self.is_local = False
|
self.is_local = False
|
||||||
|
|
||||||
|
@ -77,12 +78,11 @@ class pmdb(object):
|
||||||
return pkg
|
return pkg
|
||||||
|
|
||||||
def db_read(self, name):
|
def db_read(self, name):
|
||||||
path = self.dbdir
|
if not self.dbdir or not os.path.isdir(self.dbdir):
|
||||||
if not os.path.isdir(path):
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
dbentry = ""
|
dbentry = ""
|
||||||
for roots, dirs, files in os.walk(path):
|
for roots, dirs, files in os.walk(self.dbdir):
|
||||||
for i in dirs:
|
for i in dirs:
|
||||||
[pkgname, pkgver, pkgrel] = i.rsplit("-", 2)
|
[pkgname, pkgver, pkgrel] = i.rsplit("-", 2)
|
||||||
if pkgname == name:
|
if pkgname == name:
|
||||||
|
@ -90,7 +90,7 @@ class pmdb(object):
|
||||||
break
|
break
|
||||||
if not dbentry:
|
if not dbentry:
|
||||||
return None
|
return None
|
||||||
path = os.path.join(path, dbentry)
|
path = os.path.join(self.dbdir, dbentry)
|
||||||
|
|
||||||
[pkgname, pkgver, pkgrel] = dbentry.rsplit("-", 2)
|
[pkgname, pkgver, pkgrel] = dbentry.rsplit("-", 2)
|
||||||
pkg = pmpkg.pmpkg(pkgname, pkgver + "-" + pkgrel)
|
pkg = pmpkg.pmpkg(pkgname, pkgver + "-" + pkgrel)
|
||||||
|
@ -179,9 +179,7 @@ class pmdb(object):
|
||||||
# db_write is used to add both 'local' and 'sync' db entries
|
# db_write is used to add both 'local' and 'sync' db entries
|
||||||
#
|
#
|
||||||
def db_write(self, pkg):
|
def db_write(self, pkg):
|
||||||
path = os.path.join(self.dbdir, pkg.fullname())
|
entry = {}
|
||||||
util.mkdir(path)
|
|
||||||
|
|
||||||
# desc/depends type entries
|
# desc/depends type entries
|
||||||
data = []
|
data = []
|
||||||
make_section(data, "NAME", pkg.name)
|
make_section(data, "NAME", pkg.name)
|
||||||
|
@ -209,32 +207,48 @@ class pmdb(object):
|
||||||
make_section(data, "MD5SUM", pkg.md5sum)
|
make_section(data, "MD5SUM", pkg.md5sum)
|
||||||
make_section(data, "PGPSIG", pkg.pgpsig)
|
make_section(data, "PGPSIG", pkg.pgpsig)
|
||||||
|
|
||||||
filename = os.path.join(path, "desc")
|
entry["desc"] = "\n".join(data) + "\n"
|
||||||
util.mkfile(filename, "\n".join(data))
|
|
||||||
|
|
||||||
# files and install
|
# files and install
|
||||||
if self.is_local:
|
if self.is_local:
|
||||||
data = []
|
data = []
|
||||||
make_section(data, "FILES", pkg.full_filelist())
|
make_section(data, "FILES", pkg.full_filelist())
|
||||||
make_section(data, "BACKUP", pkg.local_backup_entries())
|
make_section(data, "BACKUP", pkg.local_backup_entries())
|
||||||
filename = os.path.join(path, "files")
|
entry["files"] = "\n".join(data) + "\n"
|
||||||
util.mkfile(filename, "\n".join(data))
|
|
||||||
|
|
||||||
if any(pkg.install.values()):
|
if any(pkg.install.values()):
|
||||||
filename = os.path.join(path, "install")
|
entry["install"] = pkg.installfile() + "\n"
|
||||||
util.mkfile(filename, pkg.installfile())
|
|
||||||
|
|
||||||
def gensync(self):
|
return entry
|
||||||
if not self.dbfile:
|
|
||||||
return
|
|
||||||
curdir = os.getcwd()
|
|
||||||
os.chdir(self.dbdir)
|
|
||||||
|
|
||||||
tar = tarfile.open(self.dbfile, "w:gz")
|
def generate(self):
|
||||||
for i in os.listdir("."):
|
pkg_entries = [(pkg, self.db_write(pkg)) for pkg in self.pkgs]
|
||||||
tar.add(i)
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
os.chdir(curdir)
|
if self.dbdir:
|
||||||
|
for pkg, entry in pkg_entries:
|
||||||
|
path = os.path.join(self.dbdir, pkg.fullname())
|
||||||
|
util.mkdir(path)
|
||||||
|
for name, data in entry.iteritems():
|
||||||
|
filename = os.path.join(path, name)
|
||||||
|
util.mkfile(filename, data)
|
||||||
|
|
||||||
|
if self.dbfile:
|
||||||
|
tar = tarfile.open(self.dbfile, "w:gz")
|
||||||
|
for pkg, entry in pkg_entries:
|
||||||
|
# TODO: the addition of the directory is currently a
|
||||||
|
# requirement for successful reading of a DB by libalpm
|
||||||
|
info = tarfile.TarInfo(pkg.fullname())
|
||||||
|
info.type = tarfile.DIRTYPE
|
||||||
|
tar.addfile(info)
|
||||||
|
for name, data in entry.iteritems():
|
||||||
|
filename = os.path.join(pkg.fullname(), name)
|
||||||
|
info = tarfile.TarInfo(filename)
|
||||||
|
info.size = len(data)
|
||||||
|
tar.addfile(info, StringIO(data))
|
||||||
|
tar.close()
|
||||||
|
# TODO: this is a bit unnecessary considering only one test uses it
|
||||||
|
serverpath = os.path.join(self.root, util.SYNCREPO, self.treename)
|
||||||
|
util.mkdir(serverpath)
|
||||||
|
shutil.copy(self.dbfile, serverpath)
|
||||||
|
|
||||||
# vim: set ts=4 sw=4 et:
|
# vim: set ts=4 sw=4 et:
|
||||||
|
|
|
@ -190,7 +190,7 @@ class pmpkg(object):
|
||||||
data = []
|
data = []
|
||||||
for key, value in self.install.iteritems():
|
for key, value in self.install.iteritems():
|
||||||
if value:
|
if value:
|
||||||
data.append("%s() {\n%s\n}" % (key, value))
|
data.append("%s() {\n%s\n}\n" % (key, value))
|
||||||
|
|
||||||
return "\n".join(data)
|
return "\n".join(data)
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ class pmtest(object):
|
||||||
either sync databases or the local package collection. The local database
|
either sync databases or the local package collection. The local database
|
||||||
is allowed to match if allow_local is True."""
|
is allowed to match if allow_local is True."""
|
||||||
for db in self.db.itervalues():
|
for db in self.db.itervalues():
|
||||||
if db.treename == "local" and not allow_local:
|
if db.is_local and not allow_local:
|
||||||
continue
|
continue
|
||||||
pkg = db.getpkg(name)
|
pkg = db.getpkg(name)
|
||||||
if pkg and pkg.version == version:
|
if pkg and pkg.version == version:
|
||||||
|
@ -110,7 +110,7 @@ class pmtest(object):
|
||||||
|
|
||||||
# Create directory structure
|
# Create directory structure
|
||||||
vprint(" Creating directory structure:")
|
vprint(" Creating directory structure:")
|
||||||
dbdir = os.path.join(self.root, util.PM_DBPATH)
|
dbdir = os.path.join(self.root, util.PM_SYNCDBPATH)
|
||||||
cachedir = os.path.join(self.root, util.PM_CACHEDIR)
|
cachedir = os.path.join(self.root, util.PM_CACHEDIR)
|
||||||
syncdir = os.path.join(self.root, util.SYNCREPO)
|
syncdir = os.path.join(self.root, util.SYNCREPO)
|
||||||
tmpdir = os.path.join(self.root, util.TMPDIR)
|
tmpdir = os.path.join(self.root, util.TMPDIR)
|
||||||
|
@ -146,25 +146,11 @@ class pmtest(object):
|
||||||
pkg.md5sum = util.getmd5sum(pkg.path)
|
pkg.md5sum = util.getmd5sum(pkg.path)
|
||||||
pkg.csize = os.stat(pkg.path)[stat.ST_SIZE]
|
pkg.csize = os.stat(pkg.path)[stat.ST_SIZE]
|
||||||
|
|
||||||
# Populating databases
|
|
||||||
vprint(" Populating databases")
|
|
||||||
for key, value in self.db.iteritems():
|
|
||||||
for pkg in value.pkgs:
|
|
||||||
vprint("\t%s/%s" % (key, pkg.fullname()))
|
|
||||||
if key == "local":
|
|
||||||
pkg.installdate = time.ctime()
|
|
||||||
value.db_write(pkg)
|
|
||||||
|
|
||||||
# Creating sync database archives
|
# Creating sync database archives
|
||||||
vprint(" Creating sync database archives")
|
vprint(" Creating databases")
|
||||||
for key, value in self.db.iteritems():
|
for key, value in self.db.iteritems():
|
||||||
if key == "local":
|
|
||||||
continue
|
|
||||||
vprint("\t" + value.treename)
|
vprint("\t" + value.treename)
|
||||||
value.gensync()
|
value.generate()
|
||||||
serverpath = os.path.join(syncdir, value.treename)
|
|
||||||
util.mkdir(serverpath)
|
|
||||||
shutil.copy(value.dbfile, serverpath)
|
|
||||||
|
|
||||||
# Filesystem
|
# Filesystem
|
||||||
vprint(" Populating file system")
|
vprint(" Populating file system")
|
||||||
|
|
Loading…
Add table
Reference in a new issue