Imported from pacman-2.7.2.tar.gz

This commit is contained in:
Judd Vinet 2004-01-04 17:53:32 +00:00
parent 68e24b947e
commit 6d4c666a8e
10 changed files with 1119 additions and 1109 deletions

View file

@ -1,5 +1,8 @@
VERSION DESCRIPTION VERSION DESCRIPTION
----------------------------------------------------------------------------- -----------------------------------------------------------------------------
2.7.2 - Supressed "No such file" messages during stripping
- Removed extra newlines in /var/log/pacman.log
- Added a --noextract option to makepkg to skip source extraction
2.7.1 - Fixed a couple obscure segfaults 2.7.1 - Fixed a couple obscure segfaults
- LogFiles were logging incorrect dates - fixed - LogFiles were logging incorrect dates - fixed
- Cleaned up md5sum output in makepkg -g - Cleaned up md5sum output in makepkg -g

View file

@ -34,7 +34,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ $(AM_INSTALL_PROGRAM_FLAGS)
INSTALL_DATA = @INSTALL_DATA@ INSTALL_DATA = @INSTALL_DATA@
INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_SCRIPT = @INSTALL_SCRIPT@
PACVER = 2.7.1 PACVER = 2.7.2
TOPDIR = @srcdir@ TOPDIR = @srcdir@
SRCDIR = $(TOPDIR)/src/ SRCDIR = $(TOPDIR)/src/

20
TODO
View file

@ -1,17 +1,15 @@
- think about consolidating the -A/-U/-S options into one smart
install operation
- replaces code doesn't run with -U or -A
- when performing replaces, pacman should not remove old packages until
the conflict checks are passed
- --info should work with --sync to display dependencies
- add an ETA to the download progress bar
- add HTTP/1.1 support
- fix the broken pipe bug - fix the broken pipe bug
- handle version comparators in makepkg dep resolution (eg, glibc>=2.2.5) - handle version comparators in makepkg dep resolution (eg, glibc>=2.2.5)
- add post_remove, pre_install, pre_upgrade functions to scriptlets - add post_remove, pre_install, pre_upgrade functions to scriptlets
- record md5sums of all files in a package ? record md5sums of all files in a package
? use 'set -e' in makepkg? ? use 'set -e' in makepkg?
x if a package fails, ask before aborting the full operation
- can't -- further dependent packages may fail b/c of the first failure
- check $PACCONF env var - check $PACCONF env var
? use a 'trust pacman' config option for downgrading?
? build-time (source) dependencies in makepkg
? run ldd on every executable in a newly built package to find required so's
- add a --pretend option - add a --pretend option
- add a consistency/sanity check operation (md5 tracking for all files)
- use package caches more for performance
- clean up output a bit (message queue?)
- use a files.cache db for --owns and db_find_conflicts

File diff suppressed because it is too large Load diff

View file

@ -20,7 +20,7 @@
# USA. # USA.
# #
myver='2.7.1' myver='2.7.2'
usage() { usage() {
echo "gensync $myver" echo "gensync $myver"

View file

@ -20,7 +20,7 @@
# USA. # USA.
# #
myver='2.7.1' myver='2.7.2'
startdir=`pwd` startdir=`pwd`
USE_COLOR="n" USE_COLOR="n"
@ -178,6 +178,7 @@ usage() {
echo " -c, --clean Clean up work files after build" echo " -c, --clean Clean up work files after build"
echo " -C, --cleancache Clean up source files from the cache" echo " -C, --cleancache Clean up source files from the cache"
echo " -d, --nodeps Skip all dependency checks" echo " -d, --nodeps Skip all dependency checks"
echo " -e, --noextract Do not extract source files (use existing src/ dir)"
echo " -f, --force Overwrite existing package" echo " -f, --force Overwrite existing package"
echo " -g, --genmd5 Generate MD5sums for source files" echo " -g, --genmd5 Generate MD5sums for source files"
echo " -h, --help This help" echo " -h, --help This help"
@ -204,6 +205,7 @@ DEP_BIN=0
DEP_SRC=0 DEP_SRC=0
NODEPS=0 NODEPS=0
FORCE=0 FORCE=0
NOEXTRACT=0
NOSTRIP=0 NOSTRIP=0
RMDEPS=0 RMDEPS=0
PKGDEST=$startdir PKGDEST=$startdir
@ -218,6 +220,7 @@ while [ "$#" -ne "0" ]; do
--syncdeps) DEP_BIN=1 ;; --syncdeps) DEP_BIN=1 ;;
--builddeps) DEP_SRC=1 ;; --builddeps) DEP_SRC=1 ;;
--nodeps) NODEPS=1 ;; --nodeps) NODEPS=1 ;;
--noextract) NOEXTRACT=1 ;;
--install) INSTALL=1 ;; --install) INSTALL=1 ;;
--force) FORCE=1 ;; --force) FORCE=1 ;;
--nostrip) NOSTRIP=1 ;; --nostrip) NOSTRIP=1 ;;
@ -233,13 +236,14 @@ while [ "$#" -ne "0" ]; do
exit 1 exit 1
;; ;;
-*) -*)
while getopts "cCsbdhifgmnrp:w:-" opt; do while getopts "cCsbdehifgmnrp:w:-" opt; do
case $opt in case $opt in
c) CLEANUP=1 ;; c) CLEANUP=1 ;;
C) CLEANCACHE=1 ;; C) CLEANCACHE=1 ;;
s) DEP_BIN=1 ;; s) DEP_BIN=1 ;;
b) DEP_SRC=1 ;; b) DEP_SRC=1 ;;
d) NODEPS=1 ;; d) NODEPS=1 ;;
e) NOEXTRACT=1 ;;
i) INSTALL=1 ;; i) INSTALL=1 ;;
g) GENMD5=1 ;; g) GENMD5=1 ;;
f) FORCE=1 ;; f) FORCE=1 ;;
@ -414,52 +418,55 @@ for netfile in ${source[@]}; do
done done
if [ "$GENMD5" = "0" ]; then if [ "$GENMD5" = "0" ]; then
# MD5 validation if [ "$NOEXTRACT" = "1" ]; then
if [ ${#md5sums[@]} -ne ${#source[@]} ]; then warning "Skipping source extraction -- using existing src/ tree"
warning "MD5sums are missing or incomplete. Cannot verify source integrity." else
#sleep 1 # MD5 validation
elif [ `type -p md5sum` ]; then if [ ${#md5sums[@]} -ne ${#source[@]} ]; then
msg "Validating source files with MD5sums" warning "MD5sums are missing or incomplete. Cannot verify source integrity."
errors=0 #sleep 1
idx=0 elif [ `type -p md5sum` ]; then
msg "Validating source files with MD5sums"
errors=0
idx=0
for netfile in ${source[@]}; do
file=`strip_url $netfile`
echo -n " $file ... " >&2
echo "${md5sums[$idx]} $file" | md5sum -c - >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo "FAILED" >&2
errors=1
else
echo "Passed" >&2
fi
idx=$(($idx+1))
done
if [ $errors -gt 0 ]; then
error "One or more files did not pass the validity check!"
exit 1
fi
else
warning "The md5sum program is missing. Cannot verify source files!"
sleep 1
fi
# extract sources
msg "Extracting Sources..."
for netfile in ${source[@]}; do for netfile in ${source[@]}; do
file=`strip_url $netfile` file=`strip_url $netfile`
echo -n " $file ... " >&2
echo "${md5sums[$idx]} $file" | md5sum -c - >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo "FAILED" >&2
errors=1
else
echo "Passed" >&2
fi
idx=$(($idx+1))
done
if [ $errors -gt 0 ]; then
error "One or more files did not pass the validity check!"
exit 1
fi
else
warning "The md5sum program is missing. Cannot verify source files!"
sleep 1
fi
# extract sources
msg "Extracting Sources..."
for netfile in ${source[@]}; do
file=`strip_url $netfile`
unset cmd unset cmd
case $file in case $file in
*.tar.gz|*.tar.Z|*.tgz) *.tar.gz|*.tar.Z|*.tgz)
cmd="tar --use-compress-program=gzip -xf $file" ;; cmd="tar --use-compress-program=gzip -xf $file" ;;
*.tar.bz2) *.tar.bz2)
cmd="tar --use-compress-program=bzip2 -xf $file" ;; cmd="tar --use-compress-program=bzip2 -xf $file" ;;
*.tar) *.tar)
cmd="tar -xf $file" ;; cmd="tar -xf $file" ;;
*.zip) *.zip)
cmd="unzip -qqo $file" ;; cmd="unzip -qqo $file" ;;
*.gz) *.gz)
cmd="gunzip $file" ;; cmd="gunzip $file" ;;
*.bz2) *.bz2)
cmd="bunzip2 $file" ;; cmd="bunzip2 $file" ;;
esac esac
if [ "$cmd" != "" ]; then if [ "$cmd" != "" ]; then
msg " $cmd" msg " $cmd"
@ -470,7 +477,8 @@ if [ "$GENMD5" = "0" ]; then
exit 1 exit 1
fi fi
fi fi
done done
fi
else else
# generate md5 hashes # generate md5 hashes
if [ ! `type -p md5sum` ]; then if [ ! `type -p md5sum` ]; then
@ -518,7 +526,7 @@ fi
# check for existing pkg directory # check for existing pkg directory
if [ -d $startdir/pkg ]; then if [ -d $startdir/pkg ]; then
msg "Removing existing pkg directory..." msg "Removing existing pkg/ directory..."
rm -rf $startdir/pkg rm -rf $startdir/pkg
fi fi
mkdir -p $startdir/pkg mkdir -p $startdir/pkg
@ -573,9 +581,9 @@ cd $startdir
# strip binaries # strip binaries
if [ "$NOSTRIP" = "0" ]; then if [ "$NOSTRIP" = "0" ]; then
msg "Stripping debugging symbols from libraries..." msg "Stripping debugging symbols from libraries..."
find pkg/{,usr,usr/local,opt/*}/lib -type f -exec /usr/bin/strip --strip-debug '{}' \; 2>&1 find pkg/{,usr,usr/local,opt/*}/lib -type f -exec /usr/bin/strip --strip-debug '{}' \; 2>&1 | grep -v "No such file"
msg "Stripping symbols from binaries..." msg "Stripping symbols from binaries..."
find pkg/{,usr,usr/local,opt/*}/{bin,sbin} -type f -exec /usr/bin/strip '{}' \; 2>&1 find pkg/{,usr,usr/local,opt/*}/{bin,sbin} -type f -exec /usr/bin/strip '{}' \; 2>&1 | grep -v "No such file"
fi fi
# get some package meta info # get some package meta info

View file

@ -21,7 +21,7 @@
# #
toplevel=`pwd` toplevel=`pwd`
version="2.7.1" version="2.7.2"
usage() { usage() {
echo "makeworld version $version" echo "makeworld version $version"

View file

@ -912,14 +912,14 @@ int pacman_sync(pacdb_t *db, PMList *targets)
/* no cache directory.... try creating it */ /* no cache directory.... try creating it */
snprintf(parent, PATH_MAX, "%svar/cache/pacman", pmo_root); snprintf(parent, PATH_MAX, "%svar/cache/pacman", pmo_root);
logaction(stderr, "warning: no %s cache exists. creating...\n", ldir); logaction(stderr, "warning: no %s cache exists. creating...", ldir);
oldmask = umask(0000); oldmask = umask(0000);
mkdir(parent, 0755); mkdir(parent, 0755);
if(mkdir(ldir, 0755)) { if(mkdir(ldir, 0755)) {
/* couldn't mkdir the cache directory, so fall back to /tmp and unlink /* couldn't mkdir the cache directory, so fall back to /tmp and unlink
* the package afterwards. * the package afterwards.
*/ */
logaction(stderr, "warning: couldn't create package cache, using /tmp instead\n"); logaction(stderr, "warning: couldn't create package cache, using /tmp instead");
snprintf(ldir, PATH_MAX, "/tmp"); snprintf(ldir, PATH_MAX, "/tmp");
varcache = 0; varcache = 0;
} }
@ -1382,7 +1382,7 @@ int pacman_add(pacdb_t *db, PMList *targets)
temp = strdup("/tmp/pacman_XXXXXX"); temp = strdup("/tmp/pacman_XXXXXX");
mkstemp(temp); mkstemp(temp);
if(tar_extract_file(tar, temp)) { if(tar_extract_file(tar, temp)) {
logaction(stderr, "could not extract %s: %s\n", pathname, strerror(errno)); logaction(stderr, "could not extract %s: %s", pathname, strerror(errno));
errors++; errors++;
continue; continue;
} }
@ -1419,13 +1419,13 @@ int pacman_add(pacdb_t *db, PMList *targets)
char newpath[PATH_MAX]; char newpath[PATH_MAX];
snprintf(newpath, PATH_MAX, "%s.pacorig", expath); snprintf(newpath, PATH_MAX, "%s.pacorig", expath);
if(rename(expath, newpath)) { if(rename(expath, newpath)) {
logaction(stderr, "error: could not rename %s: %s\n", expath, strerror(errno)); logaction(stderr, "error: could not rename %s: %s", expath, strerror(errno));
} }
if(copyfile(temp, expath)) { if(copyfile(temp, expath)) {
logaction(stderr, "error: could not copy %s to %s: %s\n", temp, expath, strerror(errno)); logaction(stderr, "error: could not copy %s to %s: %s", temp, expath, strerror(errno));
errors++; errors++;
} else { } else {
logaction(stderr, "warning: %s saved as %s\n", expath, newpath); logaction(stderr, "warning: %s saved as %s", expath, newpath);
} }
} }
} else if(md5_orig) { } else if(md5_orig) {
@ -1452,9 +1452,9 @@ int pacman_add(pacdb_t *db, PMList *targets)
installnew = 1; installnew = 1;
snprintf(newpath, PATH_MAX, "%s.pacsave", expath); snprintf(newpath, PATH_MAX, "%s.pacsave", expath);
if(rename(expath, newpath)) { if(rename(expath, newpath)) {
logaction(stderr, "error: could not rename %s: %s\n", expath, strerror(errno)); logaction(stderr, "error: could not rename %s: %s", expath, strerror(errno));
} else { } else {
logaction(stderr, "warning: %s saved as %s\n", expath, newpath); logaction(stderr, "warning: %s saved as %s", expath, newpath);
} }
} }
@ -1478,11 +1478,11 @@ int pacman_add(pacdb_t *db, PMList *targets)
} else { } else {
vprint("%s is in NoUpgrade - skipping\n", pathname); vprint("%s is in NoUpgrade - skipping\n", pathname);
strncat(expath, ".pacnew", PATH_MAX); strncat(expath, ".pacnew", PATH_MAX);
logaction(stderr, "warning: extracting %s%s as %s\n", pmo_root, pathname, expath); logaction(stderr, "warning: extracting %s%s as %s", pmo_root, pathname, expath);
/*tar_skip_regfile(tar);*/ /*tar_skip_regfile(tar);*/
} }
if(tar_extract_file(tar, expath)) { if(tar_extract_file(tar, expath)) {
logaction(stderr, "could not extract %s: %s\n", pathname, strerror(errno)); logaction(stderr, "could not extract %s: %s", pathname, strerror(errno));
errors++; errors++;
} }
/* calculate an md5 hash if this is in info->backup */ /* calculate an md5 hash if this is in info->backup */
@ -1506,7 +1506,7 @@ int pacman_add(pacdb_t *db, PMList *targets)
tar_close(tar); tar_close(tar);
if(errors) { if(errors) {
ret = 1; ret = 1;
logaction(stderr, "errors occurred while %s %s\n", logaction(stderr, "errors occurred while %s %s",
(pmo_upgrade ? "upgrading" : "installing"), info->name); (pmo_upgrade ? "upgrading" : "installing"), info->name);
/* XXX: this "else" is disabled so the db_write() ALWAYS occurs. If it doesn't /* XXX: this "else" is disabled so the db_write() ALWAYS occurs. If it doesn't
@ -1541,7 +1541,7 @@ int pacman_add(pacdb_t *db, PMList *targets)
/* make an install date (in UTC) */ /* make an install date (in UTC) */
strncpy(info->installdate, asctime(gmtime(&t)), sizeof(info->installdate)); strncpy(info->installdate, asctime(gmtime(&t)), sizeof(info->installdate));
if(db_write(db, info)) { if(db_write(db, info)) {
logaction(stderr, "error updating database for %s!\n", info->name); logaction(stderr, "error updating database for %s!", info->name);
return(1); return(1);
} }
vprint("done.\n"); vprint("done.\n");
@ -1755,7 +1755,7 @@ int pacman_remove(pacdb_t *db, PMList *targets)
newpath = (char*)realloc(newpath, strlen(line)+strlen(".pacsave")+1); newpath = (char*)realloc(newpath, strlen(line)+strlen(".pacsave")+1);
sprintf(newpath, "%s.pacsave", line); sprintf(newpath, "%s.pacsave", line);
rename(line, newpath); rename(line, newpath);
logaction(stderr, "warning: %s saved as %s\n", line, newpath); logaction(stderr, "warning: %s saved as %s", line, newpath);
} else { } else {
/*vprint(" unlinking %s\n", line);*/ /*vprint(" unlinking %s\n", line);*/
if(unlink(line)) { if(unlink(line)) {
@ -1838,7 +1838,7 @@ int pacman_remove(pacdb_t *db, PMList *targets)
} }
if(!pmo_upgrade) { if(!pmo_upgrade) {
printf("done.\n"); printf("done.\n");
logaction(NULL, "removed %s (%s)\n", info->name, info->version); logaction(NULL, "removed %s (%s)", info->name, info->version);
} }
} }
@ -2427,7 +2427,7 @@ PMList* checkdeps(pacdb_t *db, unsigned short op, PMList *targets)
for(j = tp->depends; j; j = j->next) { for(j = tp->depends; j; j = j->next) {
/* split into name/version pairs */ /* split into name/version pairs */
if(splitdep((char*)j->data, &depend)) { if(splitdep((char*)j->data, &depend)) {
logaction(stderr, "warning: invalid dependency in %s\n", (char*)tp->name); logaction(stderr, "warning: invalid dependency in %s", (char*)tp->name);
continue; continue;
} }
found = 0; found = 0;
@ -3019,7 +3019,8 @@ void logaction(FILE *fp, char *fmt, ...)
vsnprintf(msg, 1024, fmt, args); vsnprintf(msg, 1024, fmt, args);
va_end(args); va_end(args);
if(fp) { if(fp) {
fprintf(fp, "%s", msg); fprintf(fp, "%s\n", msg);
fflush(fp);
} }
if(pmo_usesyslog) { if(pmo_usesyslog) {
syslog(LOG_WARNING, "%s", msg); syslog(LOG_WARNING, "%s", msg);
@ -3031,7 +3032,7 @@ void logaction(FILE *fp, char *fmt, ...)
tm = localtime(&t); tm = localtime(&t);
fprintf(logfd, "[%02d/%02d/%02d %02d:%02d] %s\n", tm->tm_mon+1, tm->tm_mday, fprintf(logfd, "[%02d/%02d/%02d %02d:%02d] %s\n", tm->tm_mon+1, tm->tm_mday,
tm->tm_year-100, tm->tm_hour, tm->tm_min, msg); tm->tm_year-100, tm->tm_hour, tm->tm_min, msg);
} }
} }
@ -3083,7 +3084,7 @@ void cleanup(int signum)
PMList *lp; PMList *lp;
if(pm_access == READ_WRITE && lckrm(lckfile)) { if(pm_access == READ_WRITE && lckrm(lckfile)) {
logaction(stderr, "warning: could not remove lock file %s\n", lckfile); logaction(stderr, "warning: could not remove lock file %s", lckfile);
} }
if(workfile) { if(workfile) {
/* remove the current file being downloaded (as it's not complete) */ /* remove the current file being downloaded (as it's not complete) */

View file

@ -22,7 +22,7 @@
#define _PAC_PACMAN_H #define _PAC_PACMAN_H
#ifndef PACVER #ifndef PACVER
#define PACVER "2.7.1" #define PACVER "2.7.2"
#endif #endif
#ifndef PKGDIR #ifndef PKGDIR

View file

@ -142,7 +142,7 @@ int downloadfiles(PMList *servers, char *localpath, PMList *files)
} }
} }
/* set up our progress bar's callback */ /* set up our progress bar's callback (and idle timeout) */
if(strcmp(server->protocol, "file")) { if(strcmp(server->protocol, "file")) {
FtpOptions(FTPLIB_CALLBACK, (long)log_progress, control); FtpOptions(FTPLIB_CALLBACK, (long)log_progress, control);
FtpOptions(FTPLIB_IDLETIME, (long)1000, control); FtpOptions(FTPLIB_IDLETIME, (long)1000, control);