pacman: add cache server support
Signed-off-by: Andrew Gregory <andrew.gregory.8@gmail.com>
This commit is contained in:
parent
bad3e13eaa
commit
dc91476555
6 changed files with 80 additions and 10 deletions
|
@ -240,6 +240,12 @@ number.
|
|||
general configuration options. Wildcards in the specified paths will get
|
||||
expanded based on linkman:glob[7] rules.
|
||||
|
||||
*CacheServer =* url::
|
||||
A full URL to a location where the packages, and signatures (if
|
||||
available) for this repository can be found. Cache servers will be tried
|
||||
before any non-cache servers, will not be removed from the server pool for
|
||||
404 download errors, and will not be used for database files.
|
||||
|
||||
*Server =* url::
|
||||
A full URL to a location where the database, packages, and signatures (if
|
||||
available) for this repository can be found.
|
||||
|
|
|
@ -172,6 +172,7 @@ void config_repo_free(config_repo_t *repo)
|
|||
return;
|
||||
}
|
||||
free(repo->name);
|
||||
FREELIST(repo->cache_servers);
|
||||
FREELIST(repo->servers);
|
||||
free(repo);
|
||||
}
|
||||
|
@ -781,6 +782,21 @@ static char *replace_server_vars(config_t *c, config_repo_t *r, const char *s)
|
|||
}
|
||||
}
|
||||
|
||||
static int replace_server_list_vars(config_t *c, config_repo_t *r, alpm_list_t *list)
|
||||
{
|
||||
alpm_list_t *i;
|
||||
for(i = list; i; i = i->next) {
|
||||
char *newurl = replace_server_vars(c, r, i->data);
|
||||
if(newurl == NULL) {
|
||||
return -1;
|
||||
} else {
|
||||
free(i->data);
|
||||
i->data = newurl;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int register_repo(config_repo_t *repo)
|
||||
{
|
||||
alpm_list_t *i;
|
||||
|
@ -797,6 +813,15 @@ static int register_repo(config_repo_t *repo)
|
|||
repo->usage, repo->name);
|
||||
alpm_db_set_usage(db, repo->usage);
|
||||
|
||||
for(i = repo->cache_servers; i; i = alpm_list_next(i)) {
|
||||
const char *value = i->data;
|
||||
if(alpm_db_add_cache_server(db, value) != 0) {
|
||||
/* pm_errno is set by alpm_db_setserver */
|
||||
pm_printf(ALPM_LOG_ERROR, _("could not add cache server URL to database '%s': %s (%s)\n"),
|
||||
alpm_db_get_name(db), value, alpm_strerror(alpm_errno(config->handle)));
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
for(i = repo->servers; i; i = alpm_list_next(i)) {
|
||||
const char *value = i->data;
|
||||
if(alpm_db_add_server(db, value) != 0) {
|
||||
|
@ -982,7 +1007,10 @@ static int _parse_repo(const char *key, char *value, const char *file,
|
|||
} \
|
||||
} while(0)
|
||||
|
||||
if(strcmp(key, "Server") == 0) {
|
||||
if(strcmp(key, "CacheServer") == 0) {
|
||||
CHECK_VALUE(value);
|
||||
repo->cache_servers = alpm_list_add(repo->cache_servers, strdup(value));
|
||||
} else if(strcmp(key, "Server") == 0) {
|
||||
CHECK_VALUE(value);
|
||||
repo->servers = alpm_list_add(repo->servers, strdup(value));
|
||||
} else if(strcmp(key, "SigLevel") == 0) {
|
||||
|
@ -1162,17 +1190,11 @@ int setdefaults(config_t *c)
|
|||
|
||||
for(i = c->repos; i; i = i->next) {
|
||||
config_repo_t *r = i->data;
|
||||
alpm_list_t *j;
|
||||
SETDEFAULT(r->usage, ALPM_DB_USAGE_ALL);
|
||||
r->siglevel = merge_siglevel(c->siglevel, r->siglevel, r->siglevel_mask);
|
||||
for(j = r->servers; j; j = j->next) {
|
||||
char *newurl = replace_server_vars(c, r, j->data);
|
||||
if(newurl == NULL) {
|
||||
if(replace_server_list_vars(c, r, r->cache_servers) == -1
|
||||
|| replace_server_list_vars(c, r, r->servers) == -1) {
|
||||
return -1;
|
||||
} else {
|
||||
free(j->data);
|
||||
j->data = newurl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ typedef struct __colstr_t {
|
|||
|
||||
typedef struct __config_repo_t {
|
||||
char *name;
|
||||
alpm_list_t *cache_servers;
|
||||
alpm_list_t *servers;
|
||||
int usage;
|
||||
int siglevel;
|
||||
|
|
|
@ -236,6 +236,7 @@ static void dump_repo(config_repo_t *repo)
|
|||
{
|
||||
show_usage("Usage", repo->usage);
|
||||
show_siglevel("SigLevel", repo->siglevel, 0);
|
||||
show_list_str("CacheServer", repo->cache_servers);
|
||||
show_list_str("Server", repo->servers);
|
||||
}
|
||||
|
||||
|
@ -310,6 +311,8 @@ static int list_repo_directives(void)
|
|||
for(i = directives; i; i = i->next) {
|
||||
if(strcasecmp(i->data, "Server") == 0) {
|
||||
show_list_str("Server", repo->servers);
|
||||
} else if(strcasecmp(i->data, "CacheServer") == 0) {
|
||||
show_list_str("CacheServer", repo->cache_servers);
|
||||
} else if(strcasecmp(i->data, "SigLevel") == 0) {
|
||||
show_siglevel("SigLevel", repo->siglevel, 0);
|
||||
} else if(strcasecmp(i->data, "Usage") == 0) {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
pacman_tests = [
|
||||
'tests/backup001.py',
|
||||
'tests/cache-server-basic.py',
|
||||
'tests/clean001.py',
|
||||
'tests/clean002.py',
|
||||
'tests/clean003.py',
|
||||
|
|
37
test/pacman/tests/cache-server-basic.py
Normal file
37
test/pacman/tests/cache-server-basic.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
self.description = "basic cache server test"
|
||||
self.require_capability("curl")
|
||||
|
||||
pkgs = [ self.addpkg2db('sync', pmpkg("pkg{}".format(i))) for i in range(0, 5) ]
|
||||
|
||||
# TODO: hack to prevent pacman trying to validate the downloaded packages
|
||||
p404 = pmpkg('pkg404')
|
||||
self.addpkg2db('sync', p404)
|
||||
|
||||
cache_url = self.add_simple_http_server({
|
||||
'/{}'.format(pkgs[0].filename()): { 'body': 'CacheServer' },
|
||||
# 404 for packages 1-3
|
||||
'/{}'.format(pkgs[4].filename()): { 'body': 'CacheServer' },
|
||||
})
|
||||
normal_url = self.add_simple_http_server({
|
||||
'/{}'.format(pkgs[0].filename()): { 'body': 'Server' },
|
||||
'/{}'.format(pkgs[1].filename()): { 'body': 'Server' },
|
||||
'/{}'.format(pkgs[2].filename()): { 'body': 'Server' },
|
||||
'/{}'.format(pkgs[3].filename()): { 'body': 'Server' },
|
||||
'/{}'.format(pkgs[4].filename()): { 'body': 'Server' },
|
||||
})
|
||||
|
||||
self.db['sync'].option['CacheServer'] = [ cache_url ]
|
||||
self.db['sync'].option['Server'] = [ normal_url ]
|
||||
self.db['sync'].syncdir = False
|
||||
self.cachepkgs = False
|
||||
|
||||
self.args = '-S pkg0 pkg1 pkg2 pkg3 pkg4 pkg404'
|
||||
|
||||
#self.addrule("PACMAN_RETCODE=0") # TODO
|
||||
self.addrule("PACMAN_OUTPUT={}".format(normal_url.replace("http://", "")))
|
||||
self.addrule("!PACMAN_OUTPUT={}".format(cache_url.replace("http://", "")))
|
||||
self.addrule("CACHE_FCONTENTS={}|CacheServer".format(pkgs[0].filename()))
|
||||
self.addrule("CACHE_FCONTENTS={}|Server".format(pkgs[1].filename()))
|
||||
self.addrule("CACHE_FCONTENTS={}|Server".format(pkgs[2].filename()))
|
||||
self.addrule("CACHE_FCONTENTS={}|Server".format(pkgs[3].filename()))
|
||||
self.addrule("CACHE_FCONTENTS={}|CacheServer".format(pkgs[4].filename()))
|
Loading…
Add table
Reference in a new issue