Package yum :: Module sqlitesack
[hide private]
[frames] | no frames]

Source Code for Module yum.sqlitesack

   1  #!/usr/bin/python -tt 
   2   
   3  # This program is free software; you can redistribute it and/or modify 
   4  # it under the terms of the GNU General Public License as published by 
   5  # the Free Software Foundation; either version 2 of the License, or 
   6  # (at your option) any later version. 
   7  # 
   8  # This program is distributed in the hope that it will be useful, 
   9  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
  10  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the 
  11  # GNU Library General Public License for more details. 
  12  # 
  13  # You should have received a copy of the GNU General Public License 
  14  # along with this program; if not, write to the Free Software 
  15  # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. 
  16  # Copyright 2005 Duke University  
  17   
  18  # 
  19  # Implementation of the YumPackageSack class that uses an sqlite backend 
  20  # 
  21   
  22  import os 
  23  import os.path 
  24  import fnmatch 
  25   
  26  import yumRepo 
  27  from packages import PackageObject, RpmBase, YumAvailablePackage, parsePackages 
  28  import Errors 
  29  import misc 
  30   
  31  from sqlutils import executeSQL, sql_esc, sql_esc_glob 
  32  import rpmUtils.miscutils 
  33  import sqlutils 
  34  import constants 
  35  import operator 
  36  from yum.misc import seq_max_split 
  37  from yum.i18n import to_utf8, to_unicode 
  38  import sys 
  39  import re 
  40  import warnings 
41 42 -def catchSqliteException(func):
43 """This decorator converts sqlite exceptions into RepoError""" 44 def newFunc(*args, **kwargs): 45 try: 46 return func(*args, **kwargs) 47 except sqlutils.sqlite.Error, e: 48 # 2.4.x requires this, but 2.6.x complains about even hasattr() 49 # of e.message ... *sigh* 50 if sys.hexversion < 0x02050000: 51 if hasattr(e,'message'): 52 raise Errors.RepoError, str(e.message) 53 else: 54 raise Errors.RepoError, str(e) 55 raise Errors.RepoError, str(e)
56 57 newFunc.__name__ = func.__name__ 58 newFunc.__doc__ = func.__doc__ 59 newFunc.__dict__.update(func.__dict__) 60 return newFunc 61
62 -def _share_data(value):
63 return misc.share_data(value)
64
65 # FIXME: parsePackages() 66 -def _parse_pkg_n(match, regexp_match, n):
67 if match == n: 68 return True 69 if not regexp_match: 70 return False 71 72 if (match and n and match[0] not in ('?', '*') and match[0] != n[0]): 73 return False 74 if regexp_match(n): 75 return True 76 return False
77
78 -def _parse_pkg(match, regexp_match, data, e,v,r,a):
79 80 n = data['n'] 81 assert e, 'Nothing in epoch' 82 # Worthless speed hacks? 83 if match == n: 84 return True 85 if (match and n and match[0] not in ('?', '*') and 86 match[0] != n[0] and match[0] != e[0]): 87 return False 88 89 if 'nameArch' not in data: 90 data['nameArch'] = '%s.%s' % (n, a) 91 data['nameVerRelArch'] = '%s-%s-%s.%s' % (n, v, r, a) 92 data['nameVer'] = '%s-%s' % (n, v) 93 data['nameVerRel'] = '%s-%s-%s' % (n, v, r) 94 data['envra'] = '%s:%s-%s-%s.%s' % (e, n, v, r, a) 95 data['nevra'] = '%s-%s:%s-%s.%s' % (n, e, v, r, a) 96 data = set([n, data['nameArch'], data['nameVerRelArch'], data['nameVer'], 97 data['nameVerRel'], data['envra'], data['nevra']]) 98 99 if match in data: 100 return True 101 if not regexp_match: 102 return False 103 104 for item in data: 105 if regexp_match(item): 106 return True 107 return False
108
109 -def _excluder_match(excluder, match, regexp_match, data, e,v,r,a):
110 if False: pass 111 elif excluder in ('eq', 'match'): 112 if _parse_pkg(match, regexp_match, data, e,v,r,a): 113 return True 114 115 elif excluder in ('name.eq', 'name.match'): 116 if _parse_pkg_n(match, regexp_match, data['n']): 117 return True 118 119 elif excluder in ('arch.eq', 'arch.match'): 120 if _parse_pkg_n(match, regexp_match, a): 121 return True 122 123 elif excluder == 'nevr.eq': 124 if 'nevr' not in data: 125 data['nevr'] = '%s-%s:%s-%s' % (data['n'], e, v, r) 126 if match == data['nevr']: 127 return True 128 129 elif excluder in ('nevra.eq', 'nevra.match'): 130 if 'nevra' not in data: 131 data['nevra'] = '%s-%s:%s-%s.%s' % (data['n'], e, v, r, a) 132 if _parse_pkg_n(match, regexp_match, data['nevra']): 133 return True 134 135 elif excluder == 'name.in': 136 if data['n'] in match: 137 return True 138 139 elif excluder == 'nevr.in': 140 if 'nevr' not in data: 141 data['nevr'] = '%s-%s:%s-%s' % (data['n'], e, v, r) 142 if data['nevr'] in match: 143 return True 144 145 elif excluder == 'nevra.in': 146 if 'nevra' not in data: 147 data['nevra'] = '%s-%s:%s-%s.%s' % (data['n'], e, v, r, a) 148 if data['nevra'] in match: 149 return True 150 151 elif excluder == 'pkgtup.eq': 152 if match == data['pkgtup']: 153 return True 154 155 elif excluder == 'pkgtup.in': 156 if data['pkgtup'] in match: 157 return True 158 159 elif excluder == 'marked': 160 if data['marked']: 161 return True 162 163 elif excluder == 'washed': 164 if not data['marked']: 165 return True 166 167 elif excluder == '*': 168 return True 169 170 else: 171 assert False, 'Bad excluder: ' + excluder 172 return None 173 174 return False
175
176 177 -class YumAvailablePackageSqlite(YumAvailablePackage, PackageObject, RpmBase):
178 - def __init__(self, repo, db_obj):
179 self.prco = { 'obsoletes': (), 180 'conflicts': (), 181 'requires': (), 182 'provides': () } 183 self.sack = repo.sack 184 self.repoid = repo.id 185 self.repo = repo 186 self.state = None 187 self._loadedfiles = False 188 self._files = None 189 self._read_db_obj(db_obj) 190 # for stupid metadata created without epochs listed in the version tag 191 # die die 192 if self.epoch is None: 193 self.epoch = '0' 194 self.id = self.pkgId 195 self.ver = self.version 196 self.rel = self.release 197 self.pkgtup = (self.name, self.arch, self.epoch, self.version, self.release) 198 199 self._changelog = None 200 self._hash = None
201 202 203 files = property(fget=lambda self: self._loadFiles()) 204
205 - def _read_db_obj(self, db_obj, item=None):
206 """read the db obj. If asked for a specific item, return it. 207 otherwise populate out into the object what exists""" 208 if item: 209 try: 210 return db_obj[item] 211 except (IndexError, KeyError): 212 return None 213 214 for item in ['name', 'arch', 'epoch', 'version', 'release', 'pkgKey']: 215 try: 216 setattr(self, item, _share_data(db_obj[item])) 217 except (IndexError, KeyError): 218 pass 219 220 try: 221 self.pkgId = db_obj['pkgId'] 222 223 checksum_type = _share_data(db_obj['checksum_type']) 224 check_sum = (checksum_type, db_obj['pkgId'], True) 225 self._checksums = [ check_sum ] 226 except (IndexError, KeyError): 227 pass
228 229 @catchSqliteException
230 - def _sql_MD(self, MD, sql, *args):
231 """ Exec SQL against an MD of the repo, return a cursor. """ 232 233 cache = getattr(self.sack, MD + 'db')[self.repo] 234 cur = cache.cursor() 235 executeSQL(cur, sql, *args) 236 return cur
237
238 - def __getattr__(self, varname):
239 db2simplemap = { 'packagesize' : 'size_package', 240 'archivesize' : 'size_archive', 241 'installedsize' : 'size_installed', 242 'buildtime' : 'time_build', 243 'hdrstart' : 'rpm_header_start', 244 'hdrend' : 'rpm_header_end', 245 'basepath' : 'location_base', 246 'relativepath': 'location_href', 247 'filetime' : 'time_file', 248 'packager' : 'rpm_packager', 249 'group' : 'rpm_group', 250 'buildhost' : 'rpm_buildhost', 251 'sourcerpm' : 'rpm_sourcerpm', 252 'vendor' : 'rpm_vendor', 253 'license' : 'rpm_license', 254 'checksum_value' : 'pkgId', 255 } 256 257 # If these existed, then we wouldn't get here ... and nothing in the DB 258 # starts and ends with __'s. So these are missing. 259 if varname.startswith('__') and varname.endswith('__'): 260 raise AttributeError, varname 261 262 dbname = varname 263 if db2simplemap.has_key(varname): 264 dbname = db2simplemap[varname] 265 try: 266 r = self._sql_MD('primary', 267 "SELECT %s FROM packages WHERE pkgId = ?" % dbname, 268 (self.pkgId,)).fetchone() 269 except Errors.RepoError, e: 270 if str(e).startswith('no such column'): 271 #FIXME - after API break make this an AttributeError Raise 272 raise KeyError, str(e) 273 raise 274 value = r[0] 275 if varname == 'epoch' and value is None: 276 value = '0' 277 if varname in ('summary', 'description') and value is None: 278 # Maybe others here? ... location_base is a bad NONO though. 279 value = '' # Description for picasa, probably among others *sigh* 280 if varname in {'vendor' : 1, 'packager' : 1, 'buildhost' : 1, 281 'license' : 1, 'group' : 1, 282 'summary' : 1, 'description' : 1, 'sourcerpm' : 1, 283 'url' : 1}: 284 value = _share_data(value) 285 setattr(self, varname, value) 286 287 return value
288
289 - def _loadFiles(self):
290 if self._loadedfiles: 291 return self._files 292 293 result = {} 294 295 #FIXME - this should be try, excepting 296 self.sack.populate(self.repo, mdtype='filelists') 297 cur = self._sql_MD('filelists', 298 "SELECT dirname, filetypes, filenames " \ 299 "FROM filelist JOIN packages USING(pkgKey) " \ 300 "WHERE packages.pkgId = ?", (self.pkgId,)) 301 for ob in cur: 302 dirname = ob['dirname'] 303 filetypes = decodefiletypelist(ob['filetypes']) 304 filenames = decodefilenamelist(ob['filenames']) 305 while(filetypes): 306 if dirname: 307 filename = dirname+'/'+filenames.pop() 308 else: 309 filename = filenames.pop() 310 filetype = _share_data(filetypes.pop()) 311 result.setdefault(filetype,[]).append(filename) 312 self._loadedfiles = True 313 self._files = result 314 315 return self._files
316
317 - def _loadChangelog(self):
318 result = [] 319 if not self._changelog: 320 if self.repo not in self.sack.otherdb: 321 try: 322 self.sack.populate(self.repo, mdtype='otherdata') 323 except Errors.RepoError: 324 self._changelog = result 325 return 326 cur = self._sql_MD('other', 327 "SELECT date, author, changelog " \ 328 "FROM changelog JOIN packages USING(pkgKey) " \ 329 "WHERE pkgId = ? ORDER BY date DESC", 330 (self.pkgId,)) 331 # Check count(pkgId) here, the same way we do in searchFiles()? 332 # Failure mode is much less of a problem. 333 for ob in cur: 334 # Note: Atm. rpm only does days, where (60 * 60 * 24) == 86400 335 # and we have the hack in _dump_changelog() to keep the 336 # order the same, so this is a quick way to get rid of 337 # any extra "seconds". 338 # We still leak the seconds if there are 100 updates in 339 # a day ... but don't do that. It also breaks if rpm ever 340 # gets fixed (but that is unlikely). 341 c_date = 100 * (ob['date'] / 100) 342 c_author = to_utf8(ob['author']) 343 c_log = to_utf8(ob['changelog']) 344 result.append((c_date, _share_data(c_author), c_log)) 345 self._changelog = result 346 return
347
348 - def returnIdSum(self):
349 return (self.checksum_type, self.pkgId)
350
351 - def returnChangelog(self):
352 self._loadChangelog() 353 return self._changelog
354
355 - def returnFileEntries(self, ftype='file', primary_only=False):
356 if primary_only and not self._loadedfiles: 357 sql = "SELECT name as fname FROM files WHERE pkgKey = ? and type = ?" 358 cur = self._sql_MD('primary', sql, (self.pkgKey, ftype)) 359 return map(lambda x: x['fname'], cur) 360 361 self._loadFiles() 362 return RpmBase.returnFileEntries(self,ftype,primary_only)
363
364 - def returnFileTypes(self):
365 self._loadFiles() 366 return RpmBase.returnFileTypes(self)
367
368 - def simpleFiles(self, ftype='file'):
369 warnings.warn('simpleFiles() will go away in a future version of Yum.' 370 'Use returnFileEntries(primary_only=True)\n', 371 Errors.YumDeprecationWarning, stacklevel=2) 372 sql = "SELECT name as fname FROM files WHERE pkgKey = ? and type = ?" 373 cur = self._sql_MD('primary', sql, (self.pkgKey, ftype)) 374 return map(lambda x: x['fname'], cur)
375
376 - def returnPrco(self, prcotype, printable=False):
377 prcotype = _share_data(prcotype) 378 if isinstance(self.prco[prcotype], tuple): 379 sql = "SELECT name, version, release, epoch, flags " \ 380 "FROM %s WHERE pkgKey = ?" % prcotype 381 cur = self._sql_MD('primary', sql, (self.pkgKey,)) 382 self.prco[prcotype] = [ ] 383 for ob in cur: 384 prco_set = (_share_data(ob['name']), _share_data(ob['flags']), 385 (_share_data(ob['epoch']), 386 _share_data(ob['version']), 387 _share_data(ob['release']))) 388 self.prco[prcotype].append(_share_data(prco_set)) 389 390 return RpmBase.returnPrco(self, prcotype, printable)
391
392 - def _requires_with_pre(self):
393 """returns requires with pre-require bit""" 394 sql = "SELECT name, version, release, epoch, flags,pre " \ 395 "FROM requires WHERE pkgKey = ?" 396 cur = self._sql_MD('primary', sql, (self.pkgKey,)) 397 requires = [] 398 for ob in cur: 399 pre = "0" 400 if ob['pre'].lower() in ['TRUE', 1]: 401 pre = "1" 402 prco_set = (_share_data(ob['name']), _share_data(ob['flags']), 403 (_share_data(ob['epoch']), 404 _share_data(ob['version']), 405 _share_data(ob['release'])), pre) 406 requires.append(prco_set) 407 return requires
408
409 -class YumSqlitePackageSack(yumRepo.YumPackageSack):
410 """ Implementation of a PackageSack that uses sqlite cache instead of fully 411 expanded metadata objects to provide information """ 412
413 - def __init__(self, packageClass):
414 # Just init as usual and create a dict to hold the databases 415 yumRepo.YumPackageSack.__init__(self, packageClass) 416 self.primarydb = {} 417 self.filelistsdb = {} 418 self.otherdb = {} 419 self.excludes = {} # of [repo] => {} of pkgId's => 1 420 self._excludes = set() # of (repo, pkgKey) 421 self._exclude_whitelist = set() # of (repo, pkgKey) 422 self._all_excludes = {} 423 self._search_cache = { 424 'provides' : { }, 425 'requires' : { }, 426 } 427 self._key2pkg = {} 428 self._pkgname2pkgkeys = {} 429 self._pkgtup2pkgs = {} 430 self._pkgnames_loaded = set() 431 self._arch_allowed = None 432 self._pkgExcluder = [] 433 self._pkgExcludeIds = {} 434 self._pkgobjlist_dirty = False
435 436 @catchSqliteException
437 - def _sql_MD(self, MD, repo, sql, *args):
438 """ Exec SQL against an MD of the repo, return a cursor. """ 439 440 cache = getattr(self, MD + 'db')[repo] 441 cur = cache.cursor() 442 executeSQL(cur, sql, *args) 443 return cur
444
445 - def _sql_MD_pkg_num(self, MD, repo):
446 """ Give a count of pkgIds in the given repo DB """ 447 sql = "SELECT count(pkgId) FROM packages" 448 return self._sql_MD('primary', repo, sql).fetchone()[0]
449
450 - def _clean_pkgobjlist(self):
451 """ If the pkgobjlist is dirty (possible pkgs on it which are excluded) 452 then clean it, and return the clean list. """ 453 assert hasattr(self, 'pkgobjlist') 454 455 if self._pkgobjlist_dirty: 456 pol = filter(lambda x: not self._pkgExcluded(x), self.pkgobjlist) 457 self.pkgobjlist = pol 458 self._pkgobjlist_dirty = False 459 460 return self.pkgobjlist
461
462 - def __len__(self):
463 # First check if everything is excluded 464 all_excluded = True 465 for (repo, cache) in self.primarydb.items(): 466 if repo not in self._all_excludes: 467 all_excluded = False 468 break 469 if all_excluded: 470 return 0 471 472 if hasattr(self, 'pkgobjlist'): 473 return len(self._clean_pkgobjlist()) 474 475 exclude_num = 0 476 for repo in self.excludes: 477 exclude_num += len(self.excludes[repo]) 478 pkg_num = 0 479 for repo in self.primarydb: 480 pkg_num += self._sql_MD_pkg_num('primary', repo) 481 return pkg_num - exclude_num
482
483 - def dropCachedData(self):
484 if hasattr(self, '_memoize_requires'): 485 del self._memoize_requires 486 if hasattr(self, '_memoize_provides'): 487 del self._memoize_provides 488 if hasattr(self, 'pkgobjlist'): 489 del self.pkgobjlist 490 self._pkgobjlist_dirty = False 491 self._key2pkg = {} 492 self._pkgname2pkgkeys = {} 493 self._pkgnames_loaded = set() 494 self._pkgtup2pkgs = {} 495 self._search_cache = { 496 'provides' : { }, 497 'requires' : { }, 498 } 499 misc.unshare_data()
500 501 @catchSqliteException
502 - def close(self):
503 self.dropCachedData() 504 505 for dataobj in self.primarydb.values() + \ 506 self.filelistsdb.values() + \ 507 self.otherdb.values(): 508 dataobj.close() 509 self.primarydb = {} 510 self.filelistsdb = {} 511 self.otherdb = {} 512 self.excludes = {} 513 self._excludes = set() 514 self._exclude_whitelist = set() 515 self._all_excludes = {} 516 self._pkgExcluder = [] 517 self._pkgExcludeIds = {} 518 self._pkgobjlist_dirty = False 519 520 yumRepo.YumPackageSack.close(self)
521
522 - def buildIndexes(self):
523 # We don't need to play with returnPackages() caching as it handles 524 # additions to excludes after the cache is built. 525 pass
526
527 - def _checkIndexes(self, failure='error'):
528 return
529
530 - def _delPackageRK(self, repo, pkgKey):
531 ''' Exclude a package so that _pkgExcluded*() knows it's gone. 532 Note that this doesn't update self.exclude. ''' 533 self._excludes.add((repo, pkgKey)) 534 # Don't keep references around, just wastes memory. 535 if repo in self._key2pkg: 536 po = self._key2pkg[repo].pop(pkgKey, None) 537 if po is not None: # Will also be in the pkgtup2pkgs cache... 538 pos = self._pkgtup2pkgs[po.pkgtup] 539 pos = filter(lambda x: id(x) == id(po), pos) 540 self._pkgtup2pkgs[po.pkgtup] = pos
541 542 # Remove a package 543 # Because we don't want to remove a package from the database we just 544 # add it to the exclude list
545 - def delPackage(self, obj):
546 if not self.excludes.has_key(obj.repo): 547 self.excludes[obj.repo] = {} 548 self.excludes[obj.repo][obj.pkgId] = 1 549 if (obj.repo, obj.pkgKey) in self._exclude_whitelist: 550 self._exclude_whitelist.discard((obj.repo, obj.pkgKey)) 551 self._delPackageRK(obj.repo, obj.pkgKey) 552 self._pkgobjlist_dirty = True
553
554 - def _delAllPackages(self, repo):
555 """ Exclude all packages from the repo. """ 556 self._all_excludes[repo] = True 557 if repo in self.excludes: 558 del self.excludes[repo] 559 if repo in self._key2pkg: 560 del self._key2pkg[repo] 561 if repo in self._pkgname2pkgkeys: 562 del self._pkgname2pkgkeys[repo]
563
564 - def _excluded(self, repo, pkgId):
565 if repo in self._all_excludes: 566 return True 567 568 if repo in self.excludes and pkgId in self.excludes[repo]: 569 return True 570 571 return False
572
573 - def _pkgKeyExcluded(self, repo, pkgKey):
574 if self._all_excludes and repo in self._all_excludes: 575 return True 576 577 return self._excludes and (repo, pkgKey) in self._excludes
578
579 - def _pkgExcludedRKNEVRA(self, repo,pkgKey, n,e,v,r,a):
580 ''' Main function to use for "can we use this package" question. 581 . Tests repo against allowed repos. 582 . Tests pkgKey against allowed packages. 583 . Tests arch against allowed arches. 584 . Tests addPackageExcluder() calls. 585 ''' 586 587 if self._exclude_whitelist and (repo,pkgKey) in self._exclude_whitelist: 588 return False 589 590 if self._pkgKeyExcluded(repo, pkgKey): 591 return True 592 593 if self._arch_allowed is not None and a not in self._arch_allowed: 594 self._delPackageRK(repo, pkgKey) 595 return True 596 597 if not self._pkgExcluder: 598 return False 599 600 data = {'n' : n.lower(), 'pkgtup' : (n, a, e, v, r), 'marked' : False} 601 e = e.lower() 602 v = v.lower() 603 r = r.lower() 604 a = a.lower() 605 606 for repoid, excluder, match, regexp_match in self._pkgExcluder: 607 if repoid is not None and repoid != repo.id: 608 continue 609 610 exSPLIT = excluder.split('.', 1) 611 if len(exSPLIT) != 2: 612 assert False, 'Bad excluder: ' + excluder 613 continue 614 615 exT, exM = exSPLIT 616 if False: pass 617 elif exT == 'exclude': 618 if _excluder_match(exM, match, regexp_match, data, e,v,r,a): 619 self._delPackageRK(repo, pkgKey) 620 return True 621 622 elif exT == 'include': 623 if _excluder_match(exM, match, regexp_match, data, e,v,r,a): 624 break 625 626 elif exT == 'mark': 627 if data['marked']: 628 pass # Speed opt. don't do matches we don't need to do. 629 elif _excluder_match(exM, match, regexp_match, data, e,v,r,a): 630 data['marked'] = True 631 632 elif exT == 'wash': 633 if not data['marked']: 634 pass # Speed opt. don't do matches we don't need to do. 635 elif _excluder_match(exM, match, regexp_match, data, e,v,r,a): 636 data['marked'] = False 637 638 else: 639 assert False, 'Bad excluder: ' + excluder 640 641 self._exclude_whitelist.add((repo, pkgKey)) 642 return False
643
644 - def _pkgExcludedRKT(self, repo,pkgKey, pkgtup):
645 ''' Helper function to call _pkgExcludedRKNEVRA. 646 Takes a repo, pkgKey and a package tuple''' 647 (n,a,e,v,r) = pkgtup 648 return self._pkgExcludedRKNEVRA(repo, pkgKey, n,e,v,r,a)
649
650 - def _pkgExcludedRKD(self, repo,pkgKey, data):
651 ''' Helper function to call _pkgExcludedRKNEVRA. 652 Takes a repo, pkgKey and a dict of package data''' 653 (n,a,e,v,r) = (data['name'], data['arch'], 654 data['epoch'], data['version'], data['release']) 655 return self._pkgExcludedRKNEVRA(repo, pkgKey, n,e,v,r,a)
656
657 - def _pkgExcluded(self, po):
658 ''' Helper function to call _pkgExcludedRKNEVRA. 659 Takes a package object. ''' 660 return self._pkgExcludedRKT(po.repo, po.pkgKey, po.pkgtup)
661
662 - def addPackageExcluder(self, repoid, excluderid, excluder, *args):
663 """ Add an "excluder" for all packages in the repo/sack. Can basically 664 do anything based on nevra, changes lots of exclude decisions from 665 "preload package; test; delPackage" into "load excluder". 666 Excluderid is used so the caller doesn't have to track 667 "have I loaded the excluder for this repo.", it's probably only 668 useful when repoid is None ... if it turns out utterly worthless 669 then it's still not a huge wart. """ 670 if excluderid is not None and excluderid in self._pkgExcludeIds: 671 return 672 673 match = None 674 regexp_match = None 675 if False: pass 676 elif excluder.endswith('.eq'): 677 assert len(args) == 1 678 match = args[0].lower() 679 elif excluder.endswith('.in'): 680 assert len(args) == 1 681 match = args[0] 682 elif excluder.endswith('.match'): 683 assert len(args) == 1 684 match = args[0].lower() 685 if misc.re_glob(match): 686 regexp_match = re.compile(fnmatch.translate(match)).match 687 elif excluder.endswith('.*'): 688 assert len(args) == 0 689 elif excluder.endswith('.marked'): 690 assert len(args) == 0 691 elif excluder.endswith('.washed'): 692 assert len(args) == 0 693 # Really need to do this, need to cleanup pkgExcluder first though 694 # or it does nothing. 695 # self._pkgobjlist_dirty = True 696 self._pkgExcluder.append((repoid, excluder, match, regexp_match)) 697 if excluderid is not None: 698 self._pkgExcludeIds[excluderid] = len(self._pkgExcluder)
699
700 - def _packageByKey(self, repo, pkgKey, exclude=True):
701 """ Lookup a pkg by it's pkgKey, if we don't have it load it """ 702 # Speed hack, so we don't load the pkg. if the pkgKey is dead. 703 if exclude and self._pkgKeyExcluded(repo, pkgKey): 704 return None 705 706 if repo not in self._key2pkg: 707 self._key2pkg[repo] = {} 708 self._pkgname2pkgkeys[repo] = {} 709 if pkgKey not in self._key2pkg[repo]: 710 sql = "SELECT pkgKey, pkgId, name, epoch, version, release, arch " \ 711 "FROM packages WHERE pkgKey = ?" 712 data = self._sql_MD('primary', repo, sql, (pkgKey,)).fetchone() 713 if data is None: 714 msg = "pkgKey %s doesn't exist in repo %s" % (pkgKey, repo) 715 raise Errors.RepoError, msg 716 if exclude and self._pkgExcludedRKD(repo, pkgKey, data): 717 return None 718 po = self.pc(repo, data) 719 self._key2pkg[repo][pkgKey] = po 720 self._pkgtup2pkgs.setdefault(po.pkgtup, []).append(po) 721 pkgkeys = self._pkgname2pkgkeys[repo].setdefault(data['name'], []) 722 pkgkeys.append(pkgKey) 723 return self._key2pkg[repo][pkgKey]
724
725 - def _packageByKeyData(self, repo, pkgKey, data, exclude=True):
726 """ Like _packageByKey() but we already have the data for .pc() """ 727 if exclude and self._pkgExcludedRKD(repo, pkgKey, data): 728 return None 729 if repo not in self._key2pkg: 730 self._key2pkg[repo] = {} 731 self._pkgname2pkgkeys[repo] = {} 732 if data['pkgKey'] not in self._key2pkg.get(repo, {}): 733 po = self.pc(repo, data) 734 self._key2pkg[repo][pkgKey] = po 735 self._pkgtup2pkgs.setdefault(po.pkgtup, []).append(po) 736 pkgkeys = self._pkgname2pkgkeys[repo].setdefault(data['name'], []) 737 pkgkeys.append(pkgKey) 738 return self._key2pkg[repo][data['pkgKey']]
739
740 - def _pkgtupByKeyData(self, repo, pkgKey, data):
741 """ Like _packageByKeyData() but we don't create the package, we just 742 return the pkgtup. """ 743 if self._pkgExcludedRKD(repo, pkgKey, data): 744 return None 745 prepo = self._key2pkg.get(repo) 746 if prepo is None: 747 self._key2pkg[repo] = {} 748 self._pkgname2pkgkeys[repo] = {} 749 elif data['pkgKey'] in prepo: 750 return prepo[data['pkgKey']].pkgtup 751 return (data['name'], data['arch'], 752 data['epoch'], data['version'], data['release'])
753
754 - def _packagesByName(self, pkgname):
755 """ Load all pkgnames from cache, with a given name. """ 756 ret = [] 757 for repo in self.primarydb: 758 pkgkeys = self._pkgname2pkgkeys.get(repo, {}).get(pkgname, []) 759 if not pkgkeys: 760 continue 761 762 for pkgkey in pkgkeys: 763 pkg = self._packageByKey(repo, pkgkey) 764 if pkg is None: 765 continue 766 ret.append(pkg) 767 return ret
768
769 - def addDict(self, repo, datatype, dataobj, callback=None):
770 if self.added.has_key(repo): 771 if datatype in self.added[repo]: 772 return 773 else: 774 self.added[repo] = [] 775 776 if not self.excludes.has_key(repo): 777 self.excludes[repo] = {} 778 779 if dataobj is None: 780 raise Errors.RepoError, "Tried to add None %s to %s" % (datatype, repo) 781 782 if datatype == 'metadata': 783 self.primarydb[repo] = dataobj 784 elif datatype == 'filelists': 785 self.filelistsdb[repo] = dataobj 786 elif datatype == 'otherdata': 787 self.otherdb[repo] = dataobj 788 else: 789 # We can not handle this yet... 790 raise Errors.RepoError, "Sorry sqlite does not support %s in %s" % (datatype, repo) 791 792 self.added[repo].append(datatype)
793 794 795 # Get all files for a certain pkgId from the filelists.xml metadata 796 # Search packages that either provide something containing name 797 # or provide a file containing name
798 - def searchAll(self,name, query_type='like'):
799 # this function is just silly and it reduces down to just this 800 return self.searchPrco(name, 'provides')
801
802 - def _sql_pkgKey2po(self, repo, cur, pkgs=None, have_data=False):
803 """ Takes a cursor and maps the pkgKey rows into a list of packages. """ 804 if pkgs is None: pkgs = [] 805 for ob in cur: 806 if have_data: 807 pkg = self._packageByKeyData(repo, ob['pkgKey'], ob) 808 else: 809 pkg = self._packageByKey(repo, ob['pkgKey']) 810 if pkg is None: 811 continue 812 pkgs.append(pkg) 813 return pkgs
814
815 - def _skip_all(self):
816 """ Are we going to skip every package in all our repos? """ 817 skip_all = True 818 for repo in self.added: 819 if repo not in self._all_excludes: 820 skip_all = False 821 break 822 return skip_all
823 824 @catchSqliteException
825 - def _search_primary_files(self, name):
826 querytype = 'glob' 827 if not misc.re_glob(name): 828 querytype = '=' 829 results = [] 830 831 for (rep,cache) in self.primarydb.items(): 832 if rep in self._all_excludes: 833 continue 834 cur = cache.cursor() 835 executeSQL(cur, "select DISTINCT pkgKey from files where name %s ?" % querytype, (name,)) 836 self._sql_pkgKey2po(rep, cur, results) 837 838 return misc.unique(results)
839 840 @catchSqliteException
841 - def searchFiles(self, name, strict=False):
842 """search primary if file will be in there, if not, search filelists, use globs, if possible""" 843 844 if self._skip_all(): 845 return [] 846 847 # optimizations: 848 # if it is not glob, then see if it is in the primary.xml filelists, 849 # if so, just use those for the lookup 850 851 glob = True 852 file_glob = True 853 querytype = 'glob' 854 dirname = os.path.dirname(name) 855 filename = os.path.basename(name) 856 if strict or not misc.re_glob(name): 857 glob = False 858 file_glob = False 859 querytype = '=' 860 elif not misc.re_glob(filename): 861 file_glob = False 862 863 # Take off the trailing slash to act like rpm 864 if name[-1] == '/': 865 name = name[:-1] 866 867 pkgs = [] 868 869 # ultra simple optimization 870 if misc.re_primary_filename(name): 871 if not misc.re_glob(dirname): # is the dirname a glob? 872 return self._search_primary_files(name) 873 874 if len(self.filelistsdb) == 0: 875 # grab repo object from primarydb and force filelists population in this sack using repo 876 # sack.populate(repo, mdtype, callback, cacheonly) 877 for (repo,cache) in self.primarydb.items(): 878 if repo in self._all_excludes: 879 continue 880 881 self.populate(repo, mdtype='filelists') 882 883 # Check to make sure the DB data matches, this should always pass but 884 # we've had weird errors. So check it for a bit. 885 for repo in self.filelistsdb: 886 pri_pkgs = self._sql_MD_pkg_num('primary', repo) 887 fil_pkgs = self._sql_MD_pkg_num('filelists', repo) 888 if pri_pkgs != fil_pkgs: 889 raise Errors.RepoError 890 891 sql_params = [] 892 dirname_check = "" 893 if not glob: 894 (pattern, esc) = sql_esc(filename) 895 dirname_check = "dirname = ? and filenames LIKE ? %s and " % esc 896 sql_params.append(dirname) 897 sql_params.append('%' + pattern + '%') 898 elif not file_glob: 899 (pattern, esc) = sql_esc(filename) 900 dirname_check = "dirname GLOB ? and filenames LIKE ? %s and " % esc 901 sql_params.append(dirname) 902 sql_params.append('%' + pattern + '%') 903 elif filename == '*': 904 # We only care about matching on dirname... 905 for (rep,cache) in self.filelistsdb.items(): 906 if rep in self._all_excludes: 907 continue 908 909 cur = cache.cursor() 910 sql_params.append(dirname) 911 executeSQL(cur, """SELECT pkgKey FROM filelist 912 WHERE dirname %s ?""" % (querytype,), 913 sql_params) 914 self._sql_pkgKey2po(rep, cur, pkgs) 915 916 return misc.unique(pkgs) 917 918 for (rep,cache) in self.filelistsdb.items(): 919 if rep in self._all_excludes: 920 continue 921 922 cur = cache.cursor() 923 924 # grab the entries that are a single file in the 925 # filenames section, use sqlites globbing if it is a glob 926 executeSQL(cur, "select pkgKey from filelist where \ 927 %s length(filetypes) = 1 and \ 928 dirname || ? || filenames \ 929 %s ?" % (dirname_check, querytype), sql_params + ['/',name]) 930 self._sql_pkgKey2po(rep, cur, pkgs) 931 932 if file_glob: 933 name_re = re.compile(fnmatch.translate(name)) 934 def filelist_globber(sql_dirname, sql_filenames): 935 # Note: Can't return bool, because sqlite doesn't like it in 936 # weird ways. Test: 937 # install '*bin/autoheader' 938 # provides /lib/security/pam_loginuid.so 939 files = sql_filenames.split('/') 940 if not file_glob: 941 return int(filename in files) 942 943 fns = map(lambda f: '%s/%s' % (sql_dirname, f), files) 944 for match in fns: 945 if name_re.match(match): 946 return 1 947 return 0
948 949 cache.create_function("filelist_globber", 2, filelist_globber) 950 # for all the ones where filenames is multiple files, 951 # make the files up whole and use python's globbing method 952 executeSQL(cur, "select pkgKey from filelist where \ 953 %s length(filetypes) > 1 \ 954 and filelist_globber(dirname,filenames)" % dirname_check, 955 sql_params) 956 957 self._sql_pkgKey2po(rep, cur, pkgs) 958 959 pkgs = misc.unique(pkgs) 960 return pkgs
961 962 @catchSqliteException
963 - def searchPrimaryFields(self, fields, searchstring):
964 """search arbitrary fields from the primarydb for a string""" 965 if self._skip_all(): 966 return [] 967 968 result = [] 969 if len(fields) < 1: 970 return result 971 972 searchstring = searchstring.replace("'", "''") 973 (searchstring, esc) = sql_esc(searchstring) 974 sql = "select DISTINCT pkgKey from packages where %s like '%%%s%%'%s " % (fields[0], searchstring, esc) 975 976 for f in fields[1:]: 977 sql = "%s or %s like '%%%s%%'%s " % (sql, f, searchstring, esc) 978 979 for (rep,cache) in self.primarydb.items(): 980 cur = cache.cursor() 981 executeSQL(cur, sql) 982 self._sql_pkgKey2po(rep, cur, result) 983 return result
984 985 @catchSqliteException
986 - def searchPrimaryFieldsMultipleStrings(self, fields, searchstrings):
987 """search arbitrary fields from the primarydb for a multiple strings 988 return packages, number of items it matched as a list of tuples""" 989 990 if self._skip_all(): 991 return [] 992 993 result = [] # (pkg, num matches) 994 if not fields or not searchstrings: 995 return result 996 997 # NOTE: I can't see any reason not to use this all the time, speed 998 # comparison shows them as basically equal. 999 if len(searchstrings) > (constants.PATTERNS_MAX / len(fields)): 1000 tot = {} 1001 for searchstring in searchstrings: 1002 matches = self.searchPrimaryFields(fields, searchstring) 1003 for po in matches: 1004 tot[po] = tot.get(po, 0) + 1 1005 for po in sorted(tot, key=operator.itemgetter, reverse=True): 1006 result.append((po, tot[po])) 1007 return result 1008 1009 unionstring = "select pkgKey, SUM(cumul) AS total from ( " 1010 endunionstring = ")GROUP BY pkgKey ORDER BY total DESC" 1011 1012 #SELECT pkgkey, SUM(cumul) AS total FROM (SELECT pkgkey, 1 1013 #AS cumul FROM packages WHERE description LIKE '%foo%' UNION ... ) 1014 #GROUP BY pkgkey ORDER BY total DESC; 1015 selects = [] 1016 1017 for s in searchstrings: 1018 s = s.replace("'", "''") 1019 (s, esc) = sql_esc(s) 1020 sql="select pkgKey,1 AS cumul from packages where %s like '%%%s%%'%s " % (fields[0], s, esc) 1021 for f in fields[1:]: 1022 sql = "%s or %s like '%%%s%%'%s " % (sql, f, s, esc) 1023 selects.append(sql) 1024 1025 totalstring = unionstring + " UNION ALL ".join(selects) + endunionstring 1026 1027 for (rep,cache) in self.primarydb.items(): 1028 cur = cache.cursor() 1029 executeSQL(cur, totalstring) 1030 for ob in cur: 1031 pkg = self._packageByKey(rep, ob['pkgKey']) 1032 if pkg is None: 1033 continue 1034 result.append((pkg, ob['total'])) 1035 return result
1036 1037 @catchSqliteException
1038 - def returnObsoletes(self, newest=False):
1039 if self._skip_all(): 1040 return {} 1041 1042 if newest: 1043 raise NotImplementedError() 1044 1045 obsoletes = {} 1046 for (rep,cache) in self.primarydb.items(): 1047 cur = cache.cursor() 1048 executeSQL(cur, "select packages.name as name,\ 1049 packages.pkgKey as pkgKey,\ 1050 packages.arch as arch, packages.epoch as epoch,\ 1051 packages.release as release, packages.version as version,\ 1052 obsoletes.name as oname, obsoletes.epoch as oepoch,\ 1053 obsoletes.release as orelease, obsoletes.version as oversion,\ 1054 obsoletes.flags as oflags\ 1055 from obsoletes,packages where obsoletes.pkgKey = packages.pkgKey") 1056 for ob in cur: 1057 key = ( _share_data(ob['name']), _share_data(ob['arch']), 1058 _share_data(ob['epoch']), _share_data(ob['version']), 1059 _share_data(ob['release'])) 1060 if self._pkgExcludedRKT(rep, ob['pkgKey'], key): 1061 continue 1062 1063 (n,f,e,v,r) = ( _share_data(ob['oname']), 1064 _share_data(ob['oflags']), 1065 _share_data(ob['oepoch']), 1066 _share_data(ob['oversion']), 1067 _share_data(ob['orelease'])) 1068 1069 key = _share_data(key) 1070 val = _share_data((n,f,(e,v,r))) 1071 obsoletes.setdefault(key,[]).append(val) 1072 1073 return obsoletes
1074 1075 @catchSqliteException
1076 - def getPackageDetails(self,pkgId):
1077 for (rep,cache) in self.primarydb.items(): 1078 cur = cache.cursor() 1079 executeSQL(cur, "select * from packages where pkgId = ?", (pkgId,)) 1080 for ob in cur: 1081 return ob
1082 1083 @catchSqliteException
1084 - def _getListofPackageDetails(self, pkgId_list):
1085 pkgs = [] 1086 if len(pkgId_list) == 0: 1087 return pkgs 1088 pkgid_query = str(tuple(pkgId_list)) 1089 1090 for (rep,cache) in self.primarydb.items(): 1091 cur = cache.cursor() 1092 executeSQL(cur, "select * from packages where pkgId in %s" %(pkgid_query,)) 1093 for ob in cur: 1094 pkgs.append(ob) 1095 1096 return pkgs
1097 1098 @catchSqliteException
1099 - def _search_get_memoize(self, prcotype):
1100 if not hasattr(self, '_memoize_' + prcotype): 1101 memoize = {} 1102 1103 for (rep,cache) in self.primarydb.items(): 1104 if rep in self._all_excludes: 1105 continue 1106 1107 cur = cache.cursor() 1108 executeSQL(cur, "select * from %s" % prcotype) 1109 for x in cur: 1110 val = (_share_data(x['name']), _share_data(x['flags']), 1111 (_share_data(x['epoch']), _share_data(x['version']), 1112 _share_data(x['release']))) 1113 val = _share_data(val) 1114 key = (rep, val[0]) 1115 pkgkey = _share_data(x['pkgKey']) 1116 val = (pkgkey, val) 1117 memoize.setdefault(key, []).append(val) 1118 setattr(self, '_memoize_' + prcotype, memoize) 1119 return getattr(self, '_memoize_' + prcotype)
1120 1121 @catchSqliteException
1122 - def _search(self, prcotype, name, flags, version):
1123 1124 if self._skip_all(): 1125 return {} 1126 1127 name = to_unicode(name) 1128 if flags == 0: 1129 flags = None 1130 if type(version) in (str, type(None), unicode): 1131 req = (name, flags, rpmUtils.miscutils.stringToVersion( 1132 version)) 1133 elif type(version) in (tuple, list): # would this ever be a list? 1134 req = (name, flags, version) 1135 1136 prcotype = _share_data(prcotype) 1137 req = _share_data(req) 1138 if req in self._search_cache[prcotype]: 1139 return self._search_cache[prcotype][req] 1140 1141 result = { } 1142 1143 # Requires is the biggest hit, pre-loading provides actually hurts 1144 # NOTE: Disabling atm. ... small install/updates get a significant hit. 1145 # And even large updates take a hit with the memoize path, maybe we 1146 # fixed something with later change? ... maybe I was on crack? 1147 # Speed seems to depend on _search_cache. 1148 if True: # prcotype != 'requires': 1149 primarydb_items = self.primarydb.items() 1150 preload = False 1151 else: 1152 primarydb_items = [] 1153 preload = True 1154 memoize = self._search_get_memoize(prcotype) 1155 for (rep,cache) in self.primarydb.items(): 1156 if rep in self._all_excludes: 1157 continue 1158 1159 tmp = {} 1160 for x in memoize.get((rep, name), []): 1161 pkgkey, val = x 1162 if rpmUtils.miscutils.rangeCompare(req, val): 1163 tmp.setdefault(pkgkey, []).append(val) 1164 for pkgKey, hits in tmp.iteritems(): 1165 pkg = self._packageByKey(rep, pkgKey) 1166 if pkg is None: 1167 continue 1168 result[pkg] = hits 1169 1170 for (rep,cache) in primarydb_items: 1171 if rep in self._all_excludes: 1172 continue 1173 1174 cur = cache.cursor() 1175 executeSQL(cur, "select * from %s where name=?" % prcotype, 1176 (name,)) 1177 tmp = { } 1178 for x in cur: 1179 val = (_share_data(x['name']), _share_data(x['flags']), 1180 (_share_data(x['epoch']), _share_data(x['version']), 1181 _share_data(x['release']))) 1182 val = _share_data(val) 1183 if rpmUtils.miscutils.rangeCompare(req, val): 1184 tmp.setdefault(x['pkgKey'], []).append(val) 1185 for pkgKey, hits in tmp.iteritems(): 1186 pkg = self._packageByKey(rep, pkgKey) 1187 if pkg is None: 1188 continue 1189 result[pkg] = hits 1190 1191 if prcotype != 'provides' or name[0] != '/': 1192 if not preload: 1193 self._search_cache[prcotype][req] = result 1194 return result 1195 1196 if not misc.re_primary_filename(name): 1197 # if its not in the primary.xml files 1198 # search the files.xml file info 1199 for pkg in self.searchFiles(name, strict=True): 1200 result[pkg] = [(name, None, None)] 1201 if not preload: 1202 self._search_cache[prcotype][req] = result 1203 return result 1204 1205 # If it is a filename, search the primary.xml file info 1206 1207 for pkg in self._search_primary_files(name): 1208 result[pkg] = [(name, None, None)] 1209 self._search_cache[prcotype][req] = result 1210 return result
1211
1212 - def getProvides(self, name, flags=None, version=(None, None, None)):
1213 return self._search("provides", name, flags, version)
1214
1215 - def getRequires(self, name, flags=None, version=(None, None, None)):
1216 return self._search("requires", name, flags, version)
1217 1218 @catchSqliteException
1219 - def searchNames(self, names=[]):
1220 """return a list of packages matching any of the given names. This is 1221 only a match on package name, nothing else""" 1222 1223 if self._skip_all(): 1224 return [] 1225 1226 loaded_all_names = hasattr(self, 'pkgobjlist') 1227 returnList = [] 1228 user_names = set(names) 1229 names = [] 1230 for pkgname in user_names: 1231 if loaded_all_names or pkgname in self._pkgnames_loaded: 1232 returnList.extend(self._packagesByName(pkgname)) 1233 else: 1234 names.append(pkgname) 1235 1236 if not names: 1237 return returnList 1238 1239 max_entries = constants.PATTERNS_INDEXED_MAX 1240 if len(names) > max_entries: 1241 # Unique is done at user_names time, above. 1242 for names in seq_max_split(names, max_entries): 1243 returnList.extend(self.searchNames(names)) 1244 return returnList 1245 1246 pat_sqls = [] 1247 qsql = """select pkgId,pkgKey,name,epoch,version,release,arch 1248 from packages where """ 1249 for name in names: 1250 pat_sqls.append("name = ?") 1251 qsql = qsql + " OR ".join(pat_sqls) 1252 1253 for (repo, cache) in self.primarydb.items(): 1254 cur = cache.cursor() 1255 executeSQL(cur, qsql, names) 1256 1257 self._sql_pkgKey2po(repo, cur, returnList, have_data=True) 1258 1259 # Mark all the processed pkgnames as fully loaded 1260 self._pkgnames_loaded.update([name for name in names]) 1261 1262 return returnList
1263 1264 @catchSqliteException
1265 - def searchPrco(self, name, prcotype):
1266 """return list of packages matching name and prcotype """ 1267 # we take name to be a string of some kind 1268 # we parse the string to see if it is a foo > 1.1 or if it is just 'foo' 1269 # or what - so we can answer correctly 1270 1271 if self._skip_all(): 1272 return [] 1273 try: 1274 (n,f,(e,v,r)) = misc.string_to_prco_tuple(name) 1275 except Errors.MiscError, e: 1276 raise Errors.PackageSackError, to_unicode(e) 1277 1278 n = to_unicode(n) 1279 1280 glob = True 1281 querytype = 'glob' 1282 if not misc.re_glob(n): 1283 glob = False 1284 querytype = '=' 1285 1286 basic_results = [] 1287 results = [] 1288 for (rep,cache) in self.primarydb.items(): 1289 cur = cache.cursor() 1290 executeSQL(cur, "select DISTINCT pkgKey from %s where name %s ?" % (prcotype,querytype), (n,)) 1291 self._sql_pkgKey2po(rep, cur, basic_results) 1292 1293 # now we have a list of items matching just the name - let's match them out 1294 for po in basic_results: 1295 if misc.re_filename(n) and v is None: 1296 # file dep add all matches to the results 1297 results.append(po) 1298 continue 1299 1300 if not glob: 1301 if po.checkPrco(prcotype, (n, f, (e,v,r))): 1302 results.append(po) 1303 else: 1304 # if it is a glob we can't really get any closer to checking it 1305 results.append(po) 1306 # If it's not a provides or a filename, we are done 1307 if prcotype != "provides": 1308 return results 1309 if not misc.re_filename(n): 1310 return results 1311 1312 # If it is a filename, search the primary.xml file info 1313 results.extend(self._search_primary_files(n)) 1314 1315 # if its in the primary.xml files then skip the other check 1316 if misc.re_primary_filename(n) and not glob: 1317 return misc.unique(results) 1318 1319 # If it is a filename, search the files.xml file info 1320 results.extend(self.searchFiles(n)) 1321 return misc.unique(results)
1322 1323 1324 #~ #FIXME - comment this all out below here 1325 #~ for (rep,cache) in self.filelistsdb.items(): 1326 #~ cur = cache.cursor() 1327 #~ (dirname,filename) = os.path.split(name) 1328 #~ # FIXME: why doesn't this work??? 1329 #~ if 0: # name.find('%') == -1: # no %'s in the thing safe to LIKE 1330 #~ executeSQL(cur, "select packages.pkgId as pkgId,\ 1331 #~ filelist.dirname as dirname,\ 1332 #~ filelist.filetypes as filetypes,\ 1333 #~ filelist.filenames as filenames \ 1334 #~ from packages,filelist where \ 1335 #~ (filelist.dirname LIKE ? \ 1336 #~ OR (filelist.dirname LIKE ? AND\ 1337 #~ filelist.filenames LIKE ?))\ 1338 #~ AND (filelist.pkgKey = packages.pkgKey)", (name,dirname,filename)) 1339 #~ else: 1340 #~ executeSQL(cur, "select packages.pkgId as pkgId,\ 1341 #~ filelist.dirname as dirname,\ 1342 #~ filelist.filetypes as filetypes,\ 1343 #~ filelist.filenames as filenames \ 1344 #~ from filelist,packages where dirname = ? AND filelist.pkgKey = packages.pkgKey" , (dirname,)) 1345 1346 #~ matching_ids = [] 1347 #~ for res in cur: 1348 #~ if self._excluded(rep, res['pkgId']): 1349 #~ continue 1350 1351 #~ #FIXME - optimize the look up here by checking for single-entry filenames 1352 #~ quicklookup = {} 1353 #~ for fn in decodefilenamelist(res['filenames']): 1354 #~ quicklookup[fn] = 1 1355 1356 #~ # If it matches the dirname, that doesnt mean it matches 1357 #~ # the filename, check if it does 1358 #~ if filename and filename not in quicklookup: 1359 #~ continue 1360 1361 #~ matching_ids.append(str(res['pkgId'])) 1362 1363 1364 #~ pkgs = self._getListofPackageDetails(matching_ids) 1365 #~ for pkg in pkgs: 1366 #~ results.append(self.pc(rep,pkg)) 1367 1368 #~ return results 1369
1370 - def searchProvides(self, name):
1371 """return list of packages providing name (any evr and flag)""" 1372 return self.searchPrco(name, "provides")
1373
1374 - def searchRequires(self, name):
1375 """return list of packages requiring name (any evr and flag)""" 1376 return self.searchPrco(name, "requires")
1377
1378 - def searchObsoletes(self, name):
1379 """return list of packages obsoleting name (any evr and flag)""" 1380 return self.searchPrco(name, "obsoletes")
1381
1382 - def searchConflicts(self, name):
1383 """return list of packages conflicting with name (any evr and flag)""" 1384 return self.searchPrco(name, "conflicts")
1385 1386
1387 - def db2class(self, db, nevra_only=False):
1388 print 'die die die die die db2class' 1389 class tmpObject: 1390 pass
1391 y = tmpObject() 1392 1393 y.nevra = (db['name'],db['epoch'],db['version'],db['release'],db['arch']) 1394 y.sack = self 1395 y.pkgId = db['pkgId'] 1396 if nevra_only: 1397 return y 1398 1399 y.hdrange = {'start': db['rpm_header_start'],'end': db['rpm_header_end']} 1400 y.location = {'href': db['location_href'],'value': '', 'base': db['location_base']} 1401 y.checksum = {'pkgid': 'YES','type': db['checksum_type'], 1402 'value': db['pkgId'] } 1403 y.time = {'build': db['time_build'], 'file': db['time_file'] } 1404 y.size = {'package': db['size_package'], 'archive': db['size_archive'], 'installed': db['size_installed'] } 1405 y.info = {'summary': db['summary'], 'description': db['description'], 1406 'packager': db['rpm_packager'], 'group': db['rpm_group'], 1407 'buildhost': db['rpm_buildhost'], 'sourcerpm': db['rpm_sourcerpm'], 1408 'url': db['url'], 'vendor': db['rpm_vendor'], 'license': db['rpm_license'] } 1409 return y 1410 1411 @catchSqliteException
1412 - def returnNewestByNameArch(self, naTup=None, patterns=None, ignore_case=False):
1413 1414 # If naTup is set do it from the database otherwise use our parent's 1415 # returnNewestByNameArch 1416 if (not naTup): 1417 return yumRepo.YumPackageSack.returnNewestByNameArch(self, naTup, 1418 patterns, 1419 ignore_case) 1420 1421 # First find all packages that fulfill naTup 1422 allpkg = [] 1423 for (rep,cache) in self.primarydb.items(): 1424 cur = cache.cursor() 1425 executeSQL(cur, "select pkgId,pkgKey,name,epoch,version,release,arch from packages where name=? and arch=?", naTup) 1426 self._sql_pkgKey2po(rep, cur, allpkg, have_data=True) 1427 1428 # if we've got zilch then raise 1429 if not allpkg: 1430 raise Errors.PackageSackError, 'No Package Matching %s.%s' % naTup 1431 return misc.newestInList(allpkg)
1432 1433 @catchSqliteException
1434 - def returnNewestByName(self, name=None, patterns=None, ignore_case=False):
1435 """return list of newest packages based on name matching 1436 this means(in name.arch form): foo.i386 and foo.noarch will 1437 be compared to each other for highest version. 1438 Note that given: foo-1.i386; foo-2.i386 and foo-3.x86_64 1439 The last _two_ pkgs will be returned, not just one of them. """ 1440 # If name is set do it from the database otherwise use our parent's 1441 # returnNewestByName 1442 if self._skip_all(): 1443 return [] 1444 1445 if (not name): 1446 return yumRepo.YumPackageSack.returnNewestByName(self, name, 1447 patterns, 1448 ignore_case) 1449 1450 # First find all packages that fulfill name 1451 allpkg = [] 1452 for (rep,cache) in self.primarydb.items(): 1453 cur = cache.cursor() 1454 executeSQL(cur, "select pkgId,pkgKey,name,epoch,version,release,arch from packages where name=?", (name,)) 1455 self._sql_pkgKey2po(rep, cur, allpkg, have_data=True) 1456 1457 # if we've got zilch then raise 1458 if not allpkg: 1459 raise Errors.PackageSackError, 'No Package Matching %s' % name 1460 return misc.newestInList(allpkg)
1461 1462 # Do what packages.matchPackageNames does, but query the DB directly 1463 @catchSqliteException
1464 - def matchPackageNames(self, pkgspecs):
1465 if self._skip_all(): 1466 return [], [], [] 1467 1468 matched = [] 1469 exactmatch = [] 1470 unmatched = list(pkgspecs) 1471 1472 for p in pkgspecs: 1473 if misc.re_glob(p): 1474 query = PARSE_QUERY % ({ "op": "glob", "q": p }) 1475 matchres = matched 1476 else: 1477 query = PARSE_QUERY % ({ "op": "=", "q": p }) 1478 matchres = exactmatch 1479 1480 for (rep, db) in self.primarydb.items(): 1481 cur = db.cursor() 1482 executeSQL(cur, query) 1483 pmatches = self._sql_pkgKey2po(rep, cur) 1484 if len(pmatches): 1485 unmatched.remove(p) 1486 matchres.extend(pmatches) 1487 1488 exactmatch = misc.unique(exactmatch) 1489 matched = misc.unique(matched) 1490 unmatched = misc.unique(unmatched) 1491 return exactmatch, matched, unmatched
1492
1493 - def _setupPkgObjList(self, repoid=None, patterns=None, ignore_case=False):
1494 """Setup need_full and patterns for _yieldSQLDataList, also see if 1495 we can get away with just using searchNames(). """ 1496 1497 if patterns is None: 1498 patterns = [] 1499 1500 fields = ['name', 'sql_nameArch', 'sql_nameVerRelArch', 1501 'sql_nameVer', 'sql_nameVerRel', 1502 'sql_envra', 'sql_nevra'] 1503 need_full = False 1504 for pat in patterns: 1505 if misc.re_full_search_needed(pat): 1506 need_full = True 1507 break 1508 1509 pat_max = constants.PATTERNS_MAX 1510 if not need_full: 1511 fields = ['name'] 1512 pat_max = constants.PATTERNS_INDEXED_MAX 1513 if len(patterns) > pat_max: 1514 patterns = [] 1515 if ignore_case: 1516 patterns = sql_esc_glob(patterns) 1517 else: 1518 tmp = [] 1519 need_glob = False 1520 for pat in patterns: 1521 if misc.re_glob(pat): 1522 tmp.append((pat, 'glob')) 1523 need_glob = True 1524 else: 1525 tmp.append((pat, '=')) 1526 if not need_full and not need_glob and patterns: 1527 return (need_full, patterns, fields, True) 1528 patterns = tmp 1529 return (need_full, patterns, fields, False)
1530 1531 @catchSqliteException
1532 - def _yieldSQLDataList(self, repoid, patterns, fields, ignore_case):
1533 """Yields all the package data for the given params. Excludes are done 1534 at this stage. """ 1535 1536 pat_sqls = [] 1537 pat_data = [] 1538 for (pattern, rest) in patterns: 1539 for field in fields: 1540 if ignore_case: 1541 pat_sqls.append("%s LIKE ?%s" % (field, rest)) 1542 else: 1543 pat_sqls.append("%s %s ?" % (field, rest)) 1544 pat_data.append(pattern) 1545 if pat_sqls: 1546 qsql = _FULL_PARSE_QUERY_BEG + " OR ".join(pat_sqls) 1547 else: 1548 qsql = """select pkgId, pkgKey, name,epoch,version,release,arch 1549 from packages""" 1550 1551 for (repo,cache) in self.primarydb.items(): 1552 if (repoid == None or repoid == repo.id): 1553 cur = cache.cursor() 1554 executeSQL(cur, qsql, pat_data) 1555 for x in cur: 1556 yield (repo, x)
1557
1558 - def _buildPkgObjList(self, repoid=None, patterns=None, ignore_case=False):
1559 """Builds a list of packages, only containing nevra information. 1560 Excludes are done at this stage. """ 1561 1562 returnList = [] 1563 1564 data = self._setupPkgObjList(repoid, patterns, ignore_case) 1565 (need_full, patterns, fields, names) = data 1566 if names: 1567 return self.searchNames(patterns) 1568 1569 for (repo, x) in self._yieldSQLDataList(repoid, patterns, fields, 1570 ignore_case): 1571 po = self._packageByKeyData(repo, x['pkgKey'], x) 1572 if po is None: 1573 continue 1574 returnList.append(po) 1575 if not patterns and repoid is None: 1576 self.pkgobjlist = returnList 1577 self._pkgnames_loaded = set() # Save memory 1578 if not need_full and repoid is None: 1579 # Mark all the processed pkgnames as fully loaded 1580 self._pkgnames_loaded.update([po.name for po in returnList]) 1581 1582 return returnList
1583
1584 - def returnPackages(self, repoid=None, patterns=None, ignore_case=False):
1585 """Returns a list of packages, only containing nevra information. The 1586 packages are processed for excludes. Note that the packages are 1587 always filtered to those matching the patterns/case. """ 1588 1589 if self._skip_all(): 1590 return [] 1591 1592 internal_pkgoblist = hasattr(self, 'pkgobjlist') 1593 if internal_pkgoblist: 1594 pkgobjlist = self._clean_pkgobjlist() 1595 else: 1596 pkgobjlist = self._buildPkgObjList(repoid, patterns, ignore_case) 1597 internal_pkgoblist = hasattr(self, 'pkgobjlist') 1598 1599 if internal_pkgoblist and patterns: 1600 internal_pkgoblist = False 1601 pkgobjlist = parsePackages(pkgobjlist, patterns, not ignore_case, 1602 unique='repo-pkgkey') 1603 pkgobjlist = pkgobjlist[0] + pkgobjlist[1] 1604 1605 # Can't unexclude things, and new excludes are done above... 1606 if repoid is None: 1607 if internal_pkgoblist: 1608 pkgobjlist = pkgobjlist[:] 1609 return pkgobjlist 1610 1611 returnList = [] 1612 for po in pkgobjlist: 1613 if repoid != po.repoid: 1614 continue 1615 returnList.append(po) 1616 1617 return returnList
1618
1619 - def simplePkgList(self, patterns=None, ignore_case=False):
1620 """Returns a list of pkg tuples (n, a, e, v, r), optionally from a 1621 single repoid. Note that the packages are always filtered to those 1622 matching the patterns/case. """ 1623 1624 if self._skip_all(): 1625 return [] 1626 1627 internal_pkgoblist = hasattr(self, 'pkgobjlist') 1628 if internal_pkgoblist: 1629 return yumRepo.YumPackageSack.simplePkgList(self, patterns, 1630 ignore_case) 1631 1632 repoid = None 1633 returnList = [] 1634 # Haven't loaded everything, so _just_ get the pkgtups... 1635 data = self._setupPkgObjList(repoid, patterns, ignore_case) 1636 (need_full, patterns, fields, names) = data 1637 if names: 1638 return [pkg.pkgtup for pkg in self.searchNames(patterns)] 1639 1640 for (repo, x) in self._yieldSQLDataList(repoid, patterns, fields, 1641 ignore_case): 1642 # NOTE: Can't unexclude things... 1643 pkgtup = self._pkgtupByKeyData(repo, x['pkgKey'], x) 1644 if pkgtup is None: 1645 continue 1646 returnList.append(pkgtup) 1647 return returnList
1648 1649 @catchSqliteException
1650 - def searchNevra(self, name=None, epoch=None, ver=None, rel=None, arch=None):
1651 """return list of pkgobjects matching the nevra requested""" 1652 if self._skip_all(): 1653 return [] 1654 1655 returnList = [] 1656 1657 if name: # Almost always true... 1658 for pkg in self.searchNames(names=[name]): 1659 match = True 1660 for (col, var) in [('epoch', epoch), ('version', ver), 1661 ('arch', arch), ('release', rel)]: 1662 if var and getattr(pkg, col) != var: 1663 match = False 1664 break 1665 if match: 1666 returnList.append(pkg) 1667 return returnList 1668 1669 # make sure some dumbass didn't pass us NOTHING to search on 1670 empty = True 1671 for arg in (name, epoch, ver, rel, arch): 1672 if arg: 1673 empty = False 1674 if empty: 1675 return returnList 1676 1677 # make up our execute string 1678 q = "select pkgId,pkgKey,name,epoch,version,release,arch from packages WHERE" 1679 for (col, var) in [('name', name), ('epoch', epoch), ('version', ver), 1680 ('arch', arch), ('release', rel)]: 1681 if var: 1682 if q[-5:] != 'WHERE': 1683 q = q + ' AND %s = "%s"' % (col, var) 1684 else: 1685 q = q + ' %s = "%s"' % (col, var) 1686 1687 # Search all repositories 1688 for (rep,cache) in self.primarydb.items(): 1689 cur = cache.cursor() 1690 executeSQL(cur, q) 1691 self._sql_pkgKey2po(rep, cur, returnList, have_data=True) 1692 return returnList 1693 1694 @catchSqliteException
1695 - def excludeArchs(self, archlist):
1696 """excludes incompatible arches - archlist is a list of compat arches""" 1697 1698 if self._arch_allowed is None: 1699 self._arch_allowed = set(archlist) 1700 else: 1701 self._arch_allowed = self._arch_allowed.intersection(archlist) 1702 sarchlist = map(lambda x: "'%s'" % x , archlist) 1703 arch_query = ",".join(sarchlist) 1704 1705 for (rep, cache) in self.primarydb.items(): 1706 cur = cache.cursor() 1707 1708 # This is a minor hack opt. for source repos. ... if they are 1709 # enabled normally, we don't want to exclude each package so we 1710 # check it and exclude the entire thing. 1711 if not rep.id.endswith("-source") or 'src' in self._arch_allowed: 1712 continue 1713 has_arch = False 1714 executeSQL(cur, "SELECT DISTINCT arch FROM packages") 1715 for row in cur: 1716 if row[0] in archlist: 1717 has_arch = True 1718 break 1719 if not has_arch: 1720 self._delAllPackages(rep) 1721 return
1722
1723 # Simple helper functions 1724 1725 # Return a string representing filenamelist (filenames can not contain /) 1726 -def encodefilenamelist(filenamelist):
1727 return '/'.join(filenamelist)
1728
1729 # Return a list representing filestring (filenames can not contain /) 1730 -def decodefilenamelist(filenamestring):
1731 filenamestring = filenamestring.replace('//', '/') 1732 return filenamestring.split('/')
1733
1734 # Return a string representing filetypeslist 1735 # filetypes should be file, dir or ghost 1736 -def encodefiletypelist(filetypelist):
1737 result = '' 1738 ft2string = {'file': 'f','dir': 'd','ghost': 'g'} 1739 for x in filetypelist: 1740 result += ft2string[x] 1741 return result
1742
1743 # Return a list representing filetypestring 1744 # filetypes should be file, dir or ghost 1745 -def decodefiletypelist(filetypestring):
1746 string2ft = {'f':'file','d': 'dir','g': 'ghost'} 1747 return [string2ft[x] for x in filetypestring]
1748 1749 1750 # Query used by matchPackageNames 1751 # op is either '=' or 'like', q is the search term 1752 # Check against name, nameArch, nameVerRelArch, nameVer, nameVerRel, 1753 # envra, nevra 1754 PARSE_QUERY = """ 1755 select pkgKey from packages 1756 where name %(op)s '%(q)s' 1757 or name || '.' || arch %(op)s '%(q)s' 1758 or name || '-' || version %(op)s '%(q)s' 1759 or name || '-' || version || '-' || release %(op)s '%(q)s' 1760 or name || '-' || version || '-' || release || '.' || arch %(op)s '%(q)s' 1761 or epoch || ':' || name || '-' || version || '-' || release || '.' || arch %(op)s '%(q)s' 1762 or name || '-' || epoch || ':' || version || '-' || release || '.' || arch %(op)s '%(q)s' 1763 """ 1764 1765 # This is roughly the same as above, and used by _buildPkgObjList(). 1766 # Use " to quote because we using ? ... and sqlutils.QmarkToPyformat gets 1767 # confused. 1768 _FULL_PARSE_QUERY_BEG = """ 1769 SELECT pkgId,pkgKey,name,epoch,version,release,arch, 1770 name || "." || arch AS sql_nameArch, 1771 name || "-" || version || "-" || release || "." || arch AS sql_nameVerRelArch, 1772 name || "-" || version AS sql_nameVer, 1773 name || "-" || version || "-" || release AS sql_nameVerRel, 1774 epoch || ":" || name || "-" || version || "-" || release || "." || arch AS sql_envra, 1775 name || "-" || epoch || ":" || version || "-" || release || "." || arch AS sql_nevra 1776 FROM packages 1777 WHERE 1778 """ 1779