Package pulp :: Package server :: Package api :: Module repo
[hide private]
[frames] | no frames]

Source Code for Module pulp.server.api.repo

  1  #!/usr/bin/python 
  2  # 
  3  # Copyright (c) 2010 Red Hat, Inc. 
  4  # 
  5  # This software is licensed to you under the GNU General Public License, 
  6  # version 2 (GPLv2). There is NO WARRANTY for this software, express or 
  7  # implied, including the implied warranties of MERCHANTABILITY or FITNESS 
  8  # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 
  9  # along with this software; if not, see 
 10  # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. 
 11  # 
 12  # Red Hat trademarks are not licensed under GPLv2. No permission is 
 13  # granted to use or replicate Red Hat trademarks that are incorporated 
 14  # in this software or its documentation. 
 15   
 16   
 17  # Python 
 18  import logging 
 19  import gzip 
 20  import os 
 21  import traceback 
 22  from itertools import chain 
 23  from urlparse import urlparse 
 24   
 25  # Pulp 
 26  from pulp.server import comps_util 
 27  from pulp.server import crontab 
 28  from pulp.server import upload 
 29  from pulp.server.api import repo_sync 
 30  from pulp.server.api.base import BaseApi 
 31  from pulp.server.api.package import PackageApi 
 32  from pulp.server.api.errata import ErrataApi 
 33  from pulp.server.auditing import audit 
 34  from pulp.server.event.dispatcher import event 
 35  from pulp.server import config 
 36  from pulp.server.db import model 
 37  from pulp.server.db.connection import get_object_db 
 38  from pulp.server.pexceptions import PulpException 
 39  from pulp.server.api.fetch_listings import CDNConnection 
 40   
 41  log = logging.getLogger(__name__) 
 42   
 43  repo_fields = model.Repo(None, None, None).keys() 
44 45 -class RepoApi(BaseApi):
46 """ 47 API for create/delete/syncing of Repo objects 48 """ 49
50 - def __init__(self):
51 BaseApi.__init__(self) 52 self.packageApi = PackageApi() 53 self.errataapi = ErrataApi() 54 self.localStoragePath = config.config.get('paths', 'local_storage')
55 56 @property
57 - def _indexes(self):
58 return ["packages", "packagegroups", "packagegroupcategories"]
59 60 @property
61 - def _unique_indexes(self):
62 return ["id"]
63
64 - def _getcollection(self):
65 return get_object_db('repos', 66 self._unique_indexes, 67 self._indexes)
68
69 - def _validate_schedule(self, sync_schedule):
70 ''' 71 Verifies the sync schedule is in the correct cron syntax, throwing an exception if 72 it is not. 73 ''' 74 if sync_schedule: 75 item = crontab.CronItem(sync_schedule + ' null') # CronItem expects a command 76 if not item.is_valid(): 77 raise PulpException('Invalid sync schedule specified [%s]' % sync_schedule)
78
79 - def _get_existing_repo(self, id):
80 """ 81 Protected helper function to look up a repository by id and raise a 82 PulpException if it is not found. 83 """ 84 repo = self.repository(id) 85 if repo is None: 86 raise PulpException("No Repo with id: %s found" % id) 87 return repo
88 89 @event(subject='repo.created') 90 @audit(params=['id', 'name', 'arch', 'feed'])
91 - def create(self, id, name, arch, feed=None, symlinks=False, sync_schedule=None, cert_data=None, productid=None):
92 """ 93 Create a new Repository object and return it 94 """ 95 repo = self.repository(id) 96 if repo is not None: 97 raise PulpException("A Repo with id %s already exists" % id) 98 self._validate_schedule(sync_schedule) 99 100 r = model.Repo(id, name, arch, feed) 101 r['sync_schedule'] = sync_schedule 102 r['use_symlinks'] = symlinks 103 if cert_data: 104 cert_files = self._write_certs_to_disk(id, cert_data) 105 for key, value in cert_files.items(): 106 r[key] = value 107 if productid: 108 r['productid'].append(productid) 109 110 if not r['relative_path']: 111 # For none product repos, default to repoid 112 r['relative_path'] = r['id'] 113 self.insert(r) 114 115 if sync_schedule: 116 repo_sync.update_schedule(r) 117 118 return r
119
120 - def _write_certs_to_disk(self, repoid, cert_data):
121 CONTENT_CERTS_PATH = config.config.get("repos", "content_cert_location") 122 cert_dir = os.path.join(CONTENT_CERTS_PATH, repoid) 123 124 if not os.path.exists(cert_dir): 125 os.makedirs(cert_dir) 126 cert_files = {} 127 for key, value in cert_data.items(): 128 fname = os.path.join(cert_dir, repoid + "." + key) 129 try: 130 log.error("storing file %s" % fname) 131 f = open(fname, 'w') 132 f.write(value) 133 f.close() 134 cert_files[key] = fname 135 except: 136 raise PulpException("Error storing certificate file %s " % key) 137 return cert_files
138 139 @audit(params=['productid', 'content_set'])
140 - def create_product_repo(self, content_set, cert_data, productid):
141 """ 142 Creates a repo associated to a product. Usually through an event raised 143 from candlepin 144 @param productid: A product the candidate repo should be associated with. 145 @type productid: str 146 @param content_set: a dict of content set labels and relative urls 147 @type content_set: dict(<label> : <relative_url>,) 148 @param cert_data: a dictionary of ca_cert, cert and key for this product 149 @type cert_data: dict(ca : <ca_cert>, cert: <ent_cert>, key : <cert_key>) 150 """ 151 if not cert_data: 152 # Nothing further can be done, exit 153 return 154 cert_files = self._write_certs_to_disk(productid, cert_data) 155 CDN_URL= config.config.get("repos", "content_url") 156 CDN_HOST = urlparse(CDN_URL).hostname 157 serv = CDNConnection(CDN_HOST, cacert=cert_files['ca'], 158 cert=cert_files['cert'], key=cert_files['key']) 159 serv.connect() 160 repo_info = serv.fetch_urls(content_set) 161 162 for label, uri in repo_info.items(): 163 try: 164 repo = self.create(label, label, arch=label.split("-")[-1], 165 feed="yum:" + CDN_URL + '/' + uri, cert_data=cert_data, productid=productid) 166 repo['relative_path'] = uri 167 self.update(repo) 168 except: 169 log.error("Error creating repo %s for product %s" % (label, productid)) 170 continue 171 172 serv.disconnect()
173 174 @audit()
175 - def get_repos_by_product(self, productid):
176 """ 177 Lookup available repos associated to a product id 178 @param productid: productid a candidate repo is associated. 179 @type productid: str 180 """ 181 return list(self.objectdb.find({"productid" : productid}))
182 183 @audit()
184 - def delete(self, id):
185 repo = self._get_existing_repo(id) 186 repo_sync.delete_schedule(repo) 187 self.objectdb.remove(repo, safe=True)
188 189 @audit()
190 - def update(self, repo_data):
191 repo = self._get_existing_repo(repo_data['id']) 192 # make sure we're only updating the fields in the model 193 for field in repo_fields: 194 #default to the existing value if the field isn't in the data 195 repo[field] = repo_data.get(field, repo[field]) 196 self._validate_schedule(repo['sync_schedule']) 197 198 self.objectdb.save(repo, safe=True) 199 200 if repo['sync_schedule']: 201 repo_sync.update_schedule(repo) 202 else: 203 repo_sync.delete_schedule(repo) 204 205 return repo
206
207 - def repositories(self, spec=None, fields=None):
208 """ 209 Return a list of Repositories 210 """ 211 return list(self.objectdb.find(spec=spec, fields=fields))
212
213 - def repository(self, id, fields=None):
214 """ 215 Return a single Repository object 216 """ 217 repos = self.repositories({'id': id}, fields) 218 if not repos: 219 return None 220 return repos[0]
221
222 - def packages(self, id, name=None):
223 """ 224 Return list of Package objects in this Repo 225 """ 226 repo = self._get_existing_repo(id) 227 packages = repo['packages'] 228 # XXX this is WRONG!!!!, we are returning a dict if name is None 229 # otherwise we are returning a list! 230 if name is None: 231 return packages 232 return [p for p in packages.values() if p['name'].find(name) >= 0]
233 234
235 - def get_package(self, id, name):
236 """ 237 Return matching Package object in this Repo 238 """ 239 packages = self.packages(id, name) 240 if not packages: 241 return None 242 return packages[0]
243 244 @audit()
245 - def add_package(self, repoid, packageid):
246 """ 247 Adds the passed in package to this repo 248 """ 249 repo = self._get_existing_repo(repoid) 250 package = self.packageApi.package(packageid) 251 if package is None: 252 raise PulpException("No Package with id: %s found" % packageid) 253 # TODO: We might want to restrict Packages we add to only 254 # allow 1 NEVRA per repo and require filename to be unique 255 self._add_package(repo, package) 256 self.update(repo)
257
258 - def _add_package(self, repo, p):
259 """ 260 Responsible for properly associating a Package to a Repo 261 """ 262 packages = repo['packages'] 263 if p['id'] in packages: 264 # No need to update repo, this Package is already under this repo 265 return 266 packages[p['id']] = p
267 268 @audit()
269 - def remove_package(self, repoid, p):
270 repo = self._get_existing_repo(repoid) 271 # this won't fail even if the package is not in the repo's packages 272 repo['packages'].pop(p['id'], None) 273 self.update(repo)
274
275 - def errata(self, id, types=()):
276 """ 277 Look up all applicable errata for a given repo id 278 """ 279 repo = self._get_existing_repo(id) 280 errata = repo['errata'] 281 if not errata: 282 return [] 283 if types: 284 try: 285 return [item for type in types for item in errata[type]] 286 except KeyError, ke: 287 log.debug("Invalid errata type requested :[%s]" % (ke)) 288 raise PulpException("Invalid errata type requested :[%s]" % (ke)) 289 return list(chain.from_iterable(errata.values()))
290 291 @audit()
292 - def add_erratum(self, repoid, erratumid):
293 """ 294 Adds in erratum to this repo 295 """ 296 repo = self._get_existing_repo(repoid) 297 self._add_erratum(repo, erratumid) 298 self.update(repo)
299
300 - def add_errata(self, repoid, errataids=()):
301 """ 302 Adds a list of errata to this repo 303 """ 304 repo = self._get_existing_repo(repoid) 305 for erratumid in errataids: 306 self._add_erratum(repo, erratumid) 307 self.update(repo)
308
309 - def _add_erratum(self, repo, erratumid):
310 """ 311 Responsible for properly associating an Erratum to a Repo 312 """ 313 erratum = self.errataapi.erratum(erratumid) 314 if erratum is None: 315 raise PulpException("No Erratum with id: %s found" % erratumid) 316 317 errata = repo['errata'] 318 try: 319 if erratum['id'] in errata[erratum['type']]: 320 #errata already in repo, continue 321 return 322 except KeyError: 323 errata[erratum['type']] = [] 324 325 errata[erratum['type']].append(erratum['id'])
326 327 @audit()
328 - def delete_erratum(self, repoid, erratumid):
329 """ 330 delete erratum from this repo 331 """ 332 repo = self._get_existing_repo(repoid) 333 self._delete_erratum(repo, erratumid) 334 self.update(repo)
335
336 - def delete_errata(self, repoid, errataids):
337 """ 338 delete list of errata from this repo 339 """ 340 repo = self._get_existing_repo(repoid) 341 for erratumid in errataids: 342 self._delete_erratum(repo, erratumid) 343 self.update(repo)
344
345 - def _delete_erratum(self, repo, erratumid):
346 """ 347 Responsible for properly removing an Erratum from a Repo 348 """ 349 erratum = self.errataapi.erratum(erratumid) 350 if erratum is None: 351 raise PulpException("No Erratum with id: %s found" % erratumid) 352 try: 353 curr_errata = repo['errata'][erratum['type']] 354 if erratum['id'] not in curr_errata: 355 log.debug("Erratum %s Not in repo. Nothing to delete" % erratum['id']) 356 return 357 del curr_errata[curr_errata.index(erratum['id'])] 358 except Exception, e: 359 raise PulpException("Erratum %s delete failed due to Error: %s" % (erratum['id'], e))
360 361 @audit(params=['repoid', 'group_id', 'group_name'])
362 - def create_packagegroup(self, repoid, group_id, group_name, description):
363 """ 364 Creates a new packagegroup saved in the referenced repo 365 @param repoid: 366 @param group_id: 367 @param group_name: 368 @param description: 369 @return packagegroup object 370 """ 371 repo = self._get_existing_repo(repoid) 372 if group_id in repo['packagegroups']: 373 raise PulpException("Package group %s already exists in repo %s" % 374 (group_id, repoid)) 375 group = model.PackageGroup(group_id, group_name, description) 376 repo["packagegroups"][group_id] = group 377 self.update(repo) 378 self._update_groups_metadata(repo["id"]) 379 return group
380 381 @audit()
382 - def delete_packagegroup(self, repoid, groupid):
383 """ 384 Remove a packagegroup from a repo 385 @param repoid: repo id 386 @param groupid: package group id 387 """ 388 repo = self._get_existing_repo(repoid) 389 if groupid not in repo['packagegroups']: 390 return 391 if repo['packagegroups'][groupid]["immutable"]: 392 raise PulpException("Changes to immutable groups are not supported: %s" % (groupid)) 393 del repo['packagegroups'][groupid] 394 self.update(repo) 395 self._update_groups_metadata(repo["id"])
396 397 @audit()
398 - def update_packagegroup(self, repoid, pg):
399 """ 400 Save the passed in PackageGroup to this repo 401 @param repoid: repo id 402 @param pg: packagegroup 403 """ 404 repo = self._get_existing_repo(repoid) 405 pg_id = pg['id'] 406 if pg_id in repo['packagegroups']: 407 if repo["packagegroups"][pg_id]["immutable"]: 408 raise PulpException("Changes to immutable groups are not supported: %s" % (pg["id"])) 409 repo['packagegroups'][pg_id] = pg 410 self.update(repo) 411 self._update_groups_metadata(repo["id"])
412 413 @audit()
414 - def update_packagegroups(self, repoid, pglist):
415 """ 416 Save the list of passed in PackageGroup objects to this repo 417 @param repoid: repo id 418 @param pglist: list of packagegroups 419 """ 420 repo = self._get_existing_repo(repoid) 421 for item in pglist: 422 if item['id'] in repo['packagegroups']: 423 if repo['packagegroups'][item["id"]]["immutable"]: 424 raise PulpException("Changes to immutable groups are not supported: %s" % (item["id"])) 425 repo['packagegroups'][item['id']] = item 426 self.update(repo) 427 self._update_groups_metadata(repo["id"])
428
429 - def packagegroups(self, id):
430 """ 431 Return list of PackageGroup objects in this Repo 432 @param id: repo id 433 @return: packagegroup or None 434 """ 435 repo = self._get_existing_repo(id) 436 return repo['packagegroups']
437
438 - def packagegroup(self, repoid, groupid):
439 """ 440 Return a PackageGroup from this Repo 441 @param repoid: repo id 442 @param groupid: packagegroup id 443 @return: packagegroup or None 444 """ 445 repo = self._get_existing_repo(repoid) 446 return repo['packagegroups'].get(groupid, None)
447 448 449 @audit()
450 - def add_packages_to_group(self, repoid, groupid, pkg_names=[], gtype="default"):
451 """ 452 @param repoid: repository id 453 @param groupid: group id 454 @param pkg_names: package names 455 @param gtype: OPTIONAL type of package group, 456 example "mandatory", "default", "optional" 457 """ 458 repo = self._get_existing_repo(repoid) 459 if groupid not in repo['packagegroups']: 460 raise PulpException("No PackageGroup with id: %s exists in repo %s" 461 % (groupid, repoid)) 462 group = repo["packagegroups"][groupid] 463 if group["immutable"]: 464 raise PulpException("Changes to immutable groups are not supported: %s" % (group["id"])) 465 for pkg_name in pkg_names: 466 if gtype == "mandatory": 467 if pkg_name not in group["mandatory_package_names"]: 468 group["mandatory_package_names"].append(pkg_name) 469 elif gtype == "conditional": 470 raise NotImplementedError("No support for creating conditional groups") 471 elif gtype == "optional": 472 if pkg_name not in group["optional_package_names"]: 473 group["optional_package_names"].append(pkg_name) 474 else: 475 if pkg_name not in group["default_package_names"]: 476 group["default_package_names"].append(pkg_name) 477 self.update(repo) 478 self._update_groups_metadata(repo["id"])
479 480 481 @audit()
482 - def delete_package_from_group(self, repoid, groupid, pkg_name, gtype="default"):
483 """ 484 @param repoid: repository id 485 @param groupid: group id 486 @param pkg_name: package name 487 @param gtype: OPTIONAL type of package group, 488 example "mandatory", "default", "optional" 489 """ 490 repo = self._get_existing_repo(repoid) 491 if groupid not in repo['packagegroups']: 492 raise PulpException("No PackageGroup with id: %s exists in repo %s" 493 % (groupid, repoid)) 494 group = repo["packagegroups"][groupid] 495 if group["immutable"]: 496 raise PulpException("Changes to immutable groups are not supported: %s" % (group["id"])) 497 if gtype == "mandatory": 498 if pkg_name in group["mandatory_package_names"]: 499 group["mandatory_package_names"].remove(pkg_name) 500 elif gtype == "conditional": 501 raise NotImplementedError("No support for creating conditional groups") 502 elif gtype == "optional": 503 if pkg_name in group["optional_package_names"]: 504 group["optional_package_names"].remove(pkg_name) 505 else: 506 if pkg_name in group["default_package_names"]: 507 group["default_package_names"].remove(pkg_name) 508 self.update(repo) 509 self._update_groups_metadata(repo["id"])
510 511 @audit(params=['repoid', 'cat_id', 'cat_name'])
512 - def create_packagegroupcategory(self, repoid, cat_id, cat_name, description):
513 """ 514 Creates a new packagegroupcategory saved in the referenced repo 515 @param repoid: 516 @param cat_id: 517 @param cat_name: 518 @param description: 519 @return packagegroupcategory object 520 """ 521 repo = self._get_existing_repo(repoid) 522 if cat_id in repo['packagegroupcategories']: 523 raise PulpException("Package group category %s already exists in repo %s" % 524 (cat_id, repoid)) 525 cat = model.PackageGroupCategory(cat_id, cat_name, description) 526 repo["packagegroupcategories"][cat_id] = cat 527 self.update(repo) 528 self._update_groups_metadata(repo["id"]) 529 return cat
530 531 @audit()
532 - def delete_packagegroupcategory(self, repoid, categoryid):
533 """ 534 Remove a packagegroupcategory from a repo 535 """ 536 repo = self._get_existing_repo(repoid) 537 if categoryid not in repo['packagegroupcategories']: 538 return 539 if repo['packagegroupcategories'][categoryid]["immutable"]: 540 raise PulpException("Changes to immutable categories are not supported: %s" % (categoryid)) 541 del repo['packagegroupcategories'][categoryid] 542 self.update(repo) 543 self._update_groups_metadata(repo["id"])
544 545 @audit()
546 - def update_packagegroupcategory(self, repoid, pgc):
547 """ 548 Save the passed in PackageGroupCategory to this repo 549 """ 550 repo = self._get_existing_repo(repoid) 551 if pgc['id'] in repo['packagegroupcategories']: 552 if repo["packagegroupcategories"][pgc["id"]]["immutable"]: 553 raise PulpException("Changes to immutable categories are not supported: %s" % (pgc["id"])) 554 repo['packagegroupcategories'][pgc['id']] = pgc 555 self.update(repo) 556 self._update_groups_metadata(repo["id"])
557 558 @audit()
559 - def update_packagegroupcategories(self, repoid, pgclist):
560 """ 561 Save the list of passed in PackageGroupCategory objects to this repo 562 """ 563 repo = self._get_existing_repo(repoid) 564 for item in pgclist: 565 if item['id'] in repo['packagegroupcategories']: 566 if repo["packagegroupcategories"][item["id"]]["immutable"]: 567 raise PulpException("Changes to immutable categories are not supported: %s" % item["id"]) 568 repo['packagegroupcategories'][item['id']] = item 569 self.update(repo) 570 self._update_groups_metadata(repo["id"])
571
572 - def packagegroupcategories(self, id):
573 """ 574 Return list of PackageGroupCategory objects in this Repo 575 """ 576 repo = self._get_existing_repo(id) 577 return repo['packagegroupcategories']
578
579 - def packagegroupcategory(self, repoid, categoryid):
580 """ 581 Return a PackageGroupCategory object from this Repo 582 """ 583 repo = self._get_existing_repo(repoid) 584 return repo['packagegroupcategories'].get(categoryid, None)
585
586 - def _update_groups_metadata(self, repoid):
587 """ 588 Updates the groups metadata (example: comps.xml) for a given repo 589 @param repoid: repo id 590 @return: True if metadata was successfully updated, otherwise False 591 """ 592 repo = self._get_existing_repo(repoid) 593 try: 594 # If the repomd file is not valid, or if we are missingg 595 # a group metadata file, no point in continuing. 596 if not os.path.exists(repo["repomd_xml_path"]): 597 log.debug("Skipping update of groups metadata since missing repomd file: '%s'" % 598 (repo["repomd_xml_path"])) 599 return False 600 xml = comps_util.form_comps_xml(repo['packagegroupcategories'], 601 repo['packagegroups']) 602 if repo["group_xml_path"] == "": 603 repo["group_xml_path"] = os.path.dirname(repo["repomd_xml_path"]) 604 repo["group_xml_path"] = os.path.join(os.path.dirname(repo["repomd_xml_path"]), 605 "comps.xml") 606 self.update(repo) 607 f = open(repo["group_xml_path"], "w") 608 f.write(xml.encode("utf-8")) 609 f.close() 610 if repo["group_gz_xml_path"]: 611 gz = gzip.open(repo["group_gz_xml_path"], "wb") 612 gz.write(xml.encode("utf-8")) 613 gz.close() 614 return comps_util.update_repomd_xml_file(repo["repomd_xml_path"], 615 repo["group_xml_path"], repo["group_gz_xml_path"]) 616 except Exception, e: 617 log.debug("_update_groups_metadata exception caught: %s" % (e)) 618 log.debug("Traceback: %s" % (traceback.format_exc())) 619 return False
620 621 @audit()
622 - def sync(self, id, progress_callback=None):
623 """ 624 Sync a repo from the URL contained in the feed 625 """ 626 repo = self._get_existing_repo(id) 627 repo_source = repo['source'] 628 if not repo_source: 629 raise PulpException("This repo is not setup for sync. Please add packages using upload.") 630 sync_packages, sync_errataids = repo_sync.sync(repo, repo_source, progress_callback) 631 log.info("Sync returned %s packages, %s errata" % (len(sync_packages), 632 len(sync_errataids))) 633 # We need to update the repo object in Mongo to account for 634 # package_group info added in sync call 635 self.update(repo) 636 # Remove packages that are no longer in source repo 637 for pid in repo["packages"]: 638 if pid not in sync_packages: 639 log.info("Removing package <%s> from repo <%s>" % (repo["packages"][pid], repo["id"])) 640 self.remove_package(repo["id"], repo["packages"][pid]) 641 # Refresh repo object since we may have deleted some packages 642 repo = self._get_existing_repo(id) 643 for p in sync_packages.values(): 644 self._add_package(repo, p) 645 # Update repo for package additions 646 self.update(repo) 647 # Determine removed errata 648 log.info("Examining %s errata from repo %s" % (len(self.errata(id)), id)) 649 for eid in self.errata(id): 650 if eid not in sync_errataids: 651 log.info("Removing errata %s from repo %s" % (eid, id)) 652 self.delete_erratum(id, eid) 653 # Add in all errata, existing errata will be skipped 654 repo = self._get_existing_repo(id) #repo object must be refreshed 655 for eid in sync_errataids: 656 self._add_erratum(repo, eid) 657 self.update(repo)
658 659 @audit(params=['id', 'pkginfo'])
660 - def upload(self, id, pkginfo, pkgstream):
661 """ 662 Store the uploaded package and associate to this repo 663 """ 664 repo = self._get_existing_repo(id) 665 pkg_upload = upload.PackageUpload(repo, pkginfo, pkgstream) 666 pkg, repo = pkg_upload.upload() 667 self._add_package(repo, pkg) 668 self.update(repo) 669 log.info("Upload success %s %s" % (pkg['id'], repo['id'])) 670 return True
671
672 - def all_schedules(self):
673 ''' 674 For all repositories, returns a mapping of repository name to sync schedule. 675 676 @rtype: dict 677 @return: key - repo name, value - sync schedule 678 ''' 679 return dict((r['id'], r['sync_schedule']) for r in self.repositories())
680