Package backend :: Package satellite_tools :: Module reposync
[hide private]
[frames] | no frames]

Source Code for Module backend.satellite_tools.reposync

   1  # 
   2  # Copyright (c) 2008--2020 Red Hat, Inc. 
   3  # Copyright (c) 2010--2011 SUSE Linux Products GmbH 
   4  # 
   5  # This software is licensed to you under the GNU General Public License, 
   6  # version 2 (GPLv2). There is NO WARRANTY for this software, express or 
   7  # implied, including the implied warranties of MERCHANTABILITY or FITNESS 
   8  # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 
   9  # along with this software; if not, see 
  10  # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. 
  11  # 
  12  # Red Hat trademarks are not licensed under GPLv2. No permission is 
  13  # granted to use or replicate Red Hat trademarks that are incorporated 
  14  # in this software or its documentation. 
  15  # 
  16  import os 
  17  import re 
  18  import shutil 
  19  import sys 
  20  from datetime import datetime 
  21  from xml.dom import minidom 
  22  import gzip 
  23  import ConfigParser 
  24  import gettext 
  25  import errno 
  26   
  27  from rhn.connections import idn_puny_to_unicode 
  28   
  29  from spacewalk.server import rhnPackage, rhnSQL, rhnChannel 
  30  from spacewalk.common.usix import raise_with_tb 
  31  from spacewalk.common import fileutils, rhnLog, rhnCache, rhnMail 
  32  from spacewalk.common.rhnLib import isSUSE, utc 
  33  from spacewalk.common.checksum import getFileChecksum 
  34  from spacewalk.common.rhnConfig import CFG, initCFG 
  35  from spacewalk.common.rhnException import rhnFault 
  36  from spacewalk.server.importlib import importLib, mpmSource, packageImport, errataCache 
  37  from spacewalk.server.importlib.packageImport import ChannelPackageSubscription 
  38  from spacewalk.server.importlib.backendOracle import SQLBackend 
  39  from spacewalk.server.importlib.errataImport import ErrataImport 
  40  from spacewalk.satellite_tools.download import ThreadedDownloader, ProgressBarLogger, TextLogger 
  41  from spacewalk.satellite_tools.repo_plugins import CACHE_DIR 
  42  from spacewalk.server import taskomatic, rhnPackageUpload 
  43  from spacewalk.satellite_tools.satCerts import verify_certificate_dates 
  44   
  45  from syncLib import log, log2, log2disk, dumpEMAIL_LOG, log2background 
  46   
  47  translation = gettext.translation('spacewalk-backend-server', fallback=True) 
  48  _ = translation.ugettext 
  49   
  50  default_log_location = '/var/log/rhn/' 
  51  relative_comps_dir = 'rhn/comps' 
  52  relative_modules_dir = 'rhn/modules' 
  53  checksum_cache_filename = 'reposync/checksum_cache' 
  54  default_import_batch_size = 10 
  55   
  56  errata_typemap = { 
  57      'security': 'Security Advisory', 
  58      'recommended': 'Bug Fix Advisory', 
  59      'bugfix': 'Bug Fix Advisory', 
  60      'optional': 'Product Enhancement Advisory', 
  61      'feature': 'Product Enhancement Advisory', 
  62      'enhancement': 'Product Enhancement Advisory' 
  63  } 
64 65 66 -def send_mail(sync_type="Repo"):
67 """ Send email summary """ 68 body = dumpEMAIL_LOG() 69 if body: 70 print(_("+++ sending log as an email +++")) 71 host_label = idn_puny_to_unicode(os.uname()[1]) 72 headers = { 73 'Subject': _("%s sync. report from %s") % (sync_type, host_label), 74 } 75 sndr = "root@%s" % host_label 76 if CFG.default_mail_from: 77 sndr = CFG.default_mail_from 78 rhnMail.send(headers, body, sender=sndr) 79 else: 80 print(_("+++ email requested, but there is nothing to send +++"))
81
82 83 -class KSDirParser:
84 file_blacklist = ["release-notes/"] 85
86 - def __init__(self):
87 self.dir_content = []
88
89 - def get_content(self):
90 return self.dir_content
91
92 93 -class KSDirHtmlParser(KSDirParser):
94 - def __init__(self, plug, dir_name):
95 KSDirParser.__init__(self) 96 97 dir_html = plug.get_file(dir_name) 98 if dir_html is None: 99 return 100 101 for s in (m.group(1) for m in re.finditer(r'(?i)<a href="(.+?)"', dir_html)): 102 if not (re.match(r'/', s) or re.search(r'\?', s) or re.search(r'\.\.', s) or re.match(r'[a-zA-Z]+:', s) or 103 re.search(r'\.rpm$', s)): 104 if re.search(r'/$', s): 105 file_type = 'DIR' 106 else: 107 file_type = 'FILE' 108 109 if s not in (self.file_blacklist): 110 self.dir_content.append({'name': s, 'type': file_type})
111
112 113 -class KSDirLocalParser(KSDirParser):
114 - def __init__(self, base_dir, dir_name):
115 KSDirParser.__init__(self) 116 dir_path = os.path.join(base_dir, dir_name) 117 for dir_item in os.listdir(dir_path): 118 if not dir_item.endswith(".rpm"): 119 dir_item_path = os.path.join(dir_path, dir_item) 120 if os.path.isdir(dir_item_path): 121 file_type = 'DIR' 122 dir_item = "%s/" % dir_item 123 else: 124 file_type = 'FILE' 125 if dir_item not in self.file_blacklist: 126 self.dir_content.append({'name': dir_item, 'type': file_type})
127
128 129 -class TreeInfoError(Exception):
130 pass
131
132 133 -class TreeInfoParser(object):
134 - def __init__(self, filename):
135 self.parser = ConfigParser.RawConfigParser() 136 # do not lowercase 137 self.parser.optionxform = str 138 fp = open(filename) 139 try: 140 try: 141 self.parser.readfp(fp) 142 except ConfigParser.ParsingError: 143 raise TreeInfoError("Could not parse treeinfo file!") 144 finally: 145 if fp is not None: 146 fp.close()
147
148 - def get_images(self):
149 files = [] 150 for section_name in self.parser.sections(): 151 if section_name.startswith('images-') or section_name == 'stage2': 152 for item in self.parser.items(section_name): 153 files.append(item[1]) 154 return files
155
156 - def get_family(self):
157 for section_name in self.parser.sections(): 158 if section_name == 'general': 159 for item in self.parser.items(section_name): 160 if item[0] == 'family': 161 return item[1] 162 return None
163
164 - def get_major_version(self):
165 for section_name in self.parser.sections(): 166 if section_name == 'general': 167 for item in self.parser.items(section_name): 168 if item[0] == 'version': 169 return item[1].split('.')[0] 170 return None
171
172 - def get_package_dir(self):
173 for section_name in self.parser.sections(): 174 if section_name == 'general': 175 for item in self.parser.items(section_name): 176 if item[0] == 'packagedir': 177 return item[1] 178 return None
179
180 - def get_addons(self):
181 addons_dirs = [] 182 for section_name in self.parser.sections(): 183 # check by name 184 if section_name.startswith('addon-'): 185 for item in self.parser.items(section_name): 186 if item[0] == 'repository': 187 addons_dirs.append(item[1]) 188 # check by type 189 else: 190 repository = None 191 repo_type = None 192 for item in self.parser.items(section_name): 193 if item[0] == 'repository': 194 repository = item[1] 195 elif item[0] == 'type': 196 repo_type = item[1] 197 198 if repo_type == 'addon' and repository is not None: 199 addons_dirs.append(repository) 200 201 return addons_dirs
202
203 204 -def set_filter_opt(option, opt_str, value, parser):
205 # pylint: disable=W0613 206 if opt_str in ['--include', '-i']: 207 f_type = '+' 208 else: 209 f_type = '-' 210 parser.values.filters.append((f_type, [v.strip() for v in value.split(',') if v.strip()]))
211
212 213 -def getChannelRepo():
214 215 initCFG('server.satellite') 216 rhnSQL.initDB() 217 items = {} 218 sql = """ 219 select s.source_url, c.label 220 from rhnContentSource s, 221 rhnChannelContentSource cs, 222 rhnChannel c 223 where s.id = cs.source_id and cs.channel_id=c.id 224 """ 225 h = rhnSQL.prepare(sql) 226 h.execute() 227 while 1: 228 row = h.fetchone_dict() 229 if not row: 230 break 231 if not row['label'] in items: 232 items[row['label']] = [] 233 items[row['label']] += [row['source_url']] 234 235 return items
236
237 238 -def getParentsChilds(b_only_custom=False):
239 240 initCFG('server.satellite') 241 rhnSQL.initDB() 242 243 sql = """ 244 select c1.label, c2.label parent_channel, c1.id 245 from rhnChannel c1 left outer join rhnChannel c2 on c1.parent_channel = c2.id 246 order by c2.label desc, c1.label asc 247 """ 248 h = rhnSQL.prepare(sql) 249 h.execute() 250 d_parents = {} 251 while 1: 252 row = h.fetchone_dict() 253 if not row: 254 break 255 if not b_only_custom or rhnChannel.isCustomChannel(row['id']): 256 parent_channel = row['parent_channel'] 257 if not parent_channel: 258 d_parents[row['label']] = [] 259 else: 260 # If the parent is not a custom channel treat the child like 261 # it's a parent for our purposes 262 if parent_channel not in d_parents: 263 d_parents[row['label']] = [] 264 else: 265 d_parents[parent_channel].append(row['label']) 266 267 return d_parents
268
269 270 -def getCustomChannels():
271 272 d_parents = getParentsChilds(True) 273 l_custom_ch = [] 274 275 for ch in d_parents: 276 l_custom_ch += [ch] + d_parents[ch] 277 278 return l_custom_ch
279
280 281 -def write_ssl_set_cache(ca_cert, client_cert, client_key):
282 """Write one SSL set into cache directory and return path to files.""" 283 def create_dir_tree(path): 284 try: 285 os.makedirs(path, int('0750', 8)) 286 except OSError: 287 exc = sys.exc_info()[1] 288 if exc.errno == errno.EEXIST and os.path.isdir(path): 289 pass 290 else: 291 raise
292 293 filenames = {} 294 for cert in (ca_cert, client_cert, client_key): 295 (name, pem, org) = cert 296 filenames[cert] = None 297 if name is not None and pem is not None: 298 if not org: 299 org = "NULL" 300 else: 301 org = str(org) 302 ssldir = os.path.join(CACHE_DIR, '.ssl-certs', org) 303 cert_file = os.path.join(ssldir, "%s.pem" % name) 304 if not os.path.exists(cert_file): 305 create_dir_tree(ssldir) 306 f = open(cert_file, "w") 307 f.write(str(pem)) 308 f.close() 309 filenames[cert] = cert_file 310 311 return filenames[ca_cert], filenames[client_cert], filenames[client_key] 312
313 314 -def clear_ssl_cache():
315 ssldir = os.path.join(CACHE_DIR, '.ssl-certs') 316 shutil.rmtree(ssldir, True)
317
318 319 -def get_single_ssl_set(keys, check_dates=False):
320 """Picks one of available SSL sets for given repository.""" 321 if check_dates: 322 for ssl_set in keys: 323 if verify_certificate_dates(str(ssl_set['ca_cert'])) and \ 324 (not ssl_set['client_cert'] or 325 verify_certificate_dates(str(ssl_set['client_cert']))): 326 return ssl_set 327 # Get first 328 else: 329 return keys[0] 330 return None
331
332 333 -class RepoSync(object):
334
335 - def __init__(self, channel_label, repo_type=None, url=None, fail=False, 336 filters=None, no_errata=False, sync_kickstart=False, latest=False, 337 metadata_only=False, strict=0, excluded_urls=None, no_packages=False, 338 log_dir="reposync", log_level=None, debug=False, force_kickstart=False, 339 force_all_errata=False, check_ssl_dates=False, force_null_org_content=False, 340 show_packages_only=False):
341 self.regen = False 342 self.fail = fail 343 self.filters = filters or [] 344 self.no_packages = no_packages 345 self.no_errata = no_errata 346 self.sync_kickstart = sync_kickstart 347 self.force_all_errata = force_all_errata 348 self.force_kickstart = force_kickstart 349 self.latest = latest 350 self.metadata_only = metadata_only 351 self.ks_tree_type = 'externally-managed' 352 self.ks_install_type = None 353 self.show_packages_only = show_packages_only 354 355 initCFG('server.satellite') 356 rhnSQL.initDB() 357 358 # setup logging 359 if debug: 360 log_path = 'stdout' 361 else: 362 log_filename = channel_label + '.log' 363 log_path = default_log_location + log_dir + '/' + log_filename 364 if log_level is None: 365 log_level = 0 366 CFG.set('DEBUG', log_level) 367 rhnLog.initLOG(log_path, log_level) 368 # os.fchown isn't in 2.4 :/ 369 if not debug: 370 if isSUSE(): 371 os.system("chgrp www " + log_path) 372 else: 373 os.system("chgrp apache " + log_path) 374 375 log2disk(0, "Command: %s" % str(sys.argv)) 376 log2disk(0, "Sync of channel started.") 377 378 self.channel_label = channel_label 379 self.channel = self.load_channel() 380 if not self.channel: 381 log(0, "Channel %s does not exist." % channel_label) 382 383 if not self.channel['org_id'] or force_null_org_content: 384 self.org_id = None 385 else: 386 self.org_id = int(self.channel['org_id']) 387 388 if not url: 389 # TODO:need to look at user security across orgs 390 h = rhnSQL.prepare("""select s.id, s.source_url, s.label as repo_label, cst.label as repo_type_label 391 from rhnContentSource s, 392 rhnChannelContentSource cs, 393 rhnContentSourceType cst 394 where s.id = cs.source_id 395 and cst.id = s.type_id 396 and cs.channel_id = :channel_id""") 397 h.execute(channel_id=int(self.channel['id'])) 398 source_data = h.fetchall_dict() 399 self.urls = [] 400 if excluded_urls is None: 401 excluded_urls = [] 402 if source_data: 403 for row in source_data: 404 if row['source_url'] not in excluded_urls: 405 # Override repo type DB value using parameter 406 if repo_type: 407 repo_type_label = repo_type 408 else: 409 repo_type_label = row['repo_type_label'] 410 self.urls.append((row['id'], row['source_url'], repo_type_label, row['repo_label'])) 411 else: 412 if repo_type: 413 repo_type_label = repo_type 414 else: 415 repo_type_label = 'yum' 416 self.urls = [(None, u, repo_type_label, None) for u in url] 417 418 if not self.urls: 419 log2(0, 0, "Channel %s has no URL associated" % channel_label, stream=sys.stderr) 420 421 self.strict = strict 422 self.all_packages = set() 423 self.all_errata = set() 424 self.check_ssl_dates = check_ssl_dates 425 # Init cache for computed checksums to not compute it on each reposync run again 426 self.checksum_cache = rhnCache.get(checksum_cache_filename) 427 if self.checksum_cache is None: 428 self.checksum_cache = {} 429 self.import_batch_size = default_import_batch_size
430
431 - def set_import_batch_size(self, batch_size):
432 self.import_batch_size = int(batch_size)
433
434 - def set_urls_prefix(self, prefix):
435 """If there are relative urls in DB, set their real location in runtime""" 436 for index, url in enumerate(self.urls): 437 # Make list, add prefix, make tuple and save 438 url = list(url) 439 url[1] = "%s%s" % (prefix, url[1]) 440 url = tuple(url) 441 self.urls[index] = url
442
443 - def sync(self, update_repodata=True):
444 """Trigger a reposync""" 445 failed_packages = 0 446 sync_error = 0 447 start_time = datetime.now() 448 for (repo_id, url, repo_type, repo_label) in self.urls: 449 log(0, '') 450 log(0, " Processing repository with URL: %s" % url) 451 if self.metadata_only: 452 log(0, ' * WARNING: processing RPM metadata only.') 453 454 plugin = None 455 456 # pylint: disable=W0703 457 try: 458 if '://' not in url: 459 raise Exception("Unknown protocol in repo URL: %s" % url) 460 461 # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line 462 if url.startswith("uln://"): 463 repo_type = "uln" 464 465 is_non_local_repo = (url.find("file:/") < 0) 466 467 repo_plugin = self.load_plugin(repo_type) 468 469 if repo_label: 470 repo_name = repo_label 471 else: 472 # use modified relative_url as name of repo plugin, because 473 # it used as name of cache directory as well 474 relative_url = '_'.join(url.split('://')[1].split('/')[1:]) 475 repo_name = relative_url.replace("?", "_").replace("&", "_").replace("=", "_") 476 477 (ca_cert_file, client_cert_file, client_key_file) = (None, None, None) 478 if repo_id is not None: 479 h = rhnSQL.execute(""" 480 select k1.description as ca_cert_name, k1.key as ca_cert, k1.org_id as ca_cert_org, 481 k2.description as client_cert_name, k2.key as client_cert, k2.org_id as client_cert_org, 482 k3.description as client_key_name, k3.key as client_key, k3.org_id as client_key_org 483 from rhncontentsource cs inner join 484 rhncontentsourcessl csssl on cs.id = csssl.content_source_id inner join 485 rhncryptokey k1 on csssl.ssl_ca_cert_id = k1.id left outer join 486 rhncryptokey k2 on csssl.ssl_client_cert_id = k2.id left outer join 487 rhncryptokey k3 on csssl.ssl_client_key_id = k3.id 488 where cs.id = :repo_id 489 """, repo_id=int(repo_id)) 490 keys = [] 491 while True: 492 row = h.fetchone_dict() 493 if row is None: 494 break 495 for col in ['ca_cert', 'client_cert', 'client_key']: 496 row[col] = rhnSQL.read_lob(row[col]) 497 keys.append(row) 498 if keys: 499 ssl_set = get_single_ssl_set(keys, check_dates=self.check_ssl_dates) 500 if ssl_set: 501 (ca_cert_file, client_cert_file, client_key_file) = write_ssl_set_cache( 502 (ssl_set['ca_cert_name'], ssl_set['ca_cert'], ssl_set['ca_cert_org']), 503 (ssl_set['client_cert_name'], ssl_set['client_cert'], ssl_set['client_cert_org']), 504 (ssl_set['client_key_name'], ssl_set['client_key'], ssl_set['client_key_org'])) 505 else: 506 raise ValueError("No valid SSL certificates were found for repository.") 507 508 plugin = repo_plugin(url, repo_name, 509 org=str(self.org_id or ''), 510 channel_label=self.channel_label, 511 ca_cert_file=ca_cert_file, 512 client_cert_file=client_cert_file, 513 client_key_file=client_key_file) 514 515 if self.show_packages_only: 516 self.show_packages(plugin, repo_id) 517 else: 518 if update_repodata: 519 plugin.clear_cache() 520 521 if not self.no_packages: 522 self.import_groups(plugin) 523 if repo_type == "yum": 524 self.import_modules(plugin) 525 ret = self.import_packages(plugin, repo_id, is_non_local_repo) 526 failed_packages += ret 527 528 if not self.no_errata: 529 self.import_updates(plugin) 530 531 # only for repos obtained from the DB 532 if self.sync_kickstart and repo_label: 533 try: 534 self.import_kickstart(plugin, repo_label, is_non_local_repo) 535 except: 536 rhnSQL.rollback() 537 raise 538 except rhnSQL.SQLError: 539 raise 540 except Exception: 541 e = sys.exc_info()[1] 542 log2(0, 0, "ERROR: %s" % e, stream=sys.stderr) 543 log2disk(0, "ERROR: %s" % e) 544 # pylint: disable=W0104 545 sync_error = -1 546 547 # In strict mode unlink all packages from channel which are not synced from current repositories 548 if self.strict and sync_error == 0: 549 if not self.no_packages: 550 channel_packages = rhnSQL.fetchall_dict(""" 551 select p.id, ct.label as checksum_type, c.checksum 552 from rhnChannelPackage cp, 553 rhnPackage p, 554 rhnChecksumType ct, 555 rhnChecksum c 556 where cp.channel_id = :channel_id 557 and cp.package_id = p.id 558 and p.checksum_id = c.id 559 and c.checksum_type_id = ct.id 560 """, channel_id=int(self.channel['id'])) or [] 561 for package in channel_packages: 562 if (package['checksum_type'], package['checksum']) not in self.all_packages: 563 self.disassociate_package(package['checksum_type'], package['checksum']) 564 self.regen = True 565 566 # For custom channels unlink also errata 567 if not self.no_errata and self.channel['org_id']: 568 channel_errata = self.list_errata() 569 for erratum in channel_errata: 570 if erratum not in self.all_errata: 571 self.disassociate_erratum(erratum) 572 self.regen = True 573 574 # Update cache with package checksums 575 rhnCache.set(checksum_cache_filename, self.checksum_cache) 576 if self.regen: 577 taskomatic.add_to_repodata_queue_for_channel_package_subscription( 578 [self.channel_label], [], "server.app.yumreposync") 579 taskomatic.add_to_erratacache_queue(self.channel_label) 580 self.update_date() 581 rhnSQL.commit() 582 583 # update permissions 584 fileutils.createPath(os.path.join(CFG.MOUNT_POINT, 'rhn')) # if the directory exists update ownership only 585 for root, dirs, files in os.walk(os.path.join(CFG.MOUNT_POINT, 'rhn')): 586 for d in dirs: 587 fileutils.setPermsPath(os.path.join(root, d), group='apache') 588 for f in files: 589 fileutils.setPermsPath(os.path.join(root, f), group='apache') 590 elapsed_time = datetime.now() - start_time 591 log(0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) 592 # if there is no global problems, but some packages weren't synced 593 if sync_error == 0 and failed_packages > 0: 594 sync_error = failed_packages 595 return elapsed_time, sync_error
596
597 - def set_ks_tree_type(self, tree_type='externally-managed'):
598 self.ks_tree_type = tree_type
599
600 - def set_ks_install_type(self, install_type='generic_rpm'):
601 self.ks_install_type = install_type
602
603 - def update_date(self):
604 """ Updates the last sync time""" 605 h = rhnSQL.prepare("""update rhnChannel set LAST_SYNCED = current_timestamp 606 where label = :channel""") 607 h.execute(channel=self.channel['label'])
608 609 @staticmethod
610 - def load_plugin(repo_type):
611 name = repo_type + "_src" 612 mod = __import__('spacewalk.satellite_tools.repo_plugins', globals(), locals(), [name]) 613 submod = getattr(mod, name) 614 return getattr(submod, "ContentSource")
615
616 - def import_updates(self, plug):
617 notices = plug.get_updates() 618 log(0, '') 619 log(0, " Errata in repo: %s." % len(notices)) 620 if notices: 621 self.upload_updates(notices)
622
623 - def copy_metadata_file(self, plug, filename, comps_type, relative_dir):
624 old_checksum = None 625 db_timestamp = datetime.fromtimestamp(0.0, utc) 626 basename = os.path.basename(filename) 627 log(0, '') 628 log(0, " Importing %s file %s." % (comps_type, basename)) 629 relativedir = os.path.join(relative_dir, self.channel_label) 630 absdir = os.path.join(CFG.MOUNT_POINT, relativedir) 631 if not os.path.exists(absdir): 632 os.makedirs(absdir) 633 relativepath = os.path.join(relativedir, basename) 634 abspath = os.path.join(absdir, basename) 635 for suffix in ['.gz', '.bz', '.xz']: 636 if basename.endswith(suffix): 637 abspath = abspath.rstrip(suffix) 638 relativepath = relativepath.rstrip(suffix) 639 640 h = rhnSQL.prepare("""select relative_filename, last_modified 641 from rhnChannelComps 642 where channel_id = :cid 643 and comps_type_id = (select id from rhnCompsType where label = :ctype)""") 644 if h.execute(cid=self.channel['id'], ctype=comps_type): 645 (db_filename, db_timestamp) = h.fetchone() 646 comps_path = os.path.join(CFG.MOUNT_POINT, db_filename) 647 if os.path.isfile(comps_path): 648 old_checksum = getFileChecksum('sha256', comps_path) 649 650 src = fileutils.decompress_open(filename) 651 dst = open(abspath, "w") 652 shutil.copyfileobj(src, dst) 653 dst.close() 654 src.close() 655 if old_checksum and old_checksum != getFileChecksum('sha256', abspath): 656 self.regen = True 657 658 repoDataKey = 'group' if comps_type == 'comps' else comps_type 659 file_timestamp = plug.repo.repoXML.repoData[repoDataKey].timestamp 660 last_modified = datetime.fromtimestamp(float(file_timestamp), utc) 661 662 663 if db_timestamp >= last_modified: 664 # already have newer data, skip updating 665 return abspath 666 667 # update or insert 668 hu = rhnSQL.prepare("""update rhnChannelComps 669 set relative_filename = :relpath, 670 modified = current_timestamp, 671 last_modified = :last_modified 672 where channel_id = :cid 673 and comps_type_id = (select id from rhnCompsType where label = :ctype)""") 674 hu.execute(cid=self.channel['id'], relpath=relativepath, ctype=comps_type, 675 last_modified=last_modified) 676 677 hi = rhnSQL.prepare("""insert into rhnChannelComps 678 (id, channel_id, relative_filename, last_modified, comps_type_id) 679 (select sequence_nextval('rhn_channelcomps_id_seq'), 680 :cid, 681 :relpath, 682 :last_modified, 683 (select id from rhnCompsType where label = :ctype) 684 from dual 685 where not exists (select 1 from rhnChannelComps 686 where channel_id = :cid 687 and comps_type_id = (select id from rhnCompsType where label = :ctype)))""") 688 hi.execute(cid=self.channel['id'], relpath=relativepath, ctype=comps_type, 689 last_modified=last_modified) 690 return abspath
691
692 - def import_groups(self, plug):
693 groupsfile = plug.get_groups() 694 if groupsfile: 695 abspath = self.copy_metadata_file(plug, groupsfile, 'comps', relative_comps_dir) 696 plug.groupsfile = abspath
697
698 - def import_modules(self, plug):
699 modulesfile = plug.get_modules() 700 if modulesfile: 701 self.copy_metadata_file(plug, modulesfile, 'modules', relative_modules_dir)
702
703 - def _populate_erratum(self, notice):
704 advisory = notice['update_id'] + '-' + notice['version'] 705 existing_errata = self.get_errata(notice['update_id']) 706 e = importLib.Erratum() 707 e['errata_from'] = notice['from'] 708 e['advisory'] = advisory 709 e['advisory_name'] = notice['update_id'] 710 e['advisory_rel'] = notice['version'] 711 e['advisory_type'] = errata_typemap.get(notice['type'], 'Product Enhancement Advisory') 712 e['product'] = notice['release'] or 'Unknown' 713 e['description'] = notice['description'] 714 e['synopsis'] = notice['title'] or notice['update_id'] 715 if notice['type'] == 'security' and 'severity' in notice and notice['severity'].lower() != 'none': 716 e['security_impact'] = notice['severity'] 717 if notice['type'] == 'security' and not e['synopsis'].startswith(notice['severity'] + ': '): 718 e['synopsis'] = notice['severity'] + ': ' + e['synopsis'] 719 if 'summary' in notice and not notice['summary'] is None: 720 e['topic'] = notice['summary'] 721 else: 722 e['topic'] = ' ' 723 if 'solution' in notice and not notice['solution'] is None: 724 e['solution'] = notice['solution'] 725 else: 726 e['solution'] = ' ' 727 e['issue_date'] = self._to_db_date(notice['issued']) 728 if notice['updated']: 729 e['update_date'] = self._to_db_date(notice['updated']) 730 else: 731 e['update_date'] = self._to_db_date(notice['issued']) 732 e['org_id'] = self.org_id 733 e['notes'] = '' 734 e['channels'] = [] 735 e['packages'] = [] 736 e['files'] = [] 737 if existing_errata: 738 e['channels'] = existing_errata['channels'] 739 e['packages'] = existing_errata['packages'] 740 e['channels'].append({'label': self.channel_label}) 741 742 for collection in notice['pkglist']: 743 for pkg in collection['packages']: 744 param_dict = { 745 'name': pkg['name'], 746 'version': pkg['version'], 747 'release': pkg['release'], 748 'arch': pkg['arch'], 749 'channel_id': int(self.channel['id']), 750 } 751 if pkg['epoch'] == '0': 752 epochStatement = "(pevr.epoch is NULL or pevr.epoch = '0')" 753 elif pkg['epoch'] is None or pkg['epoch'] == '': 754 epochStatement = "pevr.epoch is NULL" 755 else: 756 epochStatement = "pevr.epoch = :epoch" 757 param_dict['epoch'] = pkg['epoch'] 758 if self.org_id: 759 param_dict['org_id'] = self.org_id 760 orgStatement = "= :org_id" 761 else: 762 orgStatement = "is NULL" 763 764 h = rhnSQL.prepare(""" 765 select p.id, pevr.epoch, c.checksum, c.checksum_type 766 from rhnPackage p 767 join rhnPackagename pn on p.name_id = pn.id 768 join rhnpackageevr pevr on p.evr_id = pevr.id 769 join rhnpackagearch pa on p.package_arch_id = pa.id 770 join rhnArchType at on pa.arch_type_id = at.id 771 join rhnChecksumView c on p.checksum_id = c.id 772 join rhnChannelPackage cp on p.id = cp.package_id 773 where pn.name = :name 774 and p.org_id %s 775 and pevr.version = :version 776 and pevr.release = :release 777 and pa.label = :arch 778 and %s 779 and at.label = 'rpm' 780 and cp.channel_id = :channel_id 781 """ % (orgStatement, epochStatement)) 782 h.execute(**param_dict) 783 cs = h.fetchone_dict() or None 784 785 if not cs: 786 if 'epoch' in param_dict: 787 epoch = str(param_dict['epoch']) + ":" 788 else: 789 epoch = "" 790 log(2, "No checksum found for %s-%s%s-%s.%s." 791 " Skipping Package" % (param_dict['name'], 792 epoch, 793 param_dict['version'], 794 param_dict['release'], 795 param_dict['arch'])) 796 continue 797 798 newpkgs = [] 799 for oldpkg in e['packages']: 800 if oldpkg['package_id'] != cs['id']: 801 newpkgs.append(oldpkg) 802 803 package = importLib.IncompletePackage().populate(pkg) 804 package['epoch'] = cs['epoch'] 805 package['org_id'] = self.org_id 806 807 package['checksums'] = {cs['checksum_type']: cs['checksum']} 808 package['checksum_type'] = cs['checksum_type'] 809 package['checksum'] = cs['checksum'] 810 811 package['package_id'] = cs['id'] 812 newpkgs.append(package) 813 814 e['packages'] = newpkgs 815 816 # Empty package list in original metadata 817 if not e['packages'] and not notice['pkglist'][0]['packages']: 818 log(2, "Advisory %s has empty package list." % e['advisory_name']) 819 elif not e['packages']: 820 raise ValueError("Advisory %s skipped because of empty package list (filtered)." % e['advisory_name']) 821 822 e['keywords'] = [] 823 if notice['reboot_suggested']: 824 kw = importLib.Keyword() 825 kw.populate({'keyword': 'reboot_suggested'}) 826 e['keywords'].append(kw) 827 if notice['restart_suggested']: 828 kw = importLib.Keyword() 829 kw.populate({'keyword': 'restart_suggested'}) 830 e['keywords'].append(kw) 831 e['bugs'] = [] 832 e['cve'] = [] 833 if notice['references']: 834 bzs = [r for r in notice['references'] if r['type'] == 'bugzilla'] 835 if bzs: 836 tmp = {} 837 for bz in bzs: 838 try: 839 bz_id = int(bz['id']) 840 # This can happen in some incorrectly generated updateinfo, let's be smart 841 except ValueError: 842 log(2, "Bugzilla assigned to advisory %s has invalid id: %s, trying to get it from URL..." 843 % (e['advisory_name'], bz['id'])) 844 bz_id = int(re.search(r"\d+$", bz['href']).group(0)) 845 if bz_id not in tmp: 846 bug = importLib.Bug() 847 bug.populate({'bug_id': bz_id, 'summary': bz['title'], 'href': bz['href']}) 848 e['bugs'].append(bug) 849 tmp[bz_id] = None 850 cves = [r for r in notice['references'] if r['type'] == 'cve'] 851 if cves: 852 tmp = {} 853 for cve in cves: 854 if cve['id'] not in tmp: 855 e['cve'].append(cve['id']) 856 tmp[cve['id']] = None 857 others = [r for r in notice['references'] if not r['type'] == 'bugzilla' and not r['type'] == 'cve'] 858 if others: 859 refers_to = "" 860 for other in others: 861 if refers_to: 862 refers_to += "\n" 863 refers_to += other['href'] 864 e['refers_to'] = refers_to 865 e['locally_modified'] = None 866 return e
867
868 - def upload_updates(self, notices):
869 batch = [] 870 871 advisory_update_date = self.list_update_dates() 872 for notice in notices: 873 notice = self.fix_notice(notice) 874 875 # Save advisory names from all repositories 876 self.all_errata.add(notice['update_id']) 877 878 update_date = advisory_update_date.get(notice['update_id'], '') 879 notice_updated = self._to_db_date(notice['updated']) 880 if not self.force_all_errata and notice_updated == update_date: 881 continue 882 883 # pylint: disable=W0703 884 try: 885 erratum = self._populate_erratum(notice) 886 batch.append(erratum) 887 except Exception: 888 e = "Skipped %s - %s" % (notice['update_id'], sys.exc_info()[1]) 889 log2(1, 1, e, stream=sys.stderr) 890 if self.fail: 891 raise 892 893 if batch: 894 log(0, " Syncing %s new errata to channel." % len(batch)) 895 backend = SQLBackend() 896 importer = ErrataImport(batch, backend) 897 importer.run() 898 self.regen = True 899 elif notices: 900 log(0, " No new errata to sync.")
901
902 - def import_packages(self, plug, source_id, is_non_local_repo):
903 failed_packages = 0 904 if (not self.filters) and source_id: 905 h = rhnSQL.prepare(""" 906 select flag, filter 907 from rhnContentSourceFilter 908 where source_id = :source_id 909 order by sort_order """) 910 h.execute(source_id=source_id) 911 filter_data = h.fetchall_dict() or [] 912 filters = [(row['flag'], [v.strip() for v in row['filter'].split(',') if v.strip()]) 913 for row in filter_data] 914 else: 915 filters = self.filters 916 917 packages = plug.list_packages(filters, self.latest) 918 to_disassociate = {} 919 to_process = [] 920 num_passed = len(packages) 921 log(0, " Packages in repo: %5d" % plug.num_packages) 922 if plug.num_excluded: 923 log(0, " Packages passed filter rules: %5d" % num_passed) 924 channel_id = int(self.channel['id']) 925 926 for pack in packages: 927 db_pack = rhnPackage.get_info_for_package( 928 [pack.name, pack.version, pack.release, pack.epoch, pack.arch], 929 channel_id, self.org_id) 930 931 to_download = True 932 to_link = True 933 # Package exists in DB 934 if db_pack: 935 # Path in filesystem is defined 936 if db_pack['path']: 937 pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) 938 else: 939 pack.path = "" 940 941 if self.metadata_only or self.match_package_checksum(db_pack['path'], pack.path, 942 pack.checksum_type, pack.checksum): 943 # package is already on disk or not required 944 to_download = False 945 if db_pack['channel_id'] == channel_id: 946 # package is already in the channel 947 to_link = False 948 949 # just pass data from DB, they will be used in strict channel 950 # linking if there is no new RPM downloaded 951 pack.checksum = db_pack['checksum'] 952 pack.checksum_type = db_pack['checksum_type'] 953 pack.epoch = db_pack['epoch'] 954 955 self.all_packages.add((pack.checksum_type, pack.checksum)) 956 957 elif db_pack['channel_id'] == channel_id: 958 # different package with SAME NVREA 959 # disassociate from channel if it doesn't match package which will be downloaded 960 to_disassociate[(db_pack['checksum_type'], db_pack['checksum'])] = True 961 962 if to_download or to_link: 963 to_process.append((pack, to_download, to_link)) 964 965 num_to_process = len(to_process) 966 if num_to_process == 0: 967 log(0, " No new packages to sync.") 968 # If we are just appending, we can exit 969 if not self.strict: 970 return failed_packages 971 else: 972 log(0, " Packages already synced: %5d" % (num_passed - num_to_process)) 973 log(0, " Packages to sync: %5d" % num_to_process) 974 975 downloader = ThreadedDownloader() 976 to_download_count = 0 977 for what in to_process: 978 pack, to_download, to_link = what 979 if to_download: 980 target_file = os.path.join(plug.repo.pkgdir, os.path.basename(pack.unique_id.relativepath)) 981 pack.path = target_file 982 params = {} 983 checksum_type = pack.checksum_type 984 checksum = pack.checksum 985 plug.set_download_parameters(params, pack.unique_id.relativepath, target_file, 986 checksum_type=checksum_type, checksum_value=checksum) 987 downloader.add(params) 988 to_download_count += 1 989 if num_to_process != 0: 990 log(0, " New packages to download: %5d" % to_download_count) 991 log2(0, 0, " Downloading packages:") 992 logger = TextLogger(None, to_download_count) 993 downloader.set_log_obj(logger) 994 downloader.run() 995 996 log2background(0, "Importing packages started.") 997 log(0, '') 998 log(0, ' Importing packages to DB:') 999 progress_bar = ProgressBarLogger(" Importing packages: ", to_download_count) 1000 1001 # Prepare SQL statements 1002 h_delete_package_queue = rhnSQL.prepare("""delete from rhnPackageFileDeleteQueue where path = :path""") 1003 backend = SQLBackend() 1004 1005 mpm_bin_batch = importLib.Collection() 1006 mpm_src_batch = importLib.Collection() 1007 affected_channels = [] 1008 upload_caller = "server.app.uploadPackage" 1009 1010 import_count = 0 1011 for (index, what) in enumerate(to_process): 1012 pack, to_download, to_link = what 1013 if not to_download: 1014 continue 1015 import_count += 1 1016 stage_path = pack.path 1017 1018 # pylint: disable=W0703 1019 try: 1020 # check if package was downloaded 1021 if not os.path.exists(stage_path): 1022 raise Exception 1023 1024 pack.load_checksum_from_header() 1025 1026 if not self.metadata_only: 1027 rel_package_path = rhnPackageUpload.relative_path_from_header(pack.a_pkg.header, self.org_id, 1028 pack.a_pkg.checksum_type, 1029 pack.a_pkg.checksum) 1030 else: 1031 rel_package_path = None 1032 1033 if rel_package_path: 1034 # Save uploaded package to cache with repository checksum type 1035 self.checksum_cache[rel_package_path] = {pack.checksum_type: pack.checksum} 1036 1037 # First write the package to the filesystem to final location 1038 # pylint: disable=W0703 1039 try: 1040 importLib.move_package(pack.a_pkg.payload_stream.name, basedir=CFG.MOUNT_POINT, 1041 relpath=rel_package_path, 1042 checksum_type=pack.a_pkg.checksum_type, 1043 checksum=pack.a_pkg.checksum, force=1) 1044 except OSError: 1045 e = sys.exc_info()[1] 1046 raise_with_tb(rhnFault(50, "Package upload failed: %s" % e), sys.exc_info()[2]) 1047 except importLib.FileConflictError: 1048 raise_with_tb(rhnFault(50, "File already exists"), sys.exc_info()[2]) 1049 except Exception: 1050 raise_with_tb(rhnFault(50, "File error"), sys.exc_info()[2]) 1051 1052 # Remove any pending scheduled file deletion for this package 1053 h_delete_package_queue.execute(path=rel_package_path) 1054 1055 pkg = mpmSource.create_package(pack.a_pkg.header, size=pack.a_pkg.payload_size, 1056 checksum_type=pack.a_pkg.checksum_type, checksum=pack.a_pkg.checksum, 1057 relpath=rel_package_path, org_id=self.org_id, 1058 header_start=pack.a_pkg.header_start, 1059 header_end=pack.a_pkg.header_end, channels=[]) 1060 1061 if pack.a_pkg.header.is_source: 1062 mpm_src_batch.append(pkg) 1063 else: 1064 mpm_bin_batch.append(pkg) 1065 # we do not want to keep a whole 'a_pkg' object for every package in memory, 1066 # because we need only checksum. see BZ 1397417 1067 pack.checksum = pack.a_pkg.checksum 1068 pack.checksum_type = pack.a_pkg.checksum_type 1069 pack.epoch = pack.a_pkg.header['epoch'] 1070 pack.a_pkg = None 1071 1072 self.all_packages.add((pack.checksum_type, pack.checksum)) 1073 1074 # Downloaded pkg checksum matches with pkg already in channel, no need to disassociate from channel 1075 if (pack.checksum_type, pack.checksum) in to_disassociate: 1076 to_disassociate[(pack.checksum_type, pack.checksum)] = False 1077 # Set to_link to False, no need to link again 1078 to_process[index] = (pack, True, False) 1079 1080 # importing packages by batch or if the current packages is the last 1081 if mpm_bin_batch and (import_count == to_download_count 1082 or len(mpm_bin_batch) % self.import_batch_size == 0): 1083 importer = packageImport.PackageImport(mpm_bin_batch, backend, caller=upload_caller) 1084 importer.setUploadForce(1) 1085 importer.run() 1086 rhnSQL.commit() 1087 del importer.batch 1088 affected_channels.extend(importer.affected_channels) 1089 del mpm_bin_batch 1090 mpm_bin_batch = importLib.Collection() 1091 1092 if mpm_src_batch and (import_count == to_download_count 1093 or len(mpm_src_batch) % self.import_batch_size == 0): 1094 src_importer = packageImport.SourcePackageImport(mpm_src_batch, backend, caller=upload_caller) 1095 src_importer.setUploadForce(1) 1096 src_importer.run() 1097 rhnSQL.commit() 1098 del mpm_src_batch 1099 mpm_src_batch = importLib.Collection() 1100 1101 progress_bar.log(True, None) 1102 except KeyboardInterrupt: 1103 raise 1104 except rhnSQL.SQLError: 1105 raise 1106 except Exception: 1107 failed_packages += 1 1108 e = str(sys.exc_info()[1]) 1109 if e: 1110 log2(0, 1, e, stream=sys.stderr) 1111 if self.fail: 1112 raise 1113 to_process[index] = (pack, False, False) 1114 progress_bar.log(False, None) 1115 finally: 1116 if is_non_local_repo and stage_path and os.path.exists(stage_path): 1117 os.remove(stage_path) 1118 1119 if affected_channels: 1120 errataCache.schedule_errata_cache_update(affected_channels) 1121 log2background(0, "Importing packages finished.") 1122 1123 # Disassociate packages 1124 for (checksum_type, checksum) in to_disassociate: 1125 if to_disassociate[(checksum_type, checksum)]: 1126 self.disassociate_package(checksum_type, checksum) 1127 # Do not re-link if nothing was marked to link 1128 if any([to_link for (pack, to_download, to_link) in to_process]): 1129 log(0, '') 1130 log(0, " Linking packages to the channel.") 1131 # Packages to append to channel 1132 import_batch = [self.associate_package(pack) for (pack, to_download, to_link) in to_process if to_link] 1133 backend = SQLBackend() 1134 caller = "server.app.yumreposync" 1135 importer = ChannelPackageSubscription(import_batch, 1136 backend, caller=caller, repogen=False) 1137 importer.run() 1138 backend.commit() 1139 self.regen = True 1140 return failed_packages
1141
1142 - def show_packages(self, plug, source_id):
1143 1144 if (not self.filters) and source_id: 1145 h = rhnSQL.prepare(""" 1146 select flag, filter 1147 from rhnContentSourceFilter 1148 where source_id = :source_id 1149 order by sort_order """) 1150 h.execute(source_id=source_id) 1151 filter_data = h.fetchall_dict() or [] 1152 filters = [(row['flag'], re.split(r'[,\s]+', row['filter'])) 1153 for row in filter_data] 1154 else: 1155 filters = self.filters 1156 1157 packages = plug.raw_list_packages(filters) 1158 1159 num_passed = len(packages) 1160 log(0, " Packages in repo: %5d" % plug.num_packages) 1161 if plug.num_excluded: 1162 log(0, " Packages passed filter rules: %5d" % num_passed) 1163 1164 log(0, " Package marked with '+' will be downloaded next channel synchronization") 1165 log(0, " Package marked with '.' is already presented on filesystem") 1166 1167 channel_id = int(self.channel['id']) 1168 1169 for pack in packages: 1170 1171 db_pack = rhnPackage.get_info_for_package( 1172 [pack.name, pack.version, pack.release, pack.epoch, pack.arch], 1173 channel_id, self.org_id) 1174 1175 pack_status = " + " # need to be downloaded by default 1176 pack_full_name = "%-60s\t" % (pack.name + "-" + pack.version + "-" + pack.release + "." + 1177 pack.arch + ".rpm") 1178 pack_size = "%11d bytes\t" % pack.packagesize 1179 1180 if pack.checksum_type == 'sha512': 1181 pack_hash_info = "%-140s" % (pack.checksum_type + ' ' + pack.checksum) 1182 else: 1183 pack_hash_info = "%-80s " % (pack.checksum_type + ' ' + pack.checksum) 1184 1185 # Package exists in DB 1186 if db_pack: 1187 # Path in filesystem is defined 1188 if db_pack['path']: 1189 pack.path = os.path.join(CFG.MOUNT_POINT, db_pack['path']) 1190 else: 1191 pack.path = "" 1192 1193 if self.match_package_checksum(db_pack['path'], pack.path, pack.checksum_type, pack.checksum): 1194 # package is already on disk 1195 pack_status = ' . ' 1196 1197 log(0, " " + pack_status + pack_full_name + pack_size + pack_hash_info)
1198
1199 - def match_package_checksum(self, relpath, abspath, checksum_type, checksum):
1200 if os.path.exists(abspath): 1201 if relpath not in self.checksum_cache: 1202 self.checksum_cache[relpath] = {} 1203 cached_checksums = self.checksum_cache[relpath] 1204 if checksum_type not in cached_checksums: 1205 checksum_disk = getFileChecksum(checksum_type, filename=abspath) 1206 cached_checksums[checksum_type] = checksum_disk 1207 else: 1208 checksum_disk = cached_checksums[checksum_type] 1209 if checksum_disk == checksum: 1210 return 1 1211 elif relpath in self.checksum_cache: 1212 # Remove path from cache if not exists 1213 del self.checksum_cache[relpath] 1214 return 0
1215
1216 - def associate_package(self, pack):
1217 package = {} 1218 package['name'] = pack.name 1219 package['version'] = pack.version 1220 package['release'] = pack.release 1221 package['arch'] = pack.arch 1222 if pack.a_pkg: 1223 package['checksum'] = pack.a_pkg.checksum 1224 package['checksum_type'] = pack.a_pkg.checksum_type 1225 # use epoch from file header because createrepo puts epoch="0" to 1226 # primary.xml even for packages with epoch='' 1227 package['epoch'] = pack.a_pkg.header['epoch'] 1228 else: 1229 # RPM not available but package metadata are in DB, reuse these values 1230 package['checksum'] = pack.checksum 1231 package['checksum_type'] = pack.checksum_type 1232 package['epoch'] = pack.epoch 1233 package['channels'] = [{'label': self.channel_label, 1234 'id': self.channel['id']}] 1235 package['org_id'] = self.org_id 1236 1237 return importLib.IncompletePackage().populate(package)
1238
1239 - def disassociate_package(self, checksum_type, checksum):
1240 log(3, "Disassociating package with checksum: %s (%s)" % (checksum, checksum_type)) 1241 h = rhnSQL.prepare(""" 1242 delete from rhnChannelPackage cp 1243 where cp.channel_id = :channel_id 1244 and cp.package_id in (select p.id 1245 from rhnPackage p 1246 join rhnChecksumView c 1247 on p.checksum_id = c.id 1248 where c.checksum = :checksum 1249 and c.checksum_type = :checksum_type 1250 ) 1251 """) 1252 h.execute(channel_id=self.channel['id'], 1253 checksum_type=checksum_type, checksum=checksum)
1254
1255 - def disassociate_erratum(self, advisory_name):
1256 log(3, "Disassociating erratum: %s" % advisory_name) 1257 h = rhnSQL.prepare(""" 1258 delete from rhnChannelErrata ce 1259 where ce.channel_id = :channel_id 1260 and ce.errata_id in (select e.id 1261 from rhnErrata e 1262 where e.advisory_name = :advisory_name 1263 ) 1264 """) 1265 h.execute(channel_id=self.channel['id'], advisory_name=advisory_name)
1266
1267 - def load_channel(self):
1268 return rhnChannel.channel_info(self.channel_label)
1269 1270 @staticmethod
1271 - def _to_db_date(date):
1272 if date is None: 1273 return None 1274 ret = "" 1275 if date.isdigit(): 1276 ret = datetime.fromtimestamp(float(date)).isoformat(' ') 1277 else: 1278 # we expect to get ISO formated date 1279 if len(date) == 10: # YYYY-MM-DD 1280 date += " 00:00:00" 1281 ret = date 1282 return ret[:19] # return 1st 19 letters of date, therefore preventing ORA-01830 caused by fractions of seconds
1283 1284 @staticmethod
1285 - def fix_notice(notice):
1286 # pylint: disable=W0212 1287 if "." in notice['version']: 1288 new_version = 0 1289 for n in notice['version'].split('.'): 1290 new_version = (new_version + int(n)) * 100 1291 notice['version'] = str(new_version / 100) 1292 return notice
1293
1294 - def get_errata(self, update_id):
1295 h = rhnSQL.prepare("""select 1296 e.id, e.advisory, e.advisory_name, e.advisory_rel 1297 from rhnerrata e 1298 where e.advisory_name = :name 1299 and (e.org_id = :org_id or (e.org_id is null and :org_id is null)) 1300 """) 1301 h.execute(name=update_id, org_id=self.org_id) 1302 ret = h.fetchone_dict() or None 1303 if not ret: 1304 return None 1305 1306 h = rhnSQL.prepare("""select distinct c.label 1307 from rhnchannelerrata ce 1308 join rhnchannel c on c.id = ce.channel_id 1309 where ce.errata_id = :eid 1310 """) 1311 h.execute(eid=ret['id']) 1312 channels = h.fetchall_dict() or [] 1313 1314 ret['channels'] = channels 1315 ret['packages'] = [] 1316 1317 h = rhnSQL.prepare(""" 1318 select p.id as package_id, 1319 pn.name, 1320 pevr.epoch, 1321 pevr.version, 1322 pevr.release, 1323 pa.label as arch, 1324 p.org_id, 1325 cv.checksum, 1326 cv.checksum_type 1327 from rhnerratapackage ep 1328 join rhnpackage p on p.id = ep.package_id 1329 join rhnpackagename pn on pn.id = p.name_id 1330 join rhnpackageevr pevr on pevr.id = p.evr_id 1331 join rhnpackagearch pa on pa.id = p.package_arch_id 1332 join rhnchecksumview cv on cv.id = p.checksum_id 1333 where ep.errata_id = :eid 1334 """) 1335 h.execute(eid=ret['id']) 1336 packages = h.fetchall_dict() or [] 1337 for pkg in packages: 1338 ipackage = importLib.IncompletePackage().populate(pkg) 1339 ipackage['epoch'] = pkg.get('epoch', '') 1340 1341 ipackage['checksums'] = {ipackage['checksum_type']: ipackage['checksum']} 1342 ret['packages'].append(ipackage) 1343 1344 return ret
1345
1346 - def list_update_dates(self):
1347 """List update_date for advisories in channel""" 1348 h = rhnSQL.prepare("""select e.advisory_name, 1349 e.update_date 1350 from rhnChannelErrata ce 1351 inner join rhnErrata e on e.id = ce.errata_id 1352 where ce.channel_id = :cid 1353 """) 1354 h.execute(cid=self.channel['id']) 1355 advisories = dict((row['advisory_name'], row['update_date'].strftime("%Y-%m-%d %H:%M:%S")) 1356 for row in h.fetchall_dict() or []) 1357 return advisories
1358
1359 - def list_errata(self):
1360 """List advisory names present in channel""" 1361 h = rhnSQL.prepare("""select e.advisory_name 1362 from rhnChannelErrata ce 1363 inner join rhnErrata e on e.id = ce.errata_id 1364 where ce.channel_id = :cid 1365 """) 1366 h.execute(cid=self.channel['id']) 1367 advisories = [row['advisory_name'] for row in h.fetchall_dict() or []] 1368 return advisories
1369
1370 - def import_kickstart(self, plug, repo_label, is_non_local_repo):
1371 log(0, '') 1372 log(0, ' Importing kickstarts.') 1373 ks_path = 'rhn/kickstart/' 1374 ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) 1375 if len(ks_tree_label) < 4: 1376 ks_tree_label += "_repo" 1377 1378 # construct ks_path and check we already have this KS tree synced 1379 id_request = """ 1380 select id 1381 from rhnKickstartableTree 1382 where channel_id = :channel_id and label = :label 1383 """ 1384 1385 if self.org_id: 1386 ks_path += str(self.org_id) + '/' + ks_tree_label 1387 # Trees synced from external repositories are expected to have full path it database 1388 db_path = os.path.join(CFG.MOUNT_POINT, ks_path) 1389 row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], 1390 label=ks_tree_label, org_id=self.org_id) 1391 else: 1392 ks_path += ks_tree_label 1393 db_path = ks_path 1394 row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], 1395 label=ks_tree_label) 1396 1397 treeinfo_path = ['treeinfo', '.treeinfo'] 1398 treeinfo_parser = None 1399 for path in treeinfo_path: 1400 log(1, "Trying " + path) 1401 treeinfo = plug.get_file(path, os.path.join(plug.repo.basecachedir, plug.name)) 1402 if treeinfo: 1403 try: 1404 treeinfo_parser = TreeInfoParser(treeinfo) 1405 treeinfo_path = path 1406 break 1407 except TreeInfoError: 1408 pass 1409 1410 if not treeinfo_parser: 1411 log(0, " Kickstartable tree not detected (no valid treeinfo file)") 1412 return 1413 1414 if self.ks_install_type is None: 1415 family = treeinfo_parser.get_family() 1416 if family == 'Fedora': 1417 self.ks_install_type = 'fedora18' 1418 elif family == 'CentOS': 1419 self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version() 1420 else: 1421 self.ks_install_type = 'generic_rpm' 1422 1423 fileutils.createPath(os.path.join(CFG.MOUNT_POINT, ks_path)) 1424 # Make sure images are included 1425 to_download = set() 1426 to_download.add(treeinfo_path) 1427 for repo_path in treeinfo_parser.get_images(): 1428 local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) 1429 # TODO: better check 1430 if not os.path.exists(local_path) or self.force_kickstart: 1431 to_download.add(repo_path) 1432 1433 if row: 1434 log(0, " Kickstartable tree %s already synced. Updating content..." % ks_tree_label) 1435 ks_id = row['id'] 1436 else: 1437 row = rhnSQL.fetchone_dict(""" 1438 select sequence_nextval('rhn_kstree_id_seq') as id from dual 1439 """) 1440 ks_id = row['id'] 1441 1442 rhnSQL.execute(""" 1443 insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, 1444 install_type, last_modified, created, modified) 1445 values (:id, :org_id, :label, :base_path, :channel_id, 1446 ( select id from rhnKSTreeType where label = :ks_tree_type), 1447 ( select id from rhnKSInstallType where label = :ks_install_type), 1448 current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, 1449 org_id=self.org_id, label=ks_tree_label, base_path=db_path, 1450 channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, 1451 ks_install_type=self.ks_install_type) 1452 1453 log(0, " Added new kickstartable tree %s. Downloading content..." % ks_tree_label) 1454 1455 insert_h = rhnSQL.prepare(""" 1456 insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, 1457 modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, 1458 epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) 1459 """) 1460 1461 delete_h = rhnSQL.prepare(""" 1462 delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path 1463 """) 1464 1465 # Downloading/Updating content of KS Tree 1466 dirs_queue = [''] 1467 log(0, " Gathering all files in kickstart repository...") 1468 while dirs_queue: 1469 cur_dir_name = dirs_queue.pop(0) 1470 if is_non_local_repo: 1471 parser = KSDirHtmlParser(plug, cur_dir_name) 1472 else: 1473 parser = KSDirLocalParser(plug.repo.urls[0].replace("file://", ""), cur_dir_name) 1474 for ks_file in parser.get_content(): 1475 repo_path = cur_dir_name + ks_file['name'] 1476 if ks_file['type'] == 'DIR': 1477 dirs_queue.append(repo_path) 1478 continue 1479 1480 if not os.path.exists(os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)) or self.force_kickstart: 1481 to_download.add(repo_path) 1482 1483 for addon_dir in treeinfo_parser.get_addons(): 1484 repomd_url = str(addon_dir + '/repodata/repomd.xml') 1485 repomd_file = plug.get_file(repomd_url, os.path.join(plug.repo.basecachedir, plug.name)) 1486 1487 if repomd_file: 1488 # find location of primary.xml 1489 repomd_xml = minidom.parse(repomd_file) 1490 for i in repomd_xml.getElementsByTagName('data'): 1491 if i.attributes['type'].value == 'primary': 1492 primary_url = str(addon_dir + '/' + 1493 i.getElementsByTagName('location')[0].attributes['href'].value) 1494 break 1495 1496 primary_zip = plug.get_file(primary_url, os.path.join(plug.repo.basecachedir, plug.name)) 1497 if primary_zip: 1498 primary_xml = gzip.open(primary_zip, 'r') 1499 xmldoc = minidom.parse(primary_xml) 1500 for i in xmldoc.getElementsByTagName('package'): 1501 package = i.getElementsByTagName('location')[0].attributes['href'].value 1502 repo_path = str(os.path.normpath(os.path.join(addon_dir, package))) 1503 if not os.path.exists(os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)) \ 1504 or self.force_kickstart: 1505 to_download.add(repo_path) 1506 1507 if to_download: 1508 log(0, " Downloading %d kickstart files." % len(to_download)) 1509 progress_bar = ProgressBarLogger(" Downloading kickstarts:", len(to_download)) 1510 downloader = ThreadedDownloader(force=self.force_kickstart) 1511 for item in to_download: 1512 params = {} 1513 plug.set_download_parameters(params, item, os.path.join(CFG.MOUNT_POINT, ks_path, item)) 1514 downloader.add(params) 1515 downloader.set_log_obj(progress_bar) 1516 downloader.run() 1517 log2background(0, "Download finished.") 1518 for item in to_download: 1519 st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) 1520 # update entity about current file in a database 1521 delete_h.execute(id=ks_id, path=item) 1522 insert_h.execute(id=ks_id, path=item, 1523 checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), 1524 st_size=st.st_size, st_time=st.st_mtime) 1525 else: 1526 log(0, "No new kickstart files to download.") 1527 1528 rhnSQL.commit()
1529