Package backend :: Package satellite_tools :: Package disk_dumper :: Module iss
[hide private]
[frames] | no frames]

Source Code for Module backend.satellite_tools.disk_dumper.iss

   1  # 
   2  # Copyright (c) 2008--2018 Red Hat, Inc. 
   3  # 
   4  # This software is licensed to you under the GNU General Public License, 
   5  # version 2 (GPLv2). There is NO WARRANTY for this software, express or 
   6  # implied, including the implied warranties of MERCHANTABILITY or FITNESS 
   7  # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 
   8  # along with this software; if not, see 
   9  # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. 
  10  # 
  11  # Red Hat trademarks are not licensed under GPLv2. No permission is 
  12  # granted to use or replicate Red Hat trademarks that are incorporated 
  13  # in this software or its documentation. 
  14  # 
  15   
  16  import os 
  17  import os.path 
  18  import sys 
  19  import time 
  20  import gzip 
  21  import shutil 
  22  import gettext 
  23  try: 
  24      #  python 2 
  25      import cStringIO 
  26  except ImportError: 
  27      #  python3 
  28      import io as cStringIO 
  29  import dumper 
  30  from spacewalk.common.usix import raise_with_tb 
  31  from spacewalk.common import rhnMail 
  32  from spacewalk.common.rhnConfig import CFG, initCFG 
  33  from spacewalk.common.rhnTB import Traceback, exitWithTraceback 
  34  from spacewalk.common.checksum import getFileChecksum 
  35  from spacewalk.server import rhnSQL 
  36  from spacewalk.server.rhnSQL import SQLError, SQLSchemaError, SQLConnectError 
  37  from spacewalk.satellite_tools.exporter import xmlWriter 
  38  from spacewalk.satellite_tools import xmlDiskSource, diskImportLib, progress_bar 
  39  from spacewalk.satellite_tools.syncLib import initEMAIL_LOG, dumpEMAIL_LOG, log2email, log2stderr, log2stdout 
  40  from iss_ui import UI 
  41  from iss_actions import ActionDeps 
  42  import iss_isos 
  43   
  44  t = gettext.translation('spacewalk-backend-server', fallback=True) 
  45  _ = t.ugettext 
46 47 # bare-except and broad-except 48 # pylint: disable=W0702,W0703 49 50 -class ISSError(Exception):
51
52 - def __init__(self, msg, tb):
53 Exception.__init__(self) 54 self.msg = msg 55 self.tb = tb
56
57 58 # xmlDiskSource doesn't have a class for short channel packages, so I added one here. 59 # I named _getFile that way so it's similar to the stuff in xmlDiskSource. 60 # I grabbed the value of pathkey from dump_channel_packages_short in dumper.py. 61 -class ISSChannelPackageShortDiskSource:
62
63 - def __init__(self, mount_point, channel_name=None):
64 self.mp = mount_point 65 self.channelid = channel_name 66 self.pathkey = "xml-channel-packages/rhn-channel-%d.data"
67
68 - def setChannel(self, channel_id):
69 self.channelid = channel_id
70
71 - def _getFile(self):
72 return os.path.join(self.mp, self.pathkey % (self.channelid,))
73
74 75 -class FileMapper:
76 77 """ This class maps dumps to files. In other words, you give it 78 the type of dump you're doing and it gives you the file to 79 write it to. 80 """ 81
82 - def __init__(self, mount_point):
83 self.mp = mount_point 84 self.filemap = { 85 'arches': xmlDiskSource.ArchesDiskSource(self.mp), 86 'arches-extra': xmlDiskSource.ArchesExtraDiskSource(self.mp), 87 'blacklists': xmlDiskSource.BlacklistsDiskSource(self.mp), 88 'channelfamilies': xmlDiskSource.ChannelFamilyDiskSource(self.mp), 89 'orgs': xmlDiskSource.OrgsDiskSource(self.mp), 90 'channels': xmlDiskSource.ChannelDiskSource(self.mp), 91 'channel-pkg-short': ISSChannelPackageShortDiskSource(self.mp), 92 'packages-short': xmlDiskSource.ShortPackageDiskSource(self.mp), 93 'packages': xmlDiskSource.PackageDiskSource(self.mp), 94 'sourcepackages': xmlDiskSource.SourcePackageDiskSource(self.mp), 95 'errata': xmlDiskSource.ErrataDiskSource(self.mp), 96 'kickstart_trees': xmlDiskSource.KickstartDataDiskSource(self.mp), 97 'kickstart_files': xmlDiskSource.KickstartFileDiskSource(self.mp), 98 'binary_rpms': xmlDiskSource.BinaryRPMDiskSource(self.mp), 99 'comps': xmlDiskSource.ChannelCompsDiskSource(self.mp), 100 'modules': xmlDiskSource.ChannelModulesDiskSource(self.mp), 101 'productnames': xmlDiskSource.ProductnamesDiskSource(self.mp), 102 }
103 104 # This will make sure that all of the directories leading up to the 105 # xml file actually exist. 106 @staticmethod
107 - def setup_file(ofile):
108 # Split the path. The filename is [1], and the directories are in [0]. 109 dirs_to_make = os.path.split(ofile)[0] 110 111 # Make the directories if they don't already exist. 112 if not os.path.exists(dirs_to_make): 113 os.makedirs(dirs_to_make) 114 115 return ofile
116 117 # The get*File methods will return the full path to the xml file that the dumps are placed in. 118 # pylint: disable=W0212
119 - def getArchesFile(self):
120 return self.setup_file(self.filemap['arches']._getFile())
121
122 - def getArchesExtraFile(self):
123 return self.setup_file(self.filemap['arches-extra']._getFile())
124
125 - def getBlacklistsFile(self):
126 return self.setup_file(self.filemap['blacklists']._getFile())
127
128 - def getOrgsFile(self):
129 return self.setup_file(self.filemap['orgs']._getFile())
130
131 - def getChannelFamiliesFile(self):
132 return self.setup_file(self.filemap['channelfamilies']._getFile())
133
134 - def getBinaryRPMFile(self):
135 return self.setup_file(self.filemap['binary_rpms']._getFile())
136
137 - def getChannelsFile(self, channelname):
138 self.filemap['channels'].setChannel(channelname) 139 return self.setup_file(self.filemap['channels']._getFile())
140
141 - def getChannelCompsFile(self, channelname):
142 self.filemap['comps'].setChannel(channelname) 143 return self.setup_file(self.filemap['comps']._getFile())
144
145 - def getChannelModulesFile(self, channelname):
146 self.filemap['modules'].setChannel(channelname) 147 return self.setup_file(self.filemap['modules']._getFile())
148
149 - def getChannelPackageShortFile(self, channel_id):
150 self.filemap['channel-pkg-short'].setChannel(channel_id) 151 return self.setup_file(self.filemap['channel-pkg-short']._getFile())
152
153 - def getPackagesFile(self, packageid):
154 self.filemap['packages'].setID(packageid) 155 return self.setup_file(self.filemap['packages']._getFile())
156
157 - def getShortPackagesFile(self, packageid):
158 self.filemap['packages-short'].setID(packageid) 159 return self.setup_file(self.filemap['packages-short']._getFile())
160
161 - def getSourcePackagesFile(self, sp_id):
162 self.filemap['sourcepackages'].setID(sp_id) 163 return self.setup_file(self.filemap['sourcepackages']._getFile())
164
165 - def getErrataFile(self, errataid):
166 self.filemap['errata'].setID(errataid) 167 return self.setup_file(self.filemap['errata']._getFile())
168
169 - def getKickstartTreeFile(self, ks_id):
170 self.filemap['kickstart_trees'].setID(ks_id) 171 return self.setup_file(self.filemap['kickstart_trees']._getFile())
172
173 - def getKickstartFileFile(self, ks_label, relative_path):
174 self.filemap['kickstart_files'].setID(ks_label) 175 self.filemap['kickstart_files'].set_relative_path(relative_path) 176 return self.setup_file(self.filemap['kickstart_files']._getFile())
177
178 - def getProductNamesFile(self):
179 return self.setup_file(self.filemap['productnames']._getFile())
180
181 182 -class Dumper(dumper.XML_Dumper):
183 184 """ This class subclasses the XML_Dumper class. It overrides 185 the _get_xml_writer method and adds a set_stream method, 186 which will let it write to a file instead of over the wire. 187 """ 188
189 - def __init__(self, outputdir, channel_labels, org_ids, hardlinks, 190 start_date, end_date, use_rhn_date, whole_errata):
191 dumper.XML_Dumper.__init__(self) 192 self.fm = FileMapper(outputdir) 193 self.mp = outputdir 194 self.pb_label = "Exporting: " 195 self.pb_length = 20 # progress bar length 196 self.pb_complete = " - Done!" # string that's printed when progress bar is done. 197 self.pb_char = "#" # the string used as each unit in the progress bar. 198 self.hardlinks = hardlinks 199 self.filename = None 200 self.outstream = None 201 202 self.start_date = start_date 203 self.end_date = end_date 204 self.use_rhn_date = use_rhn_date 205 self.whole_errata = whole_errata 206 207 if self.start_date: 208 dates = {'start_date': self.start_date, 209 'end_date': self.end_date, } 210 else: 211 dates = {} 212 213 # The queries here are a little weird. They grab just enough information 214 # to satisfy the dumper objects, which will use the information to look up 215 # any additional information that they need. That's why they don't seem to grab all 216 # of the information that you'd think would be necessary to sync stuff. 217 ####CHANNEL INFO### 218 try: 219 query = """ 220 select ch.id channel_id, label, 221 TO_CHAR(last_modified, 'YYYYMMDDHH24MISS') last_modified 222 from rhnChannel ch 223 where ch.label = :label 224 """ 225 self.channel_query = rhnSQL.Statement(query) 226 ch_data = rhnSQL.prepare(self.channel_query) 227 228 comps_query = """ 229 select relative_filename 230 from rhnChannelComps 231 where channel_id = :channel_id 232 and comps_type_id = 1 233 order by id desc 234 """ 235 236 modules_query = """ 237 select relative_filename 238 from rhnChannelComps 239 where channel_id = :channel_id 240 and comps_type_id = 2 241 order by id desc 242 """ 243 244 self.channel_comps_query = rhnSQL.Statement(comps_query) 245 channel_comps_sth = rhnSQL.prepare(self.channel_comps_query) 246 self.channel_modules_query = rhnSQL.Statement(modules_query) 247 channel_modules_sth = rhnSQL.prepare(self.channel_modules_query) 248 249 # self.channel_ids contains the list of dictionaries that hold the channel information 250 # The keys are 'channel_id', 'label', and 'last_modified'. 251 self.channel_comps = {} 252 self.channel_modules = {} 253 254 self.set_exportable_orgs(org_ids) 255 256 # Channel_labels should be the list of channels passed into rhn-satellite-exporter by the user. 257 log2stdout(1, "Gathering channel info...") 258 for ids in channel_labels: 259 ch_data.execute(label=ids) 260 ch_info = ch_data.fetchall_dict() 261 262 if not ch_info: 263 raise ISSError("Error: Channel %s not found." % ids, "") 264 265 self.channel_ids = self.channel_ids + ch_info 266 267 channel_comps_sth.execute(channel_id=ch_info[0]['channel_id']) 268 comps_info = channel_comps_sth.fetchone_dict() 269 channel_modules_sth.execute(channel_id=ch_info[0]['channel_id']) 270 modules_info = channel_modules_sth.fetchone_dict() 271 272 if comps_info is not None: 273 self.channel_comps[ch_info[0]['channel_id']] = comps_info['relative_filename'] 274 if modules_info is not None: 275 self.channel_modules[ch_info[0]['channel_id']] = modules_info['relative_filename'] 276 277 # For list of channel families, we want to also list those relevant for channels 278 # that are already on disk, so that we do not lose those families with 279 # "incremental" dumps. So we will gather list of channel ids for channels already 280 # in dump. 281 channel_labels_for_families = self.fm.filemap['channels'].list() 282 print("Appending channels %s" % (channel_labels_for_families)) 283 for ids in channel_labels_for_families: 284 ch_data.execute(label=ids) 285 ch_info = ch_data.fetchall_dict() 286 if ch_info: 287 self.channel_ids_for_families = self.channel_ids_for_families + ch_info 288 289 except ISSError: 290 # Don't want calls to sys.exit to show up as a "bad" error. 291 raise 292 except Exception: 293 e = sys.exc_info()[1] 294 tbout = cStringIO.StringIO() 295 Traceback(mail=0, ostream=tbout, with_locals=1) 296 raise_with_tb(ISSError("%s caught while getting channel info." % 297 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2]) 298 299 ###BINARY RPM INFO### 300 try: 301 if self.whole_errata and self.start_date: 302 query = """ select rcp.package_id id, rp.path path 303 from rhnChannelPackage rcp, rhnPackage rp 304 left join rhnErrataPackage rep on rp.id = rep.package_id 305 left join rhnErrata re on rep.errata_id = re.id 306 where rcp.package_id = rp.id 307 and rcp.channel_id = :channel_id 308 """ 309 else: 310 query = """ 311 select rcp.package_id id, rp.path path 312 from rhnChannelPackage rcp, rhnPackage rp 313 where rcp.package_id = rp.id 314 and rcp.channel_id = :channel_id 315 """ 316 317 if self.start_date: 318 if self.whole_errata: 319 if self.use_rhn_date: 320 query += """ and 321 ((re.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 322 and re.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 323 ) or (rep.package_id is NULL 324 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 325 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')) 326 ) 327 """ 328 else: 329 query += """ and 330 ((re.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 331 and re.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 332 ) or (rep.package_id is NULL 333 and rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 334 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')) 335 ) 336 """ 337 elif self.use_rhn_date: 338 query += """ 339 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 340 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 341 """ 342 else: 343 query += """ 344 and rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 345 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 346 """ 347 self.brpm_query = rhnSQL.Statement(query) 348 brpm_data = rhnSQL.prepare(self.brpm_query) 349 350 # self.brpms is a list of binary rpm info. It is a list of dictionaries, where each dictionary 351 # has 'id' and 'path' as the keys. 352 self.brpms = [] 353 log2stdout(1, "Gathering binary RPM info...") 354 for ch in self.channel_ids: 355 brpm_data.execute(channel_id=ch['channel_id'], **dates) 356 self.brpms = self.brpms + (brpm_data.fetchall_dict() or []) 357 except Exception: 358 e = sys.exc_info()[1] 359 tbout = cStringIO.StringIO() 360 Traceback(mail=0, ostream=tbout, with_locals=1) 361 raise_with_tb(ISSError("%s caught while getting binary rpm info." % 362 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2]) 363 364 ###PACKAGE INFO### 365 # This will grab channel package information for a given channel. 366 try: 367 if self.whole_errata and self.start_date: 368 query = """ 369 select rp.id package_id, 370 TO_CHAR(rp.last_modified, 'YYYYMMDDHH24MISS') last_modified 371 from rhnChannelPackage rcp, rhnPackage rp 372 left join rhnErrataPackage rep on rp.id = rep.package_id 373 left join rhnErrata re on rep.errata_id = re.id 374 where rcp.channel_id = :channel_id 375 and rcp.package_id = rp.id 376 """ 377 else: 378 query = """ 379 select rp.id package_id, 380 TO_CHAR(rp.last_modified, 'YYYYMMDDHH24MISS') last_modified 381 from rhnPackage rp, rhnChannelPackage rcp 382 where rcp.channel_id = :channel_id 383 and rcp.package_id = rp.id 384 """ 385 if self.start_date: 386 if self.whole_errata: 387 if self.use_rhn_date: 388 query += """ and 389 ((re.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 390 and re.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 391 ) or (rep.package_id is NULL 392 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 393 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')) 394 ) 395 """ 396 else: 397 query += """ and 398 ((re.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 399 and re.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 400 ) or (rep.package_id is NULL 401 and rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 402 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')) 403 ) 404 """ 405 elif self.use_rhn_date: 406 query += """ 407 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 408 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 409 """ 410 else: 411 query += """ 412 and (rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 413 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')) 414 """ 415 self.package_query = rhnSQL.Statement(query) 416 package_data = rhnSQL.prepare(self.package_query) 417 418 # self.pkg_info will be a list of dictionaries containing channel package information. 419 # The keys are 'package_id' and 'last_modified'. 420 self.pkg_info = [] 421 422 # This fills in the pkg_info list with channel package information from the channels in 423 # self.channel_ids. 424 log2stdout(1, "Gathering package info...") 425 for channel_id in self.channel_ids: 426 package_data.execute(channel_id=channel_id['channel_id'], **dates) 427 a_package = package_data.fetchall_dict() or [] 428 429 # Don't bother placing None into self.pkg_info. 430 if a_package: 431 self.pkg_info = self.pkg_info + a_package 432 433 except Exception: 434 e = sys.exc_info()[1] 435 tbout = cStringIO.StringIO() 436 Traceback(mail=0, ostream=tbout, with_locals=1) 437 raise_with_tb(ISSError("%s caught while getting package info." % 438 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2]) 439 440 ###SOURCE PACKAGE INFO### 441 try: 442 query = """ 443 select ps.id package_id, 444 TO_CHAR(ps.last_modified,'YYYYMMDDHH24MISS') last_modified, 445 ps.source_rpm_id source_rpm_id 446 from rhnPackageSource ps 447 """ 448 if self.start_date: 449 if self.whole_errata: 450 query += """ 451 left join rhnErrataFilePackageSource refps on refps.package_id = ps.id 452 left join rhnErrataFile ref on refps.errata_file_id = ref.id 453 left join rhnErrata re on ref.errata_id = re.id 454 """ 455 if self.use_rhn_date: 456 query += """ and 457 ((re.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 458 and re.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 459 ) or (refps.package_id is NULL 460 and ps.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 461 and ps.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')) 462 ) 463 """ 464 else: 465 query += """ and 466 ((re.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 467 and re.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 468 ) or (refps.package_id is NULL 469 and ps.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 470 and ps.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')) 471 ) 472 """ 473 elif self.use_rhn_date: 474 query += """ 475 where ps.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 476 and ps.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 477 """ 478 else: 479 query += """ 480 where ps.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 481 and ps.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 482 """ 483 self.source_package_query = rhnSQL.Statement(query) 484 source_package_data = rhnSQL.prepare(self.source_package_query) 485 source_package_data.execute(**dates) 486 487 # self.src_pkg_info is a list of dictionaries containing the source package information. 488 # The keys for each dictionary are 'package_id', 'last_modified', and 'source_rpm_id'. 489 self.src_pkg_info = source_package_data.fetchall_dict() or [] 490 491 # Again, don't bother placing None into the list. 492 if not self.src_pkg_info: 493 self.src_pkg_info = [] 494 495 except Exception: 496 e = sys.exc_info()[1] 497 tbout = cStringIO.StringIO() 498 Traceback(mail=0, ostream=tbout, with_locals=1) 499 raise_with_tb(ISSError("%s caught while getting source package info." % 500 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2]) 501 502 ###ERRATA INFO### 503 try: 504 query = """ 505 select e.id errata_id, 506 TO_CHAR(e.last_modified,'YYYYMMDDHH24MISS') last_modified, 507 e.advisory_name "advisory-name" 508 from rhnChannelErrata ce, rhnErrata e 509 where ce.channel_id = :channel_id 510 and ce.errata_id = e.id 511 """ 512 if self.start_date: 513 if self.use_rhn_date: 514 query += """ 515 and e.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 516 and e.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 517 """ 518 else: 519 query += """ 520 and ce.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 521 and ce.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 522 """ 523 self.errata_query = rhnSQL.Statement(query) 524 errata_data = rhnSQL.prepare(self.errata_query) 525 526 # self.errata_info will be a list of dictionaries containing errata info for the channels 527 # that the user listed. The keys are 'errata_id' and 'last_modified'. 528 self.errata_info = [] 529 log2stdout(1, "Gathering errata info...") 530 for channel_id in self.channel_ids: 531 errata_data.execute(channel_id=channel_id['channel_id'], **dates) 532 an_errata = errata_data.fetchall_dict() or [] 533 if an_errata: 534 self.errata_info = self.errata_info + an_errata 535 536 except Exception: 537 e = sys.exc_info()[1] 538 tbout = cStringIO.StringIO() 539 Traceback(mail=0, ostream=tbout, with_locals=1) 540 raise_with_tb(ISSError("%s caught while getting errata info." % 541 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2]) 542 543 ###KICKSTART DATA/TREES INFO### 544 try: 545 query = """ 546 select kt.id kstree_id, kt.label kickstart_label, 547 TO_CHAR(kt.last_modified, 'YYYYMMDDHH24MISS') last_modified 548 from rhnKickstartableTree kt 549 where kt.channel_id = :channel_id 550 """ 551 if self.start_date: 552 if self.use_rhn_date: 553 query += """ 554 and kt.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 555 and kt.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 556 and kt.org_id is Null 557 """ 558 else: 559 query += """ 560 and kt.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 561 and kt.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 562 and kt.org_id is Null 563 """ 564 self.kickstart_trees_query = rhnSQL.Statement(query) 565 kickstart_data = rhnSQL.prepare(self.kickstart_trees_query) 566 self.kickstart_trees = [] 567 log2stdout(1, "Gathering kickstart data...") 568 for channel_id in self.channel_ids: 569 kickstart_data.execute(channel_id=channel_id['channel_id'], 570 **dates) 571 a_tree = kickstart_data.fetchall_dict() or [] 572 if a_tree: 573 self.kickstart_trees = self.kickstart_trees + a_tree 574 575 except Exception: 576 e = sys.exc_info()[1] 577 tbout = cStringIO.StringIO() 578 Traceback(mail=0, ostream=tbout, with_locals=1) 579 raise_with_tb(ISSError("%s caught while getting kickstart data info." % 580 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2]) 581 582 ###KICKSTART FILES INFO### 583 try: 584 query = """ 585 select rktf.relative_filename "relative-path", 586 c.checksum_type "checksum-type", c.checksum, 587 rktf.file_size "file-size", 588 TO_CHAR(rktf.last_modified, 'YYYYMMDDHH24MISS') "last-modified", 589 rkt.base_path "base-path", 590 rkt.label "label", 591 TO_CHAR(rkt.modified, 'YYYYMMDDHH24MISS') "modified" 592 from rhnKSTreeFile rktf, rhnKickstartableTree rkt, 593 rhnChecksumView c 594 where rktf.kstree_id = :kstree_id 595 and rkt.id = rktf.kstree_id 596 and rktf.checksum_id = c.id 597 """ 598 if self.start_date: 599 if self.use_rhn_date: 600 query += """ 601 and rkt.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 602 and rkt.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 603 """ 604 else: 605 query += """ 606 and rkt.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS') 607 and rkt.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS') 608 """ 609 self.kickstart_files_query = rhnSQL.Statement(query) 610 kickstart_files = rhnSQL.prepare(self.kickstart_files_query) 611 self.kickstart_files = [] 612 log2stdout(1, "Gathering kickstart files info...") 613 for kstree in self.kickstart_trees: 614 kickstart_files.execute(kstree_id=kstree['kstree_id'], **dates) 615 a_file = kickstart_files.fetchall_dict() or [] 616 if a_file: 617 self.kickstart_files = self.kickstart_files + a_file 618 619 except Exception: 620 e = sys.exc_info()[1] 621 tbout = cStringIO.StringIO() 622 Traceback(mail=0, ostream=tbout, with_locals=1) 623 raise_with_tb(ISSError("%s caught while getting kickstart files info." % 624 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
625 626 # The close method overrides the parent classes close method. This implementation 627 # closes the self.outstream, which is an addition defined in this subclass. 628 # set_filename and _get_xml_writer for more info.
629 - def close(self):
630 self.outstream.close()
631 632 # This is an addition that allows the caller to set the filename for the output stream.
633 - def set_filename(self, filename):
634 self.filename = filename
635 636 # This method overrides the parent class's version of this method. This version allows the output stream to 637 # be a file, which should have been set prior to this via the set_filename method. 638 # TODO: Add error-checking. Either give self.outstream a sane default or have it throw an error if it hasn't 639 # been set yet.
640 - def _get_xml_writer(self):
641 self.outstream = open(self.filename, "w") 642 return xmlWriter.XMLWriter(stream=self.outstream)
643 644 # The dump_* methods aren't really overrides because they don't preserve the method 645 # signature, but they are meant as replacements for the methods defined in the base 646 # class that have the same name. They will set up the file for the dump, collect info 647 # necessary for the dumps to take place, and then call the base class version of the 648 # method to do the actual dumping.
649 - def _dump_simple(self, filename, dump_func, startmsg, endmsg, exceptmsg):
650 try: 651 print("\n") 652 log2stdout(1, startmsg) 653 pb = progress_bar.ProgressBar(self.pb_label, 654 self.pb_complete, 655 1, 656 self.pb_length, 657 self.pb_char) 658 pb.printAll(1) 659 self.set_filename(filename) 660 dump_func(self) 661 662 pb.addTo(1) 663 pb.printIncrement() 664 pb.printComplete() 665 log2stdout(4, endmsg % filename) 666 667 except Exception: 668 e = sys.exc_info()[1] 669 tbout = cStringIO.StringIO() 670 Traceback(mail=0, ostream=tbout, with_locals=1) 671 raise_with_tb(ISSError(exceptmsg % e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
672
673 - def dump_arches(self, rpm_arch_type_only=0):
674 self._dump_simple(self.fm.getArchesFile(), dumper.XML_Dumper.dump_arches, 675 "Exporting arches...", 676 "Arches exported to %s", 677 "%s caught in dump_arches.")
678 679 # This dumps arches_extra
680 - def dump_server_group_type_server_arches(self, rpm_arch_type_only=0, virt_filter=0):
681 self._dump_simple(self.fm.getArchesExtraFile(), 682 dumper.XML_Dumper.dump_server_group_type_server_arches, 683 "Exporting arches extra...", 684 "Arches Extra exported to %s", 685 "%s caught in dump_server_group_type_server_arches.")
686
687 - def dump_blacklist_obsoletes(self):
688 self._dump_simple(self.fm.getBlacklistsFile(), 689 dumper.XML_Dumper.dump_blacklist_obsoletes, 690 "Exporting blacklists...", 691 "Blacklists exported to %s", 692 "%s caught in dump_blacklist_obsoletes.")
693
694 - def dump_channel_families(self):
695 self._dump_simple(self.fm.getChannelFamiliesFile(), 696 dumper.XML_Dumper.dump_channel_families, 697 "Exporting channel families...", 698 "Channel Families exported to %s", 699 "%s caught in dump_channel_families.")
700
701 - def dump_product_names(self):
702 self._dump_simple(self.fm.getProductNamesFile(), 703 dumper.XML_Dumper.dump_product_names, 704 "Exporting product names...", 705 "Product names exported to %s", 706 "%s caught in dump_product_names.")
707
708 - def dump_orgs(self):
709 self._dump_simple(self.fm.getOrgsFile(), 710 dumper.XML_Dumper.dump_orgs, 711 "Exporting orgs...", 712 "Orgs exported to %s", 713 "%s caught in dump_orgs.")
714
715 - def copy_repomd(self, repomds, channel, get_file_func):
716 if channel['channel_id'] in repomds: 717 full_filename = os.path.join(CFG.MOUNT_POINT, repomds[channel['channel_id']]) 718 target_filename = get_file_func(channel['label']) 719 log2email(3, "Need to copy %s to %s" % (full_filename, target_filename)) 720 721 if self.hardlinks: 722 os.link(full_filename, target_filename) 723 else: 724 shutil.copyfile(full_filename, target_filename)
725 726
727 - def dump_channels(self, channel_labels=None, start_date=None, end_date=None, 728 use_rhn_date=True, whole_errata=False):
729 try: 730 print("\n") 731 log2stdout(1, "Exporting channel info...") 732 pb = progress_bar.ProgressBar(self.pb_label, 733 self.pb_complete, 734 len(self.channel_ids), 735 self.pb_length, 736 self.pb_char) 737 pb.printAll(1) 738 for channel in self.channel_ids: 739 self.set_filename(self.fm.getChannelsFile(channel['label'])) 740 dumper.XML_Dumper.dump_channels(self, [channel], 741 self.start_date, self.end_date, 742 self.use_rhn_date, self.whole_errata) 743 744 log2email(4, "Channel: %s" % channel['label']) 745 log2email(5, "Channel exported to %s" % self.fm.getChannelsFile(channel['label'])) 746 747 self.copy_repomd(self.channel_comps, channel, self.fm.getChannelCompsFile) 748 self.copy_repomd(self.channel_modules, channel, self.fm.getChannelModulesFile) 749 750 pb.addTo(1) 751 pb.printIncrement() 752 pb.printComplete() 753 log2stderr(3, "Number of channels exported: %s" % str(len(self.channel_ids))) 754 755 except Exception: 756 e = sys.exc_info()[1] 757 tbout = cStringIO.StringIO() 758 Traceback(mail=0, ostream=tbout, with_locals=1) 759 raise_with_tb(ISSError("%s caught in dump_channels." % e.__class__.__name__, 760 tbout.getvalue()), sys.exc_info()[2])
761
762 - def dump_channel_packages_short(self, channel_label=None, last_modified=None, filepath=None, 763 validate_channels=False, send_headers=False, 764 open_stream=True):
765 try: 766 print("\n") 767 for ch_id in self.channel_ids: 768 filepath = self.fm.getChannelPackageShortFile(ch_id['channel_id']) 769 self.set_filename(filepath) 770 dumper.XML_Dumper.dump_channel_packages_short(self, ch_id, ch_id['last_modified'], filepath) 771 772 except Exception: 773 e = sys.exc_info()[1] 774 tbout = cStringIO.StringIO() 775 Traceback(mail=0, ostream=tbout, with_locals=1) 776 raise_with_tb(ISSError("%s caught in dump_channel_packages_short." % 777 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
778
779 - def dump_packages(self, packages=None):
780 try: 781 print("\n") 782 log2stdout(1, "Exporting packages...") 783 pb = progress_bar.ProgressBar(self.pb_label, 784 self.pb_complete, 785 len(self.pkg_info), 786 self.pb_length, 787 self.pb_char) 788 pb.printAll(1) 789 for pkg_info in self.pkg_info: 790 package_name = "rhn-package-" + str(pkg_info['package_id']) 791 self.set_filename(self.fm.getPackagesFile(package_name)) 792 dumper.XML_Dumper.dump_packages(self, [pkg_info]) 793 794 log2email(4, "Package: %s" % package_name) 795 log2email(5, "Package exported to %s" % self.fm.getPackagesFile(package_name)) 796 797 pb.addTo(1) 798 pb.printIncrement() 799 pb.printComplete() 800 log2stdout(3, "Number of packages exported: %s" % str(len(self.pkg_info))) 801 802 except Exception: 803 e = sys.exc_info()[1] 804 tbout = cStringIO.StringIO() 805 Traceback(mail=0, ostream=tbout, with_locals=1) 806 raise_with_tb(ISSError("%s caught in dump_packages." % e.__class__.__name__, 807 tbout.getvalue()), sys.exc_info()[2])
808
809 - def dump_packages_short(self, packages=None):
810 try: 811 print("\n") 812 log2stdout(1, "Exporting short packages...") 813 pb = progress_bar.ProgressBar(self.pb_label, 814 self.pb_complete, 815 len(self.pkg_info), 816 self.pb_length, 817 self.pb_char) 818 pb.printAll(1) 819 for pkg_info in self.pkg_info: 820 package_name = "rhn-package-" + str(pkg_info['package_id']) 821 self.set_filename(self.fm.getShortPackagesFile(package_name)) 822 dumper.XML_Dumper.dump_packages_short(self, [pkg_info]) 823 824 log2email(4, "Short Package: %s" % package_name) 825 log2email(5, "Short Package exported to %s" % package_name) 826 pb.addTo(1) 827 pb.printIncrement() 828 pb.printComplete() 829 log2stdout(3, "Number of short packages exported: %s" % str(len(self.pkg_info))) 830 831 except Exception: 832 e = sys.exc_info()[1] 833 tbout = cStringIO.StringIO() 834 Traceback(mail=0, ostream=tbout, with_locals=1) 835 raise_with_tb(ISSError("%s caught in dump_packages_short." % 836 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
837
838 - def dump_source_packages(self, packages=None):
839 try: 840 print("\n") 841 for pkg_info in self.src_pkg_info: 842 self.set_filename(self.fm.getSourcePackagesFile("rhn-source-package-" + str(pkg_info['package_id']))) 843 dumper.XML_Dumper.dump_source_packages(self, [pkg_info]) 844 845 except Exception: 846 e = sys.exc_info()[1] 847 tbout = cStringIO.StringIO() 848 Traceback(mail=0, ostream=tbout, with_locals=1) 849 raise_with_tb(ISSError("%s caught in dump_source_packages." % 850 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
851
852 - def dump_errata(self, errata=None, verify_errata=False):
853 try: 854 print("\n") 855 log2stdout(1, "Exporting errata...") 856 pb = progress_bar.ProgressBar(self.pb_label, 857 self.pb_complete, 858 len(self.errata_info), 859 self.pb_length, 860 self.pb_char) 861 pb.printAll(1) 862 for errata_info in self.errata_info: 863 erratum_name = "rhn-erratum-" + str(errata_info['errata_id']) 864 self.set_filename(self.fm.getErrataFile(erratum_name)) 865 dumper.XML_Dumper.dump_errata(self, [errata_info]) 866 867 log2email(4, "Erratum: %s" % str(errata_info['advisory-name'])) 868 log2email(5, "Erratum exported to %s" % self.fm.getErrataFile(erratum_name)) 869 870 pb.addTo(1) 871 pb.printIncrement() 872 pb.printComplete() 873 log2stdout(3, "Number of errata exported: %s" % str(len(self.errata_info))) 874 875 except Exception: 876 e = sys.exc_info()[1] 877 tbout = cStringIO.StringIO() 878 Traceback(mail=0, ostream=tbout, with_locals=1) 879 raise_with_tb(ISSError("%s caught in dump_errata." % e.__class__.__name__, 880 tbout.getvalue()), sys.exc_info()[2])
881
882 - def dump_kickstart_data(self):
883 try: 884 print("\n") 885 log2stdout(1, "Exporting kickstart data...") 886 pb = progress_bar.ProgressBar(self.pb_label, 887 self.pb_complete, 888 len(self.kickstart_trees), 889 self.pb_length, 890 self.pb_char) 891 pb.printAll(1) 892 for kickstart_tree in self.kickstart_trees: 893 self.set_filename(self.fm.getKickstartTreeFile(kickstart_tree['kickstart_label'])) # , 'foo/bar')) 894 dumper.XML_Dumper.dump_kickstartable_trees(self, [kickstart_tree]) 895 896 log2email(5, "KS Data: %s" % str(kickstart_tree['kickstart_label'])) 897 898 pb.addTo(1) 899 pb.printIncrement() 900 pb.printComplete() 901 log2stdout(3, "Amount of kickstart data exported: %s" % str(len(self.kickstart_trees))) 902 903 except Exception: 904 e = sys.exc_info()[1] 905 tbout = cStringIO.StringIO() 906 Traceback(mail=0, ostream=tbout, with_locals=1) 907 raise_with_tb(ISSError("%s caught in dump_kickstart_data." % 908 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
909
910 - def dump_kickstart_files(self):
911 try: 912 print("\n") 913 log2stdout(1, "Exporting kickstart files...") 914 pb = progress_bar.ProgressBar(self.pb_label, 915 self.pb_complete, 916 len(self.kickstart_files), 917 self.pb_length, 918 self.pb_char) 919 pb.printAll(1) 920 for kickstart_file in self.kickstart_files: 921 # get the path to the kickstart files under the satellite's mount point 922 path_to_files = os.path.join(CFG.MOUNT_POINT, 923 kickstart_file['base-path'], 924 kickstart_file['relative-path']) 925 926 # Make sure the path actually exists 927 if not os.path.exists(path_to_files): 928 raise ISSError("Missing kickstart file under satellite mount-point: %s" % (path_to_files,), "") 929 930 # generate the path to the kickstart files under the export directory. 931 path_to_export_file = self.fm.getKickstartFileFile( 932 kickstart_file['label'], 933 kickstart_file['relative-path']) 934 #os.path.join(self.mp, kickstart_file['base-path'], kickstart_file['relative-path']) 935 if os.path.exists(path_to_export_file): 936 # already exists, skip ks file 937 continue 938 # Get the dirs to the file under the export directory. 939 dirs_to_file = os.path.split(path_to_export_file)[0] 940 941 # create the directory to the kickstart files under the export directory, if necessary. 942 if not os.path.exists(dirs_to_file): 943 os.makedirs(dirs_to_file) 944 try: 945 if self.hardlinks: 946 # Make hardlinks 947 try: 948 os.link(path_to_files, path_to_export_file) 949 except OSError: 950 pass 951 else: 952 # Copy file from satellite to export dir. 953 shutil.copyfile(path_to_files, path_to_export_file) 954 except IOError: 955 e = sys.exc_info()[1] 956 tbout = cStringIO.StringIO() 957 Traceback(mail=0, ostream=tbout, with_locals=1) 958 raise_with_tb(ISSError("Error: Error copying file: %s: %s" % 959 (path_to_files, e.__class__.__name__), tbout.getvalue()), sys.exc_info()[2]) 960 961 log2email(5, "Kickstart File: %s" % 962 os.path.join(kickstart_file['base-path'], 963 kickstart_file['relative-path'])) 964 965 pb.addTo(1) 966 pb.printIncrement() 967 968 pb.printComplete() 969 log2stdout(3, "Number of kickstart files exported: %s" % str(len(self.kickstart_files))) 970 except ISSError: 971 raise 972 except Exception: 973 e = sys.exc_info()[1] 974 tbout = cStringIO.StringIO() 975 Traceback(mail=0, ostream=tbout, with_locals=1) 976 raise_with_tb(ISSError("%s caught in dump_kickstart_files." % 977 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
978 979 # RPM and SRPM dumping code
980 - def dump_rpms(self):
981 try: 982 print("\n") 983 log2stdout(1, "Exporting binary RPMs...") 984 pb = progress_bar.ProgressBar(self.pb_label, 985 self.pb_complete, 986 len(self.brpms), 987 self.pb_length, 988 self.pb_char) 989 pb.printAll(1) 990 for rpm in self.brpms: 991 # generate path to the rpms under the mount point 992 path_to_rpm = diskImportLib.rpmsPath("rhn-package-%s" % str(rpm['id']), self.mp) 993 994 # get the dirs to the rpm 995 dirs_to_rpm = os.path.split(path_to_rpm)[0] 996 997 if (not rpm['path']): 998 raise ISSError("Error: Missing RPM under the satellite mount point. (Package id: %s)" % 999 rpm['id'], "") 1000 # get the path to the rpm from under the satellite's mountpoint 1001 satellite_path = os.path.join(CFG.MOUNT_POINT, rpm['path']) 1002 1003 if not os.path.exists(satellite_path): 1004 raise ISSError("Error: Missing RPM under the satellite mount point: %s" % (satellite_path,), "") 1005 1006 # create the directory for the rpm, if necessary. 1007 if not os.path.exists(dirs_to_rpm): 1008 os.makedirs(dirs_to_rpm) 1009 1010 # check if the path to rpm hardlink already exists 1011 if os.path.exists(path_to_rpm): 1012 continue 1013 1014 try: 1015 # copy the file to the path under the mountpoint. 1016 if self.hardlinks: 1017 os.link(satellite_path, path_to_rpm) 1018 else: 1019 shutil.copyfile(satellite_path, path_to_rpm) 1020 except IOError: 1021 e = sys.exc_info()[1] 1022 tbout = cStringIO.StringIO() 1023 Traceback(mail=0, ostream=tbout, with_locals=1) 1024 raise_with_tb(ISSError("Error: Error copying file %s: %s" % 1025 (os.path.join(CFG.MOUNT_POINT, rpm['path']), e.__class__.__name__), 1026 tbout.getvalue()), sys.exc_info()[2]) 1027 except OSError: 1028 e = sys.exc_info()[1] 1029 tbout = cStringIO.StringIO() 1030 Traceback(mail=0, ostream=tbout, with_locals=1) 1031 raise_with_tb(ISSError("Error: Could not make hard link %s: %s (different filesystems?)" % 1032 (os.path.join(CFG.MOUNT_POINT, rpm['path']), e.__class__.__name__), 1033 tbout.getvalue()), sys.exc_info()[2]) 1034 1035 log2email(5, "RPM: %s" % rpm['path']) 1036 1037 pb.addTo(1) 1038 pb.printIncrement() 1039 pb.printComplete() 1040 log2stdout(3, "Number of RPMs exported: %s" % str(len(self.brpms))) 1041 except ISSError: 1042 raise 1043 1044 except Exception: 1045 e = sys.exc_info()[1] 1046 tbout = cStringIO.StringIO() 1047 Traceback(mail=0, ostream=tbout, with_locals=1) 1048 raise_with_tb(ISSError("%s caught in dump_rpms." % e.__class__.__name__, 1049 tbout.getvalue()), sys.exc_info()[2])
1050
1051 1052 -def get_report():
1053 body = dumpEMAIL_LOG() 1054 return body
1055 1062
1063 1064 # Stolen and modified from satsync.py 1065 -def sendMail():
1066 # Send email summary 1067 body = dumpEMAIL_LOG() 1068 if body: 1069 print("+++ sending log as an email +++") 1070 headers = { 1071 'Subject': 'Spacewalk Management Satellite Export report from %s' % os.uname()[1], 1072 } 1073 #sndr = CFG.get('traceback_mail', 'rhn-satellite') 1074 sndr = 'rhn-satellite@%s' % os.uname()[1] 1075 rhnMail.send(headers, body, sender=sndr) 1076 else: 1077 print("+++ email requested, but there is nothing to send +++")
1078
1079 1080 -def handle_error(message, traceback):
1081 log2stderr(-1, "\n" + message) 1082 log2email(-1, traceback)
1083
1084 1085 # This class is a mess. 1086 -class ExporterMain:
1087
1088 - def __init__(self):
1089 initCFG('server.iss') 1090 1091 # pylint: disable=E1101 1092 self.options = UI() 1093 self.action_deps = ActionDeps(self.options) 1094 self.action_order, self.actions = self.action_deps.get_actions() 1095 if self.options.debug_level: 1096 debug_level = int(self.options.debug_level) 1097 else: 1098 debug_level = int(CFG.DEBUG) 1099 1100 CFG.set("TRACEBACK_MAIL", self.options.traceback_mail or CFG.TRACEBACK_MAIL) 1101 CFG.set("DEBUG", debug_level) 1102 CFG.set("ISSEMAIL", self.options.email) 1103 1104 initEMAIL_LOG() 1105 1106 # This was taken straight from satsync.py. 1107 try: 1108 rhnSQL.initDB() 1109 except SQLConnectError: 1110 print('SQLERROR: There was an error connecting to the Database.') 1111 sys.exit(-1) 1112 except (SQLError, SQLSchemaError): 1113 e = sys.exc_info()[1] 1114 # An SQL error is fatal... crash and burn 1115 exitWithTraceback(e, 'SQL ERROR during xml processing', -1) 1116 1117 # This was cribbed from satsync.py. 1118 if self.options.print_configuration: 1119 CFG.show() 1120 sys.exit(0) 1121 1122 if self.options.list_channels: 1123 self.print_list_channels(self.list_channels()) 1124 sys.exit(0) 1125 1126 if self.options.list_orgs: 1127 self.print_orgs(self.list_orgs()) 1128 sys.exit(0) 1129 1130 # From this point on everything should assume a list of channels, so it needs to be a list 1131 # even if there's only one entry. 1132 if self.options.all_channels: 1133 channel_dict = self.list_channels() 1134 self.options.channel = [] 1135 for pc in channel_dict: 1136 self.options.channel.append(pc) 1137 self.options.channel.extend(channel_dict[pc]) 1138 elif self.options.channel: 1139 if not isinstance(self.options.channel, type([])): 1140 self.options.channel = [self.options.channel] 1141 else: 1142 sys.stdout.write("--channel not included!\n") 1143 sys.exit(0) 1144 1145 # Same as above but for orgs 1146 if self.options.all_orgs: 1147 orgs = self.list_orgs() 1148 self.options.org = [] 1149 for org in orgs: 1150 self.options.org.append(org['id']) 1151 elif self.options.org: 1152 if not type(self.options.org, type([])): 1153 self.options.org = [self.options.org] 1154 orgs = {} 1155 for org in self.list_orgs(): 1156 orgs[org['name']] = str(org['id']) 1157 using_orgs = [] 1158 for org in self.options.org: 1159 # User might have specified org name or org id, try both 1160 if org in list(orgs.values()): # ids 1161 using_orgs.append(org) 1162 elif org in list(orgs.keys()): # names 1163 using_orgs.append(orgs[org]) 1164 else: 1165 sys.stdout.write("Org not found: %s\n" % org) 1166 exit(0) 1167 self.options.org = using_orgs 1168 else: 1169 self.options.org = [] 1170 self.options.org = [str(x) for x in self.options.org] 1171 1172 # Since everything gets dumped to a directory it wouldn't make 1173 # much sense if it wasn't required. 1174 if self.options.dir: 1175 self.isos_dir = os.path.join(self.options.dir, "satellite-isos") 1176 self.outputdir = self.options.dir 1177 else: 1178 sys.stdout.write("--dir not included!\n") 1179 sys.exit(0) 1180 1181 if self.options.use_sync_date and self.options.use_rhn_date: 1182 sys.stderr.write("--use-rhn-date and --use-sync-date are mutually exclusive.\n") 1183 sys.exit(1) 1184 elif self.options.use_sync_date: 1185 self.options.use_rhn_date = False 1186 else: 1187 self.options.use_rhn_date = True 1188 1189 if self.options.end_date and not self.options.start_date: 1190 sys.stderr.write("--end-date must be used with --start-date.\n") 1191 sys.exit(1) 1192 1193 if self.options.end_date and len(self.options.end_date) < 8: 1194 sys.stdout.write(_("format of %s should be at least YYYYMMDD.\n") % '--end-date') 1195 sys.exit(1) 1196 1197 if self.options.start_date and len(self.options.start_date) < 8: 1198 sys.stdout.write(_("format of %s should be at least YYYYMMDD.\n") % '--start-date') 1199 sys.exit(1) 1200 1201 if self.options.start_date: 1202 if self.options.end_date is None: 1203 self.end_date = time.strftime("%Y%m%d%H%M%S") 1204 else: 1205 self.end_date = self.options.end_date.ljust(14, '0') 1206 1207 self.start_date = self.options.start_date.ljust(14, '0') 1208 print("start date limit: %s" % self.start_date) 1209 print("end date limit: %s" % self.end_date) 1210 else: 1211 self.start_date = None 1212 self.end_date = None 1213 1214 if self.start_date and self.options.whole_errata: 1215 self.whole_errata = self.options.whole_errata 1216 1217 # verify mountpoint 1218 if os.access(self.outputdir, os.F_OK | os.R_OK | os.W_OK): 1219 if os.path.isdir(self.outputdir): 1220 self.dumper = Dumper(self.outputdir, 1221 self.options.channel, 1222 self.options.org, 1223 self.options.hard_links, 1224 start_date=self.start_date, 1225 end_date=self.end_date, 1226 use_rhn_date=self.options.use_rhn_date, 1227 whole_errata=self.options.whole_errata) 1228 self.actionmap = { 1229 'arches': {'dump': self.dumper.dump_arches}, 1230 'arches-extra': {'dump': self.dumper.dump_server_group_type_server_arches}, 1231 'blacklists': {'dump': self.dumper.dump_blacklist_obsoletes}, 1232 'channel-families': {'dump': self.dumper.dump_channel_families}, 1233 'channels': {'dump': self.dumper.dump_channels}, 1234 'packages': {'dump': self.dumper.dump_packages}, 1235 'short': {'dump': self.dumper.dump_packages_short}, 1236 #'channel-pkg-short' : {'dump' : self.dumper.dump_channel_packages_short}, 1237 #'source-packages' : {'dump' : self.dumper.dump_source_packages}, 1238 'errata': {'dump': self.dumper.dump_errata}, 1239 'kickstarts': {'dump': [self.dumper.dump_kickstart_data, 1240 self.dumper.dump_kickstart_files]}, 1241 'rpms': {'dump': self.dumper.dump_rpms}, 1242 'orgs': {'dump': self.dumper.dump_orgs}, 1243 'productnames': {'dump': self.dumper.dump_product_names}, 1244 } 1245 else: 1246 print("The output directory is not a directory") 1247 sys.exit(-1) 1248 else: 1249 print("can't access output directory") 1250 sys.exit(-1)
1251 1252 @staticmethod
1253 - def list_channels():
1254 """ return all available channels 1255 1256 the returned format is dictionary containing base_label as keys and value is list 1257 of labels of child channels 1258 """ 1259 # The keys for channel_dict are the labels of the base channels. 1260 # The values associated with each key is a list of the labels of 1261 # the child channels whose parent channel is the key. 1262 channel_dict = {} 1263 1264 # Grab some info on base channels. Base channels 1265 # have parent_channel set to null. 1266 base_channel_query = rhnSQL.Statement(""" 1267 select id, label 1268 from rhnChannel 1269 where parent_channel is null 1270 """) 1271 base_channel_data = rhnSQL.prepare(base_channel_query) 1272 base_channel_data.execute() 1273 base_channels = base_channel_data.fetchall_dict() 1274 1275 # Grab some info on child channels. 1276 child_channel_query = rhnSQL.Statement(""" 1277 select id, label, parent_channel 1278 from rhnChannel 1279 where parent_channel = :id 1280 """) 1281 child_channel_data = rhnSQL.prepare(child_channel_query) 1282 1283 if base_channels: 1284 for ch in base_channels: 1285 base_label = ch['label'] 1286 base_id = ch['id'] 1287 1288 # If the base channel isn't in channel_dict yet, create 1289 # an empty list for it. 1290 if not base_label in channel_dict: 1291 channel_dict[base_label] = [] 1292 1293 # grab the child channel information for this base channel. 1294 child_channel_data.execute(id=base_id) 1295 child_channels = child_channel_data.fetchall_dict() 1296 1297 # If the base channel has some child channels, add them 1298 # to the list associated with the base channel in channel_dict. 1299 # Organizing the labels this way makes it a lot easier to print 1300 # out. 1301 if child_channels: 1302 for child in child_channels: 1303 child_label = child['label'] 1304 channel_dict[base_label].append(child_label) 1305 return channel_dict
1306 1307 @staticmethod
1308 - def print_list_channels(channel_dict):
1309 """ channel_dict is dictionary containing base_label as keys and value is list 1310 of labels of child channels 1311 """ 1312 if channel_dict: 1313 # Print the legend. 1314 print("Channel List:") 1315 print("B = Base Channel") 1316 print("C = Child Channel") 1317 print("") 1318 1319 base_template = "B %s" 1320 child_template = "C\t%s" 1321 1322 # Print channel information. 1323 for pc in channel_dict.keys(): 1324 print(base_template % (pc,)) 1325 for cc in channel_dict[pc]: 1326 print(child_template % (cc,)) 1327 print(" ") 1328 else: 1329 print("No Channels available for listing.")
1330 1331 @staticmethod
1332 - def list_orgs():
1333 """ 1334 Return a list of all orgs. 1335 """ 1336 org_query = rhnSQL.Statement(""" 1337 select id, name 1338 from web_customer 1339 """) 1340 org_data = rhnSQL.prepare(org_query) 1341 org_data.execute() 1342 return org_data.fetchall_dict()
1343 1344 @staticmethod
1345 - def print_orgs(orgs):
1346 if orgs: 1347 print("Orgs available for export:") 1348 for org in orgs: 1349 print("Id: %s, Name: \'%s\'" % (org['id'], org['name'])) 1350 else: 1351 print("No Orgs available for listing.")
1352
1353 - def main(self):
1354 # pylint: disable=E1101 1355 try: 1356 for action in self.action_order: 1357 if self.actions[action] != 1: 1358 continue 1359 1360 if not action in self.actionmap: 1361 # If we get here there's a programming error. It means that self.action_order 1362 # contains a action that isn't defined in self.actionmap. 1363 sys.stderr.write("List of actions doesn't have %s.\n" % (action,)) 1364 continue 1365 1366 if isinstance(self.actionmap[action]['dump'], type([])): 1367 for dmp in self.actionmap[action]['dump']: 1368 dmp() 1369 else: 1370 self.actionmap[action]['dump']() 1371 1372 # Now Compress the dump data 1373 if action == 'rpms': 1374 continue 1375 elif action == 'arches-extra': 1376 action = 'arches' 1377 elif action == 'short': 1378 action = 'packages_short' 1379 elif action == 'channel-families': 1380 action = 'channel_families' 1381 elif action == 'kickstarts': 1382 action = 'kickstart_trees' 1383 elif action == 'productnames': 1384 action = 'product_names' 1385 1386 os_data_dir = os.path.join(self.outputdir, action) 1387 if not os.path.exists(os_data_dir): 1388 continue 1389 1390 for fpath, _dirs, files in os.walk(os_data_dir): 1391 for f in files: 1392 if f.endswith(".xml") or f.endswith(".yaml"): 1393 filepath = os.path.join(fpath, f) 1394 compress_file(filepath) 1395 1396 if self.options.make_isos: 1397 #iso_output = os.path.join(self.isos_dir, self.dump_dir) 1398 iso_output = self.isos_dir 1399 if not os.path.exists(iso_output): 1400 os.makedirs(iso_output) 1401 1402 iss_isos.create_isos(self.outputdir, iso_output, 1403 "rhn-export", self.start_date, self.end_date, 1404 iso_type=self.options.make_isos) 1405 1406 # Generate md5sum digest file for isos 1407 if os.path.exists(iso_output): 1408 f = open(os.path.join(iso_output, 'MD5SUM'), 'w') 1409 for iso_file in os.listdir(iso_output): 1410 if self.options.make_isos != "dvds" and iso_file != "MD5SUM": 1411 md5_val = getFileChecksum('md5', (os.path.join(iso_output, iso_file))) 1412 md5str = "%s %s\n" % (md5_val, iso_file) 1413 f.write(md5str) 1414 f.close() 1415 1416 if self.options.email: 1417 sendMail() 1418 1419 if self.options.print_report: 1420 print_report() 1421 1422 except SystemExit: 1423 sys.exit(0) 1424 1425 except ISSError: 1426 isserror = sys.exc_info()[1] 1427 # I have the tb get generated in the functions that the the error occurred in to minimize 1428 # the amount of extra crap that shows up in it. 1429 tb = isserror.tb 1430 msg = isserror.msg 1431 handle_error(msg, tb) 1432 1433 if self.options.email: 1434 sendMail() 1435 if self.options.print_report: 1436 print_report() 1437 1438 sys.exit(-1) 1439 1440 except Exception: # pylint: disable=E0012, W0703 1441 e = sys.exc_info()[1] 1442 # This should catch the vast majority of errors that aren't ISSErrors 1443 tbout = cStringIO.StringIO() 1444 Traceback(mail=0, ostream=tbout, with_locals=1) 1445 msg = "Error: %s caught!" % e.__class__.__name__ 1446 handle_error(msg, tbout.getvalue()) 1447 if self.options.email: 1448 sendMail() 1449 if self.options.print_report: 1450 print_report() 1451 sys.exit(-1)
1452
1453 1454 -def compress_file(f):
1455 """ 1456 Gzip the given file and then remove the file. 1457 """ 1458 datafile = open(f, 'r') 1459 gzipper = gzip.GzipFile(f + '.gz', 'w', 9) 1460 gzipper.write(datafile.read()) 1461 gzipper.flush() 1462 # close opened streams 1463 gzipper.close() 1464 datafile.close() 1465 # removed the old file 1466 os.unlink(f)
1467 1468 if __name__ == "__main__": 1469 em = ExporterMain() 1470 em.main() 1471