1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 import os
17 import os.path
18 import sys
19 import time
20 import gzip
21 import shutil
22 import gettext
23 try:
24
25 import cStringIO
26 except ImportError:
27
28 import io as cStringIO
29 import dumper
30 from spacewalk.common.usix import raise_with_tb
31 from spacewalk.common import rhnMail
32 from spacewalk.common.rhnConfig import CFG, initCFG
33 from spacewalk.common.rhnTB import Traceback, exitWithTraceback
34 from spacewalk.common.checksum import getFileChecksum
35 from spacewalk.server import rhnSQL
36 from spacewalk.server.rhnSQL import SQLError, SQLSchemaError, SQLConnectError
37 from spacewalk.satellite_tools.exporter import xmlWriter
38 from spacewalk.satellite_tools import xmlDiskSource, diskImportLib, progress_bar
39 from spacewalk.satellite_tools.syncLib import initEMAIL_LOG, dumpEMAIL_LOG, log2email, log2stderr, log2stdout
40 from iss_ui import UI
41 from iss_actions import ActionDeps
42 import iss_isos
43
44 t = gettext.translation('spacewalk-backend-server', fallback=True)
45 _ = t.ugettext
51
53 Exception.__init__(self)
54 self.msg = msg
55 self.tb = tb
56
62
63 - def __init__(self, mount_point, channel_name=None):
64 self.mp = mount_point
65 self.channelid = channel_name
66 self.pathkey = "xml-channel-packages/rhn-channel-%d.data"
67
69 self.channelid = channel_id
70
72 return os.path.join(self.mp, self.pathkey % (self.channelid,))
73
76
77 """ This class maps dumps to files. In other words, you give it
78 the type of dump you're doing and it gives you the file to
79 write it to.
80 """
81
83 self.mp = mount_point
84 self.filemap = {
85 'arches': xmlDiskSource.ArchesDiskSource(self.mp),
86 'arches-extra': xmlDiskSource.ArchesExtraDiskSource(self.mp),
87 'blacklists': xmlDiskSource.BlacklistsDiskSource(self.mp),
88 'channelfamilies': xmlDiskSource.ChannelFamilyDiskSource(self.mp),
89 'orgs': xmlDiskSource.OrgsDiskSource(self.mp),
90 'channels': xmlDiskSource.ChannelDiskSource(self.mp),
91 'channel-pkg-short': ISSChannelPackageShortDiskSource(self.mp),
92 'packages-short': xmlDiskSource.ShortPackageDiskSource(self.mp),
93 'packages': xmlDiskSource.PackageDiskSource(self.mp),
94 'sourcepackages': xmlDiskSource.SourcePackageDiskSource(self.mp),
95 'errata': xmlDiskSource.ErrataDiskSource(self.mp),
96 'kickstart_trees': xmlDiskSource.KickstartDataDiskSource(self.mp),
97 'kickstart_files': xmlDiskSource.KickstartFileDiskSource(self.mp),
98 'binary_rpms': xmlDiskSource.BinaryRPMDiskSource(self.mp),
99 'comps': xmlDiskSource.ChannelCompsDiskSource(self.mp),
100 'modules': xmlDiskSource.ChannelModulesDiskSource(self.mp),
101 'productnames': xmlDiskSource.ProductnamesDiskSource(self.mp),
102 }
103
104
105
106 @staticmethod
108
109 dirs_to_make = os.path.split(ofile)[0]
110
111
112 if not os.path.exists(dirs_to_make):
113 os.makedirs(dirs_to_make)
114
115 return ofile
116
117
118
121
124
127
130
133
136
140
144
148
152
156
158 self.filemap['packages-short'].setID(packageid)
159 return self.setup_file(self.filemap['packages-short']._getFile())
160
162 self.filemap['sourcepackages'].setID(sp_id)
163 return self.setup_file(self.filemap['sourcepackages']._getFile())
164
168
170 self.filemap['kickstart_trees'].setID(ks_id)
171 return self.setup_file(self.filemap['kickstart_trees']._getFile())
172
177
180
181
182 -class Dumper(dumper.XML_Dumper):
183
184 """ This class subclasses the XML_Dumper class. It overrides
185 the _get_xml_writer method and adds a set_stream method,
186 which will let it write to a file instead of over the wire.
187 """
188
189 - def __init__(self, outputdir, channel_labels, org_ids, hardlinks,
190 start_date, end_date, use_rhn_date, whole_errata):
191 dumper.XML_Dumper.__init__(self)
192 self.fm = FileMapper(outputdir)
193 self.mp = outputdir
194 self.pb_label = "Exporting: "
195 self.pb_length = 20
196 self.pb_complete = " - Done!"
197 self.pb_char = "#"
198 self.hardlinks = hardlinks
199 self.filename = None
200 self.outstream = None
201
202 self.start_date = start_date
203 self.end_date = end_date
204 self.use_rhn_date = use_rhn_date
205 self.whole_errata = whole_errata
206
207 if self.start_date:
208 dates = {'start_date': self.start_date,
209 'end_date': self.end_date, }
210 else:
211 dates = {}
212
213
214
215
216
217
218 try:
219 query = """
220 select ch.id channel_id, label,
221 TO_CHAR(last_modified, 'YYYYMMDDHH24MISS') last_modified
222 from rhnChannel ch
223 where ch.label = :label
224 """
225 self.channel_query = rhnSQL.Statement(query)
226 ch_data = rhnSQL.prepare(self.channel_query)
227
228 comps_query = """
229 select relative_filename
230 from rhnChannelComps
231 where channel_id = :channel_id
232 and comps_type_id = 1
233 order by id desc
234 """
235
236 modules_query = """
237 select relative_filename
238 from rhnChannelComps
239 where channel_id = :channel_id
240 and comps_type_id = 2
241 order by id desc
242 """
243
244 self.channel_comps_query = rhnSQL.Statement(comps_query)
245 channel_comps_sth = rhnSQL.prepare(self.channel_comps_query)
246 self.channel_modules_query = rhnSQL.Statement(modules_query)
247 channel_modules_sth = rhnSQL.prepare(self.channel_modules_query)
248
249
250
251 self.channel_comps = {}
252 self.channel_modules = {}
253
254 self.set_exportable_orgs(org_ids)
255
256
257 log2stdout(1, "Gathering channel info...")
258 for ids in channel_labels:
259 ch_data.execute(label=ids)
260 ch_info = ch_data.fetchall_dict()
261
262 if not ch_info:
263 raise ISSError("Error: Channel %s not found." % ids, "")
264
265 self.channel_ids = self.channel_ids + ch_info
266
267 channel_comps_sth.execute(channel_id=ch_info[0]['channel_id'])
268 comps_info = channel_comps_sth.fetchone_dict()
269 channel_modules_sth.execute(channel_id=ch_info[0]['channel_id'])
270 modules_info = channel_modules_sth.fetchone_dict()
271
272 if comps_info is not None:
273 self.channel_comps[ch_info[0]['channel_id']] = comps_info['relative_filename']
274 if modules_info is not None:
275 self.channel_modules[ch_info[0]['channel_id']] = modules_info['relative_filename']
276
277
278
279
280
281 channel_labels_for_families = self.fm.filemap['channels'].list()
282 print("Appending channels %s" % (channel_labels_for_families))
283 for ids in channel_labels_for_families:
284 ch_data.execute(label=ids)
285 ch_info = ch_data.fetchall_dict()
286 if ch_info:
287 self.channel_ids_for_families = self.channel_ids_for_families + ch_info
288
289 except ISSError:
290
291 raise
292 except Exception:
293 e = sys.exc_info()[1]
294 tbout = cStringIO.StringIO()
295 Traceback(mail=0, ostream=tbout, with_locals=1)
296 raise_with_tb(ISSError("%s caught while getting channel info." %
297 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
298
299
300 try:
301 if self.whole_errata and self.start_date:
302 query = """ select rcp.package_id id, rp.path path
303 from rhnChannelPackage rcp, rhnPackage rp
304 left join rhnErrataPackage rep on rp.id = rep.package_id
305 left join rhnErrata re on rep.errata_id = re.id
306 where rcp.package_id = rp.id
307 and rcp.channel_id = :channel_id
308 """
309 else:
310 query = """
311 select rcp.package_id id, rp.path path
312 from rhnChannelPackage rcp, rhnPackage rp
313 where rcp.package_id = rp.id
314 and rcp.channel_id = :channel_id
315 """
316
317 if self.start_date:
318 if self.whole_errata:
319 if self.use_rhn_date:
320 query += """ and
321 ((re.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
322 and re.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
323 ) or (rep.package_id is NULL
324 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
325 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS'))
326 )
327 """
328 else:
329 query += """ and
330 ((re.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
331 and re.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
332 ) or (rep.package_id is NULL
333 and rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
334 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS'))
335 )
336 """
337 elif self.use_rhn_date:
338 query += """
339 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
340 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
341 """
342 else:
343 query += """
344 and rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
345 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
346 """
347 self.brpm_query = rhnSQL.Statement(query)
348 brpm_data = rhnSQL.prepare(self.brpm_query)
349
350
351
352 self.brpms = []
353 log2stdout(1, "Gathering binary RPM info...")
354 for ch in self.channel_ids:
355 brpm_data.execute(channel_id=ch['channel_id'], **dates)
356 self.brpms = self.brpms + (brpm_data.fetchall_dict() or [])
357 except Exception:
358 e = sys.exc_info()[1]
359 tbout = cStringIO.StringIO()
360 Traceback(mail=0, ostream=tbout, with_locals=1)
361 raise_with_tb(ISSError("%s caught while getting binary rpm info." %
362 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
363
364
365
366 try:
367 if self.whole_errata and self.start_date:
368 query = """
369 select rp.id package_id,
370 TO_CHAR(rp.last_modified, 'YYYYMMDDHH24MISS') last_modified
371 from rhnChannelPackage rcp, rhnPackage rp
372 left join rhnErrataPackage rep on rp.id = rep.package_id
373 left join rhnErrata re on rep.errata_id = re.id
374 where rcp.channel_id = :channel_id
375 and rcp.package_id = rp.id
376 """
377 else:
378 query = """
379 select rp.id package_id,
380 TO_CHAR(rp.last_modified, 'YYYYMMDDHH24MISS') last_modified
381 from rhnPackage rp, rhnChannelPackage rcp
382 where rcp.channel_id = :channel_id
383 and rcp.package_id = rp.id
384 """
385 if self.start_date:
386 if self.whole_errata:
387 if self.use_rhn_date:
388 query += """ and
389 ((re.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
390 and re.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
391 ) or (rep.package_id is NULL
392 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
393 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS'))
394 )
395 """
396 else:
397 query += """ and
398 ((re.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
399 and re.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
400 ) or (rep.package_id is NULL
401 and rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
402 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS'))
403 )
404 """
405 elif self.use_rhn_date:
406 query += """
407 and rp.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
408 and rp.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
409 """
410 else:
411 query += """
412 and (rcp.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
413 and rcp.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS'))
414 """
415 self.package_query = rhnSQL.Statement(query)
416 package_data = rhnSQL.prepare(self.package_query)
417
418
419
420 self.pkg_info = []
421
422
423
424 log2stdout(1, "Gathering package info...")
425 for channel_id in self.channel_ids:
426 package_data.execute(channel_id=channel_id['channel_id'], **dates)
427 a_package = package_data.fetchall_dict() or []
428
429
430 if a_package:
431 self.pkg_info = self.pkg_info + a_package
432
433 except Exception:
434 e = sys.exc_info()[1]
435 tbout = cStringIO.StringIO()
436 Traceback(mail=0, ostream=tbout, with_locals=1)
437 raise_with_tb(ISSError("%s caught while getting package info." %
438 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
439
440
441 try:
442 query = """
443 select ps.id package_id,
444 TO_CHAR(ps.last_modified,'YYYYMMDDHH24MISS') last_modified,
445 ps.source_rpm_id source_rpm_id
446 from rhnPackageSource ps
447 """
448 if self.start_date:
449 if self.whole_errata:
450 query += """
451 left join rhnErrataFilePackageSource refps on refps.package_id = ps.id
452 left join rhnErrataFile ref on refps.errata_file_id = ref.id
453 left join rhnErrata re on ref.errata_id = re.id
454 """
455 if self.use_rhn_date:
456 query += """ and
457 ((re.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
458 and re.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
459 ) or (refps.package_id is NULL
460 and ps.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
461 and ps.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS'))
462 )
463 """
464 else:
465 query += """ and
466 ((re.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
467 and re.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
468 ) or (refps.package_id is NULL
469 and ps.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
470 and ps.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS'))
471 )
472 """
473 elif self.use_rhn_date:
474 query += """
475 where ps.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
476 and ps.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
477 """
478 else:
479 query += """
480 where ps.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
481 and ps.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
482 """
483 self.source_package_query = rhnSQL.Statement(query)
484 source_package_data = rhnSQL.prepare(self.source_package_query)
485 source_package_data.execute(**dates)
486
487
488
489 self.src_pkg_info = source_package_data.fetchall_dict() or []
490
491
492 if not self.src_pkg_info:
493 self.src_pkg_info = []
494
495 except Exception:
496 e = sys.exc_info()[1]
497 tbout = cStringIO.StringIO()
498 Traceback(mail=0, ostream=tbout, with_locals=1)
499 raise_with_tb(ISSError("%s caught while getting source package info." %
500 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
501
502
503 try:
504 query = """
505 select e.id errata_id,
506 TO_CHAR(e.last_modified,'YYYYMMDDHH24MISS') last_modified,
507 e.advisory_name "advisory-name"
508 from rhnChannelErrata ce, rhnErrata e
509 where ce.channel_id = :channel_id
510 and ce.errata_id = e.id
511 """
512 if self.start_date:
513 if self.use_rhn_date:
514 query += """
515 and e.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
516 and e.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
517 """
518 else:
519 query += """
520 and ce.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
521 and ce.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
522 """
523 self.errata_query = rhnSQL.Statement(query)
524 errata_data = rhnSQL.prepare(self.errata_query)
525
526
527
528 self.errata_info = []
529 log2stdout(1, "Gathering errata info...")
530 for channel_id in self.channel_ids:
531 errata_data.execute(channel_id=channel_id['channel_id'], **dates)
532 an_errata = errata_data.fetchall_dict() or []
533 if an_errata:
534 self.errata_info = self.errata_info + an_errata
535
536 except Exception:
537 e = sys.exc_info()[1]
538 tbout = cStringIO.StringIO()
539 Traceback(mail=0, ostream=tbout, with_locals=1)
540 raise_with_tb(ISSError("%s caught while getting errata info." %
541 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
542
543
544 try:
545 query = """
546 select kt.id kstree_id, kt.label kickstart_label,
547 TO_CHAR(kt.last_modified, 'YYYYMMDDHH24MISS') last_modified
548 from rhnKickstartableTree kt
549 where kt.channel_id = :channel_id
550 """
551 if self.start_date:
552 if self.use_rhn_date:
553 query += """
554 and kt.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
555 and kt.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
556 and kt.org_id is Null
557 """
558 else:
559 query += """
560 and kt.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
561 and kt.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
562 and kt.org_id is Null
563 """
564 self.kickstart_trees_query = rhnSQL.Statement(query)
565 kickstart_data = rhnSQL.prepare(self.kickstart_trees_query)
566 self.kickstart_trees = []
567 log2stdout(1, "Gathering kickstart data...")
568 for channel_id in self.channel_ids:
569 kickstart_data.execute(channel_id=channel_id['channel_id'],
570 **dates)
571 a_tree = kickstart_data.fetchall_dict() or []
572 if a_tree:
573 self.kickstart_trees = self.kickstart_trees + a_tree
574
575 except Exception:
576 e = sys.exc_info()[1]
577 tbout = cStringIO.StringIO()
578 Traceback(mail=0, ostream=tbout, with_locals=1)
579 raise_with_tb(ISSError("%s caught while getting kickstart data info." %
580 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
581
582
583 try:
584 query = """
585 select rktf.relative_filename "relative-path",
586 c.checksum_type "checksum-type", c.checksum,
587 rktf.file_size "file-size",
588 TO_CHAR(rktf.last_modified, 'YYYYMMDDHH24MISS') "last-modified",
589 rkt.base_path "base-path",
590 rkt.label "label",
591 TO_CHAR(rkt.modified, 'YYYYMMDDHH24MISS') "modified"
592 from rhnKSTreeFile rktf, rhnKickstartableTree rkt,
593 rhnChecksumView c
594 where rktf.kstree_id = :kstree_id
595 and rkt.id = rktf.kstree_id
596 and rktf.checksum_id = c.id
597 """
598 if self.start_date:
599 if self.use_rhn_date:
600 query += """
601 and rkt.last_modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
602 and rkt.last_modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
603 """
604 else:
605 query += """
606 and rkt.modified >= TO_TIMESTAMP(:start_date, 'YYYYMMDDHH24MISS')
607 and rkt.modified <= TO_TIMESTAMP(:end_date, 'YYYYMMDDHH24MISS')
608 """
609 self.kickstart_files_query = rhnSQL.Statement(query)
610 kickstart_files = rhnSQL.prepare(self.kickstart_files_query)
611 self.kickstart_files = []
612 log2stdout(1, "Gathering kickstart files info...")
613 for kstree in self.kickstart_trees:
614 kickstart_files.execute(kstree_id=kstree['kstree_id'], **dates)
615 a_file = kickstart_files.fetchall_dict() or []
616 if a_file:
617 self.kickstart_files = self.kickstart_files + a_file
618
619 except Exception:
620 e = sys.exc_info()[1]
621 tbout = cStringIO.StringIO()
622 Traceback(mail=0, ostream=tbout, with_locals=1)
623 raise_with_tb(ISSError("%s caught while getting kickstart files info." %
624 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
625
626
627
628
630 self.outstream.close()
631
632
635
636
637
638
639
643
644
645
646
647
648
649 - def _dump_simple(self, filename, dump_func, startmsg, endmsg, exceptmsg):
650 try:
651 print("\n")
652 log2stdout(1, startmsg)
653 pb = progress_bar.ProgressBar(self.pb_label,
654 self.pb_complete,
655 1,
656 self.pb_length,
657 self.pb_char)
658 pb.printAll(1)
659 self.set_filename(filename)
660 dump_func(self)
661
662 pb.addTo(1)
663 pb.printIncrement()
664 pb.printComplete()
665 log2stdout(4, endmsg % filename)
666
667 except Exception:
668 e = sys.exc_info()[1]
669 tbout = cStringIO.StringIO()
670 Traceback(mail=0, ostream=tbout, with_locals=1)
671 raise_with_tb(ISSError(exceptmsg % e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
672
678
679
686
693
700
707
714
715 - def copy_repomd(self, repomds, channel, get_file_func):
716 if channel['channel_id'] in repomds:
717 full_filename = os.path.join(CFG.MOUNT_POINT, repomds[channel['channel_id']])
718 target_filename = get_file_func(channel['label'])
719 log2email(3, "Need to copy %s to %s" % (full_filename, target_filename))
720
721 if self.hardlinks:
722 os.link(full_filename, target_filename)
723 else:
724 shutil.copyfile(full_filename, target_filename)
725
726
727 - def dump_channels(self, channel_labels=None, start_date=None, end_date=None,
728 use_rhn_date=True, whole_errata=False):
729 try:
730 print("\n")
731 log2stdout(1, "Exporting channel info...")
732 pb = progress_bar.ProgressBar(self.pb_label,
733 self.pb_complete,
734 len(self.channel_ids),
735 self.pb_length,
736 self.pb_char)
737 pb.printAll(1)
738 for channel in self.channel_ids:
739 self.set_filename(self.fm.getChannelsFile(channel['label']))
740 dumper.XML_Dumper.dump_channels(self, [channel],
741 self.start_date, self.end_date,
742 self.use_rhn_date, self.whole_errata)
743
744 log2email(4, "Channel: %s" % channel['label'])
745 log2email(5, "Channel exported to %s" % self.fm.getChannelsFile(channel['label']))
746
747 self.copy_repomd(self.channel_comps, channel, self.fm.getChannelCompsFile)
748 self.copy_repomd(self.channel_modules, channel, self.fm.getChannelModulesFile)
749
750 pb.addTo(1)
751 pb.printIncrement()
752 pb.printComplete()
753 log2stderr(3, "Number of channels exported: %s" % str(len(self.channel_ids)))
754
755 except Exception:
756 e = sys.exc_info()[1]
757 tbout = cStringIO.StringIO()
758 Traceback(mail=0, ostream=tbout, with_locals=1)
759 raise_with_tb(ISSError("%s caught in dump_channels." % e.__class__.__name__,
760 tbout.getvalue()), sys.exc_info()[2])
761
762 - def dump_channel_packages_short(self, channel_label=None, last_modified=None, filepath=None,
763 validate_channels=False, send_headers=False,
764 open_stream=True):
778
780 try:
781 print("\n")
782 log2stdout(1, "Exporting packages...")
783 pb = progress_bar.ProgressBar(self.pb_label,
784 self.pb_complete,
785 len(self.pkg_info),
786 self.pb_length,
787 self.pb_char)
788 pb.printAll(1)
789 for pkg_info in self.pkg_info:
790 package_name = "rhn-package-" + str(pkg_info['package_id'])
791 self.set_filename(self.fm.getPackagesFile(package_name))
792 dumper.XML_Dumper.dump_packages(self, [pkg_info])
793
794 log2email(4, "Package: %s" % package_name)
795 log2email(5, "Package exported to %s" % self.fm.getPackagesFile(package_name))
796
797 pb.addTo(1)
798 pb.printIncrement()
799 pb.printComplete()
800 log2stdout(3, "Number of packages exported: %s" % str(len(self.pkg_info)))
801
802 except Exception:
803 e = sys.exc_info()[1]
804 tbout = cStringIO.StringIO()
805 Traceback(mail=0, ostream=tbout, with_locals=1)
806 raise_with_tb(ISSError("%s caught in dump_packages." % e.__class__.__name__,
807 tbout.getvalue()), sys.exc_info()[2])
808
810 try:
811 print("\n")
812 log2stdout(1, "Exporting short packages...")
813 pb = progress_bar.ProgressBar(self.pb_label,
814 self.pb_complete,
815 len(self.pkg_info),
816 self.pb_length,
817 self.pb_char)
818 pb.printAll(1)
819 for pkg_info in self.pkg_info:
820 package_name = "rhn-package-" + str(pkg_info['package_id'])
821 self.set_filename(self.fm.getShortPackagesFile(package_name))
822 dumper.XML_Dumper.dump_packages_short(self, [pkg_info])
823
824 log2email(4, "Short Package: %s" % package_name)
825 log2email(5, "Short Package exported to %s" % package_name)
826 pb.addTo(1)
827 pb.printIncrement()
828 pb.printComplete()
829 log2stdout(3, "Number of short packages exported: %s" % str(len(self.pkg_info)))
830
831 except Exception:
832 e = sys.exc_info()[1]
833 tbout = cStringIO.StringIO()
834 Traceback(mail=0, ostream=tbout, with_locals=1)
835 raise_with_tb(ISSError("%s caught in dump_packages_short." %
836 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
837
851
852 - def dump_errata(self, errata=None, verify_errata=False):
853 try:
854 print("\n")
855 log2stdout(1, "Exporting errata...")
856 pb = progress_bar.ProgressBar(self.pb_label,
857 self.pb_complete,
858 len(self.errata_info),
859 self.pb_length,
860 self.pb_char)
861 pb.printAll(1)
862 for errata_info in self.errata_info:
863 erratum_name = "rhn-erratum-" + str(errata_info['errata_id'])
864 self.set_filename(self.fm.getErrataFile(erratum_name))
865 dumper.XML_Dumper.dump_errata(self, [errata_info])
866
867 log2email(4, "Erratum: %s" % str(errata_info['advisory-name']))
868 log2email(5, "Erratum exported to %s" % self.fm.getErrataFile(erratum_name))
869
870 pb.addTo(1)
871 pb.printIncrement()
872 pb.printComplete()
873 log2stdout(3, "Number of errata exported: %s" % str(len(self.errata_info)))
874
875 except Exception:
876 e = sys.exc_info()[1]
877 tbout = cStringIO.StringIO()
878 Traceback(mail=0, ostream=tbout, with_locals=1)
879 raise_with_tb(ISSError("%s caught in dump_errata." % e.__class__.__name__,
880 tbout.getvalue()), sys.exc_info()[2])
881
883 try:
884 print("\n")
885 log2stdout(1, "Exporting kickstart data...")
886 pb = progress_bar.ProgressBar(self.pb_label,
887 self.pb_complete,
888 len(self.kickstart_trees),
889 self.pb_length,
890 self.pb_char)
891 pb.printAll(1)
892 for kickstart_tree in self.kickstart_trees:
893 self.set_filename(self.fm.getKickstartTreeFile(kickstart_tree['kickstart_label']))
894 dumper.XML_Dumper.dump_kickstartable_trees(self, [kickstart_tree])
895
896 log2email(5, "KS Data: %s" % str(kickstart_tree['kickstart_label']))
897
898 pb.addTo(1)
899 pb.printIncrement()
900 pb.printComplete()
901 log2stdout(3, "Amount of kickstart data exported: %s" % str(len(self.kickstart_trees)))
902
903 except Exception:
904 e = sys.exc_info()[1]
905 tbout = cStringIO.StringIO()
906 Traceback(mail=0, ostream=tbout, with_locals=1)
907 raise_with_tb(ISSError("%s caught in dump_kickstart_data." %
908 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
909
911 try:
912 print("\n")
913 log2stdout(1, "Exporting kickstart files...")
914 pb = progress_bar.ProgressBar(self.pb_label,
915 self.pb_complete,
916 len(self.kickstart_files),
917 self.pb_length,
918 self.pb_char)
919 pb.printAll(1)
920 for kickstart_file in self.kickstart_files:
921
922 path_to_files = os.path.join(CFG.MOUNT_POINT,
923 kickstart_file['base-path'],
924 kickstart_file['relative-path'])
925
926
927 if not os.path.exists(path_to_files):
928 raise ISSError("Missing kickstart file under satellite mount-point: %s" % (path_to_files,), "")
929
930
931 path_to_export_file = self.fm.getKickstartFileFile(
932 kickstart_file['label'],
933 kickstart_file['relative-path'])
934
935 if os.path.exists(path_to_export_file):
936
937 continue
938
939 dirs_to_file = os.path.split(path_to_export_file)[0]
940
941
942 if not os.path.exists(dirs_to_file):
943 os.makedirs(dirs_to_file)
944 try:
945 if self.hardlinks:
946
947 try:
948 os.link(path_to_files, path_to_export_file)
949 except OSError:
950 pass
951 else:
952
953 shutil.copyfile(path_to_files, path_to_export_file)
954 except IOError:
955 e = sys.exc_info()[1]
956 tbout = cStringIO.StringIO()
957 Traceback(mail=0, ostream=tbout, with_locals=1)
958 raise_with_tb(ISSError("Error: Error copying file: %s: %s" %
959 (path_to_files, e.__class__.__name__), tbout.getvalue()), sys.exc_info()[2])
960
961 log2email(5, "Kickstart File: %s" %
962 os.path.join(kickstart_file['base-path'],
963 kickstart_file['relative-path']))
964
965 pb.addTo(1)
966 pb.printIncrement()
967
968 pb.printComplete()
969 log2stdout(3, "Number of kickstart files exported: %s" % str(len(self.kickstart_files)))
970 except ISSError:
971 raise
972 except Exception:
973 e = sys.exc_info()[1]
974 tbout = cStringIO.StringIO()
975 Traceback(mail=0, ostream=tbout, with_locals=1)
976 raise_with_tb(ISSError("%s caught in dump_kickstart_files." %
977 e.__class__.__name__, tbout.getvalue()), sys.exc_info()[2])
978
979
981 try:
982 print("\n")
983 log2stdout(1, "Exporting binary RPMs...")
984 pb = progress_bar.ProgressBar(self.pb_label,
985 self.pb_complete,
986 len(self.brpms),
987 self.pb_length,
988 self.pb_char)
989 pb.printAll(1)
990 for rpm in self.brpms:
991
992 path_to_rpm = diskImportLib.rpmsPath("rhn-package-%s" % str(rpm['id']), self.mp)
993
994
995 dirs_to_rpm = os.path.split(path_to_rpm)[0]
996
997 if (not rpm['path']):
998 raise ISSError("Error: Missing RPM under the satellite mount point. (Package id: %s)" %
999 rpm['id'], "")
1000
1001 satellite_path = os.path.join(CFG.MOUNT_POINT, rpm['path'])
1002
1003 if not os.path.exists(satellite_path):
1004 raise ISSError("Error: Missing RPM under the satellite mount point: %s" % (satellite_path,), "")
1005
1006
1007 if not os.path.exists(dirs_to_rpm):
1008 os.makedirs(dirs_to_rpm)
1009
1010
1011 if os.path.exists(path_to_rpm):
1012 continue
1013
1014 try:
1015
1016 if self.hardlinks:
1017 os.link(satellite_path, path_to_rpm)
1018 else:
1019 shutil.copyfile(satellite_path, path_to_rpm)
1020 except IOError:
1021 e = sys.exc_info()[1]
1022 tbout = cStringIO.StringIO()
1023 Traceback(mail=0, ostream=tbout, with_locals=1)
1024 raise_with_tb(ISSError("Error: Error copying file %s: %s" %
1025 (os.path.join(CFG.MOUNT_POINT, rpm['path']), e.__class__.__name__),
1026 tbout.getvalue()), sys.exc_info()[2])
1027 except OSError:
1028 e = sys.exc_info()[1]
1029 tbout = cStringIO.StringIO()
1030 Traceback(mail=0, ostream=tbout, with_locals=1)
1031 raise_with_tb(ISSError("Error: Could not make hard link %s: %s (different filesystems?)" %
1032 (os.path.join(CFG.MOUNT_POINT, rpm['path']), e.__class__.__name__),
1033 tbout.getvalue()), sys.exc_info()[2])
1034
1035 log2email(5, "RPM: %s" % rpm['path'])
1036
1037 pb.addTo(1)
1038 pb.printIncrement()
1039 pb.printComplete()
1040 log2stdout(3, "Number of RPMs exported: %s" % str(len(self.brpms)))
1041 except ISSError:
1042 raise
1043
1044 except Exception:
1045 e = sys.exc_info()[1]
1046 tbout = cStringIO.StringIO()
1047 Traceback(mail=0, ostream=tbout, with_locals=1)
1048 raise_with_tb(ISSError("%s caught in dump_rpms." % e.__class__.__name__,
1049 tbout.getvalue()), sys.exc_info()[2])
1050
1055
1058 print("")
1059 print("REPORT:")
1060 report_string = get_report()
1061 sys.stdout.write(str(report_string))
1062
1066
1067 body = dumpEMAIL_LOG()
1068 if body:
1069 print("+++ sending log as an email +++")
1070 headers = {
1071 'Subject': 'Spacewalk Management Satellite Export report from %s' % os.uname()[1],
1072 }
1073
1074 sndr = 'rhn-satellite@%s' % os.uname()[1]
1075 rhnMail.send(headers, body, sender=sndr)
1076 else:
1077 print("+++ email requested, but there is nothing to send +++")
1078
1083
1084
1085
1086 -class ExporterMain:
1087
1088 - def __init__(self):
1089 initCFG('server.iss')
1090
1091
1092 self.options = UI()
1093 self.action_deps = ActionDeps(self.options)
1094 self.action_order, self.actions = self.action_deps.get_actions()
1095 if self.options.debug_level:
1096 debug_level = int(self.options.debug_level)
1097 else:
1098 debug_level = int(CFG.DEBUG)
1099
1100 CFG.set("TRACEBACK_MAIL", self.options.traceback_mail or CFG.TRACEBACK_MAIL)
1101 CFG.set("DEBUG", debug_level)
1102 CFG.set("ISSEMAIL", self.options.email)
1103
1104 initEMAIL_LOG()
1105
1106
1107 try:
1108 rhnSQL.initDB()
1109 except SQLConnectError:
1110 print('SQLERROR: There was an error connecting to the Database.')
1111 sys.exit(-1)
1112 except (SQLError, SQLSchemaError):
1113 e = sys.exc_info()[1]
1114
1115 exitWithTraceback(e, 'SQL ERROR during xml processing', -1)
1116
1117
1118 if self.options.print_configuration:
1119 CFG.show()
1120 sys.exit(0)
1121
1122 if self.options.list_channels:
1123 self.print_list_channels(self.list_channels())
1124 sys.exit(0)
1125
1126 if self.options.list_orgs:
1127 self.print_orgs(self.list_orgs())
1128 sys.exit(0)
1129
1130
1131
1132 if self.options.all_channels:
1133 channel_dict = self.list_channels()
1134 self.options.channel = []
1135 for pc in channel_dict:
1136 self.options.channel.append(pc)
1137 self.options.channel.extend(channel_dict[pc])
1138 elif self.options.channel:
1139 if not isinstance(self.options.channel, type([])):
1140 self.options.channel = [self.options.channel]
1141 else:
1142 sys.stdout.write("--channel not included!\n")
1143 sys.exit(0)
1144
1145
1146 if self.options.all_orgs:
1147 orgs = self.list_orgs()
1148 self.options.org = []
1149 for org in orgs:
1150 self.options.org.append(org['id'])
1151 elif self.options.org:
1152 if not type(self.options.org, type([])):
1153 self.options.org = [self.options.org]
1154 orgs = {}
1155 for org in self.list_orgs():
1156 orgs[org['name']] = str(org['id'])
1157 using_orgs = []
1158 for org in self.options.org:
1159
1160 if org in list(orgs.values()):
1161 using_orgs.append(org)
1162 elif org in list(orgs.keys()):
1163 using_orgs.append(orgs[org])
1164 else:
1165 sys.stdout.write("Org not found: %s\n" % org)
1166 exit(0)
1167 self.options.org = using_orgs
1168 else:
1169 self.options.org = []
1170 self.options.org = [str(x) for x in self.options.org]
1171
1172
1173
1174 if self.options.dir:
1175 self.isos_dir = os.path.join(self.options.dir, "satellite-isos")
1176 self.outputdir = self.options.dir
1177 else:
1178 sys.stdout.write("--dir not included!\n")
1179 sys.exit(0)
1180
1181 if self.options.use_sync_date and self.options.use_rhn_date:
1182 sys.stderr.write("--use-rhn-date and --use-sync-date are mutually exclusive.\n")
1183 sys.exit(1)
1184 elif self.options.use_sync_date:
1185 self.options.use_rhn_date = False
1186 else:
1187 self.options.use_rhn_date = True
1188
1189 if self.options.end_date and not self.options.start_date:
1190 sys.stderr.write("--end-date must be used with --start-date.\n")
1191 sys.exit(1)
1192
1193 if self.options.end_date and len(self.options.end_date) < 8:
1194 sys.stdout.write(_("format of %s should be at least YYYYMMDD.\n") % '--end-date')
1195 sys.exit(1)
1196
1197 if self.options.start_date and len(self.options.start_date) < 8:
1198 sys.stdout.write(_("format of %s should be at least YYYYMMDD.\n") % '--start-date')
1199 sys.exit(1)
1200
1201 if self.options.start_date:
1202 if self.options.end_date is None:
1203 self.end_date = time.strftime("%Y%m%d%H%M%S")
1204 else:
1205 self.end_date = self.options.end_date.ljust(14, '0')
1206
1207 self.start_date = self.options.start_date.ljust(14, '0')
1208 print("start date limit: %s" % self.start_date)
1209 print("end date limit: %s" % self.end_date)
1210 else:
1211 self.start_date = None
1212 self.end_date = None
1213
1214 if self.start_date and self.options.whole_errata:
1215 self.whole_errata = self.options.whole_errata
1216
1217
1218 if os.access(self.outputdir, os.F_OK | os.R_OK | os.W_OK):
1219 if os.path.isdir(self.outputdir):
1220 self.dumper = Dumper(self.outputdir,
1221 self.options.channel,
1222 self.options.org,
1223 self.options.hard_links,
1224 start_date=self.start_date,
1225 end_date=self.end_date,
1226 use_rhn_date=self.options.use_rhn_date,
1227 whole_errata=self.options.whole_errata)
1228 self.actionmap = {
1229 'arches': {'dump': self.dumper.dump_arches},
1230 'arches-extra': {'dump': self.dumper.dump_server_group_type_server_arches},
1231 'blacklists': {'dump': self.dumper.dump_blacklist_obsoletes},
1232 'channel-families': {'dump': self.dumper.dump_channel_families},
1233 'channels': {'dump': self.dumper.dump_channels},
1234 'packages': {'dump': self.dumper.dump_packages},
1235 'short': {'dump': self.dumper.dump_packages_short},
1236
1237
1238 'errata': {'dump': self.dumper.dump_errata},
1239 'kickstarts': {'dump': [self.dumper.dump_kickstart_data,
1240 self.dumper.dump_kickstart_files]},
1241 'rpms': {'dump': self.dumper.dump_rpms},
1242 'orgs': {'dump': self.dumper.dump_orgs},
1243 'productnames': {'dump': self.dumper.dump_product_names},
1244 }
1245 else:
1246 print("The output directory is not a directory")
1247 sys.exit(-1)
1248 else:
1249 print("can't access output directory")
1250 sys.exit(-1)
1251
1252 @staticmethod
1254 """ return all available channels
1255
1256 the returned format is dictionary containing base_label as keys and value is list
1257 of labels of child channels
1258 """
1259
1260
1261
1262 channel_dict = {}
1263
1264
1265
1266 base_channel_query = rhnSQL.Statement("""
1267 select id, label
1268 from rhnChannel
1269 where parent_channel is null
1270 """)
1271 base_channel_data = rhnSQL.prepare(base_channel_query)
1272 base_channel_data.execute()
1273 base_channels = base_channel_data.fetchall_dict()
1274
1275
1276 child_channel_query = rhnSQL.Statement("""
1277 select id, label, parent_channel
1278 from rhnChannel
1279 where parent_channel = :id
1280 """)
1281 child_channel_data = rhnSQL.prepare(child_channel_query)
1282
1283 if base_channels:
1284 for ch in base_channels:
1285 base_label = ch['label']
1286 base_id = ch['id']
1287
1288
1289
1290 if not base_label in channel_dict:
1291 channel_dict[base_label] = []
1292
1293
1294 child_channel_data.execute(id=base_id)
1295 child_channels = child_channel_data.fetchall_dict()
1296
1297
1298
1299
1300
1301 if child_channels:
1302 for child in child_channels:
1303 child_label = child['label']
1304 channel_dict[base_label].append(child_label)
1305 return channel_dict
1306
1307 @staticmethod
1308 - def print_list_channels(channel_dict):
1309 """ channel_dict is dictionary containing base_label as keys and value is list
1310 of labels of child channels
1311 """
1312 if channel_dict:
1313
1314 print("Channel List:")
1315 print("B = Base Channel")
1316 print("C = Child Channel")
1317 print("")
1318
1319 base_template = "B %s"
1320 child_template = "C\t%s"
1321
1322
1323 for pc in channel_dict.keys():
1324 print(base_template % (pc,))
1325 for cc in channel_dict[pc]:
1326 print(child_template % (cc,))
1327 print(" ")
1328 else:
1329 print("No Channels available for listing.")
1330
1331 @staticmethod
1333 """
1334 Return a list of all orgs.
1335 """
1336 org_query = rhnSQL.Statement("""
1337 select id, name
1338 from web_customer
1339 """)
1340 org_data = rhnSQL.prepare(org_query)
1341 org_data.execute()
1342 return org_data.fetchall_dict()
1343
1344 @staticmethod
1345 - def print_orgs(orgs):
1346 if orgs:
1347 print("Orgs available for export:")
1348 for org in orgs:
1349 print("Id: %s, Name: \'%s\'" % (org['id'], org['name']))
1350 else:
1351 print("No Orgs available for listing.")
1352
1354
1355 try:
1356 for action in self.action_order:
1357 if self.actions[action] != 1:
1358 continue
1359
1360 if not action in self.actionmap:
1361
1362
1363 sys.stderr.write("List of actions doesn't have %s.\n" % (action,))
1364 continue
1365
1366 if isinstance(self.actionmap[action]['dump'], type([])):
1367 for dmp in self.actionmap[action]['dump']:
1368 dmp()
1369 else:
1370 self.actionmap[action]['dump']()
1371
1372
1373 if action == 'rpms':
1374 continue
1375 elif action == 'arches-extra':
1376 action = 'arches'
1377 elif action == 'short':
1378 action = 'packages_short'
1379 elif action == 'channel-families':
1380 action = 'channel_families'
1381 elif action == 'kickstarts':
1382 action = 'kickstart_trees'
1383 elif action == 'productnames':
1384 action = 'product_names'
1385
1386 os_data_dir = os.path.join(self.outputdir, action)
1387 if not os.path.exists(os_data_dir):
1388 continue
1389
1390 for fpath, _dirs, files in os.walk(os_data_dir):
1391 for f in files:
1392 if f.endswith(".xml") or f.endswith(".yaml"):
1393 filepath = os.path.join(fpath, f)
1394 compress_file(filepath)
1395
1396 if self.options.make_isos:
1397
1398 iso_output = self.isos_dir
1399 if not os.path.exists(iso_output):
1400 os.makedirs(iso_output)
1401
1402 iss_isos.create_isos(self.outputdir, iso_output,
1403 "rhn-export", self.start_date, self.end_date,
1404 iso_type=self.options.make_isos)
1405
1406
1407 if os.path.exists(iso_output):
1408 f = open(os.path.join(iso_output, 'MD5SUM'), 'w')
1409 for iso_file in os.listdir(iso_output):
1410 if self.options.make_isos != "dvds" and iso_file != "MD5SUM":
1411 md5_val = getFileChecksum('md5', (os.path.join(iso_output, iso_file)))
1412 md5str = "%s %s\n" % (md5_val, iso_file)
1413 f.write(md5str)
1414 f.close()
1415
1416 if self.options.email:
1417 sendMail()
1418
1419 if self.options.print_report:
1420 print_report()
1421
1422 except SystemExit:
1423 sys.exit(0)
1424
1425 except ISSError:
1426 isserror = sys.exc_info()[1]
1427
1428
1429 tb = isserror.tb
1430 msg = isserror.msg
1431 handle_error(msg, tb)
1432
1433 if self.options.email:
1434 sendMail()
1435 if self.options.print_report:
1436 print_report()
1437
1438 sys.exit(-1)
1439
1440 except Exception:
1441 e = sys.exc_info()[1]
1442
1443 tbout = cStringIO.StringIO()
1444 Traceback(mail=0, ostream=tbout, with_locals=1)
1445 msg = "Error: %s caught!" % e.__class__.__name__
1446 handle_error(msg, tbout.getvalue())
1447 if self.options.email:
1448 sendMail()
1449 if self.options.print_report:
1450 print_report()
1451 sys.exit(-1)
1452
1455 """
1456 Gzip the given file and then remove the file.
1457 """
1458 datafile = open(f, 'r')
1459 gzipper = gzip.GzipFile(f + '.gz', 'w', 9)
1460 gzipper.write(datafile.read())
1461 gzipper.flush()
1462
1463 gzipper.close()
1464 datafile.close()
1465
1466 os.unlink(f)
1467
1468 if __name__ == "__main__":
1469 em = ExporterMain()
1470 em.main()
1471