Package backend :: Package satellite_tools :: Package repo_plugins :: Module deb_src
[hide private]
[frames] | no frames]

Source Code for Module backend.satellite_tools.repo_plugins.deb_src

  1  # 
  2  # Copyright (c) 2016--2018 Red Hat, Inc. 
  3  # 
  4  # This software is licensed to you under the GNU General Public License, 
  5  # version 2 (GPLv2). There is NO WARRANTY for this software, express or 
  6  # implied, including the implied warranties of MERCHANTABILITY or FITNESS 
  7  # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 
  8  # along with this software; if not, see 
  9  # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. 
 10  # 
 11  # Red Hat trademarks are not licensed under GPLv2. No permission is 
 12  # granted to use or replicate Red Hat trademarks that are incorporated 
 13  # in this software or its documentation. 
 14  # 
 15   
 16  import sys 
 17  import os.path 
 18  from shutil import rmtree 
 19  import time 
 20  import re 
 21  import fnmatch 
 22  import requests 
 23  from spacewalk.common import fileutils 
 24  from spacewalk.satellite_tools.download import get_proxies 
 25  from spacewalk.satellite_tools.repo_plugins import ContentPackage, CACHE_DIR 
 26  from spacewalk.satellite_tools.syncLib import log2 
 27  from spacewalk.common.rhnConfig import CFG, initCFG 
 28  try: 
 29      #  python 2 
 30      import urlparse 
 31  except ImportError: 
 32      #  python3 
 33      import urllib.parse as urlparse # pylint: disable=F0401,E0611 
 34   
 35  RETRIES = 10 
 36  RETRY_DELAY = 1 
 37  FORMAT_PRIORITY = ['.xz', '.gz', ''] 
38 39 40 -class DebPackage(object):
41 - def __init__(self):
42 self.name = None 43 self.epoch = None 44 self.version = None 45 self.release = None 46 self.arch = None 47 self.relativepath = None 48 self.checksum_type = None 49 self.checksum = None
50
51 - def __getitem__(self, key):
52 return getattr(self, key)
53
54 - def __setitem__(self, key, value):
55 return setattr(self, key, value)
56
57 - def is_populated(self):
58 return all([attribute is not None for attribute in (self.name, self.epoch, self.version, self.release, 59 self.arch, self.relativepath, self.checksum_type, 60 self.checksum)])
61
62 63 -class DebRepo(object):
64 # url example - http://ftp.debian.org/debian/dists/jessie/main/binary-amd64/
65 - def __init__(self, url, cache_dir, pkg_dir, proxy_addr="", proxy_user="", proxy_pass=""):
66 self.url = url 67 parts = url.rsplit('/dists/', 1) 68 self.base_url = [parts[0]] 69 # Make sure baseurl ends with / and urljoin will work correctly 70 if self.base_url[0][-1] != '/': 71 self.base_url[0] += '/' 72 self.urls = self.base_url 73 self.sslclientcert = self.sslclientkey = self.sslcacert = None 74 self.proxy = proxy_addr 75 self.proxy_username = proxy_user 76 self.proxy_password = proxy_pass 77 78 self.basecachedir = cache_dir 79 if not os.path.isdir(self.basecachedir): 80 fileutils.makedirs(self.basecachedir, user='apache', group='apache') 81 self.includepkgs = [] 82 self.exclude = [] 83 self.pkgdir = pkg_dir 84 self.http_headers = {}
85
86 - def _download(self, url):
87 for _ in range(0, RETRIES): 88 try: 89 proxies="" 90 if self.proxy: 91 proxies = { 92 'http' : 'http://'+self.proxy, 93 'https' : 'http://'+self.proxy 94 } 95 if self.proxy_username and self.proxy_password: 96 proxies = { 97 'http' : 'http://'+self.proxy_username+":"+self.proxy_password+"@"+self.proxy, 98 'https' : 'http://'+self.proxy_username+":"+self.proxy_password+"@"+self.proxy, 99 } 100 data = requests.get(url, proxies=proxies, cert=(self.sslclientcert, self.sslclientkey), 101 verify=self.sslcacert) 102 if not data.ok: 103 return '' 104 filename = self.basecachedir + '/' + os.path.basename(url) 105 fd = open(filename, 'wb') 106 try: 107 for chunk in data.iter_content(chunk_size=1024): 108 fd.write(chunk) 109 finally: 110 if fd is not None: 111 fd.close() 112 return filename 113 except requests.exceptions.RequestException: 114 print "ERROR: requests.exceptions.RequestException occured" 115 time.sleep(RETRY_DELAY) 116 117 return ''
118
119 - def get_package_list(self):
120 decompressed = None 121 packages_raw = [] 122 to_return = [] 123 124 for extension in FORMAT_PRIORITY: 125 url = self.url + '/Packages' + extension 126 filename = self._download(url) 127 if filename: 128 decompressed = fileutils.decompress_open(filename) 129 break 130 131 if decompressed: 132 for pkg in decompressed.read().split("\n\n"): 133 packages_raw.append(pkg) 134 decompressed.close() 135 else: 136 print("ERROR: Download of package list failed.") 137 138 # Parse and format package metadata 139 for chunk in packages_raw: 140 package = DebPackage() 141 package.epoch = "" 142 lines = chunk.split("\n") 143 checksums = {} 144 for line in lines: 145 pair = line.split(" ", 1) 146 if pair[0] == "Package:": 147 package.name = pair[1] 148 elif pair[0] == "Architecture:": 149 package.arch = pair[1] + '-deb' 150 elif pair[0] == "Version:": 151 package['epoch'] = '' 152 version = pair[1] 153 if version.find(':') != -1: 154 package['epoch'], version = version.split(':') 155 if version.find('-') != -1: 156 tmp = version.split('-') 157 package['version'] = '-'.join(tmp[:-1]) 158 package['release'] = tmp[-1] 159 else: 160 package['version'] = version 161 package['release'] = 'X' 162 elif pair[0] == "Filename:": 163 package.relativepath = pair[1] 164 elif pair[0] == "SHA256:": 165 checksums['sha256'] = pair[1] 166 elif pair[0] == "SHA1:": 167 checksums['sha1'] = pair[1] 168 elif pair[0] == "MD5sum:": 169 checksums['md5'] = pair[1] 170 171 # Pick best available checksum 172 if 'sha256' in checksums: 173 package.checksum_type = 'sha256' 174 package.checksum = checksums['sha256'] 175 elif 'sha1' in checksums: 176 package.checksum_type = 'sha1' 177 package.checksum = checksums['sha1'] 178 elif 'md5' in checksums: 179 package.checksum_type = 'md5' 180 package.checksum = checksums['md5'] 181 182 if package.is_populated(): 183 to_return.append(package) 184 185 return to_return
186
187 188 -class ContentSource(object):
189
190 - def __init__(self, url, name, org=1, channel_label="", ca_cert_file=None, client_cert_file=None, 191 client_key_file=None):
192 # pylint: disable=W0613 193 self.url = url 194 self.name = name 195 if org: 196 self.org = org 197 else: 198 self.org = "NULL" 199 200 # read the proxy configuration in /etc/rhn/rhn.conf 201 initCFG('server.satellite') 202 self.proxy_addr = CFG.http_proxy 203 self.proxy_user = CFG.http_proxy_username 204 self.proxy_pass = CFG.http_proxy_password 205 self.authtoken = None 206 207 self.repo = DebRepo(url, os.path.join(CACHE_DIR, self.org, name), 208 os.path.join(CFG.MOUNT_POINT, CFG.PREPENDED_DIR, self.org, 'stage'), 209 self.proxy_addr, self.proxy_user, self.proxy_pass) 210 211 self.num_packages = 0 212 self.num_excluded = 0 213 214 # keep authtokens for mirroring 215 (_scheme, _netloc, _path, query, _fragid) = urlparse.urlsplit(url) 216 if query: 217 self.authtoken = query
218
219 - def list_packages(self, filters, latest):
220 """ list packages""" 221 222 pkglist = self.repo.get_package_list() 223 self.num_packages = len(pkglist) 224 if latest: 225 # TODO 226 pass 227 pkglist.sort(self._sort_packages) 228 229 if not filters: 230 # if there's no include/exclude filter on command line or in database 231 for p in self.repo.includepkgs: 232 filters.append(('+', [p])) 233 for p in self.repo.exclude: 234 filters.append(('-', [p])) 235 236 if filters: 237 pkglist = self._filter_packages(pkglist, filters) 238 self.num_excluded = self.num_packages - len(pkglist) 239 240 to_return = [] 241 for pack in pkglist: 242 new_pack = ContentPackage() 243 new_pack.setNVREA(pack.name, pack.version, pack.release, 244 pack.epoch, pack.arch) 245 new_pack.unique_id = pack 246 new_pack.checksum_type = pack.checksum_type 247 new_pack.checksum = pack.checksum 248 to_return.append(new_pack) 249 return to_return
250 251 @staticmethod
252 - def _sort_packages(pkg1, pkg2):
253 """sorts a list of deb package dicts by name""" 254 if pkg1.name > pkg2.name: 255 return 1 256 elif pkg1.name == pkg2.name: 257 return 0 258 else: 259 return -1
260 @staticmethod
261 - def _filter_packages(packages, filters):
262 """ implement include / exclude logic 263 filters are: [ ('+', includelist1), ('-', excludelist1), 264 ('+', includelist2), ... ] 265 """ 266 if filters is None: 267 return [] 268 269 selected = [] 270 excluded = [] 271 allmatched_include = [] 272 allmatched_exclude = [] 273 if filters[0][0] == '-': 274 # first filter is exclude, start with full package list 275 # and then exclude from it 276 selected = packages 277 else: 278 excluded = packages 279 280 for filter_item in filters: 281 sense, pkg_list = filter_item 282 regex = fnmatch.translate(pkg_list[0]) 283 reobj = re.compile(regex) 284 if sense == '+': 285 # include 286 for excluded_pkg in excluded: 287 if (reobj.match(excluded_pkg['name'])): 288 allmatched_include.insert(0,excluded_pkg) 289 selected.insert(0,excluded_pkg) 290 for pkg in allmatched_include: 291 if pkg in excluded: 292 excluded.remove(pkg) 293 elif sense == '-': 294 # exclude 295 for selected_pkg in selected: 296 if (reobj.match(selected_pkg['name'])): 297 allmatched_exclude.insert(0,selected_pkg) 298 excluded.insert(0,selected_pkg) 299 300 for pkg in allmatched_exclude: 301 if pkg in selected: 302 selected.remove(pkg) 303 excluded = (excluded + allmatched_exclude) 304 else: 305 raise IOError("Filters are malformed") 306 return selected
307
308 - def clear_cache(self, directory=None):
309 if directory is None: 310 directory = os.path.join(CACHE_DIR, self.org, self.name) 311 # remove content in directory 312 for item in os.listdir(directory): 313 path = os.path.join(directory, item) 314 if os.path.isfile(path): 315 os.unlink(path) 316 elif os.path.isdir(path): 317 rmtree(path)
318 319 @staticmethod
320 - def get_updates():
321 # There isn't any update info in the repository 322 return []
323 324 @staticmethod
325 - def get_groups():
326 # There aren't any 327 return None
328 329 # Get download parameters for threaded downloader
330 - def set_download_parameters(self, params, relative_path, target_file, checksum_type=None, checksum_value=None, 331 bytes_range=None):
332 # Create directories if needed 333 target_dir = os.path.dirname(target_file) 334 if not os.path.exists(target_dir): 335 os.makedirs(target_dir, int('0755', 8)) 336 337 params['urls'] = self.repo.urls 338 params['relative_path'] = relative_path 339 params['authtoken'] = self.authtoken 340 params['target_file'] = target_file 341 params['ssl_ca_cert'] = self.repo.sslcacert 342 params['ssl_client_cert'] = self.repo.sslclientcert 343 params['ssl_client_key'] = self.repo.sslclientkey 344 params['checksum_type'] = checksum_type 345 params['checksum'] = checksum_value 346 params['bytes_range'] = bytes_range 347 params['proxy'] = self.proxy_addr 348 params['proxy_username'] = self.proxy_user 349 params['proxy_password'] = self.proxy_pass 350 params['http_headers'] = self.repo.http_headers 351 # Older urlgrabber compatibility 352 params['proxies'] = get_proxies(self.repo.proxy, self.repo.proxy_username, self.repo.proxy_password)
353 354 @staticmethod
355 - def get_file(path, local_base=None):
356 # pylint: disable=W0613 357 # Called from import_kickstarts, not working for deb repo 358 log2(0, 0, "Unable to download path %s from deb repo." % path, stream=sys.stderr) 359 return None
360