backend/satellite_exporter/Makefile | 2
backend/satellite_exporter/exporter/Makefile | 10
backend/satellite_exporter/exporter/__init__.py | 16
backend/satellite_exporter/exporter/dumper.py | 984 -----------------
backend/satellite_exporter/exporter/string_buffer.py | 52
backend/satellite_exporter/handlers/non_auth_dumper.py | 6
backend/satellite_tools/disk_dumper/dumper.py | 405 ++++++
backend/spacewalk-backend.spec | 10
rel-eng/packages/spacewalk-backend | 2
9 files changed, 409 insertions(+), 1078 deletions(-)
New commits:
commit 7bc4ee42f72baf9a0bed7a24a94aa338e7a43782
Author: Michael Mraka <michael.mraka(a)redhat.com>
Date: Tue Jan 5 13:21:04 2010 +0100
Automatic commit of package [spacewalk-backend] release [0.8.15-1].
diff --git a/backend/spacewalk-backend.spec b/backend/spacewalk-backend.spec
index c607d22..23ac2a2 100644
--- a/backend/spacewalk-backend.spec
+++ b/backend/spacewalk-backend.spec
@@ -8,7 +8,7 @@ Name: spacewalk-backend
Summary: Common programs needed to be installed on the Spacewalk servers/proxies
Group: Applications/Internet
License: GPLv2
-Version: 0.8.14
+Version: 0.8.15
Release: 1%{?dist}
URL:
https://fedorahosted.org/spacewalk
Source0:
https://fedorahosted.org/releases/s/p/spacewalk/%{name}-%{version}.tar.gz
@@ -588,6 +588,9 @@ rm -f %{rhnconf}/rhnSecret.py*
# $Id$
%changelog
+* Tue Jan 05 2010 Michael Mraka <michael.mraka(a)redhat.com> 0.8.15-1
+- merged satellite_exporter/exporter into satellite_tools/disk_dumper
+
* Mon Jan 04 2010 Michael Mraka <michael.mraka(a)redhat.com> 0.8.14-1
- more fixes in SHA256 implementation
diff --git a/rel-eng/packages/spacewalk-backend b/rel-eng/packages/spacewalk-backend
index 48d6f9c..1abd8db 100644
--- a/rel-eng/packages/spacewalk-backend
+++ b/rel-eng/packages/spacewalk-backend
@@ -1 +1 @@
-0.8.14-1 backend/
+0.8.15-1 backend/
commit 4483d65f0177c827af0b80a3220fd3d0287ad0b5
Author: Michael Mraka <michael.mraka(a)redhat.com>
Date: Tue Jan 5 13:00:32 2010 +0100
removed satellite_exporter/exporter from spec
diff --git a/backend/satellite_exporter/Makefile b/backend/satellite_exporter/Makefile
index c01d832..2689598 100644
--- a/backend/satellite_exporter/Makefile
+++ b/backend/satellite_exporter/Makefile
@@ -1,7 +1,7 @@
# Makefile for the backend directory
#
-CODE_DIRS = handlers exporter
+CODE_DIRS = handlers
SUBDIR = satellite_exporter
FILES = __init__ constants satexport
diff --git a/backend/spacewalk-backend.spec b/backend/spacewalk-backend.spec
index cad1a9d..c607d22 100644
--- a/backend/spacewalk-backend.spec
+++ b/backend/spacewalk-backend.spec
@@ -445,11 +445,6 @@ rm -f %{rhnconf}/rhnSecret.py*
%{rhnroot}/satellite_exporter/satexport.py*
%{rhnroot}/satellite_exporter/constants.py*
-%dir %{rhnroot}/satellite_exporter/exporter
-%{rhnroot}/satellite_exporter/exporter/__init__.py*
-%{rhnroot}/satellite_exporter/exporter/dumper.py*
-%{rhnroot}/satellite_exporter/exporter/string_buffer.py*
-
%dir %{rhnroot}/satellite_exporter/handlers
%{rhnroot}/satellite_exporter/handlers/__init__.py*
%{rhnroot}/satellite_exporter/handlers/non_auth_dumper.py*
commit 93204d74ad5bf08365afb7168d01c0fc231f28d0
Author: Michael Mraka <michael.mraka(a)redhat.com>
Date: Tue Jan 5 12:57:49 2010 +0100
deleted the rest of satellite_exporter/exporter directory
diff --git a/backend/satellite_exporter/exporter/Makefile
b/backend/satellite_exporter/exporter/Makefile
deleted file mode 100644
index 0245eaa..0000000
--- a/backend/satellite_exporter/exporter/Makefile
+++ /dev/null
@@ -1,10 +0,0 @@
-# Makefile for pkgupload
-#
-# $Id: Makefile 118393 2007-12-27 17:38:45Z jbowes(a)REDHAT.COM $
-
-TOP = ../..
-
-SUBDIR = satellite_exporter/exporter
-FILES = __init__ dumper string_buffer
-
-include $(TOP)/Makefile.defs
diff --git a/backend/satellite_exporter/exporter/__init__.py
b/backend/satellite_exporter/exporter/__init__.py
deleted file mode 100644
index 717a6a4..0000000
--- a/backend/satellite_exporter/exporter/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2008 Red Hat, Inc.
-# Init module
-#
-# This software is licensed to you under the GNU General Public License,
-# version 2 (GPLv2). There is NO WARRANTY for this software, express or
-# implied, including the implied warranties of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
-# along with this software; if not, see
-#
http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
-#
-# Red Hat trademarks are not licensed under GPLv2. No permission is
-# granted to use or replicate Red Hat trademarks that are incorporated
-# in this software or its documentation.
-#
-
-__all__ = []
commit 801d835c2e49b65a732d5d8d766632d834adeb2a
Author: Michael Mraka <michael.mraka(a)redhat.com>
Date: Tue Jan 5 11:29:47 2010 +0100
merged satellite_exporter/exporter/dumper.py into
satellite_tools/disk_dumper/dumper.py
diff --git a/backend/satellite_exporter/exporter/dumper.py
b/backend/satellite_exporter/exporter/dumper.py
deleted file mode 100644
index 093d96c..0000000
--- a/backend/satellite_exporter/exporter/dumper.py
+++ /dev/null
@@ -1,984 +0,0 @@
-#!/usr/bin/python
-# Copyright (C) 2008 Red Hat, Inc.
-#
-# The actual dumper
-#
-# This software is licensed to you under the GNU General Public License,
-# version 2 (GPLv2). There is NO WARRANTY for this software, express or
-# implied, including the implied warranties of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
-# along with this software; if not, see
-#
http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
-#
-# Red Hat trademarks are not licensed under GPLv2. No permission is
-# granted to use or replicate Red Hat trademarks that are incorporated
-# in this software or its documentation.
-#
-
-import time
-import math
-import gzip
-import tempfile
-from types import ListType
-from cStringIO import StringIO
-
-from common import log_debug, log_error, rhnFault, UserDictCase, rhnCache, \
- CFG, rhnLib
-from server import rhnSQL, rhnDatabaseCache
-from satellite_tools.exporter import exportLib, xmlWriter
-from satellite_exporter import constants
-from satellite_exporter.exporter.string_buffer import StringBuffer
-
-# A wrapper class for a database statement
-class DatabaseStatement:
- def __init__(self, **kwparams):
- self.statement = None
- self.init_params = kwparams
-
- def add_params(self, **kwparams):
- self.init_params.update(kwparams)
-
- def set_statement(self, statement):
- self.statement = statement
- return self
-
- def execute(self, **kwparams):
- kwparams.update(self.init_params)
- return self.statement.execute(**kwparams)
-
- def next(self):
- return self.statement.fetchone_dict()
-
- def __getattr__(self, name):
- return getattr(self.statement, name)
-
-class XML_Dumper:
- def __init__(self, req):
- self.compress_level = 5
- self.headers_out = UserDictCase()
- self._raw_stream = req
- self._raw_stream.content_type = 'application/octet-stream'
- # State machine
- self._headers_sent = 0
- self._is_closed = 0
- self._compressed_stream = None
- # Redefine in subclasses
- self._channel_family_query = """
- select pcf.channel_family_id, to_number(null) quantity
- from rhnPublicChannelFamily pcf
- """
-
- def _send_headers(self, error=0, init_compressed_stream=1):
- log_debug(4, "is_closed", self._is_closed)
- if self._is_closed:
- raise Exception, "Trying to write to a closed connection"
- if self._headers_sent:
- return
- self._headers_sent = 1
- if self.compress_level:
- self.headers_out['Content-Encoding'] = 'gzip'
- # Send the headers
- if error:
- # No compression
- self.compress_level = 0
- self._raw_stream.content_type = 'text/xml'
- for h, v in self.headers_out.items():
- self._raw_stream.headers_out[h] = str(v)
- self._raw_stream.send_http_header()
- # If need be, start gzipping
- if self.compress_level and init_compressed_stream:
- log_debug(4, "Compressing with factor %s" % self.compress_level)
- self._compressed_stream = gzip.GzipFile(None, "wb",
- self.compress_level, self._raw_stream)
-
- def send(self, data):
- log_debug(3, "Sending %d bytes" % len(data))
- try:
- self._send_headers()
- if self._compressed_stream:
- log_debug(4, "Sending through a compressed stream")
- self._compressed_stream.write(data)
- else:
- self._raw_stream.write(data)
- except IOError:
- log_error("Client appears to have closed connection")
- self.close()
- raise ClosedConnectionError
- log_debug(5, "Bytes sent", len(data))
-
- write = send
-
- def close(self):
- log_debug(2, "Closing")
- if self._is_closed:
- log_debug(3, "Already closed")
- return
-
- if self._compressed_stream:
- log_debug(5, "Closing a compressed stream")
- try:
- self._compressed_stream.close()
- except IOError, e:
- # Remote end has closed connection already
- log_error("Error closing the stream", str(e))
- pass
- self._compressed_stream = None
- self._is_closed = 1
- log_debug(3, "Closed")
-
- # Override the get_*_statement methods in subclasses
- def get_channel_families_statement(self):
- query = """
- select cf.*, scf.quantity max_members
- from rhnChannelFamily cf,
- (%s
- ) scf
- where scf.channel_family_id = cf.id
- """ % self._channel_family_query
- return DatabaseStatement().set_statement(rhnSQL.prepare(query))
-
- def get_channels_statement(self):
- query = """
- select c.id channel_id, c.label,
- TO_CHAR(c.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnChannel c, rhnChannelFamilyMembers cfm,
- (%s
- ) scf
- where scf.channel_family_id = cfm.channel_family_id
- and cfm.channel_id = c.id
- """ % self._channel_family_query
- return DatabaseStatement().set_statement(rhnSQL.prepare(query))
-
- def get_packages_statement(self):
- query = """
- select p.id package_id,
- TO_CHAR(p.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnChannelPackage cp, rhnPackage p,
- rhnChannelFamilyMembers cfm,
- (%s
- ) scf
- where scf.channel_family_id = cfm.channel_family_id
- and cfm.channel_id = cp.channel_id
- and cp.package_id = :package_id
- and p.id = :package_id
- """ % self._channel_family_query
- return DatabaseStatement().set_statement(rhnSQL.prepare(query))
-
- def get_source_packages_statement(self):
- query = """
- select ps.id package_id,
- TO_CHAR(ps.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnChannelPackage cp, rhnPackage p, rhnPackageSource ps,
- rhnChannelFamilyMembers cfm,
- (%s
- ) scf
- where scf.channel_family_id = cfm.channel_family_id
- and cfm.channel_id = cp.channel_id
- and cp.package_id = p.id
- and p.source_rpm_id = ps.source_rpm_id
- and ((p.org_id is null and ps.org_id is null) or
- p.org_id = ps.org_id)
- and ps.id = :package_id
- """ % self._channel_family_query
- return DatabaseStatement().set_statement(rhnSQL.prepare(query))
-
- def get_errata_statement(self):
- query = """
- select e.id errata_id,
- TO_CHAR(e.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnChannelErrata ce, rhnErrata e,
- rhnChannelFamilyMembers cfm,
- (%s
- ) scf
- where scf.channel_family_id = cfm.channel_family_id
- and cfm.channel_id = ce.channel_id
- and ce.errata_id = :errata_id
- and e.id = :errata_id
- """ % self._channel_family_query
- return DatabaseStatement().set_statement(rhnSQL.prepare(query))
-
- def _get_xml_writer(self):
- return xmlWriter.XMLWriter(stream=StringBuffer(self))
-
- # Dumper functions here
- def dump_blacklist_obsoletes(self):
- log_debug(2)
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer,
- exportLib.BlacklistObsoletesDumper(writer))
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_arches(self, rpm_arch_type_only=0):
- log_debug(2)
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer,
- exportLib.ChannelArchesDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only),
- exportLib.PackageArchesDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only),
- exportLib.ServerArchesDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only),
- exportLib.CPUArchesDumper(writer),
- exportLib.ServerPackageArchCompatDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only),
- exportLib.ServerChannelArchCompatDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only),
- exportLib.ChannelPackageArchCompatDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only),
- )
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_server_group_type_server_arches(self, rpm_arch_type_only=0,
- virt_filter=0):
- log_debug(2)
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer,
- exportLib.ServerGroupTypeServerArchCompatDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only, virt_filter=virt_filter),
- )
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_channel_families(self, virt_filter=0):
- log_debug(2)
-
- h = self.get_channel_families_statement()
- h.execute()
-
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer,
- exportLib.ChannelFamiliesDumper(writer,
- data_iterator=h, null_max_members=0, virt_filter=virt_filter),)
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_channels(self, channel_labels=None):
- log_debug(2)
- channels = self._validate_channels(channel_labels=channel_labels)
-
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer, ChannelsDumper(writer,
- channels=channels.values()))
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_channel_packages_short(self, channel_label, last_modified):
- log_debug(2, channel_label)
- channels = self._validate_channels(channel_labels=[channel_label])
- channel_obj = channels[channel_label]
- db_last_modified = int(rhnLib.timestamp(channel_obj['last_modified']))
- last_modified = int(rhnLib.timestamp(last_modified))
- log_debug(3, "last modified", last_modified, "db last
modified",
- db_last_modified)
- if last_modified != db_last_modified:
- raise rhnFault(3013, "The requested channel version does not
match"
- " the upstream version", explain=0)
- channel_id = channel_obj['channel_id']
- key = "xml-channel-packages/rhn-channel-%d.data" % channel_id
- # Try to get everything off of the cache
- val = rhnCache.get(key, compressed=0, raw=1, modified=last_modified)
- if val is None:
- # Not generated yet
- log_debug(4, "Cache MISS for %s (%s)" % (channel_label,
- channel_id))
- stream = self._cache_channel_packages_short(channel_id, key,
- last_modified)
- else:
- log_debug(4, "Cache HIT for %s (%s)" % (channel_label,
- channel_id))
- temp_stream = tempfile.TemporaryFile()
- temp_stream.write(val)
- temp_stream.flush()
- stream = self._normalize_compressed_stream(temp_stream)
-
- # Copy the results to the output stream
- # They shold be already compressed if they were requested to be
- # compressed
- buffer_size = 16384
- # Send the HTTP headers - but don't init the compressed stream since
- # we send the data ourselves
- self._send_headers(init_compressed_stream=0)
- while 1:
- buff = stream.read(buffer_size)
- if not buff:
- break
- try:
- self._raw_stream.write(buff)
- except IOError:
- log_error("Client disconnected prematurely")
- self.close()
- raise ClosedConnectionError
- # We're done
- return 0
-
- _query_get_channel_packages = rhnSQL.Statement("""
- select cp.package_id,
- TO_CHAR(p.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnChannelPackage cp,
- rhnPackage p
- where cp.channel_id = :channel_id
- and cp.package_id = p.id
- """)
-
- def _cache_channel_packages_short(self, channel_id, key, last_modified):
- # Caches the short package entries for channel_id
- # Create a temporary file
- temp_stream = tempfile.TemporaryFile()
- # Always compress the result
- compress_level = 5
- stream = gzip.GzipFile(None, "wb", compress_level, temp_stream)
- writer = xmlWriter.XMLWriter(stream=stream)
-
- # Fetch packages
- h = rhnSQL.prepare(self._query_get_channel_packages)
- h.execute(channel_id=channel_id)
- package_ids = h.fetchall_dict() or []
- # Sort packages
- package_ids.sort(lambda a, b: cmp(a['package_id'],
b['package_id']))
-
- dumper = SatelliteDumper(writer,
- ShortPackagesDumper(writer, package_ids))
- dumper.dump()
- writer.flush()
- # We're done with the stream object
- stream.close()
- del stream
- temp_stream.seek(0, 0)
- # Set the value in the cache. We don't recompress the result since
- # it's already compressed
- rhnCache.set(key, temp_stream.read(), modified=last_modified,
- compressed=0, raw=1)
- return self._normalize_compressed_stream(temp_stream)
-
- def _normalize_compressed_stream(self, stream):
- # Given a compressed stream, will either return the stream, or will
- # decompress it and return it, depending on the compression level
- # self.compress_level
- stream.seek(0, 0)
- if self.compress_level:
- # Output should be compressed; nothing else to to
- return stream
- # Argh, have to decompress
- return gzip.GzipFile(None, "rb", 0, stream)
-
- def dump_packages(self, packages):
- log_debug(2)
- return self._packages(packages, prefix='rhn-package-',
- dump_class=PackagesDumper)
-
- def dump_packages_short(self, packages):
- log_debug(2)
- return self._packages(packages, prefix='rhn-package-',
- dump_class=ShortPackagesDumper)
-
- def dump_source_packages(self, packages):
- log_debug(2)
- return self._packages(packages, prefix='rhn-source-package-',
- dump_class=SourcePackagesDumper, sources=1)
-
- def _packages(self, packages, prefix, dump_class, sources=0):
- if sources:
- h = self.get_source_packages_statement()
- else:
- h = self.get_packages_statement()
-
- packages_hash = {}
- for package in packages:
- package = str(package)
- if package[:len(prefix)] != prefix:
- raise rhnFault(3002, "Invalid package name %s" % package)
- package_id = package[len(prefix):]
- try:
- package_id = int(package_id)
- except ValueError:
- raise rhnFault(3002, "Invalid package name %s" % package)
- if packages_hash.has_key(package_id):
- # Already verified
- continue
- h.execute(package_id=package_id)
- row = h.fetchone_dict()
- if not row:
- # XXX Silently ignore it?
- raise rhnFault(3003, "No such package %s" % package)
- # Saving the row, it's handy later when we create the iterator
- packages_hash[package_id] = row
-
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer,
- dump_class(writer, packages_hash.values()))
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_errata(self, errata):
- log_debug(2)
-
- h = self.get_errata_statement()
-
- errata_hash = {}
- prefix = 'rhn-erratum-'
- for erratum in errata:
- erratum = str(erratum)
- if erratum[:len(prefix)] != prefix:
- raise rhnFault(3004, "Wrong erratum name %s" % erratum)
- errata_id = erratum[len(prefix):]
- try:
- errata_id = int(errata_id)
- except ValueError:
- raise rhnFault(3004, "Wrong erratum name %s" % erratum)
- if errata_hash.has_key(errata_id):
- # Already verified
- continue
- h.execute(errata_id=errata_id)
- row = h.fetchone_dict()
- if not row:
- # XXX Silently ignore it?
- raise rhnFault(3005, "No such erratum %s" % erratum)
- # Saving the row, it's handy later when we create the iterator
- errata_hash[errata_id] = row
-
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer,
- ErrataDumper(writer, errata_hash.values()))
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_kickstartable_trees(self, kickstart_labels=None):
- log_debug(2)
- kickstarts = self._validate_kickstarts(
- kickstart_labels=kickstart_labels)
-
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer,
- KickstartableTreesDumper(writer, kickstarts=kickstarts))
- dumper.dump()
- writer.flush()
- log_debug(4, "OK")
- self.close()
- return 0
-
- def dump_product_names(self):
- log_debug(4)
- writer = self._get_xml_writer()
- dumper = SatelliteDumper(writer, exportLib.ProductNamesDumper(writer))
- dumper.dump()
- writer.flush()
- self.close()
- return 0
-
- def _validate_channels(self, channel_labels=None):
- log_debug(4)
- # Sanity check
- if channel_labels:
- if not isinstance(channel_labels, ListType):
- raise rhnFault(3000,
- "Expected list of channels, got %s" %
type(channel_labels))
-
- h = self.get_channels_statement()
- h.execute()
- # Hash the list of all available channels based on the label
- all_channels_hash = {}
- while 1:
- row = h.fetchone_dict()
- if not row:
- break
- all_channels_hash[row['label']] = row
-
- # Intersect the list of channels they've sent to us
- if not channel_labels:
- channels = all_channels_hash
- else:
- channels = {}
- for label in channel_labels:
- if not all_channels_hash.has_key(label):
- raise rhnFault(3001, "Could not retrieve channel %s" %
- label)
- channels[label] = all_channels_hash[label]
-
- return channels
-
- _query_validate_kickstarts = rhnSQL.Statement("""
- select kt.label kickstart_label,
- TO_CHAR(kt.modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnKickstartableTree kt
- where kt.channel_id = :channel_id
- and kt.org_id is null
- """)
- def _validate_kickstarts(self, kickstart_labels):
- log_debug(4)
- # Saity check
- if kickstart_labels:
- if not isinstance(kickstart_labels, ListType):
- raise rhnFault(3000,
- "Expected list of kickstart labels, got %s" %
- type(kickstart_labels))
-
- all_ks_hash = {}
-
- h = self.get_channels_statement()
- h.execute()
-
- hk = rhnSQL.prepare(self._query_validate_kickstarts)
- while 1:
- channel = h.fetchone_dict()
- if not channel:
- break
-
- hk.execute(channel_id=channel['channel_id'])
- while 1:
- row = hk.fetchone_dict()
- if not row:
- break
- all_ks_hash[row['kickstart_label']] = row
-
- if not kickstart_labels:
- return all_ks_hash.values()
-
- result = []
- for l in kickstart_labels:
- if all_ks_hash.has_key(l):
- result.append(all_ks_hash[l])
-
- return result
-
-class SatelliteDumper(exportLib.SatelliteDumper):
- # Overriding with our own version
- def set_attributes(self):
- attributes = exportLib.SatelliteDumper.set_attributes(self)
- attributes['version'] = constants.PROTOCOL_VERSION
- attributes['generation'] = CFG.SAT_CERT_GENERATION
- return attributes
-
-
-# A query iterator successively applies the list of params as execute() to the
-# statement that was passed in, and presents the union of the result sets as a
-# single result set.
-# Params is a list of dictionaries that would fill the named bound variables
-# from the statement
-class QueryIterator:
- def __init__(self, statement, params):
- self._statement = statement
- self._params = params
- # Position in the params list
- self._params_pos = -1
- self._result_set_exhausted = 1
-
- def fetchone_dict(self):
- log_debug(4)
- while 1:
- if self._result_set_exhausted:
- # Nothing to do here, move to the next set of params
- pos = self._params_pos
- pos = pos + 1
- self._params_pos = pos
- if pos == len(self._params):
- # End of the list, we're done
- return None
- # Execute the satement
- log_debug(5, "Using param", pos, self._params[pos])
- self._statement.execute(**self._params[pos])
- self._result_set_exhausted = 0
- # Go back into the loop
- continue
-
- # Result set not exhausted yet
- row = self._statement.fetchone_dict()
- if row:
- return row
-
- self._result_set_exhausted = 1
-
- def _execute_next(self):
- log_debug(4)
- self._params_pos = self._params_pos + 1
- if self._params_pos == len(self._params):
- log_debug(5, "Done")
- self._statement = None
- return None
- self._statement.execute(**self._params[self._params_pos])
-
-# This class will attempt to retrieve information, either from the database or
-# from a local cache
-#
-# Note that we expect at most one result set per database query - this can be
-# easily fixed if we need more
-class CachedQueryIterator:
- def __init__(self, statement, params, cache_get):
- self._statement = statement
- # XXX params has to be a list of hashes, containing at least a
- # last_modified - which is stripped before the execution of the
- # statement
- self._params = params
- self._params_pos = 0
- self._cache_get = cache_get
-
- def fetchone_dict(self):
- log_debug(4)
- while 1:
- if self._params_pos == len(self._params):
- log_debug(4, "End of iteration")
- self.close()
- return None
- log_debug(4, "Fetching set for param", self._params_pos)
- # Get the last modified attribute
- params = self._params[self._params_pos]
- self._params_pos = self._params_pos + 1
-
- # Look up the object in the cache
- val = self._cache_get(params)
- if val is not None:
- # Entry is cached
- log_debug(2, "Cache HIT for %s" % params)
- return val
-
- log_debug(4, "Cache MISS for %s" % params)
- start = time.time()
- self._execute(params)
- row = self._statement.fetchone_dict()
-
- if row:
- log_debug(5, "Timer: %.2f" % (time.time() - start))
- return (params, row)
-
- # Dummy return
- return None
-
- def _execute(self, params):
- log_debug(4, params)
- self._statement.execute(**params)
-
- def close(self):
- # Make sure we remove references to these objects, or circular
- # references can occur
- log_debug(3, "Closing the iterator")
- self._statement = None
- self._cache_get = None
- self._params = None
-
-
-class CachedDumper(exportLib.BaseDumper):
- def __init__(self, writer, statement, params):
- iterator = CachedQueryIterator(statement, params,
- cache_get=self.cache_get)
- exportLib.BaseDumper.__init__(self, writer, data_iterator=iterator)
- self.use_database_cache = CFG.USE_DATABASE_CACHE
- log_debug(1, "Use database cache", self.use_database_cache)
-
- def _get_last_modified(self, params):
- # To be overwritten
- return params['last_modified']
-
- def _get_key(self, params):
- raise NotImplementedError
-
- def cache_get(self, params):
- log_debug(4, params)
- key = self._get_key(params)
- last_modified = self._get_last_modified(params)
- if not self.use_database_cache:
- return rhnCache.get(key, modified=last_modified, raw=1)
- return rhnDatabaseCache.get(key, modified=last_modified, raw=1,
- compressed=1)
-
- def cache_set(self, params, value):
- log_debug(4, params)
- last_modified = self._get_last_modified(params)
- key = self._get_key(params)
- if not self.use_database_cache:
- return rhnCache.set(key, value, modified=last_modified, raw=1)
- return rhnDatabaseCache.set(key, value, modified=last_modified, raw=1,
- compressed=1)
-
- def _dump_subelement(self, data):
- # To be overridden in subclasses
- pass
-
- def dump_subelement(self, data):
- log_debug(2)
- # CachedQueryIterator returns (params, row) as data
- params, row = data
- s = StringIO()
- # Back up the old writer and replace it with a StringIO-based one
- ow = self.get_writer()
- # Use into a tee stream (which writes to both streams at the same
- # time)
- tee_stream = TeeStream(s, ow.stream)
- self.set_writer(xmlWriter.XMLWriter(stream=tee_stream, skip_xml_decl=1))
-
- start = time.time()
- self._dump_subelement(row)
- log_debug(5,
- "Timer for _dump_subelement: %.2f" % (time.time() - start))
-
- # Restore the old writer
- self.set_writer(ow)
-
- self.cache_set(params, s.getvalue())
-
-
-class ChannelsDumper(CachedDumper, exportLib.ChannelsDumper):
- _query_list_channels = rhnSQL.Statement("""
- select c.id, c.label, ca.label channel_arch, c.basedir, c.name,
- c.summary, c.description, c.gpg_key_url, c.org_id,
- TO_CHAR(c.last_modified, 'YYYYMMDDHH24MISS') last_modified,
- c.channel_product_id,
- pc.label parent_channel,
- cp.product channel_product,
- cp.version channel_product_version,
- cp.beta channel_product_beta,
- c.receiving_updates
- from rhnChannel c, rhnChannelArch ca, rhnChannel pc, rhnChannelProduct cp
- where c.id = :channel_id
- and c.channel_arch_id = ca.id
- and c.parent_channel = pc.id (+)
- and c.channel_product_id = cp.id (+)
- """)
- def __init__(self, writer, channels):
- h = rhnSQL.prepare(self._query_list_channels)
- CachedDumper.__init__(self, writer, statement=h, params=channels)
-
- def _get_key(self, params):
- channel_id = params['channel_id']
- return "xml-channels/rhn-channel-%d.xml" % channel_id
-
- def _dump_subelement(self, data):
- log_debug(6, data)
- return exportLib.ChannelsDumper.dump_subelement(self, data)
-
-class ShortPackagesDumper(CachedDumper, exportLib.ShortPackagesDumper):
- def __init__(self, writer, packages):
- h = rhnSQL.prepare("""
- select
- p.id,
- p.org_id,
- pn.name,
- pe.evr.version version,
- pe.evr.release release,
- pe.evr.epoch epoch,
- pa.label package_arch,
- c.checksum md5sum,
- p.package_size,
- TO_CHAR(p.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnPackage p, rhnPackageName pn, rhnPackageEVR pe,
- rhnPackageArch pa, rhnChecksum c
- where p.id = :package_id
- and p.name_id = pn.id
- and p.evr_id = pe.id
- and p.package_arch_id = pa.id
- and p.checksum_id = c.id
- """)
- CachedDumper.__init__(self, writer, statement=h, params=packages)
-
- def _get_key(self, params):
- package_id = params['package_id']
- hash_val = hash_object_id(package_id, 100)
- return "xml-short-packages/%s/rhn-package-short-%s.xml" % (
- hash_val, package_id)
-
- def _dump_subelement(self, data):
- log_debug(6, data)
- return exportLib.ShortPackagesDumper.dump_subelement(self, data)
-
-class PackagesDumper(CachedDumper, exportLib.PackagesDumper):
- def __init__(self, writer, packages):
- h = rhnSQL.prepare("""
- select
- p.id,
- pn.name,
- p.org_id,
- pe.evr.version version,
- pe.evr.release release,
- pe.evr.epoch epoch,
- pa.label package_arch,
- pg.name package_group,
- p.rpm_version,
- p.description,
- p.summary,
- p.package_size,
- p.payload_size,
- p.build_host,
- TO_CHAR(p.build_time, 'YYYYMMDDHH24MISS') build_time,
- sr.name source_rpm,
- c.checksum md5sum,
- p.vendor,
- p.payload_format,
- p.compat,
- p.header_sig,
- p.header_start,
- p.header_end,
- p.copyright,
- p.cookie,
- TO_CHAR(p.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnPackage p, rhnPackageName pn, rhnPackageEVR pe,
- rhnPackageArch pa, rhnPackageGroup pg, rhnSourceRPM sr,
- rhnChecksum c
- where p.id = :package_id
- and p.name_id = pn.id
- and p.evr_id = pe.id
- and p.package_arch_id = pa.id
- and p.package_group = pg.id
- and p.source_rpm_id = sr.id
- and p.checksum_id = c.id
- """)
- CachedDumper.__init__(self, writer, statement=h, params=packages)
-
- def _get_key(self, params):
- package_id = params['package_id']
- hash_val = hash_object_id(package_id, 100)
- return "xml-packages/%s/rhn-package-%s.xml" % (hash_val, package_id)
-
- def _dump_subelement(self, data):
- log_debug(6, data)
- return exportLib.PackagesDumper.dump_subelement(self, data)
-
-class SourcePackagesDumper(CachedDumper, exportLib.SourcePackagesDumper):
- def __init__(self, writer, packages):
- h = rhnSQL.prepare("""
- select
- ps.id,
- sr.name source_rpm,
- pg.name package_group,
- ps.rpm_version,
- ps.payload_size,
- ps.build_host,
- TO_CHAR(ps.build_time, 'YYYYMMDDHH24MISS') build_time,
- sig.checksum sigmd5,
- ps.vendor,
- ps.cookie,
- ps.package_size,
- c.checksum md5sum,
- TO_CHAR(ps.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnPackageSource ps, rhnPackageGroup pg, rhnSourceRPM sr,
- rhnChecksum c, rhnChecksum sig
- where ps.id = :package_id
- and ps.package_group = pg.id
- and ps.source_rpm_id = sr.id
- and ps.checksum_id = c.id
- and ps.sigchecksum_id = sig.id
- """)
- CachedDumper.__init__(self, writer, statement=h, params=packages)
-
- def _get_key(self, params):
- package_id = params['package_id']
- hash_val = hash_object_id(package_id, 100)
- return "xml-packages/%s/rhn-source-package-%s.xml" % (hash_val,
- package_id)
-
- def _dump_subelement(self, data):
- log_debug(6, data)
- return exportLib.SourcePackagesDumper.dump_subelement(self, data)
-
-
-class ErrataDumper(CachedDumper, exportLib.ErrataSynopsisDumper):
- def __init__(self, writer, errata):
- h = rhnSQL.prepare("""
- select
- e.id,
- e.advisory_name,
- e.advisory,
- e.advisory_type,
- e.advisory_rel,
- e.product,
- e.description,
- e.synopsis,
- e.topic,
- e.solution,
- TO_CHAR(e.issue_date, 'YYYYMMDDHH24MISS') issue_date,
- TO_CHAR(e.update_date, 'YYYYMMDDHH24MISS') update_date,
- TO_CHAR(e.last_modified, 'YYYYMMDDHH24MISS') last_modified,
- e.refers_to,
- e.notes
- from rhnErrata e
- where e.id = :errata_id
- """)
- CachedDumper.__init__(self, writer, statement=h, params=errata)
-
- def _get_key(self, params):
- errata_id = params['errata_id']
- hash_val = hash_object_id(errata_id, 10)
- return "xml-errata/%s/rhn-erratum-%s.xml" % (hash_val, errata_id)
-
- def _dump_subelement(self, data):
- log_debug(6, data)
- return exportLib.ErrataSynopsisDumper.dump_subelement(self, data)
-
-class KickstartableTreesDumper(CachedDumper,
- exportLib.KickstartableTreesDumper):
- _query_lookup_ks_tree = rhnSQL.Statement("""
- select kt.id,
- c.label channel,
- kt.base_path "base-path",
- kt.label,
- kt.boot_image "boot-image",
- ktt.name "kstree-type-name",
- ktt.label "kstree-type-label",
- kit.name "install-type-name",
- kit.label "install-type-label",
- TO_CHAR(kt.last_modified, 'YYYYMMDDHH24MISS')
"last-modified"
- from rhnKickstartableTree kt,
- rhnKSTreeType ktt,
- rhnKSInstallType kit,
- rhnChannel c
- where kt.channel_id = c.id
- and ktt.id = kt.kstree_type
- and kit.id = kt.install_type
- and kt.org_id is NULL
- and kt.label = :kickstart_label
- """)
- def __init__(self, writer, kickstarts):
- h = rhnSQL.prepare(self._query_lookup_ks_tree)
- CachedDumper.__init__(self, writer, statement=h, params=kickstarts)
-
- def _get_key(self, params):
- kickstart_label = params['kickstart_label']
- return "xml-kickstartable-tree/%s.xml" % kickstart_label
-
- def _dump_subelement(self, data):
- log_debug(6, data)
- return exportLib.KickstartableTreesDumper.dump_subelement(self, data)
-
-# This looks complicated; it returns the (integer part of)
-# obj_id, modulo factor
-# The string is left padded with as many 0 chars as necessary to
-# match factor
-def hash_object_id(obj_id, factor):
- # Make sure obj_id is a string
- obj_id = str(obj_id)
- format = int(math.ceil(math.log10(factor)))
- format = "%%0%sd" % format
- return format % (int(obj_id.split('-')[-1]) % factor, )
-
-class ClosedConnectionError(Exception):
- pass
-
-class TeeStream:
- """Writes to multiple streams at the same time"""
- def __init__(self, *streams):
- self.streams = streams
-
- def write(self, data):
- log_debug(6, "Writing %s bytes" % len(data))
- for stream in self.streams:
- stream.write(data)
diff --git a/backend/satellite_exporter/exporter/string_buffer.py
b/backend/satellite_exporter/exporter/string_buffer.py
deleted file mode 100644
index e864485..0000000
--- a/backend/satellite_exporter/exporter/string_buffer.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/python
-# Copyright (C) 2008 Red Hat, Inc.
-#
-# Simple string buffer that wraps around streams to speed up writes
-#
-# This software is licensed to you under the GNU General Public License,
-# version 2 (GPLv2). There is NO WARRANTY for this software, express or
-# implied, including the implied warranties of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
-# along with this software; if not, see
-#
http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
-#
-# Red Hat trademarks are not licensed under GPLv2. No permission is
-# granted to use or replicate Red Hat trademarks that are incorporated
-# in this software or its documentation.
-#
-
-class StringBuffer:
- def __init__(self, stream):
- self.stream = stream
- self.buffer_size = 65536
- self.buffer = ""
-
- def write(self, data):
- self.buffer = self.buffer + data
- if len(self.buffer) < self.buffer_size:
- return
- # The buffer is full, send it
- self.stream.write(self.buffer[:self.buffer_size])
- self.buffer = self.buffer[self.buffer_size:]
-
- def flush(self):
- if self.buffer:
- self.stream.write(self.buffer)
- self.buffer = ""
-
- def close(self):
- self.flush()
-
- def __del__(self):
- self.close()
-
-
-if __name__ == '__main__':
- import sys
- import time
- sb = StringBuffer(sys.stdout)
- sb.buffer_size = 10
-
- while 1:
- sb.write('a')
- time.sleep(.2)
diff --git a/backend/satellite_exporter/handlers/non_auth_dumper.py
b/backend/satellite_exporter/handlers/non_auth_dumper.py
index f0431f0..24adaf3 100644
--- a/backend/satellite_exporter/handlers/non_auth_dumper.py
+++ b/backend/satellite_exporter/handlers/non_auth_dumper.py
@@ -27,7 +27,7 @@ from server.importlib.backendLib import localtime
from common.rhnTranslate import _
from satellite_tools.exporter import exportLib
-from satellite_exporter.exporter import dumper
+from satellite_tools.disk_dumper import dumper
class InvalidPackageError(Exception):
pass
@@ -38,10 +38,10 @@ class NullPathPackageError(Exception):
class MissingPackageError(Exception):
pass
-class NonAuthenticatedDumper(rhnHandler, dumper.XML_Dumper):
+class NonAuthenticatedDumper(rhnHandler, dumper.XML_DumperEx):
def __init__(self, req):
rhnHandler.__init__(self)
- dumper.XML_Dumper.__init__(self, req)
+ dumper.XML_DumperEx.__init__(self, req)
# Don't check for abuse
self.check_for_abuse = 0
diff --git a/backend/satellite_tools/disk_dumper/dumper.py
b/backend/satellite_tools/disk_dumper/dumper.py
index f4d0b0b..aad3c2d 100644
--- a/backend/satellite_tools/disk_dumper/dumper.py
+++ b/backend/satellite_tools/disk_dumper/dumper.py
@@ -19,7 +19,7 @@ import math
import gzip
import string
import tempfile
-from types import ListType, DictType
+from types import ListType
from cStringIO import StringIO
from common import log_debug, log_error, rhnFault, UserDictCase, rhnCache, \
@@ -57,6 +57,333 @@ class DatabaseStatement:
def __getattr__(self, name):
return getattr(self.statement, name)
+class XML_DumperEx(XML_Dumper):
+ def __init__(self, req):
+ self.compress_level = 5
+ self.headers_out = UserDictCase()
+ self._raw_stream = req
+ self._raw_stream.content_type = 'application/octet-stream'
+ # State machine
+ self._headers_sent = 0
+ self._is_closed = 0
+ self._compressed_stream = None
+ # Redefine in subclasses
+ self._channel_family_query = """
+ select pcf.channel_family_id, to_number(null) quantity
+ from rhnPublicChannelFamily pcf
+ """
+
+ def _send_headers(self, error=0, init_compressed_stream=1):
+ log_debug(4, "is_closed", self._is_closed)
+ if self._is_closed:
+ raise Exception, "Trying to write to a closed connection"
+ if self._headers_sent:
+ return
+ self._headers_sent = 1
+ if self.compress_level:
+ self.headers_out['Content-Encoding'] = 'gzip'
+ # Send the headers
+ if error:
+ # No compression
+ self.compress_level = 0
+ self._raw_stream.content_type = 'text/xml'
+ for h, v in self.headers_out.items():
+ self._raw_stream.headers_out[h] = str(v)
+ self._raw_stream.send_http_header()
+ # If need be, start gzipping
+ if self.compress_level and init_compressed_stream:
+ log_debug(4, "Compressing with factor %s" % self.compress_level)
+ self._compressed_stream = gzip.GzipFile(None, "wb",
+ self.compress_level, self._raw_stream)
+
+ def send(self, data):
+ log_debug(3, "Sending %d bytes" % len(data))
+ try:
+ self._send_headers()
+ if self._compressed_stream:
+ log_debug(4, "Sending through a compressed stream")
+ self._compressed_stream.write(data)
+ else:
+ self._raw_stream.write(data)
+ except IOError:
+ log_error("Client appears to have closed connection")
+ self.close()
+ raise ClosedConnectionError
+ log_debug(5, "Bytes sent", len(data))
+
+ write = send
+
+ def close(self):
+ log_debug(2, "Closing")
+ if self._is_closed:
+ log_debug(3, "Already closed")
+ return
+
+ if self._compressed_stream:
+ log_debug(5, "Closing a compressed stream")
+ try:
+ self._compressed_stream.close()
+ except IOError, e:
+ # Remote end has closed connection already
+ log_error("Error closing the stream", str(e))
+ pass
+ self._compressed_stream = None
+ self._is_closed = 1
+ log_debug(3, "Closed")
+
+ # Dumper functions here
+ def dump_blacklist_obsoletes(self):
+ log_debug(2)
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer,
+ exportLib.BlacklistObsoletesDumper(writer))
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_arches(self, rpm_arch_type_only=0):
+ log_debug(2)
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer,
+ exportLib.ChannelArchesDumper(writer,
+ rpm_arch_type_only=rpm_arch_type_only),
+ exportLib.PackageArchesDumper(writer,
+ rpm_arch_type_only=rpm_arch_type_only),
+ exportLib.ServerArchesDumper(writer,
+ rpm_arch_type_only=rpm_arch_type_only),
+ exportLib.CPUArchesDumper(writer),
+ exportLib.ServerPackageArchCompatDumper(writer,
+ rpm_arch_type_only=rpm_arch_type_only),
+ exportLib.ServerChannelArchCompatDumper(writer,
+ rpm_arch_type_only=rpm_arch_type_only),
+ exportLib.ChannelPackageArchCompatDumper(writer,
+ rpm_arch_type_only=rpm_arch_type_only),
+ )
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_server_group_type_server_arches(self, rpm_arch_type_only=0,
+ virt_filter=0):
+ log_debug(2)
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer,
+ exportLib.ServerGroupTypeServerArchCompatDumper(writer,
+ rpm_arch_type_only=rpm_arch_type_only, virt_filter=virt_filter),
+ )
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_channel_families(self, virt_filter=0):
+ log_debug(2)
+
+ h = self.get_channel_families_statement()
+ h.execute()
+
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer,
+ exportLib.ChannelFamiliesDumper(writer,
+ data_iterator=h, null_max_members=0, virt_filter=virt_filter),)
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_channels(self, channel_labels=None):
+ log_debug(2)
+ channels = self._validate_channels(channel_labels=channel_labels)
+
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer, ChannelsDumperEx(writer,
+ channels=channels.values()))
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_channel_packages_short(self, channel_label, last_modified):
+ log_debug(2, channel_label)
+ channels = self._validate_channels(channel_labels=[channel_label])
+ channel_obj = channels[channel_label]
+ db_last_modified = int(rhnLib.timestamp(channel_obj['last_modified']))
+ last_modified = int(rhnLib.timestamp(last_modified))
+ log_debug(3, "last modified", last_modified, "db last
modified",
+ db_last_modified)
+ if last_modified != db_last_modified:
+ raise rhnFault(3013, "The requested channel version does not
match"
+ " the upstream version", explain=0)
+ channel_id = channel_obj['channel_id']
+ key = "xml-channel-packages/rhn-channel-%d.data" % channel_id
+ # Try to get everything off of the cache
+ val = rhnCache.get(key, compressed=0, raw=1, modified=last_modified)
+ if val is None:
+ # Not generated yet
+ log_debug(4, "Cache MISS for %s (%s)" % (channel_label,
+ channel_id))
+ stream = self._cache_channel_packages_short(channel_id, key,
+ last_modified)
+ else:
+ log_debug(4, "Cache HIT for %s (%s)" % (channel_label,
+ channel_id))
+ temp_stream = tempfile.TemporaryFile()
+ temp_stream.write(val)
+ temp_stream.flush()
+ stream = self._normalize_compressed_stream(temp_stream)
+
+ # Copy the results to the output stream
+ # They shold be already compressed if they were requested to be
+ # compressed
+ buffer_size = 16384
+ # Send the HTTP headers - but don't init the compressed stream since
+ # we send the data ourselves
+ self._send_headers(init_compressed_stream=0)
+ while 1:
+ buff = stream.read(buffer_size)
+ if not buff:
+ break
+ try:
+ self._raw_stream.write(buff)
+ except IOError:
+ log_error("Client disconnected prematurely")
+ self.close()
+ raise ClosedConnectionError
+ # We're done
+ return 0
+
+ def _cache_channel_packages_short(self, channel_id, key, last_modified):
+ """ Caches the short package entries for channel_id
"""
+ # Create a temporary file
+ temp_stream = tempfile.TemporaryFile()
+ # Always compress the result
+ compress_level = 5
+ stream = gzip.GzipFile(None, "wb", compress_level, temp_stream)
+ writer = xmlWriter.XMLWriter(stream=stream)
+
+ # Fetch packages
+ h = rhnSQL.prepare(self._query_get_channel_packages)
+ h.execute(channel_id=channel_id)
+ package_ids = h.fetchall_dict() or []
+ # Sort packages
+ package_ids.sort(lambda a, b: cmp(a['package_id'],
b['package_id']))
+
+ dumper = SatelliteDumperEx(writer,
+ ShortPackagesDumper(writer, package_ids))
+ dumper.dump()
+ writer.flush()
+ # We're done with the stream object
+ stream.close()
+ del stream
+ temp_stream.seek(0, 0)
+ # Set the value in the cache. We don't recompress the result since
+ # it's already compressed
+ rhnCache.set(key, temp_stream.read(), modified=last_modified,
+ compressed=0, raw=1)
+ return self._normalize_compressed_stream(temp_stream)
+
+ def _packages(self, packages, prefix, dump_class, sources=0):
+ if sources:
+ h = self.get_source_packages_statement()
+ else:
+ h = self.get_packages_statement()
+
+ packages_hash = {}
+ for package in packages:
+ package = str(package)
+ if package[:len(prefix)] != prefix:
+ raise rhnFault(3002, "Invalid package name %s" % package)
+ package_id = package[len(prefix):]
+ try:
+ package_id = int(package_id)
+ except ValueError:
+ raise rhnFault(3002, "Invalid package name %s" % package)
+ if packages_hash.has_key(package_id):
+ # Already verified
+ continue
+ h.execute(package_id=package_id)
+ row = h.fetchone_dict()
+ if not row:
+ # XXX Silently ignore it?
+ raise rhnFault(3003, "No such package %s" % package)
+ # Saving the row, it's handy later when we create the iterator
+ packages_hash[package_id] = row
+
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer,
+ dump_class(writer, packages_hash.values()))
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_errata(self, errata):
+ log_debug(2)
+
+ h = self.get_errata_statement()
+
+ errata_hash = {}
+ prefix = 'rhn-erratum-'
+ for erratum in errata:
+ erratum = str(erratum)
+ if erratum[:len(prefix)] != prefix:
+ raise rhnFault(3004, "Wrong erratum name %s" % erratum)
+ errata_id = erratum[len(prefix):]
+ try:
+ errata_id = int(errata_id)
+ except ValueError:
+ raise rhnFault(3004, "Wrong erratum name %s" % erratum)
+ if errata_hash.has_key(errata_id):
+ # Already verified
+ continue
+ h.execute(errata_id=errata_id)
+ row = h.fetchone_dict()
+ if not row:
+ # XXX Silently ignore it?
+ raise rhnFault(3005, "No such erratum %s" % erratum)
+ # Saving the row, it's handy later when we create the iterator
+ errata_hash[errata_id] = row
+
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer,
+ ErrataDumperEx(writer, errata_hash.values()))
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_kickstartable_trees(self, kickstart_labels=None):
+ log_debug(2)
+ kickstarts = self._validate_kickstarts(
+ kickstart_labels=kickstart_labels)
+
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer,
+ KickstartableTreesDumper(writer, kickstarts=kickstarts))
+ dumper.dump()
+ writer.flush()
+ log_debug(4, "OK")
+ self.close()
+ return 0
+
+ def dump_product_names(self):
+ log_debug(4)
+ writer = self._get_xml_writer()
+ dumper = SatelliteDumperEx(writer, exportLib.ProductNamesDumper(writer))
+ dumper.dump()
+ writer.flush()
+ self.close()
+ return 0
+
class XML_Dumper:
def __init__(self):
self.compress_level = 5
@@ -194,12 +521,13 @@ class XML_Dumper:
self.close()
return 0
- def dump_server_group_type_server_arches(self, rpm_arch_type_only=0):
+ def dump_server_group_type_server_arches(self, rpm_arch_type_only=0,
+ virt_filter=0):
log_debug(2)
writer = self._get_xml_writer()
dumper = SatelliteDumper(writer,
exportLib.ServerGroupTypeServerArchCompatDumper(writer,
- rpm_arch_type_only=rpm_arch_type_only),
+ rpm_arch_type_only=rpm_arch_type_only, virt_filter=virt_filter),
)
dumper.dump()
writer.flush()
@@ -526,6 +854,13 @@ class XML_Dumper:
return result
+class SatelliteDumperEx(SatelliteDumper):
+ def set_attributes(self):
+ """ Overriding with our own version """
+ attributes = SatelliteDumper.set_attributes(self)
+ attributes['version'] = constants.PROTOCOL_VERSION
+ return attributes
+
class SatelliteDumper(exportLib.SatelliteDumper):
def set_attributes(self):
""" Overriding with our own version """
@@ -656,7 +991,7 @@ class CachedDumper(exportLib.BaseDumper):
return params['last_modified']
def _get_key(self, params):
- return "dummy"
+ raise NotImplementedError
def cache_get(self, params):
log_debug(4, params)
@@ -704,6 +1039,34 @@ class CachedDumper(exportLib.BaseDumper):
self.cache_set(params, s.getvalue())
+class ChannelsDumperEx(CachedDumper, exportLib.ChannelsDumper):
+ _query_list_channels = rhnSQL.Statement("""
+ select c.id, c.label, ca.label channel_arch, c.basedir, c.name,
+ c.summary, c.description, c.gpg_key_url, c.org_id,
+ TO_CHAR(c.last_modified, 'YYYYMMDDHH24MISS') last_modified,
+ c.channel_product_id,
+ pc.label parent_channel,
+ cp.product channel_product,
+ cp.version channel_product_version,
+ cp.beta channel_product_beta,
+ c.receiving_updates
+ from rhnChannel c, rhnChannelArch ca, rhnChannel pc, rhnChannelProduct cp
+ where c.id = :channel_id
+ and c.channel_arch_id = ca.id
+ and c.parent_channel = pc.id (+)
+ and c.channel_product_id = cp.id (+)
+ """)
+ def __init__(self, writer, channels):
+ h = rhnSQL.prepare(self._query_list_channels)
+ CachedDumper.__init__(self, writer, statement=h, params=channels)
+
+ def _get_key(self, params):
+ channel_id = params['channel_id']
+ return "xml-channels/rhn-channel-%d.xml" % channel_id
+
+ def _dump_subelement(self, data):
+ log_debug(6, data)
+ return exportLib.ChannelsDumper.dump_subelement(self, data)
class ChannelsDumper(CachedDumper, exportLib.ChannelsDumper):
_query_list_channels = rhnSQL.Statement("""
@@ -1018,7 +1381,7 @@ class SourcePackagesDumper(CachedDumper,
exportLib.SourcePackagesDumper):
ps.package_size,
c.checksum md5sum,
TO_CHAR(ps.last_modified, 'YYYYMMDDHH24MISS') last_modified
- from rhnPackageSource ps, rhnPackageGroup pg, rhnSourceRPM sr
+ from rhnPackageSource ps, rhnPackageGroup pg, rhnSourceRPM sr,
rhnChecksum c, rhnChecksum sig
where ps.id = :package_id
and ps.package_group = pg.id
@@ -1037,6 +1400,38 @@ class SourcePackagesDumper(CachedDumper,
exportLib.SourcePackagesDumper):
log_debug(6, data)
return exportLib.SourcePackagesDumper.dump_subelement(self, data)
+class ErrataDumperEx(CachedDumper, exportLib.ErrataSynopsisDumper):
+ def __init__(self, writer, errata):
+ h = rhnSQL.prepare("""
+ select
+ e.id,
+ e.advisory_name,
+ e.advisory,
+ e.advisory_type,
+ e.advisory_rel,
+ e.product,
+ e.description,
+ e.synopsis,
+ e.topic,
+ e.solution,
+ TO_CHAR(e.issue_date, 'YYYYMMDDHH24MISS') issue_date,
+ TO_CHAR(e.update_date, 'YYYYMMDDHH24MISS') update_date,
+ TO_CHAR(e.last_modified, 'YYYYMMDDHH24MISS') last_modified,
+ e.refers_to,
+ e.notes
+ from rhnErrata e
+ where e.id = :errata_id
+ """)
+ CachedDumper.__init__(self, writer, statement=h, params=errata)
+
+ def _get_key(self, params):
+ errata_id = params['errata_id']
+ hash_val = hash_object_id(errata_id, 10)
+ return "xml-errata/%s/rhn-erratum-%s.xml" % (hash_val, errata_id)
+
+ def _dump_subelement(self, data):
+ log_debug(6, data)
+ return exportLib.ErrataSynopsisDumper.dump_subelement(self, data)
class ErrataDumper(CachedDumper, exportLib.ErrataDumper):
def __init__(self, writer, errata):