mac9416 has proposed merging lp:~mac9416/keryx/devel into lp:keryx/devel. Requested reviews: Keryx Admins (keryx-admins)
Sorry it took so long to get this merged. I had to reinstall Ubuntu, so I lost my PGP/SSH keys and had to get all that set up again. Now (once again) I'm not getting credit for the push because of a bzr setting. But I'll worry about that later. The important thing is that to_strings is working. :-) -mac -- https://code.launchpad.net/~mac9416/keryx/devel/+merge/12488 Your team Keryx Development Team is subscribed to branch lp:keryx/devel.
=== modified file 'libkeryx/definitions/dpkg/minideblib/AptRepoClient.py' (properties changed: -x to +x) --- libkeryx/definitions/dpkg/minideblib/AptRepoClient.py 2009-09-08 05:22:12 +0000 +++ libkeryx/definitions/dpkg/minideblib/AptRepoClient.py 2009-09-27 15:25:17 +0000 @@ -29,10 +29,10 @@ __revision__ = "r"+"$Revision$"[11:-2] __all__ = [ 'AptRepoClient', 'AptRepoException' ] -from DpkgControl import DpkgParagraph -from DpkgDatalist import DpkgOrderedDatalist -from DpkgVersion import DpkgVersion, VersionError -from LoggableObject import LoggableObject +from minideblib.DpkgControl import DpkgParagraph +from minideblib.DpkgDatalist import DpkgOrderedDatalist +from minideblib.DpkgVersion import DpkgVersion, VersionError +from minideblib.LoggableObject import LoggableObject import re, urllib2, os, types, time try: @@ -40,6 +40,7 @@ except NameError: from sets import Set as set + def _universal_urlopen(url): """More robust urlopen. It understands gzip transfer encoding""" headers = { 'User-Agent': 'Mozilla/4.0 (compatible; Python/AptRepoClient)', @@ -47,15 +48,7 @@ 'Cache-Control': 'no-cache', 'Accept-encoding': 'gzip' } request = urllib2.Request(url, None, headers) - try: - import urlgrabber - from urlgrabber.progress import TextMeter - - #BUG: TextMeter prints over itself - usock = urlgrabber.urlopen(url, http_headers=headers.items(), - progress_obj=TextMeter()) - except: - usock = urllib2.urlopen(request) + usock = urllib2.urlopen(request) if usock.headers.get('content-encoding', None) == 'gzip' or url.endswith(".gz"): data = usock.read() import cStringIO, gzip @@ -331,7 +324,7 @@ """Loads repositories into internal data structures from a local cache. Replaces previous content if clear = True (default)""" self.load_repos(repoline, ignore_errors, clear, folder) - # Alias for load_local() + # Alias for load_local()d update_local = load_local def make_source_to_binaries_map(self): @@ -419,6 +412,151 @@ def get_available_binaries(self, base_url = None): return _get_available_pkgs(base_url, self.binaries) + def to_strings(self, binaries=True, sources=True): + """Exports loaded metadata to _Packages/_Sources file formatted strings + Outputs both source and binary data by default. Use binaries=False + to output only source data and sources=False for only binary data + """ + def convert_to_url(base_url, distro, section, architecture, datatype): + """ Converts repository info into index file URL """ + if datatype == 'Packages': + url = '%s_dists_%s_%s_binary-%s_%s' % (base_url, distro, section, architecture, datatype) + elif datatype == 'Sources': + url = '%s_dists_%s_%s_source_%s' % (base_url, distro, section, datatype) + return url + + values = ('Package', + 'Essential', + 'Binary', + 'Priority', + 'Section', + 'Installed-Size', + 'Maintainer', + 'Original-Maintainer', + 'Build-Depends', + 'Build-Depends-Indep', + 'Architecture', + 'Source', + 'Version', + 'Replaces', + 'Provides', + 'Depends', + 'Pre-Depends', + 'Recommends', + 'Suggests', + 'Conflicts', + 'Breaks', + 'Filename', + 'Size', + 'MD5sum', + 'SHA1', + 'SHA256', + 'Description', + 'Enhances', + 'Origin', + 'Bugs', + 'Homepage', + 'Python-Version', + 'Tag', + 'Gstreamer-Decoders', + 'Gstreamer-Elements', + 'Gstreamer-Encoders', + 'Gstreamer-Uri-Sinks', + 'Gstreamer-Uri-Sources', + 'Gstreamer-Version', + 'Npp-Applications', + 'Npp-Description', + 'Npp-Filename', + 'Npp-Mimetype', + 'Npp-Name', + 'Original-Uploaders', + 'Python-Runtime', + 'Build-Essential', + 'Task', + 'Standards-Version', + 'Format', + 'Directory', + 'Files', + 'Uploaders', + 'Dm-Upload-Allowed', + 'Vcs-Bzr', + 'Vcs-Browser', + 'Vcs-Svn', + 'Vcs-Git', + 'Vcs-Hg', + 'Vcs-Cvs', + 'Vcs-Darcs', + 'Vcs-Arch', + 'Checksums-Sha1', + 'Checksums-Sha256', + 'Orig-Vcs-Browser', + 'Orig-Vcs-Svn', + 'Debian-Vcs-Browser', + 'Debian-Vcs-Svn', + 'Build-Conflicts', + 'Original-Vcs-Browser', + 'Original-Vcs-Svn', + 'Build-Conflicts-Indep', + 'Python-Standards-Version', + 'Debian-Vcs-Git', + 'X-Vcs-Svn', + 'X-Collab-Maint', + 'Original-Vcs-Bzr', + 'Vcs-Browse', + 'Original-Vcs-Git',) + + # Special values that take multiple lines. All lower-case. + multiline_values = ('description', + 'files', + 'checksums-sha1', + 'checksums-sha256') + + data = {} + output = {} + + if binaries: + data.update({'Packages': self.binaries}) + if sources: + data.update({'Sources': self.sources}) + if not binaries and not sources: + return {} + + for datatype in data.keys(): # 'Packages' or 'Sources' + for section in data[datatype].keys(): + # Populate output with filename keys and empty values. + for arch in self._arch: + filename = convert_to_url(section[0][7:].replace('/', '_'), section[1], section[2], arch, datatype) + output.update({filename: ''}) + for pkgname in data[datatype][section].keys(): + for pkg in data[datatype][section][pkgname]: + string = '' + for value in values: + key = value.lower() + + if key in multiline_values: + try: + if pkg[key] == '': + string += '%s:\n' % (value) + if type(pkg[key]) == type([]): + string += '%s: %s\n' % (value, '\n '.join(pkg[key])) + if type(pkg[key]) == type(''): + string += '%s: %s\n' % (value, pkg[key]) + except KeyError: + pass + else: + try: string += '%s: %s\n' % (value, pkg[key]) + except KeyError: pass + string += '\n' # Put a spacer between this and the next entry. + + if pkg['architecture'] == 'all': + for arch in self._arch: + filename = convert_to_url(section[0][7:].replace('/', '_'), section[1], section[2], arch, datatype) + output[filename] += string + else: + filename = convert_to_url(section[0][7:].replace('/', '_'), section[1], section[2], pkg['architecture'], datatype) + output[filename] += string + return output + def __get_best_version(self, package, base_url, pkgcache): """ Should return touple (base_url,package_version) with the best version found in cache.
_______________________________________________ Mailing list: https://launchpad.net/~keryx Post to : keryx@lists.launchpad.net Unsubscribe : https://launchpad.net/~keryx More help : https://help.launchpad.net/ListHelp