diff --git a/debian/README.Debian b/debian/README.Debian new file mode 100644 index 0000000..6b6e029 --- /dev/null +++ b/debian/README.Debian @@ -0,0 +1,4 @@ +The compatibility modules have been removed from the Debian package as they aim +to bring new features to old versions of Python, while we have the newer +versions available in our repositories, and did so by using modules from stdlib, +thus were difficult to support. diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000..06617cf --- /dev/null +++ b/debian/changelog @@ -0,0 +1,10 @@ +kitchen (1.1.1-1) unstable; urgency=low + + [ Simon Chopin ] + * Initial release. (Closes: #705930) + * Patch the test set to use consistent locale naming (forwarded) + + [ Jakub Wilk ] + * Use canonical URIs for Vcs-* fields. + + -- Simon Chopin Fri, 21 Jun 2013 14:24:57 -0400 diff --git a/debian/clean b/debian/clean new file mode 100644 index 0000000..58f04af --- /dev/null +++ b/debian/clean @@ -0,0 +1 @@ +@test* kitchen.egg-info/* diff --git a/debian/compat b/debian/compat new file mode 100644 index 0000000..ec63514 --- /dev/null +++ b/debian/compat @@ -0,0 +1 @@ +9 diff --git a/debian/control b/debian/control new file mode 100644 index 0000000..41d2771 --- /dev/null +++ b/debian/control @@ -0,0 +1,30 @@ +Source: kitchen +Section: python +Priority: optional +Maintainer: Debian Python Modules Team +Uploaders: Simon Chopin +Build-Depends: + debhelper (>= 9), + python-all (>= 2.6.6-3~), + locales (>= 0), + python-nose, + python-setuptools +Standards-Version: 3.9.4 +X-Python-Version: >= 2.3 +Homepage: https://fedorahosted.org/kitchen/ +Vcs-Svn: svn://anonscm.debian.org/python-modules/packages/kitchen/trunk/ +Vcs-Browser: http://anonscm.debian.org/viewvc/python-modules/packages/kitchen/trunk/ + +Package: python-kitchen +Architecture: all +Depends: ${misc:Depends}, ${python:Depends} +Description: Python library of containers, text and i18n helpers + The kitchen Python package aims at gathering in one place all the snippets of + code that the author originally carried around from project to project to ease + the development. + . + A non-exhaustive list of the features provided by kitchen: + * helpers for text and specifically unicode handling i18n support + * a dictionary that treats unicode and str strings as different values + * An helper function to change any non-list value into a single-element list + * Tools to help deal with PEP386 version format diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000..329eb7f --- /dev/null +++ b/debian/copyright @@ -0,0 +1,109 @@ +Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: kitchen +Source: https://fedorahosted.org/kitchen/ + +Files: * +Copyright: 2010-2013 Red Hat, Inc +License: LGPL + +Files: kitchen/text/utf8.py kitchen/text/display.py +Copyright: 2012 Red Hat, Inc + 2010 Ville Skyttä + 2009 Tim Lauridsen + 2007 Markus Kuhn +License: LGPL and Kuhn + +Files: kitchen/text/misc.py +Copyright: 2012 Red Hat, Inc + 2010 Seth Vidal +License: LGPL + +Files: kitchen/i18n/__init__.py +Copyright: 2010-2012 Red Hat, Inc + 2009 Milos Komarcevic + 2008 Tim Lauridsen +License: LGPL + +Files: kitchen/pycompat27/subprocess/_subprocess.py +Copyright: 2003-2005 Peter Astrand +License: Python + +Files: kitchen/pycompat24/base64/_base64.py +Copyright: 1995 Jack Jensen + 2003 Barry Warsaw +License: Python + +Files: kitchen/pycompat25/collections/_defaultdict.py +Copyright: 2007 Justin Kirkland +License: Python + +Files: debian/* +Copyright: 2011-2013 Simon Chopin +License: LGPL + +License: Kuhn + Permission to use, copy, modify, and distribute this software + for any purpose and without fee is hereby granted. The author + disclaims all warranties with regard to this software. + +License: Python + 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), + and the Individual or Organization ("Licensee") accessing and otherwise + using this software ("Python") in source or binary form and its + associated documentation. + . + 2. Subject to the terms and conditions of this License Agreement, PSF hereby + grants Licensee a nonexclusive, royalty-free, world-wide license to + reproduce, analyze, test, perform and/or display publicly, prepare + derivative works, distribute, and otherwise use Python alone or in any + derivative version, provided, however, that PSF's License Agreement and + PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, + 2005, 2006 Python Software Foundation; All Rights Reserved" are retained + in Python alone or in any derivative version prepared by Licensee. + . + 3. In the event Licensee prepares a derivative work that is based on or + incorporates Python or any part thereof, and wants to make the derivative + work available to others as provided herein, then Licensee hereby agrees + to include in any such work a brief summary of the changes made to + Python. + . + 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF + MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF + EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY + REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY + PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD + PARTY RIGHTS. + . + 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY + INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF + MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE + THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + . + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + . + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between PSF and + Licensee. This License Agreement does not grant permission to use PSF + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + . + 8. By copying, installing or otherwise using Python, Licensee agrees to be + bound by the terms and conditions of this License Agreement. + +License: LGPL + kitchen is free software; you can redistribute it and/or modify it under the + terms of the GNU Lesser General Public License as published by the Free + Software Foundation; either version 2.1 of the License, or (at your option) + any later version. + . + kitchen is distributed in the hope that it will be useful, but WITHOUT ANY + WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for + more details. + . + You should have received a copy of the GNU Lesser General Public License + along with kitchen; if not, see + . + On Debian systems, a copy of the LGPLv2.1 can be found at + /usr/share/common-licenses/LGPL-2.1 diff --git a/debian/patches/explicit_Exception_catching b/debian/patches/explicit_Exception_catching new file mode 100644 index 0000000..9222ad6 --- /dev/null +++ b/debian/patches/explicit_Exception_catching @@ -0,0 +1,36 @@ +From f2639ec4f393da7c790000b29525e331c81f2789 Mon Sep 17 00:00:00 2001 +From: Simon Chopin +Date: Tue, 30 Apr 2013 18:27:15 +0200 +Subject: [PATCH] Make kitchen.text.converters.exception_to_* not swallow + general errors such as KeyboardInterrupt +Bug: https://fedorahosted.org/kitchen/ticket/9 + +--- + kitchen/text/converters.py | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/kitchen/text/converters.py b/kitchen/text/converters.py +index 8b5aac6..0fb882f 100644 +--- a/kitchen/text/converters.py ++++ b/kitchen/text/converters.py +@@ -502,7 +502,7 @@ def exception_to_unicode(exc, converters=EXCEPTION_CONVERTERS): + for func in converters: + try: + msg = func(exc) +- except: ++ except Exception: + pass + else: + break +@@ -534,7 +534,7 @@ def exception_to_bytes(exc, converters=EXCEPTION_CONVERTERS): + for func in converters: + try: + msg = func(exc) +- except: ++ except Exception: + pass + else: + break +-- +1.7.10.4 + diff --git a/debian/patches/fix_typos b/debian/patches/fix_typos new file mode 100644 index 0000000..b7dbe05 --- /dev/null +++ b/debian/patches/fix_typos @@ -0,0 +1,141 @@ +From 9a6c2b3ee1be305e6260b3f1e1fc15abc1249656 Mon Sep 17 00:00:00 2001 +From: Simon Chopin +Date: Tue, 30 Apr 2013 17:28:27 +0200 +Subject: [PATCH] Fix several typos +Bug: https://fedorahosted.org/kitchen/ticket/8 + +--- + docs/api-pycompat27.rst | 2 +- + docs/designing-unicode-apis.rst | 2 +- + docs/hacking.rst | 2 +- + docs/unicode-frustrations.rst | 4 ++-- + kitchen/i18n/__init__.py | 6 +++--- + kitchen/pycompat25/collections/_defaultdict.py | 2 +- + kitchen/text/display.py | 6 +++--- + releaseutils.py | 2 +- + 8 files changed, 13 insertions(+), 13 deletions(-) + +--- a/docs/api-pycompat27.rst ++++ b/docs/api-pycompat27.rst +@@ -31,5 +31,5 @@ + + .. seealso:: + +- The stdlib :mod:`subprocess` documenation ++ The stdlib :mod:`subprocess` documentation + For complete documentation on how to use subprocess +--- a/docs/designing-unicode-apis.rst ++++ b/docs/designing-unicode-apis.rst +@@ -581,7 +581,7 @@ + that you may not have thought of. Corner cases in these other places may + mean that processing bytes is desirable. + 2. In python2, byte :class:`str` and :class:`unicode` are often used +- interchangably with each other. That means that people programming against ++ interchangeably with each other. That means that people programming against + your API may have received :class:`str` from some other API and it would be + most convenient for their code if your API accepted it. + +--- a/docs/hacking.rst ++++ b/docs/hacking.rst +@@ -272,7 +272,7 @@ + Criteria for subpackages in kitchen + =================================== + +-Supackages within kitchen should meet these criteria: ++Subpackages within kitchen should meet these criteria: + + * Generally useful or needed for other pieces of kitchen. + +--- a/docs/unicode-frustrations.rst ++++ b/docs/unicode-frustrations.rst +@@ -33,7 +33,7 @@ + with byte :class:`str` as those devices are going to need to deal with + concrete implementations of what bytes represent your abstract characters. + +-In the python2 world many APIs use these two classes interchangably but there ++In the python2 world many APIs use these two classes interchangeably but there + are several important APIs where only one or the other will do the right + thing. When you give the wrong type of string to an API that wants the other + type, you may end up with an exception being raised (:exc:`UnicodeDecodeError` +@@ -122,7 +122,7 @@ + + So that was simple, right? Well... there's one gotcha that makes things a bit + harder to debug sometimes. When you attempt to write non-:term:`ASCII` +-:class:`unicode` strings to a file-like object you get a traceback everytime. ++:class:`unicode` strings to a file-like object you get a traceback every time. + But what happens when you use :func:`print`? The terminal is a file-like object + so it should raise an exception right? The answer to that is.... + *sometimes*: +--- a/kitchen/i18n/__init__.py ++++ b/kitchen/i18n/__init__.py +@@ -251,8 +251,8 @@ + def _reencode_if_necessary(self, message, output_encoding): + '''Return a byte string that's valid in a specific charset. + +- .. warning:: This method may mangle the message if the inpput encoding +- is not known or the message isn't represntable in the chosen ++ .. warning:: This method may mangle the message if the input encoding ++ is not known or the message isn't representable in the chosen + output encoding. + ''' + valid = False +@@ -668,7 +668,7 @@ + objects by default. These are superior to the + :class:`gettext.GNUTranslations` and :class:`gettext.NullTranslations` + objects because they are consistent in the string type they return and +- they fix several issues that can causethe |stdlib|_ objects to throw ++ they fix several issues that can cause the |stdlib|_ objects to throw + :exc:`UnicodeError`. + 2. This function takes multiple directories to search for + :term:`message catalogs`. +--- a/kitchen/pycompat25/collections/_defaultdict.py ++++ b/kitchen/pycompat25/collections/_defaultdict.py +@@ -73,7 +73,7 @@ + + # Pylint disabled messages + # +-# :C0103: We're defnining a compatible class name therefore we need to match ++# :C0103: We're defining a compatible class name therefore we need to match + # the format of that name. + + import types +--- a/kitchen/text/display.py ++++ b/kitchen/text/display.py +@@ -3,7 +3,7 @@ + # Copyright (c) 2010 Red Hat, Inc. + # Copyright (c) 2010 Ville Skyttä + # Copyright (c) 2009 Tim Lauridsen +-# Copyright (c) 2007 Marcus Kuhn ++# Copyright (c) 2007 Markus Kuhn + # + # kitchen is free software; you can redistribute it and/or modify it under the + # terms of the GNU Lesser General Public License as published by the Free +@@ -20,7 +20,7 @@ + # + # Authors: + # James Antill +-# Marcus Kuhn ++# Markus Kuhn + # Toshio Kuratomi + # Tim Lauridsen + # Ville Skyttä +@@ -210,7 +210,7 @@ + This is used to generate the :data:`~kitchen.text.display._COMBINING` + table. + ''' +- # Marcus Kuhn's sorted list of non-overlapping intervals of non-spacing ++ # Markus Kuhn's sorted list of non-overlapping intervals of non-spacing + # characters generated ifrom Unicode 5.0 data by: + # "uniset +cat=Me +cat=Mn +cat=Cf -00AD +1160-11FF +200B c" + markus_kuhn_combining_5_0 = ( +--- a/releaseutils.py ++++ b/releaseutils.py +@@ -35,7 +35,7 @@ + shutil.rmtree('locale') + except OSError, e: + # If the error is that locale does not exist, we're okay. We're +- # deleting it here, afterall ++ # deleting it here, after all + if e.errno != 2: + raise + diff --git a/debian/patches/normalize_test_unicode_name b/debian/patches/normalize_test_unicode_name new file mode 100644 index 0000000..3eacde0 --- /dev/null +++ b/debian/patches/normalize_test_unicode_name @@ -0,0 +1,90 @@ +Description: Normalize the locale names when testing i18n features +Author: Simon Chopin +Forwarded: https://lists.fedorahosted.org/pipermail/kitchen-devel/2013-April/000020.html +Last-Update: 2013-04-23 + +--- +This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ +--- a/tests/test_i18n.py ++++ b/tests/test_i18n.py +@@ -13,7 +13,7 @@ + class TestI18N_UTF8(unittest.TestCase): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.UTF8' ++ os.environ['LC_ALL'] = 'pt_BR.UTF-8' + + def tearDown(self): + if self.old_LC_ALL: +@@ -331,7 +331,7 @@ + class TestI18N_Latin1(unittest.TestCase): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.ISO8859-1' ++ os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1' + + def tearDown(self): + if self.old_LC_ALL: +@@ -357,7 +357,7 @@ + class TestNewGNUTranslationsNoMatch(TestDummyTranslations): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.utf8' ++ os.environ['LC_ALL'] = 'pt_BR.UTF-8' + self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)]) + + def tearDown(self): +@@ -370,7 +370,7 @@ + class TestNewGNURealTranslations_UTF8(unittest.TestCase): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.UTF8' ++ os.environ['LC_ALL'] = 'pt_BR.UTF-8' + self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)]) + + def tearDown(self): +@@ -455,7 +455,7 @@ + class TestNewGNURealTranslations_Latin1(TestNewGNURealTranslations_UTF8): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.ISO8859-1' ++ os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1' + self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)]) + + def tearDown(self): +@@ -511,7 +511,7 @@ + class TestFallbackNewGNURealTranslations_UTF8(unittest.TestCase): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.UTF8' ++ os.environ['LC_ALL'] = 'pt_BR.UTF-8' + self.translations = i18n.get_translation_object('test', + ['%s/data/locale/' % os.path.dirname(__file__), + '%s/data/locale-old' % os.path.dirname(__file__)]) +@@ -598,7 +598,7 @@ + class TestFallbackNewGNURealTranslations_Latin1(unittest.TestCase): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.ISO8859-1' ++ os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1' + self.translations = i18n.get_translation_object('test', + ['%s/data/locale/' % os.path.dirname(__file__), + '%s/data/locale-old' % os.path.dirname(__file__)]) +@@ -685,7 +685,7 @@ + class TestFallback(unittest.TestCase): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.ISO8859-1' ++ os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1' + self.gtranslations = i18n.get_translation_object('test', + ['%s/data/locale/' % os.path.dirname(__file__), + '%s/data/locale-old' % os.path.dirname(__file__)]) +@@ -721,7 +721,7 @@ + class TestDefaultLocaleDir(unittest.TestCase): + def setUp(self): + self.old_LC_ALL = os.environ.get('LC_ALL', None) +- os.environ['LC_ALL'] = 'pt_BR.UTF8' ++ os.environ['LC_ALL'] = 'pt_BR.UTF-8' + self.old_DEFAULT_LOCALEDIRS = i18n._DEFAULT_LOCALEDIR + i18n._DEFAULT_LOCALEDIR = '%s/data/locale/' % os.path.dirname(__file__) + self.translations = i18n.get_translation_object('test') diff --git a/debian/patches/remove_compat_layers b/debian/patches/remove_compat_layers new file mode 100644 index 0000000..caf0f3a --- /dev/null +++ b/debian/patches/remove_compat_layers @@ -0,0 +1,386 @@ +Description: Remove the pycompat* submodules + Those are not needed in Debian as we already ship the latest runtime version. +Author: Simon Chopin +Forwarded: not-needed +Last-Update: 2013-04-30 +--- +This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ +--- a/setup.py ++++ b/setup.py +@@ -52,6 +52,6 @@ + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Topic :: Text Processing :: General', + ], +- packages=find_packages(), ++ packages=find_packages(exclude=['*pycompat*']), + data_files=[], + ) +--- a/kitchen/text/converters.py ++++ b/kitchen/text/converters.py +@@ -53,8 +53,6 @@ + # We need to access b_() for localizing our strings but we'll end up with + # a circular import if we import it directly. + import kitchen as k +-from kitchen.pycompat24 import sets +-sets.add_builtin_set() + + from kitchen.text.exceptions import ControlCharError, XmlEncodeError + from kitchen.text.misc import guess_encoding, html_entities_unescape, \ +--- a/kitchen/text/misc.py ++++ b/kitchen/text/misc.py +@@ -40,11 +40,8 @@ + # We need to access b_() for localizing our strings but we'll end up with + # a circular import if we import it directly. + import kitchen as k +-from kitchen.pycompat24 import sets + from kitchen.text.exceptions import ControlCharError + +-sets.add_builtin_set() +- + # Define a threshold for chardet confidence. If we fall below this we decode + # byte strings we're guessing about as latin1 + _CHARDET_THRESHHOLD = 0.6 +--- a/tests/test_pycompat.py ++++ /dev/null +@@ -1,25 +0,0 @@ +-# -*- coding: utf-8 -*- +-# +-import unittest +-from nose import tools +- +-class TestUsableModules(unittest.TestCase): +- def test_subprocess(self): +- '''Test that importing subprocess as a module works +- ''' +- try: +- from kitchen.pycompat24.subprocess import Popen +- except ImportError: +- tools.ok_(False, 'Unable to import pycompat24.subprocess as a module') +- try: +- from kitchen.pycompat27.subprocess import Popen +- except ImportError: +- tools.ok_(False, 'Unable to import pycompat27.subprocess as a module') +- +- def test_base64(self): +- '''Test that importing base64 as a module works +- ''' +- try: +- from kitchen.pycompat24.base64 import b64encode +- except ImportError: +- tools.ok_(False, 'Unable to import pycompat24.base64 as a module') +--- a/tests/test_pycompat24.py ++++ /dev/null +@@ -1,109 +0,0 @@ +-# -*- coding: utf-8 -*- +-# +-import unittest +-from nose import tools +-from nose.plugins.skip import SkipTest +- +-import __builtin__ +-import base64 as py_b64 +-import warnings +- +-from kitchen.pycompat24 import sets +-from kitchen.pycompat24.base64 import _base64 as base64 +- +-class TestSetsNoOverwrite(unittest.TestCase): +- def setUp(self): +- self.set_val = None +- self.frozenset_val = None +- if not hasattr(__builtin__, 'set'): +- __builtin__.set = self.set_val +- else: +- self.set_val = __builtin__.set +- if not hasattr(__builtin__, 'frozenset'): +- __builtin__.frozenset = self.frozenset_val +- else: +- self.frozenset_val = __builtin__.frozenset +- +- def tearDown(self): +- if self.frozenset_val == None: +- del(__builtin__.frozenset) +- if self.set_val == None: +- del(__builtin__.set) +- +- def test_sets_dont_overwrite(self): +- '''Test that importing sets when there's already a set and frozenset defined does not overwrite +- ''' +- sets.add_builtin_set() +- tools.ok_(__builtin__.set == self.set_val) +- tools.ok_(__builtin__.frozenset == self.frozenset_val) +- +-class TestDefineSets(unittest.TestCase): +- def setUp(self): +- warnings.simplefilter('ignore', DeprecationWarning) +- self.set_val = None +- self.frozenset_val = None +- if hasattr(__builtin__, 'set'): +- self.set_val = __builtin__.set +- del(__builtin__.set) +- if hasattr(__builtin__, 'frozenset'): +- self.frozenset_val = __builtin__.frozenset +- del(__builtin__.frozenset) +- +- def tearDown(self): +- warnings.simplefilter('default', DeprecationWarning) +- if self.set_val: +- __builtin__.set = self.set_val +- else: +- del(__builtin__.set) +- if self.frozenset_val: +- __builtin__.frozenset = self.frozenset_val +- else: +- del(__builtin__.frozenset) +- +- def test_pycompat_defines_set(self): +- '''Test that calling pycompat24.add_builtin_set() adds set and frozenset to __builtin__ +- ''' +- import sets as py_sets +- sets.add_builtin_set() +- if self.set_val: +- tools.ok_(__builtin__.set == self.set_val) +- tools.ok_(__builtin__.frozenset == self.frozenset_val) +- else: +- tools.ok_(__builtin__.set == py_sets.Set) +- tools.ok_(__builtin__.frozenset == py_sets.ImmutableSet) +- +-class TestSubprocess(unittest.TestCase): +- pass +- +-class TestBase64(unittest.TestCase): +- b_byte_chars = ' '.join(map(chr, range(0, 256))) +- b_byte_encoded = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB/IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC+IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg+SD6IPsg/CD9IP4g/w==' +- b_byte_encoded_urlsafe = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB_IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC-IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg-SD6IPsg_CD9IP4g_w==' +- +- def test_base64_encode(self): +- tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded) +- tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe) +- tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded) +- tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe) +- +- tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded) +- tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe) +- tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded) +- tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe) +- +- def test_base64_decode(self): +- tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars) +- tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars) +- tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars) +- tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars) +- +- tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars) +- tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars) +- tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars) +- tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars) +- +- def test_base64_stdlib_compat(self): +- if not hasattr(py_b64, 'b64encode'): +- raise SkipTest('Python-2.3 doesn\'t have b64encode to compare against') +- tools.ok_(base64.b64encode(self.b_byte_chars) == py_b64.b64encode(self.b_byte_chars)) +- tools.ok_(base64.b64decode(self.b_byte_chars) == py_b64.b64decode(self.b_byte_chars)) +--- a/tests/test__all__.py ++++ b/tests/test__all__.py +@@ -4,8 +4,6 @@ + import os + import types + import warnings +-from kitchen.pycompat24.sets import add_builtin_set +-add_builtin_set() + + def logit(msg): + log = open('/var/tmp/test.log', 'a') +--- a/tests/test_base64.py ++++ /dev/null +@@ -1,190 +0,0 @@ +-import unittest +-from test import test_support +-from kitchen.pycompat24.base64 import _base64 as base64 +- +- +- +-class LegacyBase64TestCase(unittest.TestCase): +- def test_encodestring(self): +- eq = self.assertEqual +- eq(base64.encodestring("www.python.org"), "d3d3LnB5dGhvbi5vcmc=\n") +- eq(base64.encodestring("a"), "YQ==\n") +- eq(base64.encodestring("ab"), "YWI=\n") +- eq(base64.encodestring("abc"), "YWJj\n") +- eq(base64.encodestring(""), "") +- eq(base64.encodestring("abcdefghijklmnopqrstuvwxyz" +- "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +- "0123456789!@#0^&*();:<>,. []{}"), +- "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" +- "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" +- "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") +- +- def test_decodestring(self): +- eq = self.assertEqual +- eq(base64.decodestring("d3d3LnB5dGhvbi5vcmc=\n"), "www.python.org") +- eq(base64.decodestring("YQ==\n"), "a") +- eq(base64.decodestring("YWI=\n"), "ab") +- eq(base64.decodestring("YWJj\n"), "abc") +- eq(base64.decodestring("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" +- "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" +- "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"), +- "abcdefghijklmnopqrstuvwxyz" +- "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +- "0123456789!@#0^&*();:<>,. []{}") +- eq(base64.decodestring(''), '') +- +- def test_encode(self): +- eq = self.assertEqual +- from cStringIO import StringIO +- infp = StringIO('abcdefghijklmnopqrstuvwxyz' +- 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' +- '0123456789!@#0^&*();:<>,. []{}') +- outfp = StringIO() +- base64.encode(infp, outfp) +- eq(outfp.getvalue(), +- 'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE' +- 'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT' +- 'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n') +- +- def test_decode(self): +- from cStringIO import StringIO +- infp = StringIO('d3d3LnB5dGhvbi5vcmc=') +- outfp = StringIO() +- base64.decode(infp, outfp) +- self.assertEqual(outfp.getvalue(), 'www.python.org') +- +- +- +-class BaseXYTestCase(unittest.TestCase): +- def test_b64encode(self): +- eq = self.assertEqual +- # Test default alphabet +- eq(base64.b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") +- eq(base64.b64encode('\x00'), 'AA==') +- eq(base64.b64encode("a"), "YQ==") +- eq(base64.b64encode("ab"), "YWI=") +- eq(base64.b64encode("abc"), "YWJj") +- eq(base64.b64encode(""), "") +- eq(base64.b64encode("abcdefghijklmnopqrstuvwxyz" +- "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +- "0123456789!@#0^&*();:<>,. []{}"), +- "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" +- "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" +- "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") +- # Test with arbitrary alternative characters +- eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd') +- # Test standard alphabet +- eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") +- eq(base64.standard_b64encode("a"), "YQ==") +- eq(base64.standard_b64encode("ab"), "YWI=") +- eq(base64.standard_b64encode("abc"), "YWJj") +- eq(base64.standard_b64encode(""), "") +- eq(base64.standard_b64encode("abcdefghijklmnopqrstuvwxyz" +- "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +- "0123456789!@#0^&*();:<>,. []{}"), +- "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" +- "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" +- "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") +- # Test with 'URL safe' alternative characters +- eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd') +- +- def test_b64decode(self): +- eq = self.assertEqual +- eq(base64.b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") +- eq(base64.b64decode('AA=='), '\x00') +- eq(base64.b64decode("YQ=="), "a") +- eq(base64.b64decode("YWI="), "ab") +- eq(base64.b64decode("YWJj"), "abc") +- eq(base64.b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" +- "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" +- "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), +- "abcdefghijklmnopqrstuvwxyz" +- "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +- "0123456789!@#0^&*();:<>,. []{}") +- eq(base64.b64decode(''), '') +- # Test with arbitrary alternative characters +- eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d') +- # Test standard alphabet +- eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") +- eq(base64.standard_b64decode("YQ=="), "a") +- eq(base64.standard_b64decode("YWI="), "ab") +- eq(base64.standard_b64decode("YWJj"), "abc") +- eq(base64.standard_b64decode(""), "") +- eq(base64.standard_b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" +- "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" +- "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), +- "abcdefghijklmnopqrstuvwxyz" +- "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +- "0123456789!@#0^&*();:<>,. []{}") +- # Test with 'URL safe' alternative characters +- eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d') +- +- def test_b64decode_error(self): +- self.assertRaises(TypeError, base64.b64decode, 'abc') +- +- def test_b32encode(self): +- eq = self.assertEqual +- eq(base64.b32encode(''), '') +- eq(base64.b32encode('\x00'), 'AA======') +- eq(base64.b32encode('a'), 'ME======') +- eq(base64.b32encode('ab'), 'MFRA====') +- eq(base64.b32encode('abc'), 'MFRGG===') +- eq(base64.b32encode('abcd'), 'MFRGGZA=') +- eq(base64.b32encode('abcde'), 'MFRGGZDF') +- +- def test_b32decode(self): +- eq = self.assertEqual +- eq(base64.b32decode(''), '') +- eq(base64.b32decode('AA======'), '\x00') +- eq(base64.b32decode('ME======'), 'a') +- eq(base64.b32decode('MFRA===='), 'ab') +- eq(base64.b32decode('MFRGG==='), 'abc') +- eq(base64.b32decode('MFRGGZA='), 'abcd') +- eq(base64.b32decode('MFRGGZDF'), 'abcde') +- +- def test_b32decode_casefold(self): +- eq = self.assertEqual +- eq(base64.b32decode('', True), '') +- eq(base64.b32decode('ME======', True), 'a') +- eq(base64.b32decode('MFRA====', True), 'ab') +- eq(base64.b32decode('MFRGG===', True), 'abc') +- eq(base64.b32decode('MFRGGZA=', True), 'abcd') +- eq(base64.b32decode('MFRGGZDF', True), 'abcde') +- # Lower cases +- eq(base64.b32decode('me======', True), 'a') +- eq(base64.b32decode('mfra====', True), 'ab') +- eq(base64.b32decode('mfrgg===', True), 'abc') +- eq(base64.b32decode('mfrggza=', True), 'abcd') +- eq(base64.b32decode('mfrggzdf', True), 'abcde') +- # Expected exceptions +- self.assertRaises(TypeError, base64.b32decode, 'me======') +- # Mapping zero and one +- eq(base64.b32decode('MLO23456'), 'b\xdd\xad\xf3\xbe') +- eq(base64.b32decode('M1023456', map01='L'), 'b\xdd\xad\xf3\xbe') +- eq(base64.b32decode('M1023456', map01='I'), 'b\x1d\xad\xf3\xbe') +- +- def test_b32decode_error(self): +- self.assertRaises(TypeError, base64.b32decode, 'abc') +- self.assertRaises(TypeError, base64.b32decode, 'ABCDEF==') +- +- def test_b16encode(self): +- eq = self.assertEqual +- eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF') +- eq(base64.b16encode('\x00'), '00') +- +- def test_b16decode(self): +- eq = self.assertEqual +- eq(base64.b16decode('0102ABCDEF'), '\x01\x02\xab\xcd\xef') +- eq(base64.b16decode('00'), '\x00') +- # Lower case is not allowed without a flag +- self.assertRaises(TypeError, base64.b16decode, '0102abcdef') +- # Case fold +- eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef') +- +- +- +-#def test_main(): +-# test_support.run_unittest(__name__) +-# +-#if __name__ == '__main__': +-# test_main() diff --git a/debian/patches/series b/debian/patches/series new file mode 100644 index 0000000..cb76e73 --- /dev/null +++ b/debian/patches/series @@ -0,0 +1,4 @@ +normalize_test_unicode_name +fix_typos +remove_compat_layers +explicit_Exception_catching diff --git a/debian/rules b/debian/rules new file mode 100755 index 0000000..059e573 --- /dev/null +++ b/debian/rules @@ -0,0 +1,23 @@ +#!/usr/bin/make -f +UPSTREAM= $(shell dpkg-parsechangelog | egrep '^Version: ' | sed -r 's/Version: (.*)-[0-9a-zA-Z~+.]+/\1/') + +%: + dh $@ --with python2 +override_dh_install: + dh_install + for py in $(shell pyversions -r); do \ + rm -f debian/python-kitchen/usr/lib/$$py/dist-packages/kitchen-$(UPSTREAM).egg-info/SOURCES.txt; \ + done + +override_dh_auto_test: +ifeq ($(filter nocheck,$(DEB_BUILD_OPTIONS)),) + mkdir -p debian/tmp/locales + localedef -f UTF-8 -i en_US ./debian/tmp/locales/en_US.UTF-8/ + localedef -f UTF-8 -i pt_BR ./debian/tmp/locales/pt_BR.UTF-8/ + localedef -f ISO-8859-1 -i pt_BR ./debian/tmp/locales/pt_BR.ISO-8859-1/ + set -e; \ + for py in $(shell pyversions -r); do \ + LOCPATH=$(CURDIR)/debian/tmp/locales/ $$py /usr/bin/nosetests tests; \ + done +endif + diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 0000000..163aaf8 --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/debian/watch b/debian/watch new file mode 100644 index 0000000..3a43a16 --- /dev/null +++ b/debian/watch @@ -0,0 +1,3 @@ +version=3 + +https://pypi.python.org/packages/source/k/kitchen/kitchen-(.+).tar.gz