Initialize git-dpm
This commit is contained in:
commit
66e3cc8957
21 changed files with 287 additions and 553 deletions
8
debian/.git-dpm
vendored
Normal file
8
debian/.git-dpm
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
# see git-dpm(1) from git-dpm package
|
||||||
|
ed8b5f7485e7b707bd1aee95665254852ecf1848
|
||||||
|
ed8b5f7485e7b707bd1aee95665254852ecf1848
|
||||||
|
dfb12f36e61fa1e7eee95b322935d1ac6c0043e3
|
||||||
|
dfb12f36e61fa1e7eee95b322935d1ac6c0043e3
|
||||||
|
kitchen_1.1.1.orig.tar.gz
|
||||||
|
89de2b9cc2d61710de8da66c55b50ffa0edbb034
|
||||||
|
158105
|
15
debian/patches/explicit_Exception_catching
vendored
15
debian/patches/explicit_Exception_catching
vendored
|
@ -1,19 +1,21 @@
|
||||||
From f2639ec4f393da7c790000b29525e331c81f2789 Mon Sep 17 00:00:00 2001
|
From ed8b5f7485e7b707bd1aee95665254852ecf1848 Mon Sep 17 00:00:00 2001
|
||||||
From: Simon Chopin <chopin.simon@gmail.com>
|
From: Simon Chopin <chopin.simon@gmail.com>
|
||||||
Date: Tue, 30 Apr 2013 18:27:15 +0200
|
Date: Tue, 30 Apr 2013 18:27:15 +0200
|
||||||
Subject: [PATCH] Make kitchen.text.converters.exception_to_* not swallow
|
Subject: Make kitchen.text.converters.exception_to_* not swallow
|
||||||
|
|
||||||
general errors such as KeyboardInterrupt
|
general errors such as KeyboardInterrupt
|
||||||
Bug: https://fedorahosted.org/kitchen/ticket/9
|
Bug: https://fedorahosted.org/kitchen/ticket/9
|
||||||
|
|
||||||
|
Patch-Name: explicit_Exception_catching
|
||||||
---
|
---
|
||||||
kitchen/text/converters.py | 4 ++--
|
kitchen/text/converters.py | 4 ++--
|
||||||
1 file changed, 2 insertions(+), 2 deletions(-)
|
1 file changed, 2 insertions(+), 2 deletions(-)
|
||||||
|
|
||||||
diff --git a/kitchen/text/converters.py b/kitchen/text/converters.py
|
diff --git a/kitchen/text/converters.py b/kitchen/text/converters.py
|
||||||
index 8b5aac6..0fb882f 100644
|
index 0eb57b4..a89d092 100644
|
||||||
--- a/kitchen/text/converters.py
|
--- a/kitchen/text/converters.py
|
||||||
+++ b/kitchen/text/converters.py
|
+++ b/kitchen/text/converters.py
|
||||||
@@ -502,7 +502,7 @@ def exception_to_unicode(exc, converters=EXCEPTION_CONVERTERS):
|
@@ -500,7 +500,7 @@ def exception_to_unicode(exc, converters=EXCEPTION_CONVERTERS):
|
||||||
for func in converters:
|
for func in converters:
|
||||||
try:
|
try:
|
||||||
msg = func(exc)
|
msg = func(exc)
|
||||||
|
@ -22,7 +24,7 @@ index 8b5aac6..0fb882f 100644
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
@@ -534,7 +534,7 @@ def exception_to_bytes(exc, converters=EXCEPTION_CONVERTERS):
|
@@ -532,7 +532,7 @@ def exception_to_bytes(exc, converters=EXCEPTION_CONVERTERS):
|
||||||
for func in converters:
|
for func in converters:
|
||||||
try:
|
try:
|
||||||
msg = func(exc)
|
msg = func(exc)
|
||||||
|
@ -31,6 +33,3 @@ index 8b5aac6..0fb882f 100644
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
--
|
|
||||||
1.7.10.4
|
|
||||||
|
|
||||||
|
|
42
debian/patches/fix_typos
vendored
42
debian/patches/fix_typos
vendored
|
@ -1,9 +1,11 @@
|
||||||
From 9a6c2b3ee1be305e6260b3f1e1fc15abc1249656 Mon Sep 17 00:00:00 2001
|
From 5691cd2707367cb46b7bda425d093b3f39089bbf Mon Sep 17 00:00:00 2001
|
||||||
From: Simon Chopin <chopin.simon@gmail.com>
|
From: Simon Chopin <chopin.simon@gmail.com>
|
||||||
Date: Tue, 30 Apr 2013 17:28:27 +0200
|
Date: Tue, 30 Apr 2013 17:28:27 +0200
|
||||||
Subject: [PATCH] Fix several typos
|
Subject: Fix several typos
|
||||||
|
|
||||||
Bug: https://fedorahosted.org/kitchen/ticket/8
|
Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
|
|
||||||
|
Patch-Name: fix_typos
|
||||||
---
|
---
|
||||||
docs/api-pycompat27.rst | 2 +-
|
docs/api-pycompat27.rst | 2 +-
|
||||||
docs/designing-unicode-apis.rst | 2 +-
|
docs/designing-unicode-apis.rst | 2 +-
|
||||||
|
@ -15,18 +17,22 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
releaseutils.py | 2 +-
|
releaseutils.py | 2 +-
|
||||||
8 files changed, 13 insertions(+), 13 deletions(-)
|
8 files changed, 13 insertions(+), 13 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/docs/api-pycompat27.rst b/docs/api-pycompat27.rst
|
||||||
|
index 6ef6db1..9654b31 100644
|
||||||
--- a/docs/api-pycompat27.rst
|
--- a/docs/api-pycompat27.rst
|
||||||
+++ b/docs/api-pycompat27.rst
|
+++ b/docs/api-pycompat27.rst
|
||||||
@@ -31,5 +31,5 @@
|
@@ -31,5 +31,5 @@ New API Feature Ver
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
- The stdlib :mod:`subprocess` documenation
|
- The stdlib :mod:`subprocess` documenation
|
||||||
+ The stdlib :mod:`subprocess` documentation
|
+ The stdlib :mod:`subprocess` documentation
|
||||||
For complete documentation on how to use subprocess
|
For complete documentation on how to use subprocess
|
||||||
|
diff --git a/docs/designing-unicode-apis.rst b/docs/designing-unicode-apis.rst
|
||||||
|
index 24265fb..5c909a9 100644
|
||||||
--- a/docs/designing-unicode-apis.rst
|
--- a/docs/designing-unicode-apis.rst
|
||||||
+++ b/docs/designing-unicode-apis.rst
|
+++ b/docs/designing-unicode-apis.rst
|
||||||
@@ -581,7 +581,7 @@
|
@@ -581,7 +581,7 @@ you should consider when designing a :class:`unicode`-only API:
|
||||||
that you may not have thought of. Corner cases in these other places may
|
that you may not have thought of. Corner cases in these other places may
|
||||||
mean that processing bytes is desirable.
|
mean that processing bytes is desirable.
|
||||||
2. In python2, byte :class:`str` and :class:`unicode` are often used
|
2. In python2, byte :class:`str` and :class:`unicode` are often used
|
||||||
|
@ -35,9 +41,11 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
your API may have received :class:`str` from some other API and it would be
|
your API may have received :class:`str` from some other API and it would be
|
||||||
most convenient for their code if your API accepted it.
|
most convenient for their code if your API accepted it.
|
||||||
|
|
||||||
|
diff --git a/docs/hacking.rst b/docs/hacking.rst
|
||||||
|
index 9fbad1a..60f0ca6 100644
|
||||||
--- a/docs/hacking.rst
|
--- a/docs/hacking.rst
|
||||||
+++ b/docs/hacking.rst
|
+++ b/docs/hacking.rst
|
||||||
@@ -272,7 +272,7 @@
|
@@ -272,7 +272,7 @@ version.
|
||||||
Criteria for subpackages in kitchen
|
Criteria for subpackages in kitchen
|
||||||
===================================
|
===================================
|
||||||
|
|
||||||
|
@ -46,9 +54,11 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
|
|
||||||
* Generally useful or needed for other pieces of kitchen.
|
* Generally useful or needed for other pieces of kitchen.
|
||||||
|
|
||||||
|
diff --git a/docs/unicode-frustrations.rst b/docs/unicode-frustrations.rst
|
||||||
|
index c46e797..1588107 100644
|
||||||
--- a/docs/unicode-frustrations.rst
|
--- a/docs/unicode-frustrations.rst
|
||||||
+++ b/docs/unicode-frustrations.rst
|
+++ b/docs/unicode-frustrations.rst
|
||||||
@@ -33,7 +33,7 @@
|
@@ -33,7 +33,7 @@ In python-2.x, there's two types that deal with text.
|
||||||
with byte :class:`str` as those devices are going to need to deal with
|
with byte :class:`str` as those devices are going to need to deal with
|
||||||
concrete implementations of what bytes represent your abstract characters.
|
concrete implementations of what bytes represent your abstract characters.
|
||||||
|
|
||||||
|
@ -57,7 +67,7 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
are several important APIs where only one or the other will do the right
|
are several important APIs where only one or the other will do the right
|
||||||
thing. When you give the wrong type of string to an API that wants the other
|
thing. When you give the wrong type of string to an API that wants the other
|
||||||
type, you may end up with an exception being raised (:exc:`UnicodeDecodeError`
|
type, you may end up with an exception being raised (:exc:`UnicodeDecodeError`
|
||||||
@@ -122,7 +122,7 @@
|
@@ -122,7 +122,7 @@ we're all set::
|
||||||
|
|
||||||
So that was simple, right? Well... there's one gotcha that makes things a bit
|
So that was simple, right? Well... there's one gotcha that makes things a bit
|
||||||
harder to debug sometimes. When you attempt to write non-:term:`ASCII`
|
harder to debug sometimes. When you attempt to write non-:term:`ASCII`
|
||||||
|
@ -66,9 +76,11 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
But what happens when you use :func:`print`? The terminal is a file-like object
|
But what happens when you use :func:`print`? The terminal is a file-like object
|
||||||
so it should raise an exception right? The answer to that is....
|
so it should raise an exception right? The answer to that is....
|
||||||
*sometimes*:
|
*sometimes*:
|
||||||
|
diff --git a/kitchen/i18n/__init__.py b/kitchen/i18n/__init__.py
|
||||||
|
index 29561a2..a7eacf8 100644
|
||||||
--- a/kitchen/i18n/__init__.py
|
--- a/kitchen/i18n/__init__.py
|
||||||
+++ b/kitchen/i18n/__init__.py
|
+++ b/kitchen/i18n/__init__.py
|
||||||
@@ -251,8 +251,8 @@
|
@@ -251,8 +251,8 @@ class DummyTranslations(object, gettext.NullTranslations):
|
||||||
def _reencode_if_necessary(self, message, output_encoding):
|
def _reencode_if_necessary(self, message, output_encoding):
|
||||||
'''Return a byte string that's valid in a specific charset.
|
'''Return a byte string that's valid in a specific charset.
|
||||||
|
|
||||||
|
@ -79,7 +91,7 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
output encoding.
|
output encoding.
|
||||||
'''
|
'''
|
||||||
valid = False
|
valid = False
|
||||||
@@ -668,7 +668,7 @@
|
@@ -668,7 +668,7 @@ def get_translation_object(domain, localedirs=tuple(), languages=None,
|
||||||
objects by default. These are superior to the
|
objects by default. These are superior to the
|
||||||
:class:`gettext.GNUTranslations` and :class:`gettext.NullTranslations`
|
:class:`gettext.GNUTranslations` and :class:`gettext.NullTranslations`
|
||||||
objects because they are consistent in the string type they return and
|
objects because they are consistent in the string type they return and
|
||||||
|
@ -88,9 +100,11 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
:exc:`UnicodeError`.
|
:exc:`UnicodeError`.
|
||||||
2. This function takes multiple directories to search for
|
2. This function takes multiple directories to search for
|
||||||
:term:`message catalogs`.
|
:term:`message catalogs`.
|
||||||
|
diff --git a/kitchen/pycompat25/collections/_defaultdict.py b/kitchen/pycompat25/collections/_defaultdict.py
|
||||||
|
index 0560a3b..639af29 100644
|
||||||
--- a/kitchen/pycompat25/collections/_defaultdict.py
|
--- a/kitchen/pycompat25/collections/_defaultdict.py
|
||||||
+++ b/kitchen/pycompat25/collections/_defaultdict.py
|
+++ b/kitchen/pycompat25/collections/_defaultdict.py
|
||||||
@@ -73,7 +73,7 @@
|
@@ -73,7 +73,7 @@ the defaultdict class provided by python-2.5 and above.
|
||||||
|
|
||||||
# Pylint disabled messages
|
# Pylint disabled messages
|
||||||
#
|
#
|
||||||
|
@ -99,6 +113,8 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
# the format of that name.
|
# the format of that name.
|
||||||
|
|
||||||
import types
|
import types
|
||||||
|
diff --git a/kitchen/text/display.py b/kitchen/text/display.py
|
||||||
|
index 8624109..431815b 100644
|
||||||
--- a/kitchen/text/display.py
|
--- a/kitchen/text/display.py
|
||||||
+++ b/kitchen/text/display.py
|
+++ b/kitchen/text/display.py
|
||||||
@@ -3,7 +3,7 @@
|
@@ -3,7 +3,7 @@
|
||||||
|
@ -119,7 +135,7 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
# Toshio Kuratomi <toshio@fedoraproject.org>
|
# Toshio Kuratomi <toshio@fedoraproject.org>
|
||||||
# Tim Lauridsen
|
# Tim Lauridsen
|
||||||
# Ville Skyttä
|
# Ville Skyttä
|
||||||
@@ -210,7 +210,7 @@
|
@@ -210,7 +210,7 @@ def _generate_combining_table():
|
||||||
This is used to generate the :data:`~kitchen.text.display._COMBINING`
|
This is used to generate the :data:`~kitchen.text.display._COMBINING`
|
||||||
table.
|
table.
|
||||||
'''
|
'''
|
||||||
|
@ -128,9 +144,11 @@ Bug: https://fedorahosted.org/kitchen/ticket/8
|
||||||
# characters generated ifrom Unicode 5.0 data by:
|
# characters generated ifrom Unicode 5.0 data by:
|
||||||
# "uniset +cat=Me +cat=Mn +cat=Cf -00AD +1160-11FF +200B c"
|
# "uniset +cat=Me +cat=Mn +cat=Cf -00AD +1160-11FF +200B c"
|
||||||
markus_kuhn_combining_5_0 = (
|
markus_kuhn_combining_5_0 = (
|
||||||
|
diff --git a/releaseutils.py b/releaseutils.py
|
||||||
|
index d10d62e..ba5d8da 100755
|
||||||
--- a/releaseutils.py
|
--- a/releaseutils.py
|
||||||
+++ b/releaseutils.py
|
+++ b/releaseutils.py
|
||||||
@@ -35,7 +35,7 @@
|
@@ -35,7 +35,7 @@ def main():
|
||||||
shutil.rmtree('locale')
|
shutil.rmtree('locale')
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
# If the error is that locale does not exist, we're okay. We're
|
# If the error is that locale does not exist, we're okay. We're
|
||||||
|
|
32
debian/patches/normalize_test_unicode_name
vendored
32
debian/patches/normalize_test_unicode_name
vendored
|
@ -1,13 +1,21 @@
|
||||||
Description: Normalize the locale names when testing i18n features
|
From dc04f079ab85676464ed231c3675646135280f6d Mon Sep 17 00:00:00 2001
|
||||||
Author: Simon Chopin <chopin.simon@gmail.com>
|
From: Simon Chopin <chopin.simon@gmail.com>
|
||||||
|
Date: Thu, 8 Oct 2015 09:26:21 -0700
|
||||||
|
Subject: Normalize the locale names when testing i18n features
|
||||||
|
|
||||||
Forwarded: https://lists.fedorahosted.org/pipermail/kitchen-devel/2013-April/000020.html
|
Forwarded: https://lists.fedorahosted.org/pipermail/kitchen-devel/2013-April/000020.html
|
||||||
Last-Update: 2013-04-23
|
Last-Update: 2013-04-23
|
||||||
|
|
||||||
|
Patch-Name: normalize_test_unicode_name
|
||||||
---
|
---
|
||||||
This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
tests/test_i18n.py | 18 +++++++++---------
|
||||||
|
1 file changed, 9 insertions(+), 9 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/tests/test_i18n.py b/tests/test_i18n.py
|
||||||
|
index 62039ab..dc06a0c 100644
|
||||||
--- a/tests/test_i18n.py
|
--- a/tests/test_i18n.py
|
||||||
+++ b/tests/test_i18n.py
|
+++ b/tests/test_i18n.py
|
||||||
@@ -13,7 +13,7 @@
|
@@ -13,7 +13,7 @@ import base_classes
|
||||||
class TestI18N_UTF8(unittest.TestCase):
|
class TestI18N_UTF8(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -16,7 +24,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if self.old_LC_ALL:
|
if self.old_LC_ALL:
|
||||||
@@ -331,7 +331,7 @@
|
@@ -331,7 +331,7 @@ class TestDummyTranslations(base_classes.UnicodeTestData):
|
||||||
class TestI18N_Latin1(unittest.TestCase):
|
class TestI18N_Latin1(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -25,7 +33,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if self.old_LC_ALL:
|
if self.old_LC_ALL:
|
||||||
@@ -357,7 +357,7 @@
|
@@ -357,7 +357,7 @@ class TestI18N_Latin1(unittest.TestCase):
|
||||||
class TestNewGNUTranslationsNoMatch(TestDummyTranslations):
|
class TestNewGNUTranslationsNoMatch(TestDummyTranslations):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -34,7 +42,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
@@ -370,7 +370,7 @@
|
@@ -370,7 +370,7 @@ class TestNewGNUTranslationsNoMatch(TestDummyTranslations):
|
||||||
class TestNewGNURealTranslations_UTF8(unittest.TestCase):
|
class TestNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -43,7 +51,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
@@ -455,7 +455,7 @@
|
@@ -455,7 +455,7 @@ class TestNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
class TestNewGNURealTranslations_Latin1(TestNewGNURealTranslations_UTF8):
|
class TestNewGNURealTranslations_Latin1(TestNewGNURealTranslations_UTF8):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -52,7 +60,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
@@ -511,7 +511,7 @@
|
@@ -511,7 +511,7 @@ class TestFallbackNewGNUTranslationsNoMatch(TestDummyTranslations):
|
||||||
class TestFallbackNewGNURealTranslations_UTF8(unittest.TestCase):
|
class TestFallbackNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -61,7 +69,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
self.translations = i18n.get_translation_object('test',
|
self.translations = i18n.get_translation_object('test',
|
||||||
['%s/data/locale/' % os.path.dirname(__file__),
|
['%s/data/locale/' % os.path.dirname(__file__),
|
||||||
'%s/data/locale-old' % os.path.dirname(__file__)])
|
'%s/data/locale-old' % os.path.dirname(__file__)])
|
||||||
@@ -598,7 +598,7 @@
|
@@ -598,7 +598,7 @@ class TestFallbackNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
class TestFallbackNewGNURealTranslations_Latin1(unittest.TestCase):
|
class TestFallbackNewGNURealTranslations_Latin1(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -70,7 +78,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
self.translations = i18n.get_translation_object('test',
|
self.translations = i18n.get_translation_object('test',
|
||||||
['%s/data/locale/' % os.path.dirname(__file__),
|
['%s/data/locale/' % os.path.dirname(__file__),
|
||||||
'%s/data/locale-old' % os.path.dirname(__file__)])
|
'%s/data/locale-old' % os.path.dirname(__file__)])
|
||||||
@@ -685,7 +685,7 @@
|
@@ -685,7 +685,7 @@ class TestFallbackNewGNURealTranslations_Latin1(unittest.TestCase):
|
||||||
class TestFallback(unittest.TestCase):
|
class TestFallback(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
@ -79,7 +87,7 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
self.gtranslations = i18n.get_translation_object('test',
|
self.gtranslations = i18n.get_translation_object('test',
|
||||||
['%s/data/locale/' % os.path.dirname(__file__),
|
['%s/data/locale/' % os.path.dirname(__file__),
|
||||||
'%s/data/locale-old' % os.path.dirname(__file__)])
|
'%s/data/locale-old' % os.path.dirname(__file__)])
|
||||||
@@ -721,7 +721,7 @@
|
@@ -721,7 +721,7 @@ class TestFallback(unittest.TestCase):
|
||||||
class TestDefaultLocaleDir(unittest.TestCase):
|
class TestDefaultLocaleDir(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
|
|
344
debian/patches/remove_compat_layers
vendored
344
debian/patches/remove_compat_layers
vendored
|
@ -1,23 +1,30 @@
|
||||||
Description: Remove the pycompat* submodules
|
From 613271973b9fc62cbfce188b5d60157d695a572b Mon Sep 17 00:00:00 2001
|
||||||
|
From: Simon Chopin <chopin.simon@gmail.com>
|
||||||
|
Date: Thu, 8 Oct 2015 09:26:23 -0700
|
||||||
|
Subject: Remove the pycompat* submodules
|
||||||
|
|
||||||
Those are not needed in Debian as we already ship the latest runtime version.
|
Those are not needed in Debian as we already ship the latest runtime version.
|
||||||
Author: Simon Chopin <chopin.simon@gmail.com>
|
|
||||||
Forwarded: not-needed
|
Forwarded: not-needed
|
||||||
Last-Update: 2013-04-30
|
Last-Update: 2013-04-30
|
||||||
|
Patch-Name: remove_compat_layers
|
||||||
---
|
---
|
||||||
This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
kitchen/text/converters.py | 2 -
|
||||||
--- a/setup.py
|
kitchen/text/misc.py | 3 -
|
||||||
+++ b/setup.py
|
setup.py | 2 +-
|
||||||
@@ -52,6 +52,6 @@
|
tests/test__all__.py | 2 -
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
tests/test_base64.py | 190 ---------------------------------------------
|
||||||
'Topic :: Text Processing :: General',
|
tests/test_pycompat.py | 25 ------
|
||||||
],
|
tests/test_pycompat24.py | 109 --------------------------
|
||||||
- packages=find_packages(),
|
7 files changed, 1 insertion(+), 332 deletions(-)
|
||||||
+ packages=find_packages(exclude=['*pycompat*']),
|
delete mode 100644 tests/test_base64.py
|
||||||
data_files=[],
|
delete mode 100644 tests/test_pycompat.py
|
||||||
)
|
delete mode 100644 tests/test_pycompat24.py
|
||||||
|
|
||||||
|
diff --git a/kitchen/text/converters.py b/kitchen/text/converters.py
|
||||||
|
index 8b5aac6..0eb57b4 100644
|
||||||
--- a/kitchen/text/converters.py
|
--- a/kitchen/text/converters.py
|
||||||
+++ b/kitchen/text/converters.py
|
+++ b/kitchen/text/converters.py
|
||||||
@@ -53,8 +53,6 @@
|
@@ -53,8 +53,6 @@ import xml.sax.saxutils
|
||||||
# We need to access b_() for localizing our strings but we'll end up with
|
# We need to access b_() for localizing our strings but we'll end up with
|
||||||
# a circular import if we import it directly.
|
# a circular import if we import it directly.
|
||||||
import kitchen as k
|
import kitchen as k
|
||||||
|
@ -26,9 +33,11 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
|
|
||||||
from kitchen.text.exceptions import ControlCharError, XmlEncodeError
|
from kitchen.text.exceptions import ControlCharError, XmlEncodeError
|
||||||
from kitchen.text.misc import guess_encoding, html_entities_unescape, \
|
from kitchen.text.misc import guess_encoding, html_entities_unescape, \
|
||||||
|
diff --git a/kitchen/text/misc.py b/kitchen/text/misc.py
|
||||||
|
index ca1be44..305ebfb 100644
|
||||||
--- a/kitchen/text/misc.py
|
--- a/kitchen/text/misc.py
|
||||||
+++ b/kitchen/text/misc.py
|
+++ b/kitchen/text/misc.py
|
||||||
@@ -40,11 +40,8 @@
|
@@ -40,11 +40,8 @@ except ImportError:
|
||||||
# We need to access b_() for localizing our strings but we'll end up with
|
# We need to access b_() for localizing our strings but we'll end up with
|
||||||
# a circular import if we import it directly.
|
# a circular import if we import it directly.
|
||||||
import kitchen as k
|
import kitchen as k
|
||||||
|
@ -40,149 +49,23 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
# Define a threshold for chardet confidence. If we fall below this we decode
|
# Define a threshold for chardet confidence. If we fall below this we decode
|
||||||
# byte strings we're guessing about as latin1
|
# byte strings we're guessing about as latin1
|
||||||
_CHARDET_THRESHHOLD = 0.6
|
_CHARDET_THRESHHOLD = 0.6
|
||||||
--- a/tests/test_pycompat.py
|
diff --git a/setup.py b/setup.py
|
||||||
+++ /dev/null
|
index 238af78..914f61c 100755
|
||||||
@@ -1,25 +0,0 @@
|
--- a/setup.py
|
||||||
-# -*- coding: utf-8 -*-
|
+++ b/setup.py
|
||||||
-#
|
@@ -52,6 +52,6 @@ setup(name='kitchen',
|
||||||
-import unittest
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
-from nose import tools
|
'Topic :: Text Processing :: General',
|
||||||
-
|
],
|
||||||
-class TestUsableModules(unittest.TestCase):
|
- packages=find_packages(),
|
||||||
- def test_subprocess(self):
|
+ packages=find_packages(exclude=['*pycompat*']),
|
||||||
- '''Test that importing subprocess as a module works
|
data_files=[],
|
||||||
- '''
|
)
|
||||||
- try:
|
diff --git a/tests/test__all__.py b/tests/test__all__.py
|
||||||
- from kitchen.pycompat24.subprocess import Popen
|
index 9f58f7a..a0be25d 100644
|
||||||
- except ImportError:
|
|
||||||
- tools.ok_(False, 'Unable to import pycompat24.subprocess as a module')
|
|
||||||
- try:
|
|
||||||
- from kitchen.pycompat27.subprocess import Popen
|
|
||||||
- except ImportError:
|
|
||||||
- tools.ok_(False, 'Unable to import pycompat27.subprocess as a module')
|
|
||||||
-
|
|
||||||
- def test_base64(self):
|
|
||||||
- '''Test that importing base64 as a module works
|
|
||||||
- '''
|
|
||||||
- try:
|
|
||||||
- from kitchen.pycompat24.base64 import b64encode
|
|
||||||
- except ImportError:
|
|
||||||
- tools.ok_(False, 'Unable to import pycompat24.base64 as a module')
|
|
||||||
--- a/tests/test_pycompat24.py
|
|
||||||
+++ /dev/null
|
|
||||||
@@ -1,109 +0,0 @@
|
|
||||||
-# -*- coding: utf-8 -*-
|
|
||||||
-#
|
|
||||||
-import unittest
|
|
||||||
-from nose import tools
|
|
||||||
-from nose.plugins.skip import SkipTest
|
|
||||||
-
|
|
||||||
-import __builtin__
|
|
||||||
-import base64 as py_b64
|
|
||||||
-import warnings
|
|
||||||
-
|
|
||||||
-from kitchen.pycompat24 import sets
|
|
||||||
-from kitchen.pycompat24.base64 import _base64 as base64
|
|
||||||
-
|
|
||||||
-class TestSetsNoOverwrite(unittest.TestCase):
|
|
||||||
- def setUp(self):
|
|
||||||
- self.set_val = None
|
|
||||||
- self.frozenset_val = None
|
|
||||||
- if not hasattr(__builtin__, 'set'):
|
|
||||||
- __builtin__.set = self.set_val
|
|
||||||
- else:
|
|
||||||
- self.set_val = __builtin__.set
|
|
||||||
- if not hasattr(__builtin__, 'frozenset'):
|
|
||||||
- __builtin__.frozenset = self.frozenset_val
|
|
||||||
- else:
|
|
||||||
- self.frozenset_val = __builtin__.frozenset
|
|
||||||
-
|
|
||||||
- def tearDown(self):
|
|
||||||
- if self.frozenset_val == None:
|
|
||||||
- del(__builtin__.frozenset)
|
|
||||||
- if self.set_val == None:
|
|
||||||
- del(__builtin__.set)
|
|
||||||
-
|
|
||||||
- def test_sets_dont_overwrite(self):
|
|
||||||
- '''Test that importing sets when there's already a set and frozenset defined does not overwrite
|
|
||||||
- '''
|
|
||||||
- sets.add_builtin_set()
|
|
||||||
- tools.ok_(__builtin__.set == self.set_val)
|
|
||||||
- tools.ok_(__builtin__.frozenset == self.frozenset_val)
|
|
||||||
-
|
|
||||||
-class TestDefineSets(unittest.TestCase):
|
|
||||||
- def setUp(self):
|
|
||||||
- warnings.simplefilter('ignore', DeprecationWarning)
|
|
||||||
- self.set_val = None
|
|
||||||
- self.frozenset_val = None
|
|
||||||
- if hasattr(__builtin__, 'set'):
|
|
||||||
- self.set_val = __builtin__.set
|
|
||||||
- del(__builtin__.set)
|
|
||||||
- if hasattr(__builtin__, 'frozenset'):
|
|
||||||
- self.frozenset_val = __builtin__.frozenset
|
|
||||||
- del(__builtin__.frozenset)
|
|
||||||
-
|
|
||||||
- def tearDown(self):
|
|
||||||
- warnings.simplefilter('default', DeprecationWarning)
|
|
||||||
- if self.set_val:
|
|
||||||
- __builtin__.set = self.set_val
|
|
||||||
- else:
|
|
||||||
- del(__builtin__.set)
|
|
||||||
- if self.frozenset_val:
|
|
||||||
- __builtin__.frozenset = self.frozenset_val
|
|
||||||
- else:
|
|
||||||
- del(__builtin__.frozenset)
|
|
||||||
-
|
|
||||||
- def test_pycompat_defines_set(self):
|
|
||||||
- '''Test that calling pycompat24.add_builtin_set() adds set and frozenset to __builtin__
|
|
||||||
- '''
|
|
||||||
- import sets as py_sets
|
|
||||||
- sets.add_builtin_set()
|
|
||||||
- if self.set_val:
|
|
||||||
- tools.ok_(__builtin__.set == self.set_val)
|
|
||||||
- tools.ok_(__builtin__.frozenset == self.frozenset_val)
|
|
||||||
- else:
|
|
||||||
- tools.ok_(__builtin__.set == py_sets.Set)
|
|
||||||
- tools.ok_(__builtin__.frozenset == py_sets.ImmutableSet)
|
|
||||||
-
|
|
||||||
-class TestSubprocess(unittest.TestCase):
|
|
||||||
- pass
|
|
||||||
-
|
|
||||||
-class TestBase64(unittest.TestCase):
|
|
||||||
- b_byte_chars = ' '.join(map(chr, range(0, 256)))
|
|
||||||
- b_byte_encoded = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB/IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC+IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg+SD6IPsg/CD9IP4g/w=='
|
|
||||||
- b_byte_encoded_urlsafe = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB_IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC-IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg-SD6IPsg_CD9IP4g_w=='
|
|
||||||
-
|
|
||||||
- def test_base64_encode(self):
|
|
||||||
- tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
- tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe)
|
|
||||||
- tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
- tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe)
|
|
||||||
-
|
|
||||||
- tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
- tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe)
|
|
||||||
- tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
- tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe)
|
|
||||||
-
|
|
||||||
- def test_base64_decode(self):
|
|
||||||
- tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
- tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars)
|
|
||||||
- tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
- tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars)
|
|
||||||
-
|
|
||||||
- tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
- tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars)
|
|
||||||
- tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
- tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars)
|
|
||||||
-
|
|
||||||
- def test_base64_stdlib_compat(self):
|
|
||||||
- if not hasattr(py_b64, 'b64encode'):
|
|
||||||
- raise SkipTest('Python-2.3 doesn\'t have b64encode to compare against')
|
|
||||||
- tools.ok_(base64.b64encode(self.b_byte_chars) == py_b64.b64encode(self.b_byte_chars))
|
|
||||||
- tools.ok_(base64.b64decode(self.b_byte_chars) == py_b64.b64decode(self.b_byte_chars))
|
|
||||||
--- a/tests/test__all__.py
|
--- a/tests/test__all__.py
|
||||||
+++ b/tests/test__all__.py
|
+++ b/tests/test__all__.py
|
||||||
@@ -4,8 +4,6 @@
|
@@ -4,8 +4,6 @@ from nose import tools
|
||||||
import os
|
import os
|
||||||
import types
|
import types
|
||||||
import warnings
|
import warnings
|
||||||
|
@ -191,6 +74,9 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
|
|
||||||
def logit(msg):
|
def logit(msg):
|
||||||
log = open('/var/tmp/test.log', 'a')
|
log = open('/var/tmp/test.log', 'a')
|
||||||
|
diff --git a/tests/test_base64.py b/tests/test_base64.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index bdb388d..0000000
|
||||||
--- a/tests/test_base64.py
|
--- a/tests/test_base64.py
|
||||||
+++ /dev/null
|
+++ /dev/null
|
||||||
@@ -1,190 +0,0 @@
|
@@ -1,190 +0,0 @@
|
||||||
|
@ -384,3 +270,149 @@ This patch header follows DEP-3: http://dep.debian.net/deps/dep3/
|
||||||
-#
|
-#
|
||||||
-#if __name__ == '__main__':
|
-#if __name__ == '__main__':
|
||||||
-# test_main()
|
-# test_main()
|
||||||
|
diff --git a/tests/test_pycompat.py b/tests/test_pycompat.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index 50a059b..0000000
|
||||||
|
--- a/tests/test_pycompat.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,25 +0,0 @@
|
||||||
|
-# -*- coding: utf-8 -*-
|
||||||
|
-#
|
||||||
|
-import unittest
|
||||||
|
-from nose import tools
|
||||||
|
-
|
||||||
|
-class TestUsableModules(unittest.TestCase):
|
||||||
|
- def test_subprocess(self):
|
||||||
|
- '''Test that importing subprocess as a module works
|
||||||
|
- '''
|
||||||
|
- try:
|
||||||
|
- from kitchen.pycompat24.subprocess import Popen
|
||||||
|
- except ImportError:
|
||||||
|
- tools.ok_(False, 'Unable to import pycompat24.subprocess as a module')
|
||||||
|
- try:
|
||||||
|
- from kitchen.pycompat27.subprocess import Popen
|
||||||
|
- except ImportError:
|
||||||
|
- tools.ok_(False, 'Unable to import pycompat27.subprocess as a module')
|
||||||
|
-
|
||||||
|
- def test_base64(self):
|
||||||
|
- '''Test that importing base64 as a module works
|
||||||
|
- '''
|
||||||
|
- try:
|
||||||
|
- from kitchen.pycompat24.base64 import b64encode
|
||||||
|
- except ImportError:
|
||||||
|
- tools.ok_(False, 'Unable to import pycompat24.base64 as a module')
|
||||||
|
diff --git a/tests/test_pycompat24.py b/tests/test_pycompat24.py
|
||||||
|
deleted file mode 100644
|
||||||
|
index adea7fe..0000000
|
||||||
|
--- a/tests/test_pycompat24.py
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,109 +0,0 @@
|
||||||
|
-# -*- coding: utf-8 -*-
|
||||||
|
-#
|
||||||
|
-import unittest
|
||||||
|
-from nose import tools
|
||||||
|
-from nose.plugins.skip import SkipTest
|
||||||
|
-
|
||||||
|
-import __builtin__
|
||||||
|
-import base64 as py_b64
|
||||||
|
-import warnings
|
||||||
|
-
|
||||||
|
-from kitchen.pycompat24 import sets
|
||||||
|
-from kitchen.pycompat24.base64 import _base64 as base64
|
||||||
|
-
|
||||||
|
-class TestSetsNoOverwrite(unittest.TestCase):
|
||||||
|
- def setUp(self):
|
||||||
|
- self.set_val = None
|
||||||
|
- self.frozenset_val = None
|
||||||
|
- if not hasattr(__builtin__, 'set'):
|
||||||
|
- __builtin__.set = self.set_val
|
||||||
|
- else:
|
||||||
|
- self.set_val = __builtin__.set
|
||||||
|
- if not hasattr(__builtin__, 'frozenset'):
|
||||||
|
- __builtin__.frozenset = self.frozenset_val
|
||||||
|
- else:
|
||||||
|
- self.frozenset_val = __builtin__.frozenset
|
||||||
|
-
|
||||||
|
- def tearDown(self):
|
||||||
|
- if self.frozenset_val == None:
|
||||||
|
- del(__builtin__.frozenset)
|
||||||
|
- if self.set_val == None:
|
||||||
|
- del(__builtin__.set)
|
||||||
|
-
|
||||||
|
- def test_sets_dont_overwrite(self):
|
||||||
|
- '''Test that importing sets when there's already a set and frozenset defined does not overwrite
|
||||||
|
- '''
|
||||||
|
- sets.add_builtin_set()
|
||||||
|
- tools.ok_(__builtin__.set == self.set_val)
|
||||||
|
- tools.ok_(__builtin__.frozenset == self.frozenset_val)
|
||||||
|
-
|
||||||
|
-class TestDefineSets(unittest.TestCase):
|
||||||
|
- def setUp(self):
|
||||||
|
- warnings.simplefilter('ignore', DeprecationWarning)
|
||||||
|
- self.set_val = None
|
||||||
|
- self.frozenset_val = None
|
||||||
|
- if hasattr(__builtin__, 'set'):
|
||||||
|
- self.set_val = __builtin__.set
|
||||||
|
- del(__builtin__.set)
|
||||||
|
- if hasattr(__builtin__, 'frozenset'):
|
||||||
|
- self.frozenset_val = __builtin__.frozenset
|
||||||
|
- del(__builtin__.frozenset)
|
||||||
|
-
|
||||||
|
- def tearDown(self):
|
||||||
|
- warnings.simplefilter('default', DeprecationWarning)
|
||||||
|
- if self.set_val:
|
||||||
|
- __builtin__.set = self.set_val
|
||||||
|
- else:
|
||||||
|
- del(__builtin__.set)
|
||||||
|
- if self.frozenset_val:
|
||||||
|
- __builtin__.frozenset = self.frozenset_val
|
||||||
|
- else:
|
||||||
|
- del(__builtin__.frozenset)
|
||||||
|
-
|
||||||
|
- def test_pycompat_defines_set(self):
|
||||||
|
- '''Test that calling pycompat24.add_builtin_set() adds set and frozenset to __builtin__
|
||||||
|
- '''
|
||||||
|
- import sets as py_sets
|
||||||
|
- sets.add_builtin_set()
|
||||||
|
- if self.set_val:
|
||||||
|
- tools.ok_(__builtin__.set == self.set_val)
|
||||||
|
- tools.ok_(__builtin__.frozenset == self.frozenset_val)
|
||||||
|
- else:
|
||||||
|
- tools.ok_(__builtin__.set == py_sets.Set)
|
||||||
|
- tools.ok_(__builtin__.frozenset == py_sets.ImmutableSet)
|
||||||
|
-
|
||||||
|
-class TestSubprocess(unittest.TestCase):
|
||||||
|
- pass
|
||||||
|
-
|
||||||
|
-class TestBase64(unittest.TestCase):
|
||||||
|
- b_byte_chars = ' '.join(map(chr, range(0, 256)))
|
||||||
|
- b_byte_encoded = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB/IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC+IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg+SD6IPsg/CD9IP4g/w=='
|
||||||
|
- b_byte_encoded_urlsafe = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB_IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC-IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg-SD6IPsg_CD9IP4g_w=='
|
||||||
|
-
|
||||||
|
- def test_base64_encode(self):
|
||||||
|
- tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
||||||
|
- tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe)
|
||||||
|
- tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
||||||
|
- tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe)
|
||||||
|
-
|
||||||
|
- tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
||||||
|
- tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe)
|
||||||
|
- tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
||||||
|
- tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe)
|
||||||
|
-
|
||||||
|
- def test_base64_decode(self):
|
||||||
|
- tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
||||||
|
- tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars)
|
||||||
|
- tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
||||||
|
- tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars)
|
||||||
|
-
|
||||||
|
- tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
||||||
|
- tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars)
|
||||||
|
- tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
||||||
|
- tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars)
|
||||||
|
-
|
||||||
|
- def test_base64_stdlib_compat(self):
|
||||||
|
- if not hasattr(py_b64, 'b64encode'):
|
||||||
|
- raise SkipTest('Python-2.3 doesn\'t have b64encode to compare against')
|
||||||
|
- tools.ok_(base64.b64encode(self.b_byte_chars) == py_b64.b64encode(self.b_byte_chars))
|
||||||
|
- tools.ok_(base64.b64decode(self.b_byte_chars) == py_b64.b64decode(self.b_byte_chars))
|
||||||
|
|
|
@ -31,5 +31,5 @@ New API Feature Ver
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
The stdlib :mod:`subprocess` documenation
|
The stdlib :mod:`subprocess` documentation
|
||||||
For complete documentation on how to use subprocess
|
For complete documentation on how to use subprocess
|
||||||
|
|
|
@ -581,7 +581,7 @@ you should consider when designing a :class:`unicode`-only API:
|
||||||
that you may not have thought of. Corner cases in these other places may
|
that you may not have thought of. Corner cases in these other places may
|
||||||
mean that processing bytes is desirable.
|
mean that processing bytes is desirable.
|
||||||
2. In python2, byte :class:`str` and :class:`unicode` are often used
|
2. In python2, byte :class:`str` and :class:`unicode` are often used
|
||||||
interchangably with each other. That means that people programming against
|
interchangeably with each other. That means that people programming against
|
||||||
your API may have received :class:`str` from some other API and it would be
|
your API may have received :class:`str` from some other API and it would be
|
||||||
most convenient for their code if your API accepted it.
|
most convenient for their code if your API accepted it.
|
||||||
|
|
||||||
|
|
|
@ -272,7 +272,7 @@ version.
|
||||||
Criteria for subpackages in kitchen
|
Criteria for subpackages in kitchen
|
||||||
===================================
|
===================================
|
||||||
|
|
||||||
Supackages within kitchen should meet these criteria:
|
Subpackages within kitchen should meet these criteria:
|
||||||
|
|
||||||
* Generally useful or needed for other pieces of kitchen.
|
* Generally useful or needed for other pieces of kitchen.
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ In python-2.x, there's two types that deal with text.
|
||||||
with byte :class:`str` as those devices are going to need to deal with
|
with byte :class:`str` as those devices are going to need to deal with
|
||||||
concrete implementations of what bytes represent your abstract characters.
|
concrete implementations of what bytes represent your abstract characters.
|
||||||
|
|
||||||
In the python2 world many APIs use these two classes interchangably but there
|
In the python2 world many APIs use these two classes interchangeably but there
|
||||||
are several important APIs where only one or the other will do the right
|
are several important APIs where only one or the other will do the right
|
||||||
thing. When you give the wrong type of string to an API that wants the other
|
thing. When you give the wrong type of string to an API that wants the other
|
||||||
type, you may end up with an exception being raised (:exc:`UnicodeDecodeError`
|
type, you may end up with an exception being raised (:exc:`UnicodeDecodeError`
|
||||||
|
@ -122,7 +122,7 @@ we're all set::
|
||||||
|
|
||||||
So that was simple, right? Well... there's one gotcha that makes things a bit
|
So that was simple, right? Well... there's one gotcha that makes things a bit
|
||||||
harder to debug sometimes. When you attempt to write non-:term:`ASCII`
|
harder to debug sometimes. When you attempt to write non-:term:`ASCII`
|
||||||
:class:`unicode` strings to a file-like object you get a traceback everytime.
|
:class:`unicode` strings to a file-like object you get a traceback every time.
|
||||||
But what happens when you use :func:`print`? The terminal is a file-like object
|
But what happens when you use :func:`print`? The terminal is a file-like object
|
||||||
so it should raise an exception right? The answer to that is....
|
so it should raise an exception right? The answer to that is....
|
||||||
*sometimes*:
|
*sometimes*:
|
||||||
|
|
|
@ -251,8 +251,8 @@ class DummyTranslations(object, gettext.NullTranslations):
|
||||||
def _reencode_if_necessary(self, message, output_encoding):
|
def _reencode_if_necessary(self, message, output_encoding):
|
||||||
'''Return a byte string that's valid in a specific charset.
|
'''Return a byte string that's valid in a specific charset.
|
||||||
|
|
||||||
.. warning:: This method may mangle the message if the inpput encoding
|
.. warning:: This method may mangle the message if the input encoding
|
||||||
is not known or the message isn't represntable in the chosen
|
is not known or the message isn't representable in the chosen
|
||||||
output encoding.
|
output encoding.
|
||||||
'''
|
'''
|
||||||
valid = False
|
valid = False
|
||||||
|
@ -668,7 +668,7 @@ def get_translation_object(domain, localedirs=tuple(), languages=None,
|
||||||
objects by default. These are superior to the
|
objects by default. These are superior to the
|
||||||
:class:`gettext.GNUTranslations` and :class:`gettext.NullTranslations`
|
:class:`gettext.GNUTranslations` and :class:`gettext.NullTranslations`
|
||||||
objects because they are consistent in the string type they return and
|
objects because they are consistent in the string type they return and
|
||||||
they fix several issues that can causethe |stdlib|_ objects to throw
|
they fix several issues that can cause the |stdlib|_ objects to throw
|
||||||
:exc:`UnicodeError`.
|
:exc:`UnicodeError`.
|
||||||
2. This function takes multiple directories to search for
|
2. This function takes multiple directories to search for
|
||||||
:term:`message catalogs`.
|
:term:`message catalogs`.
|
||||||
|
|
|
@ -73,7 +73,7 @@ the defaultdict class provided by python-2.5 and above.
|
||||||
|
|
||||||
# Pylint disabled messages
|
# Pylint disabled messages
|
||||||
#
|
#
|
||||||
# :C0103: We're defnining a compatible class name therefore we need to match
|
# :C0103: We're defining a compatible class name therefore we need to match
|
||||||
# the format of that name.
|
# the format of that name.
|
||||||
|
|
||||||
import types
|
import types
|
||||||
|
|
|
@ -53,8 +53,6 @@ import xml.sax.saxutils
|
||||||
# We need to access b_() for localizing our strings but we'll end up with
|
# We need to access b_() for localizing our strings but we'll end up with
|
||||||
# a circular import if we import it directly.
|
# a circular import if we import it directly.
|
||||||
import kitchen as k
|
import kitchen as k
|
||||||
from kitchen.pycompat24 import sets
|
|
||||||
sets.add_builtin_set()
|
|
||||||
|
|
||||||
from kitchen.text.exceptions import ControlCharError, XmlEncodeError
|
from kitchen.text.exceptions import ControlCharError, XmlEncodeError
|
||||||
from kitchen.text.misc import guess_encoding, html_entities_unescape, \
|
from kitchen.text.misc import guess_encoding, html_entities_unescape, \
|
||||||
|
@ -502,7 +500,7 @@ def exception_to_unicode(exc, converters=EXCEPTION_CONVERTERS):
|
||||||
for func in converters:
|
for func in converters:
|
||||||
try:
|
try:
|
||||||
msg = func(exc)
|
msg = func(exc)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
@ -534,7 +532,7 @@ def exception_to_bytes(exc, converters=EXCEPTION_CONVERTERS):
|
||||||
for func in converters:
|
for func in converters:
|
||||||
try:
|
try:
|
||||||
msg = func(exc)
|
msg = func(exc)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# Copyright (c) 2010 Red Hat, Inc.
|
# Copyright (c) 2010 Red Hat, Inc.
|
||||||
# Copyright (c) 2010 Ville Skyttä
|
# Copyright (c) 2010 Ville Skyttä
|
||||||
# Copyright (c) 2009 Tim Lauridsen
|
# Copyright (c) 2009 Tim Lauridsen
|
||||||
# Copyright (c) 2007 Marcus Kuhn
|
# Copyright (c) 2007 Markus Kuhn
|
||||||
#
|
#
|
||||||
# kitchen is free software; you can redistribute it and/or modify it under the
|
# kitchen is free software; you can redistribute it and/or modify it under the
|
||||||
# terms of the GNU Lesser General Public License as published by the Free
|
# terms of the GNU Lesser General Public License as published by the Free
|
||||||
|
@ -20,7 +20,7 @@
|
||||||
#
|
#
|
||||||
# Authors:
|
# Authors:
|
||||||
# James Antill <james@fedoraproject.org>
|
# James Antill <james@fedoraproject.org>
|
||||||
# Marcus Kuhn
|
# Markus Kuhn
|
||||||
# Toshio Kuratomi <toshio@fedoraproject.org>
|
# Toshio Kuratomi <toshio@fedoraproject.org>
|
||||||
# Tim Lauridsen
|
# Tim Lauridsen
|
||||||
# Ville Skyttä
|
# Ville Skyttä
|
||||||
|
@ -210,7 +210,7 @@ def _generate_combining_table():
|
||||||
This is used to generate the :data:`~kitchen.text.display._COMBINING`
|
This is used to generate the :data:`~kitchen.text.display._COMBINING`
|
||||||
table.
|
table.
|
||||||
'''
|
'''
|
||||||
# Marcus Kuhn's sorted list of non-overlapping intervals of non-spacing
|
# Markus Kuhn's sorted list of non-overlapping intervals of non-spacing
|
||||||
# characters generated ifrom Unicode 5.0 data by:
|
# characters generated ifrom Unicode 5.0 data by:
|
||||||
# "uniset +cat=Me +cat=Mn +cat=Cf -00AD +1160-11FF +200B c"
|
# "uniset +cat=Me +cat=Mn +cat=Cf -00AD +1160-11FF +200B c"
|
||||||
markus_kuhn_combining_5_0 = (
|
markus_kuhn_combining_5_0 = (
|
||||||
|
|
|
@ -40,11 +40,8 @@ except ImportError:
|
||||||
# We need to access b_() for localizing our strings but we'll end up with
|
# We need to access b_() for localizing our strings but we'll end up with
|
||||||
# a circular import if we import it directly.
|
# a circular import if we import it directly.
|
||||||
import kitchen as k
|
import kitchen as k
|
||||||
from kitchen.pycompat24 import sets
|
|
||||||
from kitchen.text.exceptions import ControlCharError
|
from kitchen.text.exceptions import ControlCharError
|
||||||
|
|
||||||
sets.add_builtin_set()
|
|
||||||
|
|
||||||
# Define a threshold for chardet confidence. If we fall below this we decode
|
# Define a threshold for chardet confidence. If we fall below this we decode
|
||||||
# byte strings we're guessing about as latin1
|
# byte strings we're guessing about as latin1
|
||||||
_CHARDET_THRESHHOLD = 0.6
|
_CHARDET_THRESHHOLD = 0.6
|
||||||
|
|
|
@ -35,7 +35,7 @@ def main():
|
||||||
shutil.rmtree('locale')
|
shutil.rmtree('locale')
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
# If the error is that locale does not exist, we're okay. We're
|
# If the error is that locale does not exist, we're okay. We're
|
||||||
# deleting it here, afterall
|
# deleting it here, after all
|
||||||
if e.errno != 2:
|
if e.errno != 2:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -52,6 +52,6 @@ setup(name='kitchen',
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
'Topic :: Text Processing :: General',
|
'Topic :: Text Processing :: General',
|
||||||
],
|
],
|
||||||
packages=find_packages(),
|
packages=find_packages(exclude=['*pycompat*']),
|
||||||
data_files=[],
|
data_files=[],
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,8 +4,6 @@ from nose import tools
|
||||||
import os
|
import os
|
||||||
import types
|
import types
|
||||||
import warnings
|
import warnings
|
||||||
from kitchen.pycompat24.sets import add_builtin_set
|
|
||||||
add_builtin_set()
|
|
||||||
|
|
||||||
def logit(msg):
|
def logit(msg):
|
||||||
log = open('/var/tmp/test.log', 'a')
|
log = open('/var/tmp/test.log', 'a')
|
||||||
|
|
|
@ -1,190 +0,0 @@
|
||||||
import unittest
|
|
||||||
from test import test_support
|
|
||||||
from kitchen.pycompat24.base64 import _base64 as base64
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class LegacyBase64TestCase(unittest.TestCase):
|
|
||||||
def test_encodestring(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.encodestring("www.python.org"), "d3d3LnB5dGhvbi5vcmc=\n")
|
|
||||||
eq(base64.encodestring("a"), "YQ==\n")
|
|
||||||
eq(base64.encodestring("ab"), "YWI=\n")
|
|
||||||
eq(base64.encodestring("abc"), "YWJj\n")
|
|
||||||
eq(base64.encodestring(""), "")
|
|
||||||
eq(base64.encodestring("abcdefghijklmnopqrstuvwxyz"
|
|
||||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
||||||
"0123456789!@#0^&*();:<>,. []{}"),
|
|
||||||
"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
|
|
||||||
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
|
|
||||||
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n")
|
|
||||||
|
|
||||||
def test_decodestring(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.decodestring("d3d3LnB5dGhvbi5vcmc=\n"), "www.python.org")
|
|
||||||
eq(base64.decodestring("YQ==\n"), "a")
|
|
||||||
eq(base64.decodestring("YWI=\n"), "ab")
|
|
||||||
eq(base64.decodestring("YWJj\n"), "abc")
|
|
||||||
eq(base64.decodestring("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
|
|
||||||
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
|
|
||||||
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"),
|
|
||||||
"abcdefghijklmnopqrstuvwxyz"
|
|
||||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
||||||
"0123456789!@#0^&*();:<>,. []{}")
|
|
||||||
eq(base64.decodestring(''), '')
|
|
||||||
|
|
||||||
def test_encode(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
from cStringIO import StringIO
|
|
||||||
infp = StringIO('abcdefghijklmnopqrstuvwxyz'
|
|
||||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
|
||||||
'0123456789!@#0^&*();:<>,. []{}')
|
|
||||||
outfp = StringIO()
|
|
||||||
base64.encode(infp, outfp)
|
|
||||||
eq(outfp.getvalue(),
|
|
||||||
'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE'
|
|
||||||
'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT'
|
|
||||||
'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n')
|
|
||||||
|
|
||||||
def test_decode(self):
|
|
||||||
from cStringIO import StringIO
|
|
||||||
infp = StringIO('d3d3LnB5dGhvbi5vcmc=')
|
|
||||||
outfp = StringIO()
|
|
||||||
base64.decode(infp, outfp)
|
|
||||||
self.assertEqual(outfp.getvalue(), 'www.python.org')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class BaseXYTestCase(unittest.TestCase):
|
|
||||||
def test_b64encode(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
# Test default alphabet
|
|
||||||
eq(base64.b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=")
|
|
||||||
eq(base64.b64encode('\x00'), 'AA==')
|
|
||||||
eq(base64.b64encode("a"), "YQ==")
|
|
||||||
eq(base64.b64encode("ab"), "YWI=")
|
|
||||||
eq(base64.b64encode("abc"), "YWJj")
|
|
||||||
eq(base64.b64encode(""), "")
|
|
||||||
eq(base64.b64encode("abcdefghijklmnopqrstuvwxyz"
|
|
||||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
||||||
"0123456789!@#0^&*();:<>,. []{}"),
|
|
||||||
"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
|
|
||||||
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
|
|
||||||
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
|
|
||||||
# Test with arbitrary alternative characters
|
|
||||||
eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd')
|
|
||||||
# Test standard alphabet
|
|
||||||
eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=")
|
|
||||||
eq(base64.standard_b64encode("a"), "YQ==")
|
|
||||||
eq(base64.standard_b64encode("ab"), "YWI=")
|
|
||||||
eq(base64.standard_b64encode("abc"), "YWJj")
|
|
||||||
eq(base64.standard_b64encode(""), "")
|
|
||||||
eq(base64.standard_b64encode("abcdefghijklmnopqrstuvwxyz"
|
|
||||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
||||||
"0123456789!@#0^&*();:<>,. []{}"),
|
|
||||||
"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
|
|
||||||
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
|
|
||||||
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==")
|
|
||||||
# Test with 'URL safe' alternative characters
|
|
||||||
eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd')
|
|
||||||
|
|
||||||
def test_b64decode(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org")
|
|
||||||
eq(base64.b64decode('AA=='), '\x00')
|
|
||||||
eq(base64.b64decode("YQ=="), "a")
|
|
||||||
eq(base64.b64decode("YWI="), "ab")
|
|
||||||
eq(base64.b64decode("YWJj"), "abc")
|
|
||||||
eq(base64.b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
|
|
||||||
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT"
|
|
||||||
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
|
|
||||||
"abcdefghijklmnopqrstuvwxyz"
|
|
||||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
||||||
"0123456789!@#0^&*();:<>,. []{}")
|
|
||||||
eq(base64.b64decode(''), '')
|
|
||||||
# Test with arbitrary alternative characters
|
|
||||||
eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d')
|
|
||||||
# Test standard alphabet
|
|
||||||
eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org")
|
|
||||||
eq(base64.standard_b64decode("YQ=="), "a")
|
|
||||||
eq(base64.standard_b64decode("YWI="), "ab")
|
|
||||||
eq(base64.standard_b64decode("YWJj"), "abc")
|
|
||||||
eq(base64.standard_b64decode(""), "")
|
|
||||||
eq(base64.standard_b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE"
|
|
||||||
"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT"
|
|
||||||
"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="),
|
|
||||||
"abcdefghijklmnopqrstuvwxyz"
|
|
||||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
||||||
"0123456789!@#0^&*();:<>,. []{}")
|
|
||||||
# Test with 'URL safe' alternative characters
|
|
||||||
eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d')
|
|
||||||
|
|
||||||
def test_b64decode_error(self):
|
|
||||||
self.assertRaises(TypeError, base64.b64decode, 'abc')
|
|
||||||
|
|
||||||
def test_b32encode(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.b32encode(''), '')
|
|
||||||
eq(base64.b32encode('\x00'), 'AA======')
|
|
||||||
eq(base64.b32encode('a'), 'ME======')
|
|
||||||
eq(base64.b32encode('ab'), 'MFRA====')
|
|
||||||
eq(base64.b32encode('abc'), 'MFRGG===')
|
|
||||||
eq(base64.b32encode('abcd'), 'MFRGGZA=')
|
|
||||||
eq(base64.b32encode('abcde'), 'MFRGGZDF')
|
|
||||||
|
|
||||||
def test_b32decode(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.b32decode(''), '')
|
|
||||||
eq(base64.b32decode('AA======'), '\x00')
|
|
||||||
eq(base64.b32decode('ME======'), 'a')
|
|
||||||
eq(base64.b32decode('MFRA===='), 'ab')
|
|
||||||
eq(base64.b32decode('MFRGG==='), 'abc')
|
|
||||||
eq(base64.b32decode('MFRGGZA='), 'abcd')
|
|
||||||
eq(base64.b32decode('MFRGGZDF'), 'abcde')
|
|
||||||
|
|
||||||
def test_b32decode_casefold(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.b32decode('', True), '')
|
|
||||||
eq(base64.b32decode('ME======', True), 'a')
|
|
||||||
eq(base64.b32decode('MFRA====', True), 'ab')
|
|
||||||
eq(base64.b32decode('MFRGG===', True), 'abc')
|
|
||||||
eq(base64.b32decode('MFRGGZA=', True), 'abcd')
|
|
||||||
eq(base64.b32decode('MFRGGZDF', True), 'abcde')
|
|
||||||
# Lower cases
|
|
||||||
eq(base64.b32decode('me======', True), 'a')
|
|
||||||
eq(base64.b32decode('mfra====', True), 'ab')
|
|
||||||
eq(base64.b32decode('mfrgg===', True), 'abc')
|
|
||||||
eq(base64.b32decode('mfrggza=', True), 'abcd')
|
|
||||||
eq(base64.b32decode('mfrggzdf', True), 'abcde')
|
|
||||||
# Expected exceptions
|
|
||||||
self.assertRaises(TypeError, base64.b32decode, 'me======')
|
|
||||||
# Mapping zero and one
|
|
||||||
eq(base64.b32decode('MLO23456'), 'b\xdd\xad\xf3\xbe')
|
|
||||||
eq(base64.b32decode('M1023456', map01='L'), 'b\xdd\xad\xf3\xbe')
|
|
||||||
eq(base64.b32decode('M1023456', map01='I'), 'b\x1d\xad\xf3\xbe')
|
|
||||||
|
|
||||||
def test_b32decode_error(self):
|
|
||||||
self.assertRaises(TypeError, base64.b32decode, 'abc')
|
|
||||||
self.assertRaises(TypeError, base64.b32decode, 'ABCDEF==')
|
|
||||||
|
|
||||||
def test_b16encode(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF')
|
|
||||||
eq(base64.b16encode('\x00'), '00')
|
|
||||||
|
|
||||||
def test_b16decode(self):
|
|
||||||
eq = self.assertEqual
|
|
||||||
eq(base64.b16decode('0102ABCDEF'), '\x01\x02\xab\xcd\xef')
|
|
||||||
eq(base64.b16decode('00'), '\x00')
|
|
||||||
# Lower case is not allowed without a flag
|
|
||||||
self.assertRaises(TypeError, base64.b16decode, '0102abcdef')
|
|
||||||
# Case fold
|
|
||||||
eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#def test_main():
|
|
||||||
# test_support.run_unittest(__name__)
|
|
||||||
#
|
|
||||||
#if __name__ == '__main__':
|
|
||||||
# test_main()
|
|
|
@ -13,7 +13,7 @@ import base_classes
|
||||||
class TestI18N_UTF8(unittest.TestCase):
|
class TestI18N_UTF8(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.UTF8'
|
os.environ['LC_ALL'] = 'pt_BR.UTF-8'
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if self.old_LC_ALL:
|
if self.old_LC_ALL:
|
||||||
|
@ -331,7 +331,7 @@ class TestDummyTranslations(base_classes.UnicodeTestData):
|
||||||
class TestI18N_Latin1(unittest.TestCase):
|
class TestI18N_Latin1(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.ISO8859-1'
|
os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1'
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if self.old_LC_ALL:
|
if self.old_LC_ALL:
|
||||||
|
@ -357,7 +357,7 @@ class TestI18N_Latin1(unittest.TestCase):
|
||||||
class TestNewGNUTranslationsNoMatch(TestDummyTranslations):
|
class TestNewGNUTranslationsNoMatch(TestDummyTranslations):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.utf8'
|
os.environ['LC_ALL'] = 'pt_BR.UTF-8'
|
||||||
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
@ -370,7 +370,7 @@ class TestNewGNUTranslationsNoMatch(TestDummyTranslations):
|
||||||
class TestNewGNURealTranslations_UTF8(unittest.TestCase):
|
class TestNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.UTF8'
|
os.environ['LC_ALL'] = 'pt_BR.UTF-8'
|
||||||
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
@ -455,7 +455,7 @@ class TestNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
class TestNewGNURealTranslations_Latin1(TestNewGNURealTranslations_UTF8):
|
class TestNewGNURealTranslations_Latin1(TestNewGNURealTranslations_UTF8):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.ISO8859-1'
|
os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1'
|
||||||
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
self.translations = i18n.get_translation_object('test', ['%s/data/locale/' % os.path.dirname(__file__)])
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
@ -511,7 +511,7 @@ class TestFallbackNewGNUTranslationsNoMatch(TestDummyTranslations):
|
||||||
class TestFallbackNewGNURealTranslations_UTF8(unittest.TestCase):
|
class TestFallbackNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.UTF8'
|
os.environ['LC_ALL'] = 'pt_BR.UTF-8'
|
||||||
self.translations = i18n.get_translation_object('test',
|
self.translations = i18n.get_translation_object('test',
|
||||||
['%s/data/locale/' % os.path.dirname(__file__),
|
['%s/data/locale/' % os.path.dirname(__file__),
|
||||||
'%s/data/locale-old' % os.path.dirname(__file__)])
|
'%s/data/locale-old' % os.path.dirname(__file__)])
|
||||||
|
@ -598,7 +598,7 @@ class TestFallbackNewGNURealTranslations_UTF8(unittest.TestCase):
|
||||||
class TestFallbackNewGNURealTranslations_Latin1(unittest.TestCase):
|
class TestFallbackNewGNURealTranslations_Latin1(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.ISO8859-1'
|
os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1'
|
||||||
self.translations = i18n.get_translation_object('test',
|
self.translations = i18n.get_translation_object('test',
|
||||||
['%s/data/locale/' % os.path.dirname(__file__),
|
['%s/data/locale/' % os.path.dirname(__file__),
|
||||||
'%s/data/locale-old' % os.path.dirname(__file__)])
|
'%s/data/locale-old' % os.path.dirname(__file__)])
|
||||||
|
@ -685,7 +685,7 @@ class TestFallbackNewGNURealTranslations_Latin1(unittest.TestCase):
|
||||||
class TestFallback(unittest.TestCase):
|
class TestFallback(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.ISO8859-1'
|
os.environ['LC_ALL'] = 'pt_BR.ISO-8859-1'
|
||||||
self.gtranslations = i18n.get_translation_object('test',
|
self.gtranslations = i18n.get_translation_object('test',
|
||||||
['%s/data/locale/' % os.path.dirname(__file__),
|
['%s/data/locale/' % os.path.dirname(__file__),
|
||||||
'%s/data/locale-old' % os.path.dirname(__file__)])
|
'%s/data/locale-old' % os.path.dirname(__file__)])
|
||||||
|
@ -721,7 +721,7 @@ class TestFallback(unittest.TestCase):
|
||||||
class TestDefaultLocaleDir(unittest.TestCase):
|
class TestDefaultLocaleDir(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
self.old_LC_ALL = os.environ.get('LC_ALL', None)
|
||||||
os.environ['LC_ALL'] = 'pt_BR.UTF8'
|
os.environ['LC_ALL'] = 'pt_BR.UTF-8'
|
||||||
self.old_DEFAULT_LOCALEDIRS = i18n._DEFAULT_LOCALEDIR
|
self.old_DEFAULT_LOCALEDIRS = i18n._DEFAULT_LOCALEDIR
|
||||||
i18n._DEFAULT_LOCALEDIR = '%s/data/locale/' % os.path.dirname(__file__)
|
i18n._DEFAULT_LOCALEDIR = '%s/data/locale/' % os.path.dirname(__file__)
|
||||||
self.translations = i18n.get_translation_object('test')
|
self.translations = i18n.get_translation_object('test')
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
import unittest
|
|
||||||
from nose import tools
|
|
||||||
|
|
||||||
class TestUsableModules(unittest.TestCase):
|
|
||||||
def test_subprocess(self):
|
|
||||||
'''Test that importing subprocess as a module works
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
from kitchen.pycompat24.subprocess import Popen
|
|
||||||
except ImportError:
|
|
||||||
tools.ok_(False, 'Unable to import pycompat24.subprocess as a module')
|
|
||||||
try:
|
|
||||||
from kitchen.pycompat27.subprocess import Popen
|
|
||||||
except ImportError:
|
|
||||||
tools.ok_(False, 'Unable to import pycompat27.subprocess as a module')
|
|
||||||
|
|
||||||
def test_base64(self):
|
|
||||||
'''Test that importing base64 as a module works
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
from kitchen.pycompat24.base64 import b64encode
|
|
||||||
except ImportError:
|
|
||||||
tools.ok_(False, 'Unable to import pycompat24.base64 as a module')
|
|
|
@ -1,109 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
import unittest
|
|
||||||
from nose import tools
|
|
||||||
from nose.plugins.skip import SkipTest
|
|
||||||
|
|
||||||
import __builtin__
|
|
||||||
import base64 as py_b64
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from kitchen.pycompat24 import sets
|
|
||||||
from kitchen.pycompat24.base64 import _base64 as base64
|
|
||||||
|
|
||||||
class TestSetsNoOverwrite(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.set_val = None
|
|
||||||
self.frozenset_val = None
|
|
||||||
if not hasattr(__builtin__, 'set'):
|
|
||||||
__builtin__.set = self.set_val
|
|
||||||
else:
|
|
||||||
self.set_val = __builtin__.set
|
|
||||||
if not hasattr(__builtin__, 'frozenset'):
|
|
||||||
__builtin__.frozenset = self.frozenset_val
|
|
||||||
else:
|
|
||||||
self.frozenset_val = __builtin__.frozenset
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
if self.frozenset_val == None:
|
|
||||||
del(__builtin__.frozenset)
|
|
||||||
if self.set_val == None:
|
|
||||||
del(__builtin__.set)
|
|
||||||
|
|
||||||
def test_sets_dont_overwrite(self):
|
|
||||||
'''Test that importing sets when there's already a set and frozenset defined does not overwrite
|
|
||||||
'''
|
|
||||||
sets.add_builtin_set()
|
|
||||||
tools.ok_(__builtin__.set == self.set_val)
|
|
||||||
tools.ok_(__builtin__.frozenset == self.frozenset_val)
|
|
||||||
|
|
||||||
class TestDefineSets(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
warnings.simplefilter('ignore', DeprecationWarning)
|
|
||||||
self.set_val = None
|
|
||||||
self.frozenset_val = None
|
|
||||||
if hasattr(__builtin__, 'set'):
|
|
||||||
self.set_val = __builtin__.set
|
|
||||||
del(__builtin__.set)
|
|
||||||
if hasattr(__builtin__, 'frozenset'):
|
|
||||||
self.frozenset_val = __builtin__.frozenset
|
|
||||||
del(__builtin__.frozenset)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
warnings.simplefilter('default', DeprecationWarning)
|
|
||||||
if self.set_val:
|
|
||||||
__builtin__.set = self.set_val
|
|
||||||
else:
|
|
||||||
del(__builtin__.set)
|
|
||||||
if self.frozenset_val:
|
|
||||||
__builtin__.frozenset = self.frozenset_val
|
|
||||||
else:
|
|
||||||
del(__builtin__.frozenset)
|
|
||||||
|
|
||||||
def test_pycompat_defines_set(self):
|
|
||||||
'''Test that calling pycompat24.add_builtin_set() adds set and frozenset to __builtin__
|
|
||||||
'''
|
|
||||||
import sets as py_sets
|
|
||||||
sets.add_builtin_set()
|
|
||||||
if self.set_val:
|
|
||||||
tools.ok_(__builtin__.set == self.set_val)
|
|
||||||
tools.ok_(__builtin__.frozenset == self.frozenset_val)
|
|
||||||
else:
|
|
||||||
tools.ok_(__builtin__.set == py_sets.Set)
|
|
||||||
tools.ok_(__builtin__.frozenset == py_sets.ImmutableSet)
|
|
||||||
|
|
||||||
class TestSubprocess(unittest.TestCase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class TestBase64(unittest.TestCase):
|
|
||||||
b_byte_chars = ' '.join(map(chr, range(0, 256)))
|
|
||||||
b_byte_encoded = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB/IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC+IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg+SD6IPsg/CD9IP4g/w=='
|
|
||||||
b_byte_encoded_urlsafe = 'ACABIAIgAyAEIAUgBiAHIAggCSAKIAsgDCANIA4gDyAQIBEgEiATIBQgFSAWIBcgGCAZIBogGyAcIB0gHiAfICAgISAiICMgJCAlICYgJyAoICkgKiArICwgLSAuIC8gMCAxIDIgMyA0IDUgNiA3IDggOSA6IDsgPCA9ID4gPyBAIEEgQiBDIEQgRSBGIEcgSCBJIEogSyBMIE0gTiBPIFAgUSBSIFMgVCBVIFYgVyBYIFkgWiBbIFwgXSBeIF8gYCBhIGIgYyBkIGUgZiBnIGggaSBqIGsgbCBtIG4gbyBwIHEgciBzIHQgdSB2IHcgeCB5IHogeyB8IH0gfiB_IIAggSCCIIMghCCFIIYghyCIIIkgiiCLIIwgjSCOII8gkCCRIJIgkyCUIJUgliCXIJggmSCaIJsgnCCdIJ4gnyCgIKEgoiCjIKQgpSCmIKcgqCCpIKogqyCsIK0griCvILAgsSCyILMgtCC1ILYgtyC4ILkguiC7ILwgvSC-IL8gwCDBIMIgwyDEIMUgxiDHIMggySDKIMsgzCDNIM4gzyDQINEg0iDTINQg1SDWINcg2CDZINog2yDcIN0g3iDfIOAg4SDiIOMg5CDlIOYg5yDoIOkg6iDrIOwg7SDuIO8g8CDxIPIg8yD0IPUg9iD3IPgg-SD6IPsg_CD9IP4g_w=='
|
|
||||||
|
|
||||||
def test_base64_encode(self):
|
|
||||||
tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe)
|
|
||||||
tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe)
|
|
||||||
|
|
||||||
tools.ok_(base64.b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
tools.ok_(base64.b64encode(self.b_byte_chars, altchars='-_') == self.b_byte_encoded_urlsafe)
|
|
||||||
tools.ok_(base64.standard_b64encode(self.b_byte_chars) == self.b_byte_encoded)
|
|
||||||
tools.ok_(base64.urlsafe_b64encode(self.b_byte_chars) == self.b_byte_encoded_urlsafe)
|
|
||||||
|
|
||||||
def test_base64_decode(self):
|
|
||||||
tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars)
|
|
||||||
tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars)
|
|
||||||
|
|
||||||
tools.ok_(base64.b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
tools.ok_(base64.b64decode(self.b_byte_encoded_urlsafe, altchars='-_') == self.b_byte_chars)
|
|
||||||
tools.ok_(base64.standard_b64decode(self.b_byte_encoded) == self.b_byte_chars)
|
|
||||||
tools.ok_(base64.urlsafe_b64decode(self.b_byte_encoded_urlsafe) == self.b_byte_chars)
|
|
||||||
|
|
||||||
def test_base64_stdlib_compat(self):
|
|
||||||
if not hasattr(py_b64, 'b64encode'):
|
|
||||||
raise SkipTest('Python-2.3 doesn\'t have b64encode to compare against')
|
|
||||||
tools.ok_(base64.b64encode(self.b_byte_chars) == py_b64.b64encode(self.b_byte_chars))
|
|
||||||
tools.ok_(base64.b64decode(self.b_byte_chars) == py_b64.b64decode(self.b_byte_chars))
|
|
Loading…
Reference in a new issue