From 6cff86ce6de27fbd4f9fc07716fb1205b14ffae4 Mon Sep 17 00:00:00 2001 From: Per Andersson Date: Thu, 30 Jan 2014 01:25:11 +0100 Subject: Imported Upstream version 1.4 --- bleach/tests/test_basics.py | 65 ++++++++---- bleach/tests/test_css.py | 13 +-- bleach/tests/test_links.py | 241 +++++++++++++++++++++++++++--------------- bleach/tests/test_security.py | 12 +-- bleach/tests/test_unicode.py | 47 ++++---- bleach/tests/tools.py | 7 ++ 6 files changed, 245 insertions(+), 140 deletions(-) create mode 100644 bleach/tests/tools.py (limited to 'bleach/tests') diff --git a/bleach/tests/test_basics.py b/bleach/tests/test_basics.py index 9eca687..822407f 100644 --- a/bleach/tests/test_basics.py +++ b/bleach/tests/test_basics.py @@ -1,7 +1,9 @@ +import six import html5lib from nose.tools import eq_ import bleach +from bleach.tests.tools import in_ def test_empty(): @@ -9,7 +11,12 @@ def test_empty(): def test_nbsp(): - eq_(u'\xa0test string\xa0', bleach.clean(' test string ')) + if six.PY3: + expected = '\xa0test string\xa0' + else: + expected = six.u('\\xa0test string\\xa0') + + eq_(expected, bleach.clean(' test string ')) def test_comments_only(): @@ -18,8 +25,8 @@ def test_comments_only(): eq_('', bleach.clean(comment)) eq_('', bleach.clean(open_comment)) eq_(comment, bleach.clean(comment, strip_comments=False)) - eq_('%s-->' % open_comment, bleach.clean(open_comment, - strip_comments=False)) + eq_('{0!s}-->'.format(open_comment), bleach.clean(open_comment, + strip_comments=False)) def test_with_comments(): @@ -55,9 +62,11 @@ def test_function_arguments(): def test_named_arguments(): ATTRS = {'a': ['rel', 'href']} - s = u'xx.com' - eq_('xx.com', bleach.clean(s)) - eq_(s, bleach.clean(s, attributes=ATTRS)) + s = ('xx.com', + 'xx.com') + + eq_('xx.com', bleach.clean(s[0])) + in_(s, bleach.clean(s[0], attributes=ATTRS)) def test_disallowed_html(): @@ -81,19 +90,19 @@ def test_bare_entities(): def test_escaped_entities(): - s = u'<em>strong</em>' + s = '<em>strong</em>' eq_(s, bleach.clean(s)) def test_serializer(): - s = u'
' + s = '
' eq_(s, bleach.clean(s, tags=['table'])) - eq_(u'test
', bleach.linkify(u'test
')) - eq_(u'

test

', bleach.clean(u'

test

', tags=['p'])) + eq_('test
', bleach.linkify('test
')) + eq_('

test

', bleach.clean('

test

', tags=['p'])) def test_no_href_links(): - s = u'x' + s = 'x' eq_(s, bleach.linkify(s)) @@ -112,7 +121,7 @@ def test_stripping(): bleach.clean('a test with html tags', strip=True)) eq_('a test with html tags', bleach.clean('a test with ' - 'html tags', strip=True)) + 'html tags', strip=True)) s = '

link text

' eq_('

link text

', bleach.clean(s, tags=['p'], strip=True)) @@ -138,7 +147,7 @@ def test_allowed_styles(): def test_idempotent(): """Make sure that applying the filter twice doesn't change anything.""" - dirty = u'invalid & < extra http://link.com' + dirty = 'invalid & < extra http://link.com' clean = bleach.clean(dirty) eq_(clean, bleach.clean(clean)) @@ -147,10 +156,23 @@ def test_idempotent(): eq_(linked, bleach.linkify(linked)) +def test_rel_already_there(): + """Make sure rel attribute is updated not replaced""" + linked = ('Click ' + 'here.') + link_good = (('Click ' + 'here.'), + ('Click ' + 'here.')) + + in_(link_good, bleach.linkify(linked)) + in_(link_good, bleach.linkify(link_good[0])) + + def test_lowercase_html(): """We should output lowercase HTML.""" - dirty = u'BAR' - clean = u'BAR' + dirty = 'BAR' + clean = 'BAR' eq_(clean, bleach.clean(dirty, attributes=['class'])) @@ -160,14 +182,15 @@ def test_wildcard_attributes(): 'img': ['src'], } TAG = ['img', 'em'] - dirty = (u'both can have ' - u'') - clean = u'both can have ' - eq_(clean, bleach.clean(dirty, tags=TAG, attributes=ATTR)) + dirty = ('both can have ' + '') + clean = ('both can have ', + 'both can have ') + in_(clean, bleach.clean(dirty, tags=TAG, attributes=ATTR)) def test_sarcasm(): """Jokes should crash.""" - dirty = u'Yeah right ' - clean = u'Yeah right <sarcasm/>' + dirty = 'Yeah right ' + clean = 'Yeah right <sarcasm/>' eq_(clean, bleach.clean(dirty)) diff --git a/bleach/tests/test_css.py b/bleach/tests/test_css.py index 588c8ce..b40596f 100644 --- a/bleach/tests/test_css.py +++ b/bleach/tests/test_css.py @@ -29,14 +29,14 @@ def test_allowed_css(): ('font-family: "Arial";', 'font-family: "Arial";', ['font-family']), ) - p_single = '

bar

' - p_double = "

bar

" + p_single = '

bar

' + p_double = "

bar

" def check(i, o, s): if '"' in i: - eq_(p_double % o, clean(p_double % i, styles=s)) + eq_(p_double.format(o), clean(p_double.format(i), styles=s)) else: - eq_(p_single % o, clean(p_single % i, styles=s)) + eq_(p_single.format(o), clean(p_single.format(i), styles=s)) for i, o, s in tests: yield check, i, o, s @@ -70,12 +70,13 @@ def test_style_hang(): """font: normal normal normal 100%/normal 'Courier New', """ """'Andale Mono', monospace; background-position: initial """ """initial; background-repeat: initial initial;""") - html = '

Hello world

' % style + html = '

Hello world

'.format(style) styles = [ 'border', 'float', 'overflow', 'min-height', 'vertical-align', 'white-space', 'margin', 'margin-left', 'margin-top', 'margin-bottom', 'margin-right', - 'padding', 'padding-left', 'padding-top', 'padding-bottom', 'padding-right', + 'padding', 'padding-left', 'padding-top', 'padding-bottom', + 'padding-right', 'background', 'background-color', 'font', 'font-size', 'font-weight', 'text-align', 'text-transform', diff --git a/bleach/tests/test_links.py b/bleach/tests/test_links.py index ac593c4..abf889d 100644 --- a/bleach/tests/test_links.py +++ b/bleach/tests/test_links.py @@ -1,18 +1,20 @@ -import urllib +try: + from urllib.parse import quote_plus +except ImportError: + from urllib import quote_plus from html5lib.tokenizer import HTMLTokenizer from nose.tools import eq_ from bleach import linkify, url_re, DEFAULT_CALLBACKS as DC - - +from bleach.tests.tools import in_ def test_url_re(): def no_match(s): match = url_re.search(s) if match: - assert not match, 'matched %s' % s[slice(*match.span())] + assert not match, 'matched {0!s}'.format(s[slice(*match.span())]) yield no_match, 'just what i am looking for...it' @@ -21,36 +23,48 @@ def test_empty(): def test_simple_link(): - eq_('a http://example.com' + in_(('a http://example.com' ' link', + 'a http://example.com' + ' link'), linkify('a http://example.com link')) - eq_('a https://example.com' + in_(('a https://example.com' ' link', + 'a https://example.com' + ' link'), linkify('a https://example.com link')) - eq_('an example.com link', - linkify('an example.com link')) + in_(('a example.com link', + 'a example.com link'), + linkify('a example.com link')) def test_trailing_slash(): - eq_('http://example.com/', - linkify('http://example.com/')) - eq_('' - 'http://example.com/foo/', - linkify('http://example.com/foo/')) - eq_('' - 'http://example.com/foo/bar/', - linkify('http://example.com/foo/bar/')) + in_(('http://examp.com/', + 'http://examp.com/'), + linkify('http://examp.com/')) + in_(('' + 'http://example.com/foo/', + '' + 'http://example.com/foo/'), + linkify('http://example.com/foo/')) + in_(('' + 'http://example.com/foo/bar/', + '' + 'http://example.com/foo/bar/'), + linkify('http://example.com/foo/bar/')) def test_mangle_link(): """We can muck with the href attribute of the link.""" def filter_url(attrs, new=False): - attrs['href'] = (u'http://bouncer/?u=%s' % - urllib.quote_plus(attrs['href'])) + quoted = quote_plus(attrs['href']) + attrs['href'] = 'http://bouncer/?u={0!s}'.format(quoted) return attrs - eq_('' - 'http://example.com', + in_(('' + 'http://example.com', + '' + 'http://example.com'), linkify('http://example.com', DC + [filter_url])) @@ -76,13 +90,19 @@ def test_email_link(): 'james@example.com.au mailto', True, 'aussie james@example.com.au mailto'), # This is kind of a pathological case. I guess we do our best here. - ('email to ' - 'james@example.com', True, - 'email to james@example.com'), + (('email to ' + 'james@example.com', + 'email to ' + 'james@example.com'), + True, + 'email to james@example.com'), ) def _check(o, p, i): - eq_(o, linkify(i, parse_email=p)) + if isinstance(o, (list, tuple)): + in_(o, linkify(i, parse_email=p)) + else: + eq_(o, linkify(i, parse_email=p)) for (o, p, i) in tests: yield _check, o, p, i @@ -151,7 +171,8 @@ def test_set_attrs(): attrs['rev'] = 'canonical' return attrs - eq_('ex.mp', + in_(('ex.mp', + 'ex.mp'), linkify('ex.mp', [set_attr])) @@ -179,15 +200,19 @@ def test_stop_email(): def test_tlds(): - eq_('example.com', + in_(('example.com', + 'example.com'), linkify('example.com')) - eq_('example.co.uk', + in_(('example.co.uk', + 'example.co.uk'), linkify('example.co.uk')) - eq_('example.edu', + in_(('example.edu', + 'example.edu'), linkify('example.edu')) eq_('example.xxx', linkify('example.xxx')) eq_(' brie', linkify(' brie')) - eq_('bit.ly/fun', + in_(('bit.ly/fun', + 'bit.ly/fun'), linkify('bit.ly/fun')) @@ -197,61 +222,81 @@ def test_escaping(): def test_nofollow_off(): eq_('example.com', - linkify(u'example.com', [])) + linkify('example.com', [])) def test_link_in_html(): - eq_('http://yy.com', + in_(('http://yy.com', + 'http://yy.com'), linkify('http://yy.com')) - eq_('http://xx.com' - '', + + in_(('http://xx.com' + '', + 'http://xx.com' + ''), linkify('http://xx.com')) def test_links_https(): - eq_('https://yy.com', + in_(('https://yy.com', + 'https://yy.com'), linkify('https://yy.com')) def test_add_rel_nofollow(): """Verify that rel="nofollow" is added to an existing link""" - eq_('http://yy.com', + in_(('http://yy.com', + 'http://yy.com'), linkify('http://yy.com')) def test_url_with_path(): - eq_('' - 'http://example.com/path/to/file', + in_(('' + 'http://example.com/path/to/file', + '' + 'http://example.com/path/to/file'), linkify('http://example.com/path/to/file')) def test_link_ftp(): - eq_('' - 'ftp://ftp.mozilla.org/some/file', + in_(('' + 'ftp://ftp.mozilla.org/some/file', + '' + 'ftp://ftp.mozilla.org/some/file'), linkify('ftp://ftp.mozilla.org/some/file')) def test_link_query(): - eq_('' + in_(('' 'http://xx.com/?test=win', + '' + 'http://xx.com/?test=win'), linkify('http://xx.com/?test=win')) - eq_('' + in_(('' 'xx.com/?test=win', + '' + 'xx.com/?test=win'), linkify('xx.com/?test=win')) - eq_('' + in_(('' 'xx.com?test=win', + '' + 'xx.com?test=win'), linkify('xx.com?test=win')) def test_link_fragment(): - eq_('' - 'http://xx.com/path#frag', + in_(('' + 'http://xx.com/path#frag', + '' + 'http://xx.com/path#frag'), linkify('http://xx.com/path#frag')) def test_link_entities(): - eq_('' + in_(('' 'http://xx.com/?a=1&b=2', + '' + 'http://xx.com/?a=1&b=2'), linkify('http://xx.com/?a=1&b=2')) @@ -262,9 +307,12 @@ def test_escaped_html(): def test_link_http_complete(): - eq_('' 'https://user:pass@ftp.mozilla.org/x/y.exe?a=b&c=d&e#f', + '' + 'https://user:pass@ftp.mozilla.org/x/y.exe?a=b&c=d&e#f'), linkify('https://user:pass@ftp.mozilla.org/x/y.exe?a=b&c=d&e#f')) @@ -282,8 +330,10 @@ def test_javascript_url(): def test_unsafe_url(): """Any unsafe char ({}[]<>, etc.) in the path should end URL scanning.""" - eq_('All your{"xx.yy.com/grover.png"}base are', + in_(('All your{"xx.yy.com/grover.png"}base are', + 'All your{"xx.yy.com/grover.png"}base are'), linkify('All your{"xx.yy.com/grover.png"}base are')) @@ -291,17 +341,23 @@ def test_skip_pre(): """Skip linkification in
 tags."""
     simple = 'http://xx.com 
http://xx.com
' linked = ('http://xx.com ' + '
http://xx.com
', + 'http://xx.com ' '
http://xx.com
') all_linked = ('http://xx.com ' '
http://xx.com'
+                  '
', + 'http://xx.com ' + '
http://xx.com'
                   '
') - eq_(linked, linkify(simple, skip_pre=True)) - eq_(all_linked, linkify(simple)) + in_(linked, linkify(simple, skip_pre=True)) + in_(all_linked, linkify(simple)) already_linked = '
xx
' - nofollowed = '
xx
' - eq_(nofollowed, linkify(already_linked)) - eq_(nofollowed, linkify(already_linked, skip_pre=True)) + nofollowed = ('
xx
', + '
xx
') + in_(nofollowed, linkify(already_linked)) + in_(nofollowed, linkify(already_linked, skip_pre=True)) def test_libgl(): @@ -311,11 +367,13 @@ def test_libgl(): def test_end_of_sentence(): """example.com. should match.""" - out = u'%s%s' - in_ = u'%s%s' + outs = ('{0!s}{1!s}', + '{0!s}{1!s}') + intxt = '{0!s}{1!s}' def check(u, p): - eq_(out % (u, u, p), linkify(in_ % (u, p))) + in_([out.format(u, p) for out in outs], + linkify(intxt.format(u, p))) tests = ( ('example.com', '.'), @@ -330,49 +388,50 @@ def test_end_of_sentence(): def test_end_of_clause(): """example.com/foo, shouldn't include the ,""" - eq_('ex.com/foo, bar', + in_(('ex.com/foo, bar', + 'ex.com/foo, bar'), linkify('ex.com/foo, bar')) def test_sarcasm(): """Jokes should crash.""" - dirty = u'Yeah right ' - clean = u'Yeah right <sarcasm/>' + dirty = 'Yeah right ' + clean = 'Yeah right <sarcasm/>' eq_(clean, linkify(dirty)) def test_wrapping_parentheses(): """URLs wrapped in parantheses should not include them.""" - out = u'%s%s%s' + outs = ('{0!s}{2!s}{3!s}', + '{0!s}{2!s}{3!s}') tests = ( - ('(example.com)', out % ('(', 'example.com', 'example.com', ')')), - ('(example.com/)', out % ('(', 'example.com/', 'example.com/', ')')), - ('(example.com/foo)', out % ('(', 'example.com/foo', - 'example.com/foo', ')')), - ('(((example.com/))))', out % ('(((', 'example.com/)', - 'example.com/)', ')))')), - ('example.com/))', out % ('', 'example.com/))', - 'example.com/))', '')), + ('(example.com)', ('(', 'example.com', 'example.com', ')')), + ('(example.com/)', ('(', 'example.com/', 'example.com/', ')')), + ('(example.com/foo)', ('(', 'example.com/foo', + 'example.com/foo', ')')), + ('(((example.com/))))', ('(((', 'example.com/)', + 'example.com/)', ')))')), + ('example.com/))', ('', 'example.com/))', 'example.com/))', '')), ('http://en.wikipedia.org/wiki/Test_(assessment)', - out % ('', 'en.wikipedia.org/wiki/Test_(assessment)', - 'http://en.wikipedia.org/wiki/Test_(assessment)', '')), + ('', 'en.wikipedia.org/wiki/Test_(assessment)', + 'http://en.wikipedia.org/wiki/Test_(assessment)', '')), ('(http://en.wikipedia.org/wiki/Test_(assessment))', - out % ('(', 'en.wikipedia.org/wiki/Test_(assessment)', - 'http://en.wikipedia.org/wiki/Test_(assessment)', ')')), + ('(', 'en.wikipedia.org/wiki/Test_(assessment)', + 'http://en.wikipedia.org/wiki/Test_(assessment)', ')')), ('((http://en.wikipedia.org/wiki/Test_(assessment))', - out % ('((', 'en.wikipedia.org/wiki/Test_(assessment', - 'http://en.wikipedia.org/wiki/Test_(assessment', '))')), + ('((', 'en.wikipedia.org/wiki/Test_(assessment', + 'http://en.wikipedia.org/wiki/Test_(assessment', '))')), ('(http://en.wikipedia.org/wiki/Test_(assessment)))', - out % ('(', 'en.wikipedia.org/wiki/Test_(assessment))', - 'http://en.wikipedia.org/wiki/Test_(assessment))', ')')), + ('(', 'en.wikipedia.org/wiki/Test_(assessment))', + 'http://en.wikipedia.org/wiki/Test_(assessment))', ')')), ('(http://en.wikipedia.org/wiki/)Test_(assessment', - out % ('(', 'en.wikipedia.org/wiki/)Test_(assessment', - 'http://en.wikipedia.org/wiki/)Test_(assessment', '')), + ('(', 'en.wikipedia.org/wiki/)Test_(assessment', + 'http://en.wikipedia.org/wiki/)Test_(assessment', '')), ) def check(test, expected_output): - eq_(expected_output, linkify(test)) + in_([o.format(*expected_output) for o in outs], linkify(test)) for test, expected_output in tests: yield check, test, expected_output @@ -389,7 +448,9 @@ def test_ports(): ) def check(test, output): - eq_(u'{0}{1}'.format(*output), + outs = ('{0}{1}', + '{0}{1}') + in_([out.format(*output) for out in outs], linkify(test)) for test, output in tests: @@ -406,8 +467,9 @@ def test_tokenizer(): def test_ignore_bad_protocols(): eq_('foohttp://bar', linkify('foohttp://bar')) - eq_('foohttp://exampl.com', - linkify('foohttp://exampl.com')) + in_(('fohttp://exampl.com', + 'fohttp://exampl.com'), + linkify('fohttp://exampl.com')) def test_max_recursion_depth(): @@ -419,22 +481,29 @@ def test_max_recursion_depth(): def test_link_emails_and_urls(): """parse_email=True shouldn't prevent URLs from getting linkified.""" output = ('' + 'http://example.com ' + 'person@example.com', + '' 'http://example.com ' 'person@example.com') - eq_(output, linkify('http://example.com person@example.com', + in_(output, linkify('http://example.com person@example.com', parse_email=True)) def test_links_case_insensitive(): """Protocols and domain names are case insensitive.""" expect = ('' + 'HTTP://EXAMPLE.COM', + '' 'HTTP://EXAMPLE.COM') - eq_(expect, linkify('HTTP://EXAMPLE.COM')) + in_(expect, linkify('HTTP://EXAMPLE.COM')) def test_elements_inside_links(): - eq_(u'hello
', + in_(('hello
', + 'hello
'), linkify('hello
')) - eq_(u'bold hello
', + in_(('bold hello
', + 'bold hello
'), linkify('bold hello
')) diff --git a/bleach/tests/test_security.py b/bleach/tests/test_security.py index 6c2b33f..6adab59 100644 --- a/bleach/tests/test_security.py +++ b/bleach/tests/test_security.py @@ -25,10 +25,10 @@ def test_invalid_attr(): clean('test')) eq_('', clean('', - tags=IMG, attributes=IMG_ATTR)) + tags=IMG, attributes=IMG_ATTR)) eq_('', clean('', - tags=IMG, attributes=IMG_ATTR)) + tags=IMG, attributes=IMG_ATTR)) def test_unquoted_attr(): @@ -57,7 +57,7 @@ def test_invalid_filter_attr(): eq_('', clean('', - tags=IMG, attributes=IMG_ATTR)) + tags=IMG, attributes=IMG_ATTR)) eq_('', clean('', tags=IMG, attributes=IMG_ATTR)) @@ -91,9 +91,9 @@ def test_nasty(): """Nested, broken up, multiple tags, are still foiled!""" test = ('ipt type="text/javascript">alert("foo");script>') - expect = (u'<scr<script></script>ipt type="text/javascript"' - u'>alert("foo");</script>script<del></del>' - u'>') + expect = ('<scr<script></script>ipt type="text/javascript"' + '>alert("foo");</script>script<del></del>' + '>') eq_(expect, clean(test)) diff --git a/bleach/tests/test_unicode.py b/bleach/tests/test_unicode.py index 67123cc..796924d 100644 --- a/bleach/tests/test_unicode.py +++ b/bleach/tests/test_unicode.py @@ -1,54 +1,59 @@ # -*- coding: utf-8 -*- - +from __future__ import unicode_literals from nose.tools import eq_ from bleach import clean, linkify +from bleach.tests.tools import in_ def test_japanese_safe_simple(): - eq_(u'ヘルプとチュートリアル', clean(u'ヘルプとチュートリアル')) - eq_(u'ヘルプとチュートリアル', linkify(u'ヘルプとチュートリアル')) + eq_('ヘルプとチュートリアル', clean('ヘルプとチュートリアル')) + eq_('ヘルプとチュートリアル', linkify('ヘルプとチュートリアル')) def test_japanese_strip(): - eq_(u'ヘルプとチュートリアル', - clean(u'ヘルプとチュートリアル')) - eq_(u'<span>ヘルプとチュートリアル</span>', - clean(u'ヘルプとチュートリアル')) + eq_('ヘルプとチュートリアル', + clean('ヘルプとチュートリアル')) + eq_('<span>ヘルプとチュートリアル</span>', + clean('ヘルプとチュートリアル')) def test_russian_simple(): - eq_(u'Домашняя', clean(u'Домашняя')) - eq_(u'Домашняя', linkify(u'Домашняя')) + eq_('Домашняя', clean('Домашняя')) + eq_('Домашняя', linkify('Домашняя')) def test_mixed(): - eq_(u'Домашняяヘルプとチュートリアル', - clean(u'Домашняяヘルプとチュートリアル')) + eq_('Домашняяヘルプとチュートリアル', + clean('Домашняяヘルプとチュートリアル')) def test_mixed_linkify(): - eq_(u'Домашняя ' - u'http://example.com ヘルプとチュートリアル', - linkify(u'Домашняя http://example.com ヘルプとチュートリアル')) + in_(('Домашняя ' + 'http://example.com ヘルプとチュートリアル', + 'Домашняя ' + 'http://example.com ヘルプとチュートリアル'), + linkify('Домашняя http://example.com ヘルプとチュートリアル')) def test_url_utf8(): """Allow UTF8 characters in URLs themselves.""" - out = u'%(url)s' + outs = ('{0!s}', + '{0!s}') + + out = lambda url: [x.format(url) for x in outs] tests = ( - ('http://éxámplé.com/', out % {'url': u'http://éxámplé.com/'}), - ('http://éxámplé.com/íàñá/', - out % {'url': u'http://éxámplé.com/íàñá/'}), + ('http://éxámplé.com/', out('http://éxámplé.com/')), + ('http://éxámplé.com/íàñá/', out('http://éxámplé.com/íàñá/')), ('http://éxámplé.com/íàñá/?foo=bar', - out % {'url': u'http://éxámplé.com/íàñá/?foo=bar'}), + out('http://éxámplé.com/íàñá/?foo=bar')), ('http://éxámplé.com/íàñá/?fóo=bár', - out % {'url': u'http://éxámplé.com/íàñá/?fóo=bár'}), + out('http://éxámplé.com/íàñá/?fóo=bár')), ) def check(test, expected_output): - eq_(expected_output, linkify(test)) + in_(expected_output, linkify(test)) for test, expected_output in tests: yield check, test, expected_output diff --git a/bleach/tests/tools.py b/bleach/tests/tools.py new file mode 100644 index 0000000..87f926c --- /dev/null +++ b/bleach/tests/tools.py @@ -0,0 +1,7 @@ + + +def in_(l, a, msg=None): + """Shorthand for 'assert a in l, "%r not in %r" % (a, l) + """ + if not a in l: + raise AssertionError(msg or "%r not in %r" % (a, l)) -- cgit v1.2.3