[cpif] r244 - trunk/frontend-web
svn at argo.es
svn at argo.es
Wed Jul 4 15:16:10 CEST 2007
Author: alvaro
Date: Wed Jul 4 15:16:10 2007
New Revision: 244
Log:
Smileys support.
Added:
trunk/frontend-web/parser_smileys.py
- copied, changed from r240, /trunk/frontend-web/parser_urls.py
Modified:
trunk/frontend-web/parser_bbcode.py
trunk/frontend-web/parsers.py
Modified: trunk/frontend-web/parser_bbcode.py
==============================================================================
--- trunk/frontend-web/parser_bbcode.py (original)
+++ trunk/frontend-web/parser_bbcode.py Wed Jul 4 15:16:10 2007
@@ -14,7 +14,7 @@
"i": ('<em>', '</em>', False, 'em'),
"url": ('<a href="%(arg)s">', '</a>', True, 'a'),
"quote*": ('<blockquote>', '</blockquote>', False, None),
- "quote": ('<blockquote><h4>%(arg)s escribió:</h4>', '</blockquote>', True, None),
+ "quote": ('<blockquote title="%(arg)s"><h4>%(arg)s escribió:</h4>', '</blockquote>', True, None),
"img": ('<img src="%(arg)s" />', None, True, None)
}
Copied: trunk/frontend-web/parser_smileys.py (from r240, /trunk/frontend-web/parser_urls.py)
==============================================================================
--- /trunk/frontend-web/parser_urls.py (original)
+++ trunk/frontend-web/parser_smileys.py Wed Jul 4 15:16:10 2007
@@ -1,78 +1,91 @@
# $Id$
-allowed_urls = ['http://', 'ftp://']
+from globales import monitor
-def url_o_matic(url):
- return """<a href="%(url)s" title="%(url)s">%(url)s</a>""" % locals()
+ at monitor
+def get_smiley_list(conn):
+ import database
+ return database.get_smiley_list(conn)
+
+smiley_list = get_smiley_list()
def parse(text, context = None):
- if context == "a":
- return None, [(True,text,context)]
import re
# Everybody stand back!!!
- regexp = re.compile("|".join(["(\s|\A)(%s(?:[a-zA-Z0-9]+\.)+[a-zA-Z]{2,4}(?:/\S+)*/{0,1})(\s|\Z)" % i for i in allowed_urls]))
- list = regexp.split(text)
- tokens = []
- for i in list:
- if i and regexp.match(i):
- tokens.append((False, url_o_matic(i), "a"))
- elif i:
- tokens.append((True, i, context))
- return None, tokens
-
-import unittest
-class Test(unittest.TestCase):
- urls = (
- 'http://example.com',
- 'http://example.com/',
- 'http://example.com/foo/',
- 'http://example.com/foo/bar/baz/gazonk',
- 'http://example.com/foo?bar=baz',
- 'http://example.com/foo?bar=baz&gazonk&gadaf',
- 'http://www.example.com',
- 'http://www.www.example.com',
- 'http://buh.bih.bah.beh.example.com',
- )
- badurls = (
- 'htp://example.com',
- 'http:/example.com',
- 'http//example.com',
- 'http/example.com',
- 'http://example.c/',
- 'http://example.c/',
- 'http://.com',
- 'hattp://example.comcomcom',
- )
-
- def testConversion(self):
- retval, tokens = parse("http://example.com")
- if retval:
- self.fail()
- self.assertEqual(tokens[0][1], """<a href="http://example.com" title="http://example.com">http://example.com</a>""")
-
- def testURLbad(self):
- import re
- regexp = re.compile("|".join(["(?:\s|\A)(%s(?:[a-zA-Z0-9]+\.)+[a-zA-Z]{2,4}(?:/\S+)*)/{0,1}(?:\s|\Z)" % i for i in allowed_urls]))
- for i in self.badurls:
- self.failIf(regexp.findall(i))
- import random
- rnd = random.sample("""?=)(/&%$"!'0987654321+-.,..,;:_"**$foo """,10)
- self.failIf(regexp.findall("%s%s%s" % (rnd, i, rnd)))
- for i in self.urls:
- import random
- rnd = random.sample("""?=)(/&%$"!'0987654321+-.,..,;:_"**$foo """,10)
- self.failIf(regexp.findall("%s%s%s" % (rnd, i, rnd)))
-
- def testURLok(self):
- import re
- regexp = re.compile("|".join(["(\s|\A)(%s(?:[a-zA-Z0-9]+\.)+[a-zA-Z]{2,4}(?:/\S+)*/{0,1})(\s|\Z)" % i for i in allowed_urls]))
- for i in self.urls:
- self.assert_(regexp.findall(i))
- import random
- rnd = random.sample("""?=)(/&%$"!'0987654321+-.,..,;:_"**$%foo """,10)
- self.assert_(regexp.findall("%s %s %s" % (rnd, i, rnd)))
-
-
-if __name__ == "__main__":
- import unittest
- unittest.main()
+ tokens = [(True, text, context)]
+ for smiley in smiley_list:
+ aux = []
+ for j in tokens:
+ if not j[0]:
+ aux.append(j)
+ else:
+ list = j[1].split(smiley[0])
+ while list:
+ el = list.pop(0)
+ if el:
+ aux.append((True,el,context))
+ if list:
+ img = '<img alt="%s" src="/static/smileys/%s" />' % smiley
+ aux.append((False,img,context))
+ tokens = aux
+
+ return False, tokens
+ return False, [(True, text, context)]
+
+#import unittest
+#class Test(unittest.TestCase):
+# urls = (
+# 'http://example.com',
+# 'http://example.com/',
+# 'http://example.com/foo/',
+# 'http://example.com/foo/bar/baz/gazonk',
+# 'http://example.com/foo?bar=baz',
+# 'http://example.com/foo?bar=baz&gazonk&gadaf',
+# 'http://www.example.com',
+# 'http://www.www.example.com',
+# 'http://buh.bih.bah.beh.example.com',
+# )
+# badurls = (
+# 'htp://example.com',
+# 'http:/example.com',
+# 'http//example.com',
+# 'http/example.com',
+# 'http://example.c/',
+# 'http://example.c/',
+# 'http://.com',
+# 'hattp://example.comcomcom',
+# )
+#
+# def testConversion(self):
+# retval, tokens = parse("http://example.com")
+# if retval:
+# self.fail()
+# self.assertEqual(tokens[0][1], """<a href="http://example.com" title="http://example.com">http://example.com</a>""")
+#
+# def testURLbad(self):
+# import re
+# regexp = re.compile("|".join(["(?:\s|\A)(%s(?:[a-zA-Z0-9]+\.)+[a-zA-Z]{2,4}(?:/\S+)*)/{0,1}(?:\s|\Z)" % i for i in allowed_urls]))
+# for i in self.badurls:
+# self.failIf(regexp.findall(i))
+# import random
+# rnd = random.sample("""?=)(/&%$"!'0987654321+-.,..,;:_"**$foo """,10)
+# self.failIf(regexp.findall("%s%s%s" % (rnd, i, rnd)))
+# for i in self.urls:
+# import random
+# rnd = random.sample("""?=)(/&%$"!'0987654321+-.,..,;:_"**$foo """,10)
+# self.failIf(regexp.findall("%s%s%s" % (rnd, i, rnd)))
+#
+# def testURLok(self):
+# import re
+# regexp = re.compile("|".join(["(\s|\A)(%s(?:[a-zA-Z0-9]+\.)+[a-zA-Z]{2,4}(?:/\S+)*/{0,1})(\s|\Z)" % i for i in allowed_urls]))
+# for i in self.urls:
+# self.assert_(regexp.findall(i))
+# import random
+# rnd = random.sample("""?=)(/&%$"!'0987654321+-.,..,;:_"**$%foo """,10)
+# self.assert_(regexp.findall("%s %s %s" % (rnd, i, rnd)))
+#
+#
+#if __name__ == "__main__":
+# lista = get_smiley_list()
+# import unittest
+# unittest.main()
Modified: trunk/frontend-web/parsers.py
==============================================================================
--- trunk/frontend-web/parsers.py (original)
+++ trunk/frontend-web/parsers.py Wed Jul 4 15:16:10 2007
@@ -3,6 +3,7 @@
import parser_bbcode
import parser_html
import parser_urls
+import parser_smileys
import parser_eol
import parser_entities
@@ -16,6 +17,7 @@
(allow_html, parser_html),
(allow_bbcode, parser_bbcode),
(True, parser_urls),
+ (True, parser_smileys),
(True, parser_eol),
# Salvo que sepas muy bien que estas haciendo,
# este parser debe ser el ultimo de todos.
More information about the cpif
mailing list