Update to Pywikibot 7.5.0 running on Python 3
1 files changed, 49 insertions(+), 34 deletions(-)

M meta/kursy/tabelac.py
M meta/kursy/tabelac.py +49 -34
@@ 5,17 5,18 @@ import urllib
 import sys
 import currencytemplate, tabelakursow
 from decimal import *
-import wikipedia
+import pywikibot
+import pywikibot.site
 import logging
 import logging.config
 from xml.parsers.expat import ExpatError
 
 logging.config.fileConfig("/home/saper/wikipedia/log/bots.conf", disable_existing_loggers=True)
-wikipedia.logger = logging.getLogger('plwiki')
+pywikibot.logger = logging.getLogger('plwiki')
 mylogger = logging.getLogger('kursywalut')
 
-meta = wikipedia.getSite("meta", "meta", "KursyWalut")
-wikinews = wikipedia.getSite("pl", "wikinews", "KursyWalut")
+meta = pywikibot.Site("meta", "meta")
+wikinews = pywikibot.Site("pl", "wikinews")
 
 TABLEACTIONMSG = {
 	"meta": u"""Robot updates currency table using %s""",

          
@@ 37,6 38,17 @@ def get_uid(doc):
 	root = doc.documentElement
 	return root.getAttribute("uid")
 
+class CannotFetchFeedError(Exception):
+	def __init__(self, e, url):
+		self.e = e
+		self.url = url
+
+	def __str__(self):
+		return "Cannot fetch feed from '%s' with an error: \n%s" % (
+			self.url,
+			self.e)
+
+
 class CannotParseItemError(Exception):
 	def __init__(self, e, url, content):
 		self.e = e

          
@@ 51,34 63,37 @@ class CannotParseItemError(Exception):
 def fetch_table(feedurl, localfile):
 	""" Parse feed, compare with cached copy and return (url, pubdate, parseddomtree) tuple """
 	p = feedparser.parse(feedurl)
-	for i in p["items"]:
-		for e in i["enclosures"]:
-			url = e["href"]
-			if e["type"] == "text/xml":
-				if localfile:
+	if p:
+		for i in p["items"]:
+			for e in i["enclosures"]:
+				url = e["href"]
+				if e["type"] == "text/xml":
+					if localfile:
+						try:
+							old_uid = get_uid(dom.parseString(open(localfile, "r", encoding="iso-8859-2").read()))
+						except IOError:
+							old_uid = None
+					else:	
+							old_uid = None
+						
+					content = urllib.request.urlopen(url).read()
 					try:
-						old_uid = get_uid(dom.parseString(open(localfile, "r").read()))
-					except IOError:
-						old_uid = None
-				else:	
-						old_uid = None
-					
-				content = urllib.urlopen(url).read()
-				try:
-					parsed = dom.parseString(content)
+						parsed = dom.parseString(content)
 
-					if old_uid != get_uid(parsed):
-						mylogger.info("Kursy walut: uid: %s->%s" % (old_uid, get_uid(parsed)))
-						data_publikacji = parsed.getElementsByTagName("data_publikacji")[0].firstChild.nodeValue
-						if localfile:
-							wr = open(localfile, "w")
-							wr.write(content)
-							wr.close()
-						return (url, data_publikacji, parsed)
-					else:
-						return (None, None, None)
-				except ExpatError, e:
-					raise CannotParseItemError(e, url, content)
+						if old_uid != get_uid(parsed):
+							mylogger.info("Kursy walut: uid: %s->%s" % (old_uid, get_uid(parsed)))
+							data_publikacji = parsed.getElementsByTagName("data_publikacji")[0].firstChild.nodeValue
+							if localfile:
+								wr = open(localfile, "wb")
+								wr.write(content)
+								wr.close()
+							return (url, data_publikacji, parsed)
+						else:
+							return (None, None, None)
+					except ExpatError as e:
+						raise CannotParseItemError(e, url, content)
+	else:
+		raise CannotFetchFeedError(feedurl)
 
 TABELA_C = ( ("kod_waluty", lambda a: a), 
 		("przelicznik", int),

          
@@ 129,13 144,13 @@ def main():
 
 	for site, pagename, lastmod, comment, table, pagetext in strony_tabeli_c + strony_tabeli_a:
 		if lastmod:
-			text = pagetext(table[2]) + LASTMODIFIEDMSG[site.language()] % (table[0], table[1])
+			text = pagetext(table[2]) + LASTMODIFIEDMSG[site.code] % (table[0], table[1])
 		else:
 			text = pagetext(table[2])
 
-		text = text + CATEGORY[site.language()]
-		wikipedia.Page(site, pagename).put(text,
-			comment=comment[site.language()] % (table[0],), minorEdit=False)
+		text = text + CATEGORY[site.code]
+		pywikibot.Page(pywikibot.Link(pagename, source=site)).put(text,
+			summary=comment[site.code] % (table[0],), minorEdit=False)
 
 
 if __name__ == '__main__':