@@ 2,6 2,7 @@
import feedparser
import xml.dom.minidom as dom
import urllib
+import os
import os.path
import sys
import currencytemplate, tabelakursow
@@ 15,6 16,8 @@ from xml.parsers.expat import ExpatError
pywikibot.logging._init_routines = [] # Workaround https://phabricator.wikimedia.org/T326650
+TAB_DIR = os.path.expanduser("~") + "/kursy"
+
logging.config.fileConfig(os.path.expanduser("~") + "/.config/bots.conf", disable_existing_loggers=True)
mylogger = logging.getLogger('kursywalut')
@@ 116,9 119,13 @@ def main():
strony_tabeli_a = []
strony_tabeli_c = []
+ try:
+ os.mkdir(TAB_DIR)
+ except FileExistsError:
+ pass
feedurl, localfile = ("http://rss.nbp.pl/kursy/TabelaC.xml",
- "/home/saper/wikipedia/src/meta/kursy/tabelac.xml")
+ TAB_DIR + "/tabelac.xml")
(url1, pubdate, parseddomtree) = fetch_table(feedurl, localfile)
if url1:
tabelac = (url1, pubdate, extract_items(parseddomtree, TABELA_C))
@@ 136,7 143,7 @@ def main():
]
feedurl, localfile = ("http://rss.nbp.pl/kursy/TabelaA.xml",
- "/home/saper/wikipedia/src/meta/kursy/tabelaa.xml")
+ TAB_DIR + "/tabelaa.xml")
(url2, pubdate, parseddomtree) = fetch_table(feedurl, localfile)
if url2:
tabelaa = (url2, pubdate, extract_items(parseddomtree, TABELA_A))