starting a python3 conversion
M fniki/SimpleAsyncServer.py +2 -2
@@ 15,7 15,7 @@ An example of protocol class is provided
 the message length, the line feed character and the message body
 """
 
-import cStringIO
+import io
 import socket
 import select
 

          
@@ 151,7 151,7 @@ class LengthSepBody(ClientHandler):
 # ============================================================================
 def loop(server,handler,timeout=30):
     while True:
-        k = client_handlers.keys()
+        k = list(client_handlers.keys())
         # w = sockets to which there is something to send
         # we must test if we can send data
         w = [ cl for cl in client_handlers if client_handlers[cl].writable ]

          
M fniki/piki.py +111 -112
@@ 29,7 29,7 @@ from os import path, environ
 from socket import gethostbyaddr
 from time import localtime, strftime
 # NOTE: cStringIO doesn't work for unicode.
-from StringIO import StringIO
+from io import StringIO
 import fileoverlay
 filefuncs = None
 

          
@@ 80,8 80,8 @@ def scrub(link_text, ss_class=None, forc
     return link_text
 
 def emit_header():
-    print "Content-type: text/html; charset=utf-8"
-    print
+    print("Content-type: text/html; charset=utf-8")
+    print()
 
 # Regular expression defining a WikiWord (but this definition
 # is also assumed in other places.

          
@@ 98,24 98,24 @@ def get_scriptname():
     return environ.get('SCRIPT_NAME', '')
 
 def send_title(text, link=None, msg=None, is_forked=False):
-    print "<head><title>%s</title>" % text
+    print("<head><title>%s</title>" % text)
     if css_url:
-        print '<link rel="stylesheet" type="text/css" href="%s">' % \
-              scrub(css_url)
-    print "</head>"
-    print '<body><h1>'
+        print('<link rel="stylesheet" type="text/css" href="%s">' % \
+              scrub(css_url))
+    print("</head>")
+    print('<body><h1>')
     if get_logo_string():
-        print link_tag('RemoteChanges', get_logo_string())
+        print(link_tag('RemoteChanges', get_logo_string()))
     if link:
         classattr = ''
         if is_forked:
             classattr = ' class="forkedtitle" '
-        print '<a%s href="%s">%s</a>' % (classattr, scrub(link), text)
+        print('<a%s href="%s">%s</a>' % (classattr, scrub(link), text))
     else:
-        print text
-    print '</h1>'
-    if msg: print msg
-    print '<hr>'
+        print(text)
+    print('</h1>')
+    if msg: print(msg)
+    print('<hr>')
 
 def link_tag(params, text=None, ss_class=None):
     if text is None:

          
@@ 149,12 149,12 @@ def do_fullsearch(needle):
     hits.sort()
     hits.reverse()
 
-    print "<UL>"
+    print("<UL>")
     for (count, page_name) in hits:
-        print '<LI>' + Page(page_name).link_to()
-        print ' . . . . ' + `count`
-        print ['match', 'matches'][count <> 1]
-    print "</UL>"
+        print('<LI>' + Page(page_name).link_to())
+        print(' . . . . ' + repr(count))
+        print(['match', 'matches'][count != 1])
+    print("</UL>")
 
     print_search_stats(len(hits), len(all_pages))
 

          
@@ 167,19 167,19 @@ def do_titlesearch(needle):
 
     needle_re = re.compile(needle, re.IGNORECASE)
     all_pages = page_list()
-    hits = filter(needle_re.search, all_pages)
+    hits = list(filter(needle_re.search, all_pages))
 
-    print "<UL>"
+    print("<UL>")
     for filename in hits:
-        print '<LI>' + Page(filename).link_to()
-    print "</UL>"
+        print('<LI>' + Page(filename).link_to())
+    print("</UL>")
 
     print_search_stats(len(hits), len(all_pages))
 
 
 def print_search_stats(hits, searched):
-    print "<p>%d hits " % hits
-    print " out of %d pages searched." % searched
+    print("<p>%d hits " % hits)
+    print(" out of %d pages searched." % searched)
 
 
 def do_edit(pagename):

          
@@ 246,14 246,13 @@ def do_deletelocal(pagename):
     send_title("Removed Local Edits", None,
                "Removed local edits to %s page." %
                pagename)
-    print "Local changes to %s have been deleted. <p>" % Page(
-        pagename).link_to()
-    print "Here's a link to the %s." % Page('FrontPage').link_to()
+    print("Local changes to %s have been deleted. <p>" % Page(
+        pagename).link_to())
+    print("Here's a link to the %s." % Page('FrontPage').link_to())
 
 def make_index_key():
     s = '<p><center>'
-    links = map(lambda ch: '<a href="#%s">%s</a>' % (ch, ch),
-                string.lowercase)
+    links = ['<a href="#%s">%s</a>' % (ch, ch) for ch in string.lowercase]
     s = s + string.join(links, ' | ')
     s = s + '</center><p>'
     return s

          
@@ 261,10 260,10 @@ def make_index_key():
 
 def page_list(include_versioned=False):
     if include_versioned:
-        return filter(versioned_page_re.match,
-                      filefuncs.list_pages(text_dir))
-    return filter(word_anchored_re.match,
-                  filefuncs.list_pages(text_dir))
+        return list(filter(versioned_page_re.match,
+                      filefuncs.list_pages(text_dir)))
+    return list(filter(word_anchored_re.match,
+                  filefuncs.list_pages(text_dir)))
 
 # ----------------------------------------------------------
 # Macros

          
@@ 275,7 274,7 @@ def _macro_FullSearch():
     return _macro_search("fullsearch")
 
 def _macro_search(type):
-    if form.has_key('value'):
+    if 'value' in form:
         default = form["value"].value.encode('utf8')
     else:
         default = ''

          
@@ 302,12 301,12 @@ def _macro_WordIndex():
             except KeyError:
                 map[word] = [name]
 
-    all_words = map.keys()
+    all_words = list(map.keys())
     all_words.sort()
     last_letter = None
     for word in all_words:
         letter = string.lower(word[0])
-        if letter <> last_letter:
+        if letter != last_letter:
             s = s + '<a name="%s"><h3>%s</h3></a>' % (letter, letter)
             last_letter = letter
 

          
@@ 332,7 331,7 @@ def _macro_TitleIndex():
     current_letter = None
     for name in pages:
         letter = string.lower(name[0])
-        if letter <> current_letter:
+        if letter != current_letter:
             s = s + '<a name="%s"><h3>%s</h3></a>' % (letter, letter)
             current_letter = letter
         else:

          
@@ 363,7 362,7 @@ def get_unmerged_versions(overlay, wikit
         # hmmmm... validate?
         ret[fields[0].strip()].add(fields[1].strip())
 
-    for name in ret.keys()[:]: # hmmm copy required?
+    for name in list(ret.keys())[:]: # hmmm copy required?
         ret[name] = list(ret[name])
         ret[name].sort()
 

          
@@ 440,7 439,7 @@ def _macro_RemoteChanges():
                 continue
             if index == len(words) - 1:
                 # Special case forked files.
-                wiki_names = change.keys()
+                wiki_names = list(change.keys())
                 wiki_names.sort()
 
                 tmps.append("%s:%s" % (words[index],

          
@@ 482,7 481,7 @@ def _macro_RemoteChanges():
         # year, month, day, DoW
         time_tuple = time.gmtime(float(entry[1]))
         day = tuple(time_tuple[0:3])
-        if day <> ratchet_day:
+        if day != ratchet_day:
             #buf.write('<h3>%s</h3>' % strftime(date_fmt, time_tuple))
             buf.write('<h3>%s</h3>' % strftime(date_fmt, time_tuple))
             ratchet_day = day

          
@@ 495,7 494,7 @@ def _macro_RemoteChanges():
 def _macro_BookMark():
     try:
         usk, desc, link_name = read_info()
-    except ValueError, err:
+    except ValueError as err:
         return "[BookMark macro failed: %s]" % str(err.args[0])
 
     if not scrub_links:

          
@@ 513,7 512,7 @@ def _macro_FreesiteUri():
             fields[-2] = '-' + fields[-2]
             usk = '/'.join(fields)
 
-    except ValueError, err:
+    except ValueError as err:
         return "[FreesiteUri macro failed: %s]" % str(err.args[0])
 
     if not scrub_links:

          
@@ 657,7 656,7 @@ class PageFormatter:
     def _macro_repl(self, word):
         macro_name = word[2:-2]
         # TODO: Somehow get the default value into the search field
-        return apply(globals()['_macro_' + macro_name], ())
+        return globals()['_macro_' + macro_name](*())
 
     def _tablerow_repl(self, word):
         if word[0:2] == '||':

          
@@ 714,7 713,7 @@ class PageFormatter:
                     + r")")
 
                     for match in span_re.finditer(line):
-                        for type, hit in match.groupdict().items():
+                        for type, hit in list(match.groupdict().items()):
                             if hit:
                                 if type == 'colspan':
                                     colspan = colspan + int(match.group('csval')) - 1

          
@@ 774,7 773,7 @@ class PageFormatter:
 
     def table_replace(self, match):
         replaced = ''
-        for type, hit in match.groupdict().items():
+        for type, hit in list(match.groupdict().items()):
             if hit:
                 if type == 'tableborder' or type == 'tdborder':
                     replaced = 'border-style:solid;border-width:'+match.group('borderval')+';'

          
@@ 821,14 820,14 @@ class PageFormatter:
         return res
 
     def replace(self, match):
-        for type, hit in match.groupdict().items():
+        for type, hit in list(match.groupdict().items()):
             if hit:
                 replaced = ''
                 if self.in_table == 1 and type != 'tablerow':
                     replaced = self._tablerow_repl(hit)
-                return replaced + apply(getattr(self, '_' + type + '_repl'), (hit,))
+                return replaced + getattr(self, '_' + type + '_repl')(*(hit,))
         else:
-            raise "Can't handle match " + `match`
+            raise "Can't handle match " + repr(match)
 
     def return_html(self):
         returnval = ''

          
@@ 874,7 873,7 @@ class PageFormatter:
         return returnval
 
     def print_html(self):
-        print self.return_html()
+        print(self.return_html())
 
 # ----------------------------------------------------------
 class Page:

          
@@ 922,7 921,7 @@ class Page:
     def get_raw_body(self, unmodified=False):
         try:
             return filefuncs.read(self._text_filename(), 'rb', unmodified)
-        except IOError, er:
+        except IOError as er:
             if er.errno == errno.ENOENT:
                 # just doesn't exist, use default
                 return 'Describe %s here.' % self.page_name

          
@@ 944,9 943,9 @@ class Page:
             PageFormatter(self.get_raw_body(unmodified), allow_images).print_html()
         else:
             if removed:
-                print "<b>Already resolved.</b>"
+                print("<b>Already resolved.</b>")
             elif resolved:
-                print "<b>Locally marked resolved.</b>"
+                print("<b>Locally marked resolved.</b>")
             else:
                 PageFormatter(self.get_raw_body(unmodified), allow_images).print_html()
 

          
@@ 960,18 959,18 @@ class Page:
                     unmodified=False):
 
         base = get_scriptname()
-        print '<hr>'
+        print('<hr>')
         if is_read_only(data_dir, self.page_name):
-            print "<em>The bot owner has marked this page read only.</em>"
-            print (('<br><a href="%s?viewunmodifiedsource=%s">'  %
-                    (base, self.page_name)) + '[View page source]</a><br>')
+            print("<em>The bot owner has marked this page read only.</em>")
+            print((('<br><a href="%s?viewunmodifiedsource=%s">'  %
+                    (base, self.page_name)) + '[View page source]</a><br>'))
             return
 
         if unmodified:
-            print ("<em>Read only original version " +
-                   "of a locally modified page.</em>")
-            print (('<br><a href="%s?viewunmodifiedsource=%s">'  %
-                    (base, self.page_name)) + '[View page source]</a><br>')
+            print(("<em>Read only original version " +
+                   "of a locally modified page.</em>"))
+            print((('<br><a href="%s?viewunmodifiedsource=%s">'  %
+                    (base, self.page_name)) + '[View page source]</a><br>'))
             return
 
         if versioned:

          
@@ 980,59 979,59 @@ class Page:
                 return
 
             if filefuncs.has_overlay(page_path):
-                print (('<br><a href="%s?unmodified=%s">' % (base,
+                print((('<br><a href="%s?unmodified=%s">' % (base,
                                                              self.page_name)) +
-                       '[Show original version]</a><br>')
-                print (('<a href="%s?deletelocal=%s">' % (base,
+                       '[Show original version]</a><br>'))
+                print((('<a href="%s?deletelocal=%s">' % (base,
                                                           self.page_name)) +
-                       '[Mark unresolved, without confirmation!]</a><br>')
+                       '[Mark unresolved, without confirmation!]</a><br>'))
 
             else:
                 if filefuncs.exists(page_path, True):
-                    print "<em>This is an unmerged fork of another page!</em>"
-                    print (('<br><a href="%s?viewsource=%s">' %
+                    print("<em>This is an unmerged fork of another page!</em>")
+                    print((('<br><a href="%s?viewsource=%s">' %
                             (base, self.page_name)) +
-                           '[View page source]</a><br>')
-                    print (('<br><a href="%s?removepage=%s">' %
+                           '[View page source]</a><br>'))
+                    print((('<br><a href="%s?removepage=%s">' %
                             (base, self.page_name)) +
                            '[Locally mark resolved, ' +
-                           'without confirmation!]</a><br>')
+                           'without confirmation!]</a><br>'))
 
-            print "<p><em>Wiki dir: %s </em>" % data_dir
+            print("<p><em>Wiki dir: %s </em>" % data_dir)
             return
 
         if not page_path is None and filefuncs.has_overlay(page_path):
-            print "<strong>This page has local edits!</strong><br>"
+            print("<strong>This page has local edits!</strong><br>")
 
         if not page_path is None:
             name = os.path.split(page_path)[1]
             fork_table = get_unmerged_versions(filefuncs, text_dir,
                                                (name,))
             if len(fork_table[name]) > 0:
-                print ("<strong>This page has forks: %s!</strong><br>"  %
-                       get_fork_html(filefuncs, text_dir, name, fork_table))
+                print(("<strong>This page has forks: %s!</strong><br>"  %
+                       get_fork_html(filefuncs, text_dir, name, fork_table)))
 
-        print link_tag('?edit=%s' %  name, 'EditText')
-        print "of this page"
+        print(link_tag('?edit=%s' %  name, 'EditText'))
+        print("of this page")
         if mod_string:
-            print "(last modified %s)" % mod_string
-        print '<br>'
-        print link_tag('FindPage?value=%s' %  name, 'FindPage')
-        print " by browsing, searching, or an index"
+            print("(last modified %s)" % mod_string)
+        print('<br>')
+        print(link_tag('FindPage?value=%s' %  name, 'FindPage'))
+        print(" by browsing, searching, or an index")
 
         if page_path is None:
-            print "<p><em>Wiki dir: %s </em>" % data_dir
+            print("<p><em>Wiki dir: %s </em>" % data_dir)
             return
 
         if filefuncs.has_overlay(page_path):
-            print (('<br><a href="%s?unmodified=%s">' % (base, name)) +
-                   '[Show original version]</a><br>')
-            print (('<a href="%s?removepage=%s">' % (base, name)) +
-                   '[Locally delete this page without confirmation!]</a><br>')
-            print (('<a href="%s?deletelocal=%s">' % (base, name)) +
-                   '[Undo local edits without confirmation!]</a><br>')
+            print((('<br><a href="%s?unmodified=%s">' % (base, name)) +
+                   '[Show original version]</a><br>'))
+            print((('<a href="%s?removepage=%s">' % (base, name)) +
+                   '[Locally delete this page without confirmation!]</a><br>'))
+            print((('<a href="%s?deletelocal=%s">' % (base, name)) +
+                   '[Undo local edits without confirmation!]</a><br>'))
 
-        print "<p><em>Wiki dir: %s </em>" % data_dir
+        print("<p><em>Wiki dir: %s </em>" % data_dir)
 
 
     def send_page(self, msg=None, unmodified=False):

          
@@ 1062,23 1061,23 @@ class Page:
 
         send_title(title + self.split_title())
         # IMPORTANT: Ask browser to send us utf8
-        print '<form method="post" action="%s" accept-charset="UTF-8">' % (get_scriptname())
-        print '<input type=hidden name="savepage" value="%s">' % \
-              (self.page_name)
+        print('<form method="post" action="%s" accept-charset="UTF-8">' % (get_scriptname()))
+        print('<input type=hidden name="savepage" value="%s">' % \
+              (self.page_name))
         # Encode outgoing raw wikitext into utf8
         raw_body = string.replace(self.get_raw_body(unmodified),
                                   '\r\n', '\n')
-        print """<textarea wrap="virtual" name="savetext" rows="17"
+        print("""<textarea wrap="virtual" name="savetext" rows="17"
                  cols="120" %s >%s</textarea>""" % (
-                 read_only_value, raw_body)
+                 read_only_value, raw_body))
         if not read_only:
-            print """<br><input type=submit value="Save">
+            print("""<br><input type=submit value="Save">
                      <input type=reset value="Reset">
-                  """
-        print "<br>"
-        print "</form>"
+                  """)
+        print("<br>")
+        print("</form>")
         if not read_only:
-            print "<p>" + Page('EditingTips').link_to()
+            print("<p>" + Page('EditingTips').link_to())
 
     def _write_file(self, text):
         filefuncs.write(self._text_filename(), text, 'wb')

          
@@ 1123,14 1122,14 @@ def serve_one_page():
                      'deletelocal': do_deletelocal,
                      'removepage':  do_removepage}
 
-        for cmd in handlers.keys():
-            if form.has_key(cmd):
-                apply(handlers[cmd], (form[cmd].value.decode('utf8'),))
+        for cmd in list(handlers.keys()):
+            if cmd in form:
+                handlers[cmd](*(form[cmd].value.decode('utf8'),))
                 break
         else:
             path_info = environ.get('PATH_INFO', '')
 
-            if form.has_key('goto'):
+            if 'goto' in form:
                 query = form['goto'].value.decode('utf8')
             elif len(path_info) and path_info[0] == '/':
                 query = path_info[1:] or 'FrontPage'

          
@@ 1147,7 1146,7 @@ def serve_one_page():
                     word = "%s_%s" % (word, word_match.group('version'))
                 Page(word).send_page()
             else:
-                print "<p>Can't work out query \"<pre>" + query + "</pre>\""
+                print("<p>Can't work out query \"<pre>" + query + "</pre>\"")
 
     except:
         cgi.print_exception()

          
@@ 1183,7 1182,7 @@ def make_fork_list(versioned_names):
         entry.append(version)
         table[wiki_name] = entry
 
-    for value in table.values():
+    for value in list(table.values()):
         value.sort()
 
     return table

          
@@ 1289,19 1288,19 @@ class FreenetPage(Page):
     def send_footer(self, versioned, dummy_mod_string=None,
                     page_path=None,
                     dummy_unmodified=False):
-        print "<hr>"
-        print "%s %s %s" % (link_tag('FrontPage', 'FrontPage'),
+        print("<hr>")
+        print("%s %s %s" % (link_tag('FrontPage', 'FrontPage'),
                             link_tag('TitleIndex', 'TitleIndex'),
-                            link_tag('WordIndex', 'WordIndex'))
+                            link_tag('WordIndex', 'WordIndex')))
         if not page_path is None and not versioned:
             name = os.path.split(page_path)[1]
             fork_table = get_unmerged_versions(filefuncs, text_dir,
                                                (name,))
             if len(fork_table[name]) > 0:
-                print (("<hr><strong>This page has forks: %s! " %
+                print((("<hr><strong>This page has forks: %s! " %
                         get_fork_html(filefuncs, text_dir, name, fork_table))
                        +
-                       "Please consider merging them.</strong><br>")
+                       "Please consider merging them.</strong><br>"))
 
 def reset_root_dir(root_dir, overlayed=False):
     global data_dir, text_dir, filefuncs

          
@@ 1324,7 1323,7 @@ def reset_root_dir(root_dir, overlayed=F
 CFG_FILE = 'fnwiki.cfg'
 WIKIROOT = 'wiki_root'
 # REDFLAG: Hacks to make this work in windows binary mercurial distro?
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
 def set_data_dir_from_cfg(base_path=None):
     if base_path is None:
         # REDFLAG: test on windoze.

          
@@ 1371,7 1370,7 @@ def dump(output_dir, wiki_root, overlaye
             try:
                 page = FreenetPage(name)
                 sys.stdout = out
-                print '<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">'
+                print('<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">')
                 page.send_page()
                 sys.stdout.flush()
                 out.close()

          
@@ 1417,5 1416,5 @@ if __name__ == "__main__" or __name__ ==
         serve_one_page()
     finally:
         sys.stdout = real_out
-        print buf.getvalue().encode('utf8')
+        print(buf.getvalue().encode('utf8'))
 

          
M fniki/servepiki.py +9 -9
@@ 24,9 24,9 @@ import os
 import traceback
 import datetime
 import mimetypes
-import urlparse
-import urllib
-import cStringIO
+import urllib.parse
+import urllib.request, urllib.parse, urllib.error
+import io
 import re
 
 import piki

          
@@ 84,7 84,7 @@ class HTTP(SimpleAsyncServer.ClientHandl
             and close_conn.lower() == "keep-alive"):
             self.close_when_done = False
         # parse the url
-        scheme,netloc,path,params,query,fragment = urlparse.urlparse(self.url)
+        scheme,netloc,path,params,query,fragment = urllib.parse.urlparse(self.url)
         self.path,self.rest = path,(params,query,fragment)
 
         if self.method == 'POST':

          
@@ 95,7 95,7 @@ class HTTP(SimpleAsyncServer.ClientHandl
             # request is incomplete if not all message body received
             if len(body)<content_length:
                 return False
-            f_body = cStringIO.StringIO(body)
+            f_body = io.StringIO(body)
             f_body.seek(0)
             sys.stdin = f_body # compatibility with CGI
 

          
@@ 166,7 166,7 @@ class HTTP(SimpleAsyncServer.ClientHandl
         # redirect print statements to a cStringIO
 
         save_stdout = sys.stdout
-        sys.stdout = cStringIO.StringIO()
+        sys.stdout = io.StringIO()
         # run the script
         try:
             # djk20091109 There was a bug here. You need the {} in order to run

          
@@ 175,9 175,9 @@ class HTTP(SimpleAsyncServer.ClientHandl
             #execfile(self.file_name)
 
             # djk20091109 HACKED to run only piki script.
-            execfile(HTTP.script_path, {})
+            exec(compile(open(HTTP.script_path, "rb").read(), HTTP.script_path, 'exec'), {})
         except:
-            sys.stdout = cStringIO.StringIO()
+            sys.stdout = io.StringIO()
             sys.stdout.write("Content-type:text/plain\r\n\r\n")
             traceback.print_exc(file=sys.stdout)
 

          
@@ 251,7 251,7 @@ class HTTP(SimpleAsyncServer.ClientHandl
                 date_str,self.requestline,code))
 
 def default_out_func(text):
-    print text
+    print(text)
 
 def serve_wiki(port=8081, bind_to='localhost', out_func=default_out_func):
     #out_func("server_wiki running under: %s" % str(sys.version))

          
M infocalypse/__init__.py +139 -132
@@ 338,14 338,14 @@ d kar bott at com cast dot net
 
 import os
 
-from commands import *
+from .commands import *
 
 from mercurial import commands, extensions, util, hg, dispatch, discovery
 from mercurial.i18n import _
 
-import freenetrepo
+from . import freenetrepo
 
-from keys import strip_protocol
+from .keys import strip_protocol
 
 _freenetschemes = ('freenet', ) # TODO: add fn
 for _scheme in _freenetschemes:

          
@@ 353,202 353,202 @@ for _scheme in _freenetschemes:
 
 #----------------------------------------------------------"
 
-DEFAULT_FCP_HOST = "127.0.0.1"
+DEFAULT_FCP_HOST = b"127.0.0.1"
 DEFAULT_FCP_PORT = 9481
 # synchronize with wot.py (copied here to a void importing wot)
 FREEMAIL_SMTP_PORT = 4025
 FREEMAIL_IMAP_PORT = 4143
 
 # Can't use None as a default? Means "takes no argument'?
-FCP_OPTS = [('', 'fcphost', '', 'fcp host, defaults to setup or ' + DEFAULT_FCP_HOST),
-            ('', 'fcpport', 0, 'fcp port, defaults to setup or ' + str(DEFAULT_FCP_PORT)),
+FCP_OPTS = [(b'', b'fcphost', b'', b'fcp host, defaults to setup or ' + DEFAULT_FCP_HOST),
+            (b'', b'fcpport', 0, b'fcp port, defaults to setup or ' + str(DEFAULT_FCP_PORT).encode('utf8')),
 ]
 
-FREEMAIL_OPTS = [('', 'mailhost', '', 'freemail host, defaults to setup or ' + DEFAULT_FCP_HOST),
-                 ('', 'smtpport', 0, 'freemail smtp port, defaults to setup or ' + str(FREEMAIL_SMTP_PORT)),
-                 ('', 'imapport', 0, 'freemail imap port, defaults to setup or ' + str(FREEMAIL_IMAP_PORT)),
+FREEMAIL_OPTS = [(b'', b'mailhost', b'', b'freemail host, defaults to setup or ' + DEFAULT_FCP_HOST),
+                 (b'', b'smtpport', 0, b'freemail smtp port, defaults to setup or ' + str(FREEMAIL_SMTP_PORT).encode('utf8')),
+                 (b'', b'imapport', 0, b'freemail imap port, defaults to setup or ' + str(FREEMAIL_IMAP_PORT).encode('utf8')),
 ]
 
-FMS_OPTS = [('', 'fmshost', '', 'fms host'),
-            ('', 'fmsport', 0, 'fms port'),
+FMS_OPTS = [(b'', b'fmshost', b'', b'fms host'),
+            (b'', b'fmsport', 0, b'fms port'),
 ]
 
-WOT_OPTS = [('', 'truster', '', 'WoT nick@key to use when looking up others'),
+WOT_OPTS = [(b'', b'truster', b'', b'WoT nick@key to use when looking up others'),
 ]
-WOT_CREATE_OPTS = [('', 'wot', '', 'WoT nickname to create on'),
+WOT_CREATE_OPTS = [(b'', b'wot', b'', b'WoT nickname to create on'),
 ]
-WOT_PULL_OPTS = [('', 'wot', '', 'WoT nick@key/repo to pull from'),
+WOT_PULL_OPTS = [(b'', b'wot', b'', b'WoT nick@key/repo to pull from'),
 ]
 
 
-AGGRESSIVE_OPT = [('', 'aggressive', None, 'aggressively search for the '
-                   + 'latest USK index'),]
-NOSEARCH_OPT = [('', 'nosearch', None, 'use USK version in URI'), ]
+AGGRESSIVE_OPT = [(b'', b'aggressive', None, b'aggressively search for the '
+                   + b'latest USK index'),]
+NOSEARCH_OPT = [(b'', b'nosearch', None, b'use USK version in URI'), ]
 # Allow mercurial naming convention for command table.
 # pylint: disable-msg=C0103
 
-PULL_OPTS = [('', 'hash', [], 'repo hash of repository to pull from'),
-             ('', 'onlytrusted', None, 'only use repo announcements from '
-              + 'known users')]
+PULL_OPTS = [(b'', b'hash', [], b'repo hash of repository to pull from'),
+             (b'', b'onlytrusted', None, b'only use repo announcements from '
+              + b'known users')]
 
 cmdtable = {
-    "fn-connect": (infocalypse_connect, FCP_OPTS),
+    b"fn-connect": (infocalypse_connect, FCP_OPTS),
 
-    "fn-pull": (infocalypse_pull,
-                [('', 'uri', '', 'request URI to pull from')]
+    b"fn-pull": (infocalypse_pull,
+                [(b'', b'uri', b'', b'request URI to pull from')]
                 + PULL_OPTS
                 + WOT_PULL_OPTS
                 + WOT_OPTS
                 + FCP_OPTS
                 + NOSEARCH_OPT
                 + AGGRESSIVE_OPT,
-                "[options]"),
+                b"[options]"),
 
-    "fn-updaterepolist": (infocalypse_update_repo_list,
+    b"fn-updaterepolist": (infocalypse_update_repo_list,
                           WOT_CREATE_OPTS),
 
-    "fn-pull-request": (infocalypse_pull_request,
-                        [('', 'wot', '', 'WoT nick@key/repo to send request '
-                                         'to')]
+    b"fn-pull-request": (infocalypse_pull_request,
+                        [(b'', b'wot', b'', b'WoT nick@key/repo to send request '
+                                         b'to')]
                         + WOT_OPTS
                         + FCP_OPTS
                         + FREEMAIL_OPTS,
-                        "[--truster nick@key] --wot nick@key/repo"),
+                        b"[--truster nick@key] --wot nick@key/repo"),
 
-    "fn-check-notifications": (infocalypse_check_notifications,
-                               [('', 'wot', '', 'WoT nick@key to check '
-                                                'notifications for')]
+    b"fn-check-notifications": (infocalypse_check_notifications,
+                               [(b'', b'wot', b'', b'WoT nick@key to check '
+                                                b'notifications for')]
                                + WOT_OPTS
                                + FCP_OPTS
                                + FREEMAIL_OPTS,
-                               "--wot nick@key"),
+                               b"--wot nick@key"),
 
-    "fn-push": (infocalypse_push,
-                [('', 'uri', '', 'insert URI to push to'),
+    b"fn-push": (infocalypse_push,
+                [(b'', b'uri', b'', b'insert URI to push to'),
                  # Buggy. Not well thought out.
-                 #('', 'requesturi', '', 'optional request URI to copy'),
-                 ('r', 'rev', [],'maximum rev to push'),]
+                 #(b'', b'requesturi', b'', b'optional request URI to copy'),
+                 (b'r', b'rev', [],b'maximum rev to push'),]
                 + FCP_OPTS
                 + AGGRESSIVE_OPT,
-                "[options]"),
+                b"[options]"),
 
-    "fn-create": (infocalypse_create,
-                  [('', 'uri', '', 'insert URI to create on'),
-                   ('r', 'rev', [],'maximum rev to push')]
+    b"fn-create": (infocalypse_create,
+                  [(b'', b'uri', b'', b'insert URI to create on'),
+                   (b'r', b'rev', [],b'maximum rev to push')]
                   + FCP_OPTS
                   + WOT_CREATE_OPTS,
-                "[options]"),
-    "fn-copy": (infocalypse_copy,
-                [('', 'requesturi', '', 'request URI to copy from'),
-                 ('', 'inserturi', '', 'insert URI to copy to'), ]
+                b"[options]"),
+    b"fn-copy": (infocalypse_copy,
+                [(b'', b'requesturi', b'', b'request URI to copy from'),
+                 (b'', b'inserturi', b'', b'insert URI to copy to'), ]
                 + FCP_OPTS
                 + NOSEARCH_OPT,
-                "[options]"),
+                b"[options]"),
 
-    "fn-reinsert": (infocalypse_reinsert,
-                    [('', 'uri', '', 'request URI'),
-                     ('', 'level', 3, 'how much to re-insert')]
+    b"fn-reinsert": (infocalypse_reinsert,
+                    [(b'', b'uri', b'', b'request URI'),
+                     (b'', b'level', 3, b'how much to re-insert')]
                     + FCP_OPTS
                     + NOSEARCH_OPT,
-                    "[options]"),
+                    b"[options]"),
 
-    "fn-info": (infocalypse_info,
-                 [('', 'uri', '', 'request URI'),],
-                "[options]"),
+    b"fn-info": (infocalypse_info,
+                 [(b'', b'uri', b'', b'request URI'),],
+                b"[options]"),
 
 
-    "fn-fmsread": (infocalypse_fmsread,
-                   [('', 'uri', '', 'request URI'),
-                    ('', 'hash', [], 'repo hash to modify trust for'),
-                    ('', 'fmsid', [], 'FMS id to modify trust for'),
-                    ('', 'list', None, 'show repo USKs from trusted '
-                     + 'fms identities'),
-                    ('', 'listall', None, 'show all repo USKs'),
-                    ('', 'showtrust', None, 'show the trust map'),
-                    ('', 'trust', None, 'add an entry to the trust map'),
-                    ('', 'untrust', None, 'remove an entry from the trust map'),
-                    ('', 'dryrun', None, "don't update the index cache"),],
-                   "[options]"),
+    b"fn-fmsread": (infocalypse_fmsread,
+                   [(b'', b'uri', b'', b'request URI'),
+                    (b'', b'hash', [], b'repo hash to modify trust for'),
+                    (b'', b'fmsid', [], b'FMS id to modify trust for'),
+                    (b'', b'list', None, b'show repo USKs from trusted '
+                     + b'fms identities'),
+                    (b'', b'listall', None, b'show all repo USKs'),
+                    (b'', b'showtrust', None, b'show the trust map'),
+                    (b'', b'trust', None, b'add an entry to the trust map'),
+                    (b'', b'untrust', None, b'remove an entry from the trust map'),
+                    (b'', b'dryrun', None, b"don't update the index cache"),],
+                   b"[options]"),
 
-    "fn-fmsnotify": (infocalypse_fmsnotify,
-                     [('', 'dryrun', None, "don't send fms message"),
-                     ('', 'announce', None, "include full URI update"),
-                     ('', 'submitbundle', None, "insert patch bundle and " +
-                      "send an fms notification"),
-                      ('', 'submitwiki', None, "insert overlayed wiki " +
-                       "changes and send an fms notification"),]
+    b"fn-fmsnotify": (infocalypse_fmsnotify,
+                     [(b'', b'dryrun', None, b"don't send fms message"),
+                     (b'', b'announce', None, b"include full URI update"),
+                     (b'', b'submitbundle', None, b"insert patch bundle and b" +
+                      b"send an fms notification"),
+                      (b'', b'submitwiki', None, b"insert overlayed wiki b" +
+                       b"changes and send an fms notification"),]
                      + FCP_OPTS, # Needs to invert the insert uri
-                     "[options]"),
+                     b"[options]"),
 
-    "fn-putsite": (infocalypse_putsite,
-                     [('', 'dryrun', None, "don't insert site"),
-                     ('', 'index', -1, "edition to insert"),
-                     ('', 'createconfig', None, "create default freesite.cfg"),
-                      ('', 'wiki', None, "insert a wiki, requires fnwiki.cfg"),
-                     ('', 'key', '', "private SSK to insert under"),]
+    b"fn-putsite": (infocalypse_putsite,
+                     [(b'', b'dryrun', None, b"don't insert site"),
+                     (b'', b'index', -1, b"edition to insert"),
+                     (b'', b'createconfig', None, b"create default freesite.cfg"),
+                      (b'', b'wiki', None, b"insert a wiki, requires fnwiki.cfg"),
+                     (b'', b'key', b'', b"private SSK to insert under"),]
                      + FCP_OPTS,
-                     "[options]"),
+                     b"[options]"),
 
-    "fn-wiki": (infocalypse_wiki,
-                [('', 'run', None, "start a local http server " +
-                  "displaying a wiki"),
-                 ('', 'createconfig', None, "create default fnwiki.cfg " +
-                  "and skeleton wiki_root dir"),
-                 ('', 'http_port', 8081, "port for http server"),
-                 ('', 'http_bind', 'localhost', "interface x1http " +
-                  "listens on, '' to listen on all"),
-                 ('', 'apply', '', "apply changes to the wiki from the " +
-                  "supplied Request URI ")] +
+    b"fn-wiki": (infocalypse_wiki,
+                [(b'', b'run', None, b"start a local http server b" +
+                  b"displaying a wiki"),
+                 (b'', b'createconfig', None, b"create default fnwiki.cfg b" +
+                  b"and skeleton wiki_root dir"),
+                 (b'', b'http_port', 8081, b"port for http server"),
+                 (b'', b'http_bind', b'localhost', b"interface x1http b" +
+                  b"listens on, '' to listen on all"),
+                 (b'', b'apply', b'', b"apply changes to the wiki from the b" +
+                  b"supplied Request URI ")] +
                 FCP_OPTS,
-                "[options]"),
+                b"[options]"),
 
-    "fn-genkey": (infocalypse_genkey,
+    b"fn-genkey": (infocalypse_genkey,
                   FCP_OPTS,
-                  "[options]"),
+                  b"[options]"),
 
-    "fn-setup": (infocalypse_setup,
-                 [('', 'tmpdir', '~/infocalypse_tmp', 'temp directory'),
-                  ('', 'nofms', None, 'skip FMS configuration'),
-                  ('', 'nowot', None, 'skip WoT configuration'),
-                  ('', 'fmsid', '', "fmsid (only part before '@'!)"),
-                  ('', 'timeout', 30, "fms socket timeout in seconds")]
+    b"fn-setup": (infocalypse_setup,
+                 [(b'', b'tmpdir', b'~/infocalypse_tmp', b'temp directory'),
+                  (b'', b'nofms', None, b'skip FMS configuration'),
+                  (b'', b'nowot', None, b'skip WoT configuration'),
+                  (b'', b'fmsid', b'', b"fmsid (only part before '@'!)"),
+                  (b'', b'timeout', 30, b"fms socket timeout in seconds")]
                  + WOT_OPTS
                  + FCP_OPTS
                  + FMS_OPTS,
-                "[options]"),
+                b"[options]"),
 
-    "fn-setupfms": (infocalypse_setupfms,
-                    [('', 'fmsid', '', "fmsid (only part before '@'!)"),
-                     ('', 'timeout', 30, "fms socket timeout in seconds"),]
+    b"fn-setupfms": (infocalypse_setupfms,
+                    [(b'', b'fmsid', b'', b"fmsid (only part before '@'!)"),
+                     (b'', b'timeout', 30, b"fms socket timeout in seconds"),]
                     + FMS_OPTS,
-                    "[options]"),
+                    b"[options]"),
 
-    "fn-setupwot": (infocalypse_setupwot,
+    b"fn-setupwot": (infocalypse_setupwot,
                     FCP_OPTS +
                     WOT_OPTS,
-                    "[options]"),
+                    b"[options]"),
 
-    "fn-setupfreemail": (infocalypse_setupfreemail,
+    b"fn-setupfreemail": (infocalypse_setupfreemail,
                          WOT_OPTS
                          + FCP_OPTS
                          + FREEMAIL_OPTS,
-                         "[--truster nick@key]"),
+                         b"[--truster nick@key]"),
 
-    "fn-archive": (infocalypse_archive,
-                   [('', 'uri', '', 'Request URI for --pull, Insert URI ' +
-                     'for --create, --push'),
-                    ('', 'create', None, 'Create a new archive using the ' +
-                     'Insert URI --uri'),
-                    ('', 'push', None, 'Push incremental updates into the ' +
-                     'archive in Freenet'),
-                    ('', 'pull', None, 'Pull incremental updates from the ' +
-                     'archive in Freenet'),
-                    ('', 'reinsert', None, 'Re-insert the entire archive. '),
+    b"fn-archive": (infocalypse_archive,
+                   [(b'', b'uri', b'', b'Request URI for --pull, Insert URI ' +
+                     b'for --create, --push'),
+                    (b'', b'create', None, b'Create a new archive using the ' +
+                     b'Insert URI --uri'),
+                    (b'', b'push', None, b'Push incremental updates into the ' +
+                     b'archive in Freenet'),
+                    (b'', b'pull', None, b'Pull incremental updates from the ' +
+                     b'archive in Freenet'),
+                    (b'', b'reinsert', None, b'Re-insert the entire archive. '),
                 ]
                    + FCP_OPTS
                    + NOSEARCH_OPT
                    + AGGRESSIVE_OPT,
-                   "[options]"),
+                   b"[options]"),
 }
 
 

          
@@ 561,6 561,11 @@ try:
     commands.norepo += ' fn-setupfreemail'
     commands.norepo += ' fn-updaterepolist'
 except AttributeError as e: # Mercurial 3.8 API change
+    for i in cmdtable:
+        cmdtable[i][0].norepo = False
+        cmdtable[i][0].optionalrepo = False
+        cmdtable[i][0].inferrepo = False
+        cmdtable[i][0].intents = set()
     infocalypse_setup.norepo = True
     infocalypse_setupfms.norepo = True
     infocalypse_setupwot.norepo = True

          
@@ 577,7 582,7 @@ except AttributeError as e: # Mercurial 
 def findcommonoutgoing(orig, *args, **opts):
     repo = args[0]
     remoterepo = args[1]
-    capable = getattr(remoterepo, 'capable', lambda x: False)
+    capable = getattr(remoterepo, b'capable', lambda x: False)
     if capable('infocalypse'):
         class fakeoutgoing(object):
             def __init__(self):

          
@@ 589,7 594,7 @@ def findcommonoutgoing(orig, *args, **op
     else:
         return orig(*args, **opts)
 # really wrap the functions
-extensions.wrapfunction(discovery, 'findcommonoutgoing', findcommonoutgoing)
+extensions.wrapfunction(discovery, b'findcommonoutgoing', findcommonoutgoing)
 
 # wrap the commands
 

          
@@ 623,7 628,7 @@ def freenetpathtouri(ui, path, operation
     # Guess whether it's WoT. This won't work if someone has chosen their WoT
     # nick to be "USK", but this is a corner case. Using --wot will still work.
     if not path.startswith("USK"):
-        import wot
+        from . import wot
         if operation == "pull":
             truster = get_truster(ui, repo, truster_identifier)
             return wot.resolve_pull_uri(ui, path, truster, repo, fcphost=fcphost, fcpport=fcpport)

          
@@ 649,7 654,7 @@ def freenetpull(orig, *args, **opts):
         return False
     ui, repo, path = parsepushargs(*args)
     if not path:
-        path = ui.expandpath('default', 'default-push')
+        path = ui.expandpath('default', b'default-push')
     else:
         path = ui.expandpath(path)
     # only act differently, if the target is an infocalypse repo.

          
@@ 681,12 686,12 @@ def freenetpush(orig, *args, **opts):
     def parsepushargs(ui, repo, path=None):
         return ui, repo, path
     def isfreenetpath(path):
-        if path and path.startswith("freenet:") or path.startswith("USK@"):
+        if path and path.startswith(b"freenet:") or path.startswith("USK@"):
             return True
         return False
     ui, repo, path = parsepushargs(*args)
     if not path:
-        path = ui.expandpath('default-push', 'default')
+        path = ui.expandpath(b'default-push', b'default')
     else:
         path = ui.expandpath(path)
     # only act differently, if the target is an infocalypse repo.

          
@@ 698,7 703,7 @@ def freenetpush(orig, *args, **opts):
     # if the uri is the short form (USK@/name/#), generate the key and preprocess the uri.
     if uri.startswith("USK@/"):
         ui.status("creating a new key for the repo. For a new repo with an existing key, use clone.\n")
-        from sitecmds import genkeypair
+        from .sitecmds import genkeypair
         fcphost, fcpport = opts["fcphost"], opts["fcpport"]
         if not fcphost:
             fcphost = DEFAULT_FCP_HOST

          
@@ 778,7 783,7 @@ def freenetclone(orig, *args, **opts):
         # if the pushuri is the short form (USK@/name/#), generate the key.
         if pushuri.startswith("USK@/"):
             ui.status("creating a new key for the repo. To use your default key, call fn-create.\n")
-            from sitecmds import genkeypair
+            from .sitecmds import genkeypair
             fcphost, fcpport = opts["fcphost"], opts["fcpport"]
             if not fcphost:
                 fcphost = DEFAULT_FCP_HOST

          
@@ 812,8 817,8 @@ def freenetclone(orig, *args, **opts):
         # Expecting dest to be something like freenet://name@key/reponame
         local_identifier = strip_protocol(dest).split('/')[0]
 
-        from wot_id import Local_WoT_ID
-        from wot import get_fcpopts
+        from .wot_id import Local_WoT_ID
+        from .wot import get_fcpopts
         local_identity = Local_WoT_ID(local_identifier,
                                       get_fcpopts(fcphost=opts["fcphost"],
                                                   fcpport=opts["fcpport"]))

          
@@ 871,14 876,14 @@ commit = !$HG clt --date "$(date -u "+%Y
 
 
 # really wrap the command
-entry = extensions.wrapcommand(commands.table, "push", freenetpush)
+entry = extensions.wrapcommand(commands.table, b"push", freenetpush)
 entry[1].extend(FCP_OPTS)
-entry = extensions.wrapcommand(commands.table, "pull", freenetpull)
+entry = extensions.wrapcommand(commands.table, b"pull", freenetpull)
 entry[1].extend(PULL_OPTS)
 entry[1].extend(FCP_OPTS)
 entry[1].extend(WOT_OPTS)
 entry[1].extend(WOT_PULL_OPTS)
-entry = extensions.wrapcommand(commands.table, "clone", freenetclone)
+entry = extensions.wrapcommand(commands.table, b"clone", freenetclone)
 entry[1].extend(FCP_OPTS)
 entry[1].extend(WOT_OPTS)
 entry[1].extend(WOT_CREATE_OPTS)

          
@@ 888,6 893,8 @@ entry[1].extend(WOT_CREATE_OPTS)
 
 from mercurial import util
 try:
+    from mercurial.interfaces.repository import peer as peerrepository
+except ImportError:
     from mercurial.peer import peerrepository
 except ImportError:
     from mercurial.repo import repository as peerrepository

          
M infocalypse/arccmds.py +6 -6
@@ 21,16 21,16 @@ 
 
 from mercurial import util
 
-from fcpclient import get_version, get_usk_for_usk_version, is_usk_file, is_usk
+from .fcpclient import get_version, get_usk_for_usk_version, is_usk_file, is_usk
 
-from config import Config
-from infcmds import setup, do_key_setup, is_redundant, run_until_quiescent
-from updatesm import QUIESCENT, FINISHING
-from archivesm import create_dirs, ArchiveUpdateContext, \
+from .config import Config
+from .infcmds import setup, do_key_setup, is_redundant, run_until_quiescent
+from .updatesm import QUIESCENT, FINISHING
+from .archivesm import create_dirs, ArchiveUpdateContext, \
      start_inserting_blocks, start_requesting_blocks, cleanup_dirs, \
      ARC_INSERTING_URI, ARC_REQUESTING_URI, ARC_CACHING_TOPKEY
 
-from arclocal import local_create, local_synch, local_update, local_reinsert
+from .arclocal import local_create, local_synch, local_update, local_reinsert
 
 
 def arc_cleanup(update_sm, top_key_state=None):

          
M infocalypse/archivesm.py +15 -15
@@ 23,22 23,22 @@ import os
 import random
 import shutil
 
-import archivetop
+from . import archivetop
 
-from fcpconnection import make_id, SUCCESS_MSGS, sha1_hexdigest
-from fcpclient import get_version, get_usk_hash, get_usk_for_usk_version, \
+from .fcpconnection import make_id, SUCCESS_MSGS, sha1_hexdigest
+from .fcpclient import get_version, get_usk_hash, get_usk_for_usk_version, \
      is_usk
-from fcpmessage import GET_DEF, PUT_FILE_DEF
+from .fcpmessage import GET_DEF, PUT_FILE_DEF
 
-from statemachine import StateMachine, State, DecisionState, \
+from .statemachine import StateMachine, State, DecisionState, \
      RetryingRequestList, CandidateRequest
-from updatesm import UpdateStateMachine, QUIESCENT, FAILING, FINISHING, \
+from .updatesm import UpdateStateMachine, QUIESCENT, FAILING, FINISHING, \
      RequestingUri, InsertingUri, UpdateContextBase, PAD_BYTE
 
-from archivetop import top_key_tuple_to_bytes, default_out
+from .archivetop import top_key_tuple_to_bytes, default_out
 
-from chk import clear_control_bytes
-from graph import FREENET_BLOCK_LEN, MAX_METADATA_HACK_LEN
+from .chk import clear_control_bytes
+from .graph import FREENET_BLOCK_LEN, MAX_METADATA_HACK_LEN
 
 TMP_DIR = "__TMP__"
 BLOCK_DIR = "__TMP_BLOCKS__"

          
@@ 96,7 96,7 @@ class ArchiveUpdateContext(UpdateContext
             return
 
         if not os.path.exists(file_name):
-            print "DOESN'T EXIST: ", file_name
+            print("DOESN'T EXIST: ", file_name)
             return
 
         if not length is None:

          
@@ 470,7 470,7 @@ class FixingUpTopKey(State):
                 if chk == 'CHK@':
                     # Use the CHK's inserted by the previous state
                     # to fixup the CHK values in the provisional top key tuple.
-                    new_block[1][index] = chks.next()
+                    new_block[1][index] = next(chks)
             new_block[1] = tuple(new_block[1])
             new_block = tuple(new_block)
             updated_blocks.append(new_block)

          
@@ 500,7 500,7 @@ class RequestHistory:
 
     def dump(self, out_func=default_out):
         """ Debugging dump function. """
-        keys = self.history.keys()
+        keys = list(self.history.keys())
         keys.sort()
         out_func("--- dumping request history ---\n")
         for key in keys:

          
@@ 623,7 623,7 @@ class RequestingRedundantBlocks(Retrying
             if len(block[1]) == 0:
                 continue
 
-            chk_ordinals = range(0, len(block[1]))
+            chk_ordinals = list(range(0, len(block[1])))
             # DESIGN INTENT: Don't favor primary over redundant.
             random.shuffle(chk_ordinals)
             ordinal = chk_ordinals.pop()

          
@@ 809,7 809,7 @@ class ArchiveStateMachine(UpdateStateMac
         """
         StateMachine.reset(self)
         if len(self.ctx.orphaned) > 0:
-            print "BUG?: Abandoning orphaned requests."
+            print("BUG?: Abandoning orphaned requests.")
             self.ctx.orphaned.clear()
         self.ctx = ArchiveUpdateContext(self, self.ctx.ui_)
 

          
@@ 926,7 926,7 @@ def check_keys(ctx, required_keys):
     # Grrr... hacking to avoid pylint W0104
     for key in required_keys:
         if not key in ctx and ctx[key]: # Let it raise KeyError
-            print "You just executed unreachable code???"
+            print("You just executed unreachable code???")
 
 def start(update_sm, ctx):
     """ Start running a context on a state machine. """

          
M infocalypse/archivetop.py +3 -3
@@ 12,9 12,9 @@ This allows us to do top key redundancy 
 
 import struct
 
-from fcpconnection import sha1_hexdigest
-from chk import CHK_SIZE, bytes_to_chk, chk_to_bytes
-from topkey import default_out
+from .fcpconnection import sha1_hexdigest
+from .chk import CHK_SIZE, bytes_to_chk, chk_to_bytes
+from .topkey import default_out
 
 MAJOR_VERSION = '01'
 MINOR_VERSION = '02'

          
M infocalypse/arclocal.py +5 -5
@@ 24,14 24,14 @@ import re
 import shutil
 import random
 
-import archivetop
-from fcpclient import get_version, get_usk_hash
-from graph import MAX_METADATA_HACK_LEN
-from archivesm import choose_word, chk_file_name, BLOCK_DIR, TMP_DIR, \
+from . import archivetop
+from .fcpclient import get_version, get_usk_hash
+from .graph import MAX_METADATA_HACK_LEN
+from .archivesm import choose_word, chk_file_name, BLOCK_DIR, TMP_DIR, \
      TOP_KEY_NAME_FMT
 
 # Archive stuff
-from pathhacks import add_parallel_sys_path
+from .pathhacks import add_parallel_sys_path
 add_parallel_sys_path('wormarc')
 from blocks import BlockStorage, ITempFileManager
 from archive import WORMBlockArchive, UpToDateException

          
M infocalypse/bundlecache.py +7 -7
@@ 26,23 26,23 @@ import random
 
 from mercurial import commands
 
-from fcpconnection import sha1_hexdigest
+from .fcpconnection import sha1_hexdigest
 
-from graph import FIRST_INDEX, FREENET_BLOCK_LEN, MAX_REDUNDANT_LENGTH
-from graphutil import get_rollup_bounds
+from .graph import FIRST_INDEX, FREENET_BLOCK_LEN, MAX_REDUNDANT_LENGTH
+from .graphutil import get_rollup_bounds
 
 def make_temp_file(temp_dir):
     """ Make a temporary file name. """
-    return os.path.join(temp_dir, '_tmp_' + ('%0.16f' % random.random())[2:14])
+    return os.path.join(temp_dir, b'_tmp_' + (b'%0.16f' % random.random())[2:14])
 
 def is_writable(dir_name):
     """ Check whether the directory exists and is writable.  """
-    tmp_file = os.path.join(dir_name, '_tmp_test_write')
+    tmp_file = os.path.join(dir_name, b'_tmp_test_write')
     out_file = None
     try:
         try:
             out_file = open(tmp_file, 'wb')
-            out_file.write('Can I write here?\n')
+            out_file.write(b'Can I write here?\n')
             return True
         except IOError:
             return False

          
@@ 230,6 230,6 @@ class BundleCache:
         for name in os.listdir(self.base_dir):
             # Only remove files that we created in case cache_dir
             # is set to something like ~/.
-            if name.startswith("_tmp_"):
+            if name.startswith(b"_tmp_"):
                 os.remove(os.path.join(self.base_dir, name))
 

          
M infocalypse/choose.py +5 -5
@@ 21,10 21,10 @@ 
 
 import random
 
-from graph import MAX_PATH_LEN, block_cost, print_list, canonical_path_itr, \
+from .graph import MAX_PATH_LEN, block_cost, print_list, canonical_path_itr, \
      build_version_table
 
-from graphutil import get_rollup_bounds
+from .graphutil import get_rollup_bounds
 # This is the maximum allowed ratio of allowed path block cost
 # to minimum full update block cost.
 # It is used in low_block_cost_edges() to determine when a

          
@@ 281,11 281,11 @@ def get_update_edges(graph, from_index, 
 
 def dump_update_edges(first, second, all_edges):
     """ Debugging function to print update edges. """
-    print "--- update edges --- "
+    print("--- update edges --- ")
     print_list("known edges  :", all_edges)
     print_list("first choice :", first)
     print_list("second choice:", second)
-    print "---"
+    print("---")
 
 def get_top_key_updates(graph, repo, version_table=None):
     """ Returns the update tuples needed to build the top key."""

          
@@ 332,7 332,7 @@ def get_top_key_updates(graph, repo, ver
 
     for head in ret[0][2]:
         if not head in result[1]:
-            print "Expected head not in all_heads!", head[:12]
+            print("Expected head not in all_heads!", head[:12])
             assert False
 
     #top_update = list(ret[0])

          
M infocalypse/commands.py +30 -30
@@ 1,25 1,25 @@ 
 from binascii import hexlify
 from mercurial import util
 
-from infcmds import get_config_info, execute_create, execute_pull, \
+from .infcmds import get_config_info, execute_create, execute_pull, \
     execute_push, execute_setup, execute_copy, execute_reinsert, \
     execute_info
 
-from fmscmds import execute_fmsread, execute_fmsnotify, get_uri_from_hash, \
+from .fmscmds import execute_fmsread, execute_fmsnotify, get_uri_from_hash, \
     execute_setupfms
 
-from sitecmds import execute_putsite, execute_genkey
-from wikicmds import execute_wiki, execute_wiki_apply
-from arccmds import execute_arc_create, execute_arc_pull, execute_arc_push, \
+from .sitecmds import execute_putsite, execute_genkey
+from .wikicmds import execute_wiki, execute_wiki_apply
+from .arccmds import execute_arc_create, execute_arc_pull, execute_arc_push, \
     execute_arc_reinsert
 
-from config import read_freesite_cfg, Config, normalize
-from validate import is_hex_string, is_fms_id
+from .config import read_freesite_cfg, Config, normalize
+from .validate import is_hex_string, is_fms_id
 
 import os
 import atexit
 
-from keys import parse_repo_path, USK
+from .keys import parse_repo_path, USK
 
 
 def set_target_version(ui_, repo, opts, params, msg_fmt):

          
@@ 43,8 43,8 @@ def infocalypse_update_repo_list(ui, **o
     if not opts['wot']:
         raise util.Abort("Update which repository list? Use --wot")
 
-    import wot
-    from wot_id import Local_WoT_ID
+    from . import wot
+    from .wot_id import Local_WoT_ID
     wot.update_repo_listing(ui, Local_WoT_ID(opts['wot'], fcpopts=wot.get_fcpopts(
         fcphost=opts["fcphost"],
         fcpport=opts["fcpport"])), 

          
@@ 73,7 73,7 @@ def infocalypse_create(ui_, repo, local_
             ui_.warn("Warning: Creating repository without redundancy. (R0 or"
                      " R1)\n")
 
-        from wot_id import Local_WoT_ID
+        from .wot_id import Local_WoT_ID
 
         local_identity = Local_WoT_ID(nick_prefix)
 

          
@@ 90,10 90,10 @@ def infocalypse_create(ui_, repo, local_
     # This is a WoT repository.
     if local_identity:
         # Prompt whether to replace in the case of conflicting names.
-        from wot import build_repo_list
+        from .wot import build_repo_list
 
         request_usks = build_repo_list(ui_, local_identity)
-        names = map(lambda x: USK(x).get_repo_name(), request_usks)
+        names = [USK(x).get_repo_name() for x in request_usks]
         new_name = USK(insert_uri).get_repo_name()
 
         if new_name in names:

          
@@ 109,7 109,7 @@ def infocalypse_create(ui_, repo, local_
             existing_usk = request_usks[names.index(new_name)]
 
             existing_dir = None
-            for directory, request_usk in stored_cfg.request_usks.iteritems():
+            for directory, request_usk in stored_cfg.request_usks.items():
                 if request_usk == existing_usk:
                     if existing_dir:
                         raise util.Abort("Configuration lists the same "

          
@@ 133,7 133,7 @@ def infocalypse_create(ui_, repo, local_
                       'Context': 'vcs'}
 
         import fcp
-        import wot
+        from . import wot
         node = fcp.FCPNode(**wot.get_fcpopts(fcphost=opts["fcphost"],
                                              fcpport=opts["fcpport"]))
         atexit.register(node.shutdown)

          
@@ 157,7 157,7 @@ def infocalypse_create(ui_, repo, local_
         stored_cfg.set_wot_identity(inserted_to[0], local_identity)
         Config.to_file(stored_cfg)
 
-        import wot
+        from . import wot
         wot.update_repo_listing(ui_, local_identity, 
                                 fcphost=opts["fcphost"],
                                 fcpport=opts["fcpport"])

          
@@ 239,7 239,7 @@ def infocalypse_pull(ui_, repo, **opts):
         params['FMSREAD_ONLYTRUSTED'] = bool(opts['onlytrusted'])
         request_uri = get_uri_from_hash(ui_, repo, params, stored_cfg)
     elif opts['wot']:
-        import wot
+        from . import wot
         truster = get_truster(ui_, repo, opts['truster'],
                               fcpport=opts["fcpport"], fcphost=opts["fcphost"])
         request_uri = wot.resolve_pull_uri(ui_, opts['wot'], truster, repo,

          
@@ 260,8 260,8 @@ def infocalypse_pull(ui_, repo, **opts):
 
 
 def infocalypse_pull_request(ui, repo, **opts):
-    import wot
-    from wot_id import WoT_ID
+    from . import wot
+    from .wot_id import WoT_ID
     if not opts['wot']:
         raise util.Abort("Who do you want to send the pull request to? Set "
                          "--wot.\n")

          
@@ 275,8 275,8 @@ def infocalypse_pull_request(ui, repo, *
 
 
 def infocalypse_check_notifications(ui, repo, **opts):
-    import wot
-    from wot_id import Local_WoT_ID
+    from . import wot
+    from .wot_id import Local_WoT_ID
     if not opts['wot']:
         raise util.Abort("What ID do you want to check for notifications? Set"
                          " --wot.\n")

          
@@ 286,7 286,7 @@ def infocalypse_check_notifications(ui, 
 
 
 def infocalypse_connect(ui, repo, **opts):
-    import plugin_connect
+    from . import plugin_connect
     plugin_connect.connect(ui, repo)
 
 

          
@@ 319,8 319,8 @@ def infocalypse_push(ui_, repo, **opts):
     request_uri = stored_cfg.get_request_uri(repo.root)
     associated_wot_id = stored_cfg.get_wot_identity(request_uri)
     if inserted_to and associated_wot_id:
-        import wot
-        from wot_id import Local_WoT_ID
+        from . import wot
+        from .wot_id import Local_WoT_ID
         local_id = Local_WoT_ID('@' + associated_wot_id)
         wot.update_repo_listing(ui_, local_id, 
                                 fcphost=opts["fcphost"],

          
@@ 333,7 333,7 @@ def infocalypse_info(ui_, repo, **opts):
     # FCP not required. Hmmm... Hack
     opts['fcphost'] = ''
     opts['fcpport'] = 0
-    print get_config_info(ui_, opts)
+    print(get_config_info(ui_, opts))
     params, stored_cfg = get_config_info(ui_, opts)
     request_uri = opts['uri']
     if not request_uri:

          
@@ 549,8 549,8 @@ def infocalypse_setupwot(ui_, **opts):
     if not opts['truster']:
         raise util.Abort("Specify default truster with --truster")
 
-    import wot
-    from wot_id import Local_WoT_ID
+    from . import wot
+    from .wot_id import Local_WoT_ID
     fcpopts = wot.get_fcpopts(fcphost=opts["fcphost"], fcpport=opts["fcpport"])
     wot.execute_setup_wot(ui_, Local_WoT_ID(opts['truster'], fcpopts=fcpopts))
 

          
@@ 560,7 560,7 @@ def infocalypse_setupfreemail(ui, repo, 
     Set a Freemail password. If --truster is not given uses the default
     truster.
     """
-    import wot
+    from . import wot
     # TODO: Here --truster doesn't make sense. There is no trust involved.
     # TODO: Should this be part of the normal fn-setup?
     wot.execute_setup_freemail(ui, get_truster(ui, repo, opts['truster'],

          
@@ 582,8 582,8 @@ def get_truster(ui, repo=None, truster_i
 
     :rtype : Local_WoT_ID
     """
-    import wot
-    import wot_id
+    from . import wot
+    from . import wot_id
     fcpopts = wot.get_fcpopts(fcphost=fcphost, fcpport=fcpport)
     if truster_identifier:
         return wot_id.Local_WoT_ID(truster_identifier, fcpopts=fcpopts)

          
M infocalypse/config.py +52 -52
@@ 23,22 23,22 @@ 
 import os
 import sys
 
-from fcpclient import get_usk_hash, is_usk_file, get_version, \
+from .fcpclient import get_usk_hash, is_usk_file, get_version, \
      get_usk_for_usk_version
-from knownrepos import DEFAULT_TRUST, DEFAULT_GROUPS, \
+from .knownrepos import DEFAULT_TRUST, DEFAULT_GROUPS, \
      DEFAULT_NOTIFICATION_GROUP
 
-from validate import is_hex_string, is_fms_id
+from .validate import is_hex_string, is_fms_id
 
-from mercurial import util
+from mercurial import util, error
 
 # Similar hack is used in fms.py.
-import knownrepos # Just need a module to read __file__ from
+from . import knownrepos # Just need a module to read __file__ from
 
 try:
     #raise ImportError('fake error to test code path')
     __import__('ConfigParser')
-except ImportError, err:
+except ImportError as err:
     # ConfigParser doesn't ship with, the 1.3 Windows binary distro
     # http://mercurial.berkwood.com/binaries/Mercurial-1.3.exe
     # so we do some hacks to use a local copy.

          
@@ 46,22 46,22 @@ except ImportError, err:
     #print "No ConfigParser? This doesn't look good."
     PARTS = os.path.split(os.path.dirname(knownrepos.__file__))
     if PARTS[-1] != 'infocalypse':
-        print "ConfigParser is missing and couldn't hack path. Giving up. :-("
+        print(b"ConfigParser is missing and couldn't hack path. Giving up. :-(b", knownrepos.__file__, PARTS)
     else:
         PATH = os.path.join(PARTS[0], 'python2_5_files')
         sys.path.append(PATH)
-    #print ("Put local copies of python2.5 ConfigParser.py, "
+    #print (b"Put local copies of python2.5 ConfigParser.py, "
     #       + "nntplib.py and netrc.py in path...")
-    print
+    print()
 
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
 
 if sys.platform == 'win32':
-    CFG_NAME = 'infocalypse.ini'
+    CFG_NAME = b'infocalypse.ini'
 else:
-    CFG_NAME = '.infocalypse'
+    CFG_NAME = b'.infocalypse'
 
-DEFAULT_CFG_PATH = '~/%s' % CFG_NAME
+DEFAULT_CFG_PATH = b'~/%s' % CFG_NAME
 
 # hg version that the format last changed in.
 FORMAT_VERSION = '348500df1ac6'

          
@@ 78,7 78,7 @@ def norm_path(dir_name):
     # chokes on ':' in option values.
     # Required for Windows. Should be a harmless NOP on *nix.
     split = os.path.splitdrive(dir_name)
-    fixed = split[0].replace(':', '') + split[1]
+    fixed = split[0].replace(b':', b'') + split[1]
     return fixed
 
 # REDFLAG: THis is an ancient hack.  Safe to back it out?

          
@@ 91,17 91,17 @@ def norm_path(dir_name):
 def detect_and_fix_default_bug(ui_, file_path):
     """ INTERNAL: Fix old (pre: 466307bc98bc) config files. """
     raw = open(file_path, 'rb').read()
-    if raw.find('[default]') == -1:
+    if raw.find(b'[default]') == -1:
         return
 
     justin_case = os.path.join(os.path.dirname(file_path), 'INFOCALYPSE.BAK')
-    ui_.warn("Hit '[default'] bug in your config file.\n"
-             "Saving existing config as:\n%s\n" % justin_case)
+    ui_.warn(b"Hit '[default'] bug in your config file.\n"
+             b"Saving existing config as:\n%s\n" % justin_case)
     if os.path.exists(justin_case):
-        ui_.warn("Refused to overwrite backup!\n"
-                 +"Move:\n%s\n" % justin_case
-                 +"out of the way and try again.\n")
-        raise util.Abort("Refused to overwrite backup config file.")
+        ui_.warn(b"Refused to overwrite backup!\n"
+                 +b"Move:\n%s\n" % justin_case
+                 +b"out of the way and try again.\n")
+        raise error.Abort(b"Refused to overwrite backup config file.")
     out_file = open(justin_case, 'wb')
     try:
         out_file.write(raw)

          
@@ 112,7 112,7 @@ def detect_and_fix_default_bug(ui_, file
         fixed_file.write(raw.replace('[default]', '[primary]'))
     finally:
         fixed_file.close()
-    ui_.warn("Applied fix.\n")
+    ui_.warn(b"Applied fix.\n")
 
 
 # Why didn't I subclass dict?

          
@@ 174,8 174,8 @@ class Config:
         prev = self.get_index(usk_or_id)
         index = abs(index)
         if not prev is None and index < prev:
-            print "update_index -- exiting, new value is lower %i %i %s" % \
-                  (prev, index, usk_or_id)
+            print(b"update_index -- exiting, new value is lower %i %i %s" % \
+                  (prev, index, usk_or_id))
             return
         self.version_table[usk_or_id] = index
 

          
@@ 193,16 193,16 @@ class Config:
         normalized = normalize(request_uri)
         match = None
 
-        for repo_dir, uri in self.request_usks.iteritems():
+        for repo_dir, uri in self.request_usks.items():
             if normalized == normalize(uri):
                 if match:
-                    raise util.Abort("Multiple directories match {0}."
+                    raise error.Abort(b"Multiple directories match {0}."
                                      .format(request_uri))
                 else:
                     match = repo_dir
 
         if not match:
-            raise util.Abort("No repository matches {0}.".format(request_uri))
+            raise error.Abort(b"No repository matches {0}.".format(request_uri))
 
         # Assuming path has not become un-normalized since being set with
         # update_dir().

          
@@ 264,15 264,15 @@ class Config:
     def get_freemail_password(self, wot_identity):
         """
         Return the password associated with the given WoT identity.
-        Raise util.Abort if one is not set.
+        Raise error.Abort if one is not set.
         :type wot_identity: WoT_ID
         """
         identity_id = wot_identity.identity_id
         if identity_id in self.freemail_passwords:
             return self.freemail_passwords[identity_id]
         else:
-            raise util.Abort("{0} does not have a Freemail password set.\n"
-                             "Run hg fn-setupfreemail --truster {0}\n"
+            raise error.Abort(b"{0} does not have a Freemail password set.\n"
+                             b"Run hg fn-setupfreemail --truster {0}\n"
                              .format(wot_identity))
 
     def set_repo_list_edition(self, wot_identity, edition):

          
@@ 319,19 319,19 @@ class Config:
     def validate_trust_map_entry(cls, cfg, fields):
         """ INTERNAL: Raise a ValueError for invalid trust map entries. """
         if not is_fms_id(fields[0]):
-            raise ValueError("%s doesn't look like an fms id." %
+            raise ValueError(b"%s doesn't look like an fms id." %
                                      fields[0])
         if len(fields) < 2:
-            raise ValueError("No USK hashes for fms id: %s?" %
+            raise ValueError(b"No USK hashes for fms id: %s?" %
                                      fields[0])
         for value in fields[1:]:
             if not is_hex_string(value):
-                raise ValueError("%s doesn't look like a repo hash." %
+                raise ValueError(b"%s doesn't look like a repo hash." %
                                          value)
 
         if fields[0] in cfg.fmsread_trust_map:
-            raise ValueError(("%s appears more than once in the "
-                              + "[fmsread_trust_map] section.") %
+            raise ValueError((b"%s appears more than once in the "
+                              + b"[fmsread_trust_map] section.") %
                              fields[0])
 
     @classmethod

          
@@ 436,12 436,12 @@ class Config:
             If there's no [infocalypse] section, a Config is
             created from the default file."""
 
-        file_name = ui_.config('infocalypse', 'cfg_file', None)
+        file_name = ui_.config(b'infocalypse', b'cfg_file', None)
         if file_name is None:
             file_name = os.path.expanduser(DEFAULT_CFG_PATH)
         if not os.path.exists(file_name):
-            ui_.warn("Couldn't read config file: %s\n" % file_name)
-            raise util.Abort("Run fn-setup.\n")
+            ui_.warn(b"Couldn't read config file: %s\n" % file_name.encode("utf8"))
+            raise error.Abort(b"Run fn-setup.\n")
 
         detect_and_fix_default_bug(ui_, file_name)
 

          
@@ 464,15 464,15 @@ class Config:
 
         parser.add_section('primary')
         parser.set('primary', 'format_version', FORMAT_VERSION)
-        parser.set('primary', 'host', cfg.defaults['HOST'])
-        parser.set('primary', 'port', cfg.defaults['PORT'])
-        parser.set('primary', 'tmp_dir', cfg.defaults['TMP_DIR'])
+        parser.set('primary', 'host', cfg.defaults['HOST'].decode("utf-8"))
+        parser.set('primary', 'port', str(cfg.defaults['PORT']))
+        parser.set('primary', 'tmp_dir', cfg.defaults['TMP_DIR'].decode("utf-8"))
         parser.set('primary', 'default_private_key',
-                   cfg.defaults['DEFAULT_PRIVATE_KEY'])
+                   cfg.defaults['DEFAULT_PRIVATE_KEY'].decode("utf-8"))
 
         parser.set('primary', 'fms_host', cfg.defaults['FMS_HOST'])
-        parser.set('primary', 'fms_port', cfg.defaults['FMS_PORT'])
-        parser.set('primary', 'fms_id', cfg.defaults['FMS_ID'])
+        parser.set('primary', 'fms_port', str(cfg.defaults['FMS_PORT']))
+        parser.set('primary', 'fms_id', str(cfg.defaults['FMS_ID']))
         parser.set('primary', 'fmsnotify_group',
                    cfg.defaults['FMSNOTIFY_GROUP'])
         parser.set('primary', 'fmsread_groups', '|'.join(cfg.fmsread_groups))

          
@@ 504,9 504,9 @@ class Config:
             entry = cfg.fmsread_trust_map[fms_id]
             assert len(entry) > 0
             parser.set('fmsread_trust_map', str(index),
-                       fms_id + '|' + '|'.join(entry))
+                       fms_id + '|' + '|'.join(i.decode("utf-8") for i in entry))
 
-        out_file = open(file_name, 'wb')
+        out_file = open(file_name, 'w')
         try:
             parser.write(out_file)
         finally:

          
@@ 544,13 544,13 @@ def read_freesite_cfg(ui_, repo, params,
 
     ui_.status('Using config file:\n%s\n' % cfg_file)
     if not os.path.exists(cfg_file):
-        ui_.warn("Can't read: %s\n" % cfg_file)
-        raise util.Abort(no_cfg_err)
+        ui_.warn(b"Can't read: %s\n" % cfg_file)
+        raise error.Abort(no_cfg_err)
 
     parser = ConfigParser()
     parser.read(cfg_file)
     if not parser.has_section('default'):
-        raise util.Abort("Can't read default section of config file?")
+        raise error.Abort(b"Can't read default section of config file?")
 
     params['SITE_NAME'] = parser.get('default', 'site_name')
 

          
@@ 585,10 585,10 @@ def read_freesite_cfg(ui_, repo, params,
         ui_.status('Reading site key from:\n%s\n' % key_file)
         params['SITE_KEY'] = open(key_file, 'rb').read().strip()
     except IOError:
-        raise util.Abort("Couldn't read site key from: %s" % key_file)
+        raise error.Abort(b"Couldn't read site key from: %s" % key_file)
 
     if not params['SITE_KEY'].startswith('SSK@'):
-        raise util.Abort("Stored site key not an SSK?")
+        raise error.Abort(b"Stored site key not an SSK?")
 
 
 

          
@@ 662,7 662,7 @@ overlayedits = False
 #wiki_repo_usk = <request uri of your wikitext infocalypse repo>
 """
     if os.path.exists(file_name):
-        raise util.Abort("Already exists: %s" % file_name)
+        raise error.Abort(b"Already exists: %s" % file_name)
 
 
     out_file = open(file_name, 'w')

          
M infocalypse/fcpclient.py +10 -10
@@ 22,10 22,10 @@ 
 
 import mimetypes, os, re
 
-from fcpconnection import FCPConnection, IDataSource, READ_BLOCK, \
+from .fcpconnection import FCPConnection, IDataSource, READ_BLOCK, \
      MinimalClient, PolledSocket, FCPError, sha1_hexdigest
 
-from fcpmessage import GETNODE_DEF, GENERATE_SSK_DEF, \
+from .fcpmessage import GETNODE_DEF, GENERATE_SSK_DEF, \
      GET_REQUEST_URI_DEF, GET_DEF, \
      PUT_FILE_DEF, PUT_REDIRECT_DEF,  PUT_COMPLEX_DIR_DEF
 

          
@@ 202,7 202,7 @@ class FileInfoDataSource(IDataSource):
         #print "FileInfoDataSource.read -- called"
         assert not self.chunks is None
         if self.chunks:
-            ret = self.chunks.next()
+            ret = next(self.chunks)
             if ret is None:
                 self.chunks = None
                 #print "FileInfoDataSource.read -- returned None"

          
@@ 353,7 353,7 @@ def prefetch_usk(client, usk_uri, allowe
         in usk_uri.
     """
 
-    if client.in_params.async:
+    if client.in_params._async:
         raise ValueError("This function only works synchronously.")
 
     usk_uri = get_negative_usk(usk_uri)

          
@@ 394,7 394,7 @@ def latest_usk_index(client, usk_uri, al
         a key which points to a large block of data.
     """
 
-    if client.in_params.async:
+    if client.in_params._async:
         raise ValueError("This function only works synchronously.")
 
     client.reset()

          
@@ 457,11 457,11 @@ def show_progress(dummy, msg):
     """ Default message callback implementation. """
 
     if msg[0] == 'SimpleProgress':
-        print "Progress: (%s/%s/%s)" % (msg[1]['Succeeded'],
+        print("Progress: (%s/%s/%s)" % (msg[1]['Succeeded'],
                                         msg[1]['Required'],
-                                        msg[1]['Total'])
+                                        msg[1]['Total']))
     else:
-        print "Progress: %s" % msg[0]
+        print("Progress: %s" % msg[0])
 
 def parse_progress(msg):
     """ Parse a SimpleProgress message into a tuple. """

          
@@ 478,7 478,7 @@ class FCPClient(MinimalClient):
     """ A class to execute common FCP requests.
 
         This class provides a simplified interface for common FCP commands.
-        Calls are blocking by default.  Set FCPClient.in_params.async = True
+        Calls are blocking by default.  Set FCPClient.in_params.__async = True
         to run asynchronously.
 
         You can set FCP parameters using the

          
@@ 560,7 560,7 @@ class FCPClient(MinimalClient):
             REQUIRES: insert_uri is a private SSK or USK.
         """
 
-        if self.in_params.async:
+        if self.in_params._async:
             raise ValueError("This function only works synchronously.")
 
         assert is_usk(insert_uri) or is_ssk(insert_uri)

          
M infocalypse/fcpconnection.py +83 -83
@@ 37,7 37,7 @@ 
     Both blocking and non-blocking client requests are supported.
     If MinimalClient.in_params.async == True, FCPConnection.start_connection()
     returns a request id string immediately.  This is the same request
-    id which appears in the 'Identifier' field of subsequent incoming.
+    id which appears in the b'Identifier' field of subsequent incoming.
     FCP messages. The MinimalClient.message_callback(client, msg)
     callback function is called for every incoming client message for
     the request. Async client code can detect the request has finished

          
@@ 59,17 59,17 @@ try:
     from hashlib import sha1
     def sha1_hexdigest(bytes):
         """ Return the SHA1 hexdigest of bytes using the hashlib module. """
-        return sha1(bytes).hexdigest()
+        return sha1(bytes.encode('utf8')).hexdigest().encode("utf-8")
 except ImportError:
     # Fall back so that code still runs on pre 2.6 systems.
     import sha
     def sha1_hexdigest(bytes):
         """ Return the SHA1 hexdigest of bytes using the sha module. """
-        return sha.new(bytes).hexdigest()
+        return sha.new(bytes).hexdigest().encode("utf-8")
 
-from fcpmessage import make_request, FCPParser, HELLO_DEF, REMOVE_REQUEST_DEF
+from .fcpmessage import make_request, FCPParser, HELLO_DEF, REMOVE_REQUEST_DEF
 
-FCP_VERSION = '2.0' # Expected version value sent in ClientHello
+FCP_VERSION = b'2.0' # Expected version value sent in ClientHello
 
 RECV_BLOCK = 4096 # socket recv
 SEND_BLOCK = 4096 # socket send

          
@@ 85,18 85,18 @@ CONNECTED  = 2
 CLOSED     = 3
 UPLOADING  = 4
 
-CONNECTION_STATES = {CONNECTING:'CONNECTING',
-                     CONNECTED:'CONNECTED',
-                     CLOSED:'CLOSED',
-                     UPLOADING:'UPLOADING'}
+CONNECTION_STATES = {CONNECTING:b'CONNECTING',
+                     CONNECTED:b'CONNECTED',
+                     CLOSED:b'CLOSED',
+                     UPLOADING:b'UPLOADING'}
 
 def example_state_callback(dummy, state):
     """ Example FCPConnection.state_callback function. """
 
     value = CONNECTION_STATES.get(state)
     if not value:
-        value = "UNKNOWN"
-    print "FCPConnection State -> [%s]" % value
+        value = b"UNKNOWN"
+    print("FCPConnection State -> [%s]" % value)
 
 def make_id():
     """ INTERNAL: Make a unique id string. """

          
@@ 143,7 143,7 @@ class NonBlockingSocket(IAsyncSocket):
     def __init__(self, connected_socket):
         """ REQUIRES: connected_socket is non-blocking and fully connected. """
         IAsyncSocket.__init__(self)
-        self.buffer = ""
+        self.buffer = b""
         self.socket = connected_socket
 
     def write_bytes(self, bytes):

          
@@ 220,7 220,7 @@ class PolledSocket(NonBlockingSocket):
             raise IOError("The socket is closed")
         # Why? Because we don't want to call the recv_callback while
         # reading... wacky re-entrance issues....
-        read = ''
+        read = b''
         ret = True
         while len(read) < MAX_SOCKET_READ: # bound read length
             check_writable  = []

          
@@ 266,21 266,21 @@ class PolledSocket(NonBlockingSocket):
 #-----------------------------------------------------------#
 
 # NOTE:
-# 'DataFound' is sometimes terminal. See msg_is_terminal().
+# b'DataFound' is sometimes terminal. See msg_is_terminal().
 #
 # NOTE:
 # This list is not complete.  It only lists
 # messages generated by supported FCP commands.
 # Messages which always indicate that an FCP request ended in success.
 SUCCESS_MSGS = frozenset([ \
-    'NodeHello', 'SSKKeypair', 'AllData', 'PutSuccessful', 'NodeData',
+    b'NodeHello', b'SSKKeypair', b'AllData', b'PutSuccessful', b'NodeData',
     ])
 
 # Messages which always indicate that an FCP request ended in failure.
 FAILURE_MSGS = frozenset([ \
-    'CloseConnectionDuplicateClientName', 'PutFailed', 'GetFailed',
-    'ProtocolError', 'IdentifierCollision', 'UnknownNodeIdentifier',
-    'UnknownPeerNoteType'
+    b'CloseConnectionDuplicateClientName', b'PutFailed', b'GetFailed',
+    b'ProtocolError', b'IdentifierCollision', b'UnknownNodeIdentifier',
+    b'UnknownPeerNoteType'
     ])
 
 # Messages which always indicate that an FCP request ended.

          
@@ 295,8 295,8 @@ def msg_is_terminal(msg, params):
         return True
 
     # Special cases
-    if msg[0] == 'DataFound' and 'ReturnType' in params and \
-           params['ReturnType'] == 'none':
+    if msg[0] == b'DataFound' and b'ReturnType' in params and \
+           params[b'ReturnType'] == b'none':
         return True
 
     #print "msg_is_terminal: False"

          
@@ 306,24 306,24 @@ def msg_is_terminal(msg, params):
     return False
 
 def get_code(msg):
-    """ Returns integer error code if msg has a 'Code' field
+    """ Returns integer error code if msg has a b'Code' field
         None otherwise.
     """
 
-    # Hmmmm... does 'Code' ever appear in non-error messages?
+    # Hmmmm... does b'Code' ever appear in non-error messages?
     #if not msg[0] in FAILURE_MSGS:
     #    # Message is not an error.
     #    return None
 
-    if not 'Code' in msg[1]:
+    if not b'Code' in msg[1]:
         if msg[0] in FAILURE_MSGS:
-            print "WARNING: get_code(msg, code) couldn't read 'Code'."
+            print("WARNING: get_code(msg, code) couldn't read b'Code'.")
         return None
 
-    return int(msg[1]['Code'])
+    return int(msg[1][b'Code'])
 
 def is_code(msg, error_code):
-    """ Returns True if msg has a 'Code' field and it is
+    """ Returns True if msg has a b'Code' field and it is
         equal to error_code, False, otherwise.
     """
 

          
@@ 333,14 333,14 @@ def is_code(msg, error_code):
     return code == error_code
 
 def is_fatal_error(msg):
-    """ Returns True if msg has a 'Fatal' field and it
+    """ Returns True if msg has a b'Fatal' field and it
         indicates a non-recoverable error, False otherwise.
     """
 
-    value = msg[1].get('Fatal')
+    value = msg[1].get(b'Fatal')
     if value is None:
         return False # hmmm...
-    return bool(value.lower() == 'true')
+    return bool(value.lower() == b'true')
 
 class FCPError(Exception):
     """ An Exception raised when an FCP command fails. """

          
@@ 352,19 352,19 @@ class FCPError(Exception):
 
     def __str__(self):
         text = "FCPError: " + self.fcp_msg[0]
-        if self.fcp_msg[1].has_key('CodeDescription'):
-            text += " -- " + self.fcp_msg[1]['CodeDescription']
+        if b'CodeDescription' in self.fcp_msg[1]:
+            text += b" -- " + self.fcp_msg[1][b'CodeDescription']
         return text
 
     def is_code(self, error_code):
-        """ Returns True if the 'Code' field in the FCP error message
+        """ Returns True if the b'Code' field in the FCP error message
             is equal to error_code, False, otherwise.
         """
 
-        if not self.fcp_msg or not 'Code' in self.fcp_msg[1]:
+        if not self.fcp_msg or not b'Code' in self.fcp_msg[1]:
             # YES. This does happen.
             # Hmmmm... just assert?  Can this really happen.
-            print "WARNING: FCPError.is_code() couldn't read 'Code'."
+            print("WARNING: FCPError.is_code() couldn't read b'Code'.")
             return False
 
         return is_code(self.fcp_msg, error_code)

          
@@ 471,8 471,8 @@ class FCPConnection:
         self.state_callback(self, CONNECTING)
 
         # Send a ClientHello
-        params = {'Name':'FCPConnection[%s]' % make_id(),
-                  'ExpectedVersion': FCP_VERSION}
+        params = {b'Name':b'FCPConnection[%s]' % make_id(),
+                  b'ExpectedVersion': FCP_VERSION}
         self.socket.write_bytes(make_request(HELLO_DEF, params))
         if wait_for_connect:
             # Wait for the reply

          
@@ 505,7 505,7 @@ class FCPConnection:
     def start_request(self, client, data_source = None, set_data_length = True):
         """ Start an FCP request.
 
-            If in_params.async is True this returns immediately, otherwise
+            If in_params._async is True this returns immediately, otherwise
             it blocks until the request finishes.
 
             If client.in_params.send_data is set, trailing data is sent

          
@@ 515,38 515,38 @@ class FCPConnection:
             the other sources are not None the contents of
             client.in_params.send_data are sent.
 
-            If set_data_length is True the 'DataLength' field is set in the
+            If set_data_length is True the b'DataLength' field is set in the
             requests FCP message.
 
-            If in_params.async it True, this method returns the identifier
+            If in_params._async it True, this method returns the identifier
             for the request, otherwise, returns the FCP message which
             terminated the request.
         """
         assert not self.is_uploading()
         assert not client.context
         assert not client.response
-        assert not 'Identifier' in client.in_params.fcp_params
+        assert not b'Identifier' in client.in_params.fcp_params
         identifier = make_id()
-        client.in_params.fcp_params['Identifier'] = identifier
+        client.in_params.fcp_params[b'Identifier'] = identifier
         write_string = False
         if client.in_params.send_data:
             assert not self.data_source
             if data_source:
                 data_source.initialize()
                 if set_data_length:
-                    client.in_params.fcp_params['DataLength'] = (data_source.
+                    client.in_params.fcp_params[b'DataLength'] = (data_source.
                                                                  data_length())
                 self.data_source = data_source
                 self.socket.writable_callback = self.writable_handler
             elif client.in_params.file_name:
                 self.data_source = FileDataSource(client.in_params.file_name)
                 self.data_source.initialize()
-                client.in_params.fcp_params['DataLength'] = (self.
+                client.in_params.fcp_params[b'DataLength'] = (self.
                                                              data_source.
                                                              data_length())
                 self.socket.writable_callback = self.writable_handler
             else:
-                client.in_params.fcp_params['DataLength'] = len(client.
+                client.in_params.fcp_params[b'DataLength'] = len(client.
                                                                 in_params.
                                                                 send_data)
                 write_string = True

          
@@ 562,7 562,7 @@ class FCPConnection:
         assert not client.context
         client.context = RequestContext(client.in_params.allowed_redirects,
                                         identifier,
-                                        client.in_params.fcp_params.get('URI'))
+                                        client.in_params.fcp_params.get(b'URI'))
         if not client.in_params.send_data:
             client.context.file_name = client.in_params.file_name
 

          
@@ 572,7 572,7 @@ class FCPConnection:
         if self.data_source:
             self.state_callback(self, UPLOADING)
 
-        if client.in_params.async:
+        if client.in_params._async:
             return identifier
 
         resp = self.wait_for_terminal(client)

          
@@ 587,10 587,10 @@ class FCPConnection:
             raise Exception("Can't remove while uploading. Sorry :-(")
 
         if not identifier in self.running_clients:
-            print "FCPConnection.remove_request -- unknown identifier: ", \
-                  identifier
-        params = {'Identifier': identifier,
-                  'Global': is_global}
+            print("FCPConnection.remove_request -- unknown identifier: ", \
+                  identifier)
+        params = {b'Identifier': identifier,
+                  b'Global': is_global}
         self.socket.write_bytes(make_request(REMOVE_REQUEST_DEF, params))
 
     def wait_for_terminal(self, client):

          
@@ 615,7 615,7 @@ class FCPConnection:
         #        codes so that client coders don't need to keep track
         #        of the initiating request in order to interpret the
         #        error code.
-        if client.in_params.definition[0] == 'ClientGet' and is_code(msg, 27):
+        if client.in_params.definition[0] == b'ClientGet' and is_code(msg, 27):
             #print "Checking for allowed redirect"
             if client.context.allowed_redirects:
                 #print "Handling redirect"

          
@@ 629,13 629,13 @@ class FCPConnection:
                     del self.running_clients[client.context.running_id]
 
                 client.context.running_id = make_id()
-                client.context.last_uri = msg[1]['RedirectURI']
+                client.context.last_uri = msg[1][b'RedirectURI']
 
                 # Copy, don't modify params.
                 params = {}
                 params.update(client.in_params.fcp_params)
-                params['URI'] = client.context.last_uri
-                params['Identifier'] = client.context.running_id
+                params[b'URI'] = client.context.last_uri
+                params[b'Identifier'] = client.context.running_id
 
                 # Send new request.
                 self.socket.write_bytes(make_request(client.in_params.

          
@@ 658,7 658,7 @@ class FCPConnection:
         """ INTERNAL: Process unexpected messages. """
 
         if not self.node_hello:
-            if msg[0] == 'NodeHello':
+            if msg[0] == b'NodeHello':
                 self.node_hello = msg
                 self.state_callback(self, CONNECTED)
                 return True

          
@@ 666,13 666,13 @@ class FCPConnection:
             raise Exception("Unexpected message before NodeHello: %s"
                             % msg[0])
 
-        if not 'Identifier' in msg[1]:
-            print "Saw message without 'Identifier': %s" % msg[0]
-            print msg
+        if not b'Identifier' in msg[1]:
+            print("Saw message without b'Identifier': %s" % msg[0])
+            print(msg)
             return True
 
-        if not msg[1]['Identifier'] in self.running_clients:
-            #print "No client for identifier: %s" % msg[1]['Identifier']
+        if not msg[1][b'Identifier'] in self.running_clients:
+            #print "No client for identifier: %s" % msg[1][b'Identifier']
             # BITCH: You get a PersistentRequestRemoved msg even for non
             #        peristent requests AND you get it after the GetFailed.
             #print msg[0]

          
@@ 697,7 697,7 @@ class FCPConnection:
         if self.handle_unexpected_msgs(msg):
             return
 
-        client = self.running_clients[msg[1]['Identifier']]
+        client = self.running_clients[msg[1][b'Identifier']]
         assert client.is_running()
 
         if msg_is_terminal(msg, client.in_params.fcp_params):

          
@@ 705,21 705,21 @@ class FCPConnection:
                 return
 
             # Remove running context entries
-            assert msg[1]['Identifier'] == client.context.running_id
+            assert msg[1][b'Identifier'] == client.context.running_id
             #print "DELETED: ", client.context.running_id
             del self.running_clients[client.context.running_id]
             if client.context.running_id != client.context.initiating_id:
                 #print "DELETED: ", client.context.initiating_id
                 del self.running_clients[client.context.initiating_id]
 
-            if msg[0] == 'DataFound' or msg[0] == 'AllData':
+            if msg[0] == b'DataFound' or msg[0] == b'AllData':
                 # REDFLAG: Always do this? and fix FCPError.last_uri?
                 # Copy URI into final message. i.e. so client
                 # sees the final redirect not the inital URI.
-                msg[1]['URI'] = client.context.last_uri
-            if msg[0] == 'AllData':
+                msg[1][b'URI'] = client.context.last_uri
+            if msg[0] == b'AllData':
                 # Copy metadata into final message
-                msg[1]['Metadata.ContentType'] = client.context.metadata
+                msg[1][b'Metadata.ContentType'] = client.context.metadata
 
                 # Add a third entry to the msg tuple containing the raw data,
                 # or a comment saying where it was written.

          
@@ 736,7 736,7 @@ class FCPConnection:
             # So that MinimalClient.request_id() returns the
             # initiating id correctly even after following
             # redirects.
-            msg[1]['Identifier'] = client.context.initiating_id
+            msg[1][b'Identifier'] = client.context.initiating_id
 
             # Reset the context
             client.context.release()

          
@@ 745,9 745,9 @@ class FCPConnection:
             client.response = msg
             assert not client.is_running()
         else:
-            if 'Metadata.ContentType' in msg[1]:
+            if b'Metadata.ContentType' in msg[1]:
                 # Keep track of metadata as we follow redirects
-                client.context.metadata = msg[1]['Metadata.ContentType']
+                client.context.metadata = msg[1][b'Metadata.ContentType']
 
         # Notify client.
         if client.message_callback:

          
@@ 758,7 758,7 @@ class FCPConnection:
             socket closes. """
         def dropping(data): # REDFLAG: Harmless but remove eventually.
             """ INTERNAL: Print warning when data is dropped after close. """
-            print "DROPPING %i BYTES OF DATA AFTER CLOSE!" % len(data)
+            print("DROPPING %i BYTES OF DATA AFTER CLOSE!" % len(data))
 
         self.node_hello = None
         if not self.socket is None:

          
@@ 766,7 766,7 @@ class FCPConnection:
             self.socket.recv_callback = dropping # Ignore any subsequent data.
 
         # Hmmmm... other info, ok to share this?
-        fake_msg = ('ProtocolError', {'CodeDescription':'Socket closed'})
+        fake_msg = (b'ProtocolError', {b'CodeDescription':b'Socket closed'})
         #print "NOTIFIED: CLOSED"
 
         # Hmmmm... iterate over values instead of keys?

          
@@ 811,7 811,7 @@ class DataSink:
     def __init__(self):
         self.file_name = None
         self.file = None
-        self.raw_data = ''
+        self.raw_data = b''
         self.data_bytes = 0
 
     def initialize(self, data_length, file_name):

          
@@ 840,8 840,8 @@ class DataSink:
         if self.file:
             #print "WRITE_BYTES writing to file"
             if self.file.closed:
-                print "FileOrStringDataSink -- refusing to write" \
-                      + " to closed file!"
+                print("FileOrStringDataSink -- refusing to write" \
+                      + " to closed file!")
                 return
             self.file.write(bytes)
             self.data_bytes -= len(bytes)

          
@@ 858,13 858,13 @@ class DataSink:
         """ Release all resources associated with the instance. """
 
         if self.data_bytes != 0:
-            print "DataSink.release -- DIDN'T FINISH PREVIOUS READ!", \
-                  self.data_bytes
+            print("DataSink.release -- DIDN'T FINISH PREVIOUS READ!", \
+                  self.data_bytes)
         if self.file:
             self.file.close()
         self.file_name = None
         self.file = None
-        self.raw_data = ''
+        self.raw_data = b''
         self.data_bytes = 0
 
 class RequestContext:

          
@@ 913,13 913,13 @@ class ClientParams:
         self.default_fcp_params = {}
         # These are per request values. They can be modified / reset.
         self.fcp_params = {}
-        self.async = False
+        self._async = False
         self.file_name = None
         self.send_data = None
         self.allowed_redirects = 0
 
     def reset(self):
-        """ Reset all members EXCEPT async, allowed_redirects and
+        """ Reset all members EXCEPT _async, allowed_redirects and
             default_fcp_params to their default values.
         """
 

          
@@ 935,7 935,7 @@ class ClientParams:
         return "%s: %s %s %s %s %s %s" % \
                ( self.definition[0],
                  str(self.send_data),
-                 str(self.async),
+                 str(self._async),
                  self.file_name,
                  self.allowed_redirects,
                  self.fcp_params,

          
@@ 945,7 945,7 @@ class MinimalClient:
     """ A single FCP request which can be executed via the
         FCPConnection.start_request() method.
 
-        If in_params.async is True the request runs asynchronously,
+        If in_params._async is True the request runs asynchronously,
         otherwise it causes FCPConnection.start_request() to block.
 
         The message_callback notifier function is called for

          
@@ 972,7 972,7 @@ class MinimalClient:
     def reset(self, reset_params = True):
         """ Reset all members EXCEPT self.in_params.allowed_redirects,
             self.in_params.default_fcp_params and
-            self.in_params.async to their default values.
+            self.in_params._async to their default values.
         """
         assert not self.is_running()
         if reset_params:

          
@@ 993,7 993,7 @@ class MinimalClient:
     def request_id(self):
         """ Returns the request id. """
         if self.response and not self.context:
-            return self.response[1]['Identifier']
+            return self.response[1][b'Identifier']
         elif self.context:
             return self.context.initiating_id
         return None

          
M infocalypse/fcpmessage.py +66 -66
@@ 49,25 49,25 @@ def merge_params(params, allowed, defaul
 def format_params(params, allowed, required):
     """ INTERNAL: Format params into an FCP message body string. """
 
-    ret = ''
+    ret = b''
     for field in params:
         if not field in allowed:
             raise ValueError("Illegal field [%s]." % field)
 
     for field in allowed:
         if field in params:
-            if field == 'Files':
+            if field == b'Files':
                 # Special case Files dictionary.
-                assert params['Files']
-                for subfield in params['Files']:
-                    ret += "%s=%s\n" % (subfield, params['Files'][subfield])
+                assert params[b'Files']
+                for subfield in params[b'Files']:
+                    ret += b"%s=%s\n" % (subfield, params[b'Files'][subfield])
                 continue
-            value = str(params[field])
+            value = params[field]
             if not value:
                 raise ValueError("Illegal value for field [%s]." % field)
-            if value.lower() == 'true' or value.lower() == 'false':
+            if value.lower() == b'true' or value.lower() == b'false':
                 value = value.lower()
-            ret += "%s=%s\n" % (field, value)
+            ret += b"%s=%s\n" % (field, value)
         elif field in required:
             #print "FIELD:", field, required
             raise ValueError("A required field [%s] was not set." % field)

          
@@ 114,8 114,8 @@ def make_request(definition, params, def
     if required is None:
         required = allowed
 
-    ret = name + '\n' + format_params(real_params, allowed, required) \
-          + 'EndMessage\n'
+    ret = name + b'\n' + format_params(real_params, allowed, required) \
+          + b'EndMessage\n'
 
     # Run extra checks on parameter values
     # Order is important.  Format_params can raise on missing fields.

          
@@ 136,8 136,8 @@ def make_request(definition, params, def
 
 def get_constraint(dummy, params):
     """ INTERNAL: Check get params. """
-    if 'ReturnType' in params and params['ReturnType'] != 'disk':
-        if 'Filename' in params or 'TempFilename' in params:
+    if b'ReturnType' in params and params[b'ReturnType'] != b'disk':
+        if b'Filename' in params or b'TempFilename' in params:
             raise ValueError("'Filename' and 'TempFileName' only allowed" \
                              + " when 'ReturnType' is disk.")
 

          
@@ 145,71 145,71 @@ def put_file_constraint(dummy, params):
     """ INTERNAL: Check put_file params. """
     # Hmmmm... this only checks for required arguments, it
     # doesn't report values that have no effect.
-    upload_from = 'direct'
-    if 'UploadFrom' in params:
-        upload_from = params['UploadFrom']
-    if upload_from == 'direct':
-        if not 'DataLength' in params:
+    upload_from = b'direct'
+    if b'UploadFrom' in params:
+        upload_from = params[b'UploadFrom']
+    if upload_from == b'direct':
+        if not b'DataLength' in params:
             raise ValueError("'DataLength' MUST be set, 'UploadFrom =="
                                  + " 'direct'.")
-    elif upload_from == 'disk':
-        if not 'Filename' in params:
+    elif upload_from == b'disk':
+        if not b'Filename' in params:
             raise ValueError("'Filename' MUST be set, 'UploadFrom =="
                              + " 'disk'.")
-        elif upload_from == 'redirect':
-            if not 'TargetURI' in params:
+        elif upload_from == b'redirect':
+            if not b'TargetURI' in params:
                 raise ValueError("'TargetURI' MUST be set, 'UploadFrom =="
                                  + " 'redirect'.")
     else:
-        raise ValueError("Unknown value, 'UploadFrom' == %s" % upload_from)
+        raise ValueError("Unknown value, b'UploadFrom' == %s" % upload_from)
 
 
-HELLO_DEF = ('ClientHello', ('Name', 'ExpectedVersion'), None, None)
+HELLO_DEF = (b'ClientHello', (b'Name', b'ExpectedVersion'), None, None)
 
 # Identifier not included in doc?
-GETNODE_DEF = ('GetNode', ('Identifier', 'GiveOpennetRef', 'WithPrivate',
-                           'WithVolatile'),
+GETNODE_DEF = (b'GetNode', (b'Identifier', b'GiveOpennetRef', b'WithPrivate',
+                           b'WithVolatile'),
                None, None)
 
 #IMPORTANT: One entry tuple MUST have trailing comma or it will evaluate
 #           to a string instead of a tuple.
-GENERATE_SSK_DEF = ('GenerateSSK', ('Identifier',), None, None)
-GET_REQUEST_URI_DEF = ('ClientPut',
-                       ('URI', 'Identifier', 'MaxRetries', 'PriorityClass',
-                        'UploadFrom', 'DataLength', 'GetCHKOnly'),
+GENERATE_SSK_DEF = (b'GenerateSSK', (b'Identifier',), None, None)
+GET_REQUEST_URI_DEF = (b'ClientPut',
+                       (b'URI', b'Identifier', b'MaxRetries', b'PriorityClass',
+                        b'UploadFrom', b'DataLength', b'GetCHKOnly'),
                        None, None)
-GET_DEF = ('ClientGet',
-           ('IgnoreDS', 'DSOnly', 'URI', 'Identifier', 'Verbosity',
-            'MaxSize', 'MaxTempSize', 'MaxRetries', 'PriorityClass',
-            'Persistence', 'ClientToken', 'Global', 'ReturnType',
-            'BinaryBlob', 'AllowedMimeTypes', 'FileName', 'TmpFileName'),
-           ('URI', 'Identifier'),
+GET_DEF = (b'ClientGet',
+           (b'IgnoreDS', b'DSOnly', b'URI', b'Identifier', b'Verbosity',
+            b'MaxSize', b'MaxTempSize', b'MaxRetries', b'PriorityClass',
+            b'Persistence', b'ClientToken', b'Global', b'ReturnType',
+            b'BinaryBlob', b'AllowedMimeTypes', b'FileName', b'TmpFileName'),
+           (b'URI', b'Identifier'),
            get_constraint)
-PUT_FILE_DEF = ('ClientPut',
-                ('URI', 'Metadata.ContentType', 'Identifier', 'Verbosity',
-                 'MaxRetries', 'PriorityClass', 'GetCHKOnly', 'Global',
-                 'DontCompress','ClientToken', 'Persistence',
-                 'TargetFilename', 'EarlyEncode', 'UploadFrom', 'DataLength',
-                 'Filename', 'TargetURI', 'FileHash', 'BinaryBlob'),
-                ('URI', 'Identifier'),
+PUT_FILE_DEF = (b'ClientPut',
+                (b'URI', b'Metadata.ContentType', b'Identifier', b'Verbosity',
+                 b'MaxRetries', b'PriorityClass', b'GetCHKOnly', b'Global',
+                 b'DontCompress','ClientToken', b'Persistence',
+                 b'TargetFilename', b'EarlyEncode', b'UploadFrom', b'DataLength',
+                 b'Filename', b'TargetURI', b'FileHash', b'BinaryBlob'),
+                (b'URI', b'Identifier'),
                 put_file_constraint)
-PUT_REDIRECT_DEF = ('ClientPut',
-                    ('URI', 'Metadata.ContentType', 'Identifier', 'Verbosity',
-                     'MaxRetries', 'PriorityClass', 'GetCHKOnly', 'Global',
-                     'ClientToken', 'Persistence', 'UploadFrom',
-                     'TargetURI'),
-                    ('URI', 'Identifier', 'TargetURI'),
+PUT_REDIRECT_DEF = (b'ClientPut',
+                    (b'URI', b'Metadata.ContentType', b'Identifier', b'Verbosity',
+                     b'MaxRetries', b'PriorityClass', b'GetCHKOnly', b'Global',
+                     b'ClientToken', b'Persistence', b'UploadFrom',
+                     b'TargetURI'),
+                    (b'URI', b'Identifier', b'TargetURI'),
                     None)
-PUT_COMPLEX_DIR_DEF = ('ClientPutComplexDir',
-                       ('URI', 'Identifier', 'Verbosity',
-                        'MaxRetries', 'PriorityClass', 'GetCHKOnly', 'Global',
-                        'DontCompress', 'ClientToken', 'Persistence',
-                        'TargetFileName', 'EarlyEncode', 'DefaultName',
-                        'Files'), #<- one off code in format_params() for this
-                       ('URI', 'Identifier'),
+PUT_COMPLEX_DIR_DEF = (b'ClientPutComplexDir',
+                       (b'URI', b'Identifier', b'Verbosity',
+                        b'MaxRetries', b'PriorityClass', b'GetCHKOnly', b'Global',
+                        b'DontCompress', b'ClientToken', b'Persistence',
+                        b'TargetFileName', b'EarlyEncode', b'DefaultName',
+                        b'Files'), #<- one off code in format_params() for this
+                       (b'URI', b'Identifier'),
                        None)
 
-REMOVE_REQUEST_DEF = ('RemoveRequest', ('Identifier', 'Global'), None, None)
+REMOVE_REQUEST_DEF = (b'RemoveRequest', (b'Identifier', b'Global'), None, None)
 
 # REDFLAG: Shouldn't assert on bad data! raise instead.
 # Hmmmm... I hacked this together by unwinding a "pull" parser

          
@@ 228,7 228,7 @@ class FCPParser:
     """
     def __init__(self):
         self.msg = None
-        self.prev_chunk = ""
+        self.prev_chunk = b""
         self.data_context = None
 
         # lambda's prevent pylint E1102 warning

          
@@ 249,7 249,7 @@ class FCPParser:
             self.msg = [line, {}]
             return False
 
-        pos = line.find('=')
+        pos = line.find(b'=')
         if pos != -1:
             # name=value pair
             fields = (line[:pos], line[pos + 1:])

          
@@ 261,21 261,21 @@ class FCPParser:
             self.msg[1][fields[0].strip()] = fields[1].strip()
         else:
             # end of message line
-            if line == 'Data':
+            if line == b'Data':
                 # Handle trailing data
                 assert self.msg
                 # REDFLAG: runtime protocol error (should never happen)
-                assert 'Identifier' in self.msg[1]
+                assert b'Identifier' in self.msg[1]
                 assert not self.data_context
                 self.data_context = self.context_callback(self.msg[1]
-                                                          ['Identifier'])
+                                                          [b'Identifier'])
                 self.data_context.data_sink.initialize(int(self.msg[1]
-                                                           ['DataLength']),
+                                                           [b'DataLength']),
                                                        self.data_context.
                                                        file_name)
                 return True
 
-            assert line == 'End' or line == 'EndMessage'
+            assert line == b'End' or line == b'EndMessage'
             msg = self.msg
             self.msg = None
             assert not self.data_context or self.data_context.writable() == 0

          
@@ 312,9 312,9 @@ class FCPParser:
         else:
             # Expecting a \n terminated line.
             bytes = self.prev_chunk + bytes
-            self.prev_chunk = ""
+            self.prev_chunk = b""
             last_eol = -1
-            pos = bytes.find('\n')
+            pos = bytes.find(b'\n')
             while pos != -1:
                 if last_eol <= 0:
                     last_eol = 0

          
@@ 326,7 326,7 @@ class FCPParser:
                     # Hmmm... recursion depth
                     self.parse_bytes(bytes[last_eol + 1:])
                     return
-                pos = bytes.find('\n', last_eol + 1)
+                pos = bytes.find(b'\n', last_eol + 1)
 
             assert not self.data_context or not self.data_context.writable()
             self.prev_chunk = bytes[last_eol + 1:]

          
M infocalypse/fms.py +33 -33
@@ 21,21 21,21 @@ 
 
 import os
 import sys
-import StringIO
+import io
 import time
 
-from fcpclient import get_usk_hash, get_version, is_usk_file, \
+from .fcpclient import get_usk_hash, get_version, is_usk_file, \
      get_usk_for_usk_version
 
-from validate import is_hex_string
+from .validate import is_hex_string
 
 # Similar HACK is used in config.py
-import knownrepos # Just need a module to read __file__ from
+from . import knownrepos # Just need a module to read __file__ from
 
 try:
     #raise ImportError('fake error to test code path')
     __import__('nntplib')
-except ImportError, err:
+except ImportError as err:
     # djk20090506 tested this code path.
     # nntplib doesn't ship with the Windoze binary hg distro.
     # so we do some hacks to use a local copy.

          
@@ 43,7 43,7 @@ except ImportError, err:
     #print "No nntplib? This doesn't look good."
     PARTS = os.path.split(os.path.dirname(knownrepos.__file__))
     if PARTS[-1] != 'infocalypse':
-        print "nntplib is missing and couldn't hack path. Giving up. :-("
+        print("nntplib is missing and couldn't hack path. Giving up. :-(")
     else:
         PATH = os.path.join(PARTS[0], 'python2_5_files')
         sys.path.append(PATH)

          
@@ 82,7 82,7 @@ def send_msgs(server, msg_tuples, send_q
                                   msg_tuple[1],
                                   msg_tuple[2],
                                   msg_tuple[3])
-        in_file = StringIO.StringIO(raw_msg)
+        in_file = io.StringIO(raw_msg)
         try:
             server.post(in_file)
 

          
@@ 142,8 142,8 @@ class TrustCache:
         for fms_id in fms_ids:
             if (not self.table.get(fms_id, None) is None and
                 self.table[fms_id][0] > time.time()):
-                print "%s cached for %i more secs. (prefetch)" % (
-                    fms_id, (self.table[fms_id][0] - time.time()))
+                print("%s cached for %i more secs. (prefetch)" % (
+                    fms_id, (self.table[fms_id][0] - time.time())))
                 continue
             self.table[fms_id] = (time.time() + self.timeout_secs,
                                   get_trust(self.server, fms_id))

          
@@ 157,8 157,8 @@ class TrustCache:
         if cached is None or cached[0] < time.time():
             self.prefetch_trust((fms_id, ))
         assert fms_id in self.table
-        print "%s cached for %i more secs. (get)" % (
-            fms_id, (self.table[fms_id][0] - time.time()))
+        print("%s cached for %i more secs. (get)" % (
+            fms_id, (self.table[fms_id][0] - time.time())))
 
         return self.table[fms_id][1]
 

          
@@ 231,10 231,10 @@ def recv_group_msgs(server, group, msg_s
 
     try:
         result = server.group(group)
-    except nntplib.NNTPTemporaryError, err1:
+    except nntplib.NNTPTemporaryError as err1:
         # Ignore 411 errors which happen before the local FMS
         # instance has learned about the group.
-        print "Skipped: %s because of error: %s" % (group, str(err1))
+        print("Skipped: %s because of error: %s" % (group, str(err1)))
         return
 
     if result[1] == '0':

          
@@ 265,7 265,7 @@ def recv_group_msgs(server, group, msg_s
             continue # Hmmmm... were does this continue?
         try:
             result = server.article(item[0])
-        except nntplib.NNTPProtocolError, nntp_err:
+        except nntplib.NNTPProtocolError as nntp_err:
             # REDFLAG:
             # djk20091224 I haven't seen this trip in a month or so.
             # Research:

          
@@ 275,13 275,13 @@ def recv_group_msgs(server, group, msg_s
             #
             # djk20091023 If I use execquery.htm to on the message ID
             # that causes this I get nothing back. == db corruption?
-            print "SAW NNTPProtocolError: ", items[4]
+            print("SAW NNTPProtocolError: ", items[4])
             if str(nntp_err) !=  '.':
-                print "CAN'T HACK AROUND IT. Sorry :-("
+                print("CAN'T HACK AROUND IT. Sorry :-(")
                 raise
-            print "TRYING TO HACK AROUND IT..."
+            print("TRYING TO HACK AROUND IT...")
             msg_sink.recv_fms_msg(group, item, [])
-            print "continue(2)"
+            print("continue(2)")
             continue
 
         if result[0].split(' ')[0] != '220':

          
@@ 403,8 403,8 @@ def strip_names(trust_map):
     for nym in trust_map:
         cleaned = clean_nym(nym)
         if nym in clean:
-            print "strip_name -- nym appears multiple times w/ different " \
-                  + "name part: " + nym
+            print("strip_name -- nym appears multiple times w/ different " \
+                  + "name part: " + nym)
         clean[cleaned] = list(set(list(trust_map[nym])
                                   + clean.get(cleaned, [])))
     return clean

          
@@ 491,7 491,7 @@ class USKNotificationParser(IFmsMessageS
     def handle_update(self, clean_id, fms_id, usk_hash, index):
         """ INTERNAL: process a single update. """
         if index < 0:
-            print "handle_update -- skipped negative index!"
+            print("handle_update -- skipped negative index!")
             return
 
         entry = self.table.get(clean_id, (set([]), {}, set([])))

          
@@ 547,8 547,8 @@ class USKNotificationParser(IFmsMessageS
                 break # break inner loop.
         if ret is None:
             # REDFLAG: Nail down when this can happen.
-            print "??? saw an fms id with no human readable part ???"
-            print list(table_entry[2])[0]
+            print("??? saw an fms id with no human readable part ???")
+            print(list(table_entry[2])[0])
             ret = list(table_entry[2])[0]
         return ret
 

          
@@ 595,7 595,7 @@ def show_table(parser, out_func):
     """ Dump the announcements and updates in a human readable format. """
     fms_id_map, announce_map, update_map = parser.invert_table()
 
-    usks = announce_map.keys()
+    usks = list(announce_map.keys())
     usks.sort()
 
     for usk in usks:

          
@@ 616,8 616,8 @@ DEFAULT_SUBJECT = 'Ignore'
 def make_update_msg(fms_id, group, updates, announcements=None,
                     subject=DEFAULT_SUBJECT):
     """ Test function to make message tuples. """
-    print "updates: ",  updates
-    print "announcements: ", announcements
+    print("updates: ",  updates)
+    print("announcements: ", announcements)
 
     # fms doesn't want to see the full id?
     fms_id = fms_id.split('@')[0]

          
@@ 652,22 652,22 @@ def smoke_test():
                 + 'infocalypse.hgext.R1/12',))
 
     # From tuple to string
-    print "---"
-    print values0
+    print("---")
+    print(values0)
 
     text = to_msg_string(values0[0], values0[1])
-    print "---"
+    print("---")
     # And back
-    print text
+    print(text)
     values1 = parse(text)
-    print "---"
-    print values1
+    print("---")
+    print(values1)
     # Not values0 because of implicit update.
     assert values1 == values2
 
     # Test sig style update strings.
     text = to_msg_string(values0[0], None, ':')
-    print text
+    print(text)
     values3 = parse(text)
     assert values3 == (values0[0], ())
 

          
M infocalypse/fmsbot.py +9 -9
@@ 25,8 25,8 @@ import os
 import socket
 import time
 
-import fms
-from fms import IFmsMessageSink
+from . import fms
+from .fms import IFmsMessageSink
 
 def make_bot_path(storage_dir, bot_name, file_name):
     """ Helper function makes to make a bot instance specific file name. """

          
@@ 56,7 56,7 @@ class FMSBotRunner(IFmsMessageSink):
 
     def log(self, msg):
         """ Print a log message. """
-        print msg
+        print(msg)
 
     def nntp_reconnect(self, suppress_events=False):
         """ Connect to fms. """

          
@@ 68,7 68,7 @@ class FMSBotRunner(IFmsMessageSink):
             self.nntp_server = self.nntp.get_connection(self.params['FMS_HOST'],
                                                         self.params['FMS_PORT'],
                                                         fms_id)
-        except Exception, err: # DCI: what else do I need to catch?
+        except Exception as err: # DCI: what else do I need to catch?
             self.log("FMSBotRunner.nntp_reconnect -- failed: %s" % str(err))
             return None
 

          
@@ 85,9 85,9 @@ class FMSBotRunner(IFmsMessageSink):
         try:
             try:
                 self.nntp_server.quit()
-            except IOError, err:
+            except IOError as err:
                 self.log("FMSBotRunner.nntp_close -- failed: %s" % str(err))
-            except EOFError, err:
+            except EOFError as err:
                 self.log("FMSBotRunner.nntp_close -- failed: %s" % str(err))
         finally:
             self.nntp_server = None

          
@@ 113,7 113,7 @@ class FMSBotRunner(IFmsMessageSink):
             finally:
                 if raised:
                     self.nntp_close()
-        except Exception, err: # DCI: what else do I need to catch?
+        except Exception as err: # DCI: what else do I need to catch?
             # ??? fail silently???
             self.log("FMSBotRunner.nntp_send -- send_msgs failed: %s" %
                      str(err))

          
@@ 199,7 199,7 @@ class FMSBotRunner(IFmsMessageSink):
             finally:
                 if raised:
                     self.nntp_close()
-        except Exception, err: # DCI: what else do I need to catch?
+        except Exception as err: # DCI: what else do I need to catch?
             self.log("FMSBotRunner.recv_msgs -- failed: %s" % str(err))
             raise # DCI: NEED TO FIX THIS
             return False

          
@@ 237,7 237,7 @@ class FMSBot(IFmsMessageSink):
 
     def log(self, text):
         """ Display log messages. """
-        print "%s:%s" % (self.name, text)
+        print("%s:%s" % (self.name, text))
 
     def on_startup(self):
         """ Event handler which is run once when the bot is started. """

          
M infocalypse/fmscmds.py +12 -12
@@ 25,16 25,16 @@ import socket
 
 from mercurial import util
 
-from fcpclient import get_usk_hash
+from .fcpclient import get_usk_hash
 
-from knownrepos import KNOWN_REPOS
+from .knownrepos import KNOWN_REPOS
 
-from fms import recv_msgs, to_msg_string, MSG_TEMPLATE, send_msgs, \
+from .fms import recv_msgs, to_msg_string, MSG_TEMPLATE, send_msgs, \
      USKNotificationParser, show_table, get_connection
 
-from config import Config, trust_id_for_repo, untrust_id_for_repo, known_hashes
-from infcmds import do_key_setup, setup, cleanup, execute_insert_patch
-from wikicmds import execute_wiki_submit
+from .config import Config, trust_id_for_repo, untrust_id_for_repo, known_hashes
+from .infcmds import do_key_setup, setup, cleanup, execute_insert_patch
+from .wikicmds import execute_wiki_submit
 
 def handled_list(ui_, params, stored_cfg):
     """ INTERNAL: HACKED"""

          
@@ 44,7 44,7 @@ def handled_list(ui_, params, stored_cfg
     trust_map = None
     if params['FMSREAD'] == 'list':
         trust_map = stored_cfg.fmsread_trust_map.copy() # paranoid copy
-        fms_ids = trust_map.keys()
+        fms_ids = list(trust_map.keys())
         fms_ids.sort()
         ui_.status(("Only listing repo USKs from trusted "
                     + "FMS IDs:\n   %s\n\n") % '\n   '.join(fms_ids))

          
@@ 68,7 68,7 @@ def dump_trust_map(ui_, params, trust_ma
     if not force and not params['REQUEST_URI'] is None:
         ui_.status("USK hash for local repository: %s\n" %
                    get_usk_hash(params['REQUEST_URI']))
-    fms_ids = trust_map.keys()
+    fms_ids = list(trust_map.keys())
     fms_ids.sort()
     ui_.status("Update Trust Map:\n")
     for fms_id in fms_ids:

          
@@ 277,7 277,7 @@ def execute_fmsnotify(ui_, repo, params,
         if 'MSG_SPOOL_DIR' in params:
             ui_.warn("DEBUG HACK!!! Writing fms msg to local spool:\n%s\n" %
                       params['MSG_SPOOL_DIR'])
-            import fmsstub
+            from . import fmsstub
 
             # LATER: fix config file to store full fmsid?
             # grrrr... hacks piled upon hacks.

          
@@ 307,7 307,7 @@ def check_trust_map(ui_, stored_cfg, rep
              + "provide update notifications.\n\n")
 
     added = False
-    fms_ids = notifiers.keys()
+    fms_ids = list(notifiers.keys())
     fms_ids.sort()
 
     done = False

          
@@ 345,7 345,7 @@ def get_trust_map(ui_, params, stored_cf
     if params['FMSREAD_ONLYTRUSTED']:
         # HACK to deal with spam of the announcement group.'
         trust_map = stored_cfg.fmsread_trust_map.copy() # paranoid copy
-        fms_ids = trust_map.keys()
+        fms_ids = list(trust_map.keys())
         fms_ids.sort()
         ui_.status(("Only using announcements from trusted "
                     + "FMS IDs:\n   %s\n\n") % '\n   '.join(fms_ids))

          
@@ 394,7 394,7 @@ def get_uri_from_hash(ui_, dummy, params
         notifiers[fms_id_map[clean_id]] = (parser.table[clean_id][1]
                                            [params['FMSREAD_HASH']])
 
-    fms_ids = notifiers.keys()
+    fms_ids = list(notifiers.keys())
     fms_ids.sort()
 
     ui_.status("Found Updates:\n")

          
M infocalypse/fmsstub.py +10 -10
@@ 4,8 4,8 @@ import stat
 import time
 import traceback
 
-from fms import MSG_TEMPLATE
-from fcpconnection import make_id
+from .fms import MSG_TEMPLATE
+from .fcpconnection import make_id
 
 def read_msg(full_path, default_sender, default_subject, default_group):
     article_num = os.stat(full_path)[stat.ST_MTIME]

          
@@ 51,7 51,7 @@ def read_msg(full_path, default_sender, 
 FAKE_TRUST = 65 # Trust value returned for all queries.
 class NNTPStub:
     def quit(self):
-        print "NNTPStub.quit -- called."
+        print("NNTPStub.quit -- called.")
         traceback.print_stack()
         #raise Exception("DCI: forcing stack trace")
     def shortcmd(self, cmd):

          
@@ 74,7 74,7 @@ class FMSStub:
 
     def send_msgs(self, dummy_server, msg_tuples, send_quit=False):
         if not os.path.exists(self.base_dir):
-            print "FMSStub.send_msg -- THE MESSAGE SPOOL DIR DOESN'T EXIST!"
+            print("FMSStub.send_msg -- THE MESSAGE SPOOL DIR DOESN'T EXIST!")
             raise IOError("Message spool directory doesn't exist.")
 
         for msg_tuple in msg_tuples:

          
@@ 83,9 83,9 @@ class FMSStub:
             # print "sender_lut: ", self.sender_lut
             sender = self.sender_lut.get(msg_tuple[0].split('@')[0],
                                          msg_tuple[0])
-            print "sender: ", sender
+            print("sender: ", sender)
             if sender != msg_tuple[0]:
-                print "fmsstub: FIXED UP %s->%s" % (msg_tuple[0], sender)
+                print("fmsstub: FIXED UP %s->%s" % (msg_tuple[0], sender))
 
             if sender.find('@') == -1:
                 raise IOError("Couldn't fixup fms_id: %s. Add it to the LUT."

          
@@ 119,8 119,8 @@ class FMSStub:
                                             name))[stat.ST_MTIME]
 
             if mod_time in by_mtime:
-                print "The msg ID hack in FMSStub failed!!!"
-                print "MANUALLY DELETE MSG FILE: ", name
+                print("The msg ID hack in FMSStub failed!!!")
+                print("MANUALLY DELETE MSG FILE: ", name)
 
             assert not mod_time in by_mtime
             by_mtime[mod_time] = name

          
@@ 129,7 129,7 @@ class FMSStub:
             #print "BAILING OUT, no files."
             return
 
-        times = by_mtime.keys()
+        times = list(by_mtime.keys())
         times.sort()
         if times[-1] <= max_articles[self.group]:
             #print "BAILING OUT, no new files."

          
@@ 148,7 148,7 @@ class FMSStub:
                 continue
 
             if not msg_sink.wants_msg(self.group, items):
-                print "fmsstub: Rejected by sink: %s" % by_mtime[mod_time]
+                print("fmsstub: Rejected by sink: %s" % by_mtime[mod_time])
                 continue
 
             msg_sink.recv_fms_msg(self.group, items, items[-2])

          
M infocalypse/gensig.py +5 -5
@@ 29,9 29,9 @@ 
 """
 import os
 
-from fcpclient import get_usk_hash
-from config import Config, DEFAULT_CFG_PATH
-from fms import to_msg_string
+from .fcpclient import get_usk_hash
+from .config import Config, DEFAULT_CFG_PATH
+from .fms import to_msg_string
 
 # The maximum number of updates to include.
 MAX_UPDATES = 4 #  == 67 chars

          
@@ 88,8 88,8 @@ def print_updates():
         # Hmmm... silently truncate
         updates = updates[:MAX_UPDATES]
         if len(updates) > 0:
-            print STATIC_TEXT
-            print to_msg_string(updates, None, ':')
+            print(STATIC_TEXT)
+            print(to_msg_string(updates, None, ':'))
     except:
         # Better to exit() with the correct exit code?
         # Fail silently, rather than spewing garbage into sig.

          
M infocalypse/graph.py +26 -26
@@ 346,14 346,14 @@ class UpdateGraph:
             raise UpdateGraphException("Length mismatch: [%s]:%i"
                                        % (str(index_pair), ordinal))
         if not edge_list[ordinal + 1].startswith(PENDING_INSERT):
-            print "set_chk -- replacing a non pending chk (%i, %i, %i)?" % \
-                  (index_pair[0], index_pair[1], ordinal)
+            print("set_chk -- replacing a non pending chk (%i, %i, %i)?" % \
+                  (index_pair[0], index_pair[1], ordinal))
             if edge_list[ordinal + 1] == chk:
-                print "Values are same."
+                print("Values are same.")
             else:
-                print "Values are different:"
-                print "old:", edge_list[ordinal + 1]
-                print "new:", chk
+                print("Values are different:")
+                print("old:", edge_list[ordinal + 1])
+                print("new:", chk)
         edge_list[ordinal + 1] = chk
         self.edge_table[index_pair] = tuple(edge_list)
 

          
@@ 402,8 402,8 @@ class UpdateGraph:
         if length <= MAX_METADATA_HACK_LEN:
             return INSERT_SALTED_METADATA
 
-        print "insert_type -- called for edge that's too big to salt???"
-        print edge_triple
+        print("insert_type -- called for edge that's too big to salt???")
+        print(edge_triple)
         return INSERT_HUGE
 
     def insert_length(self, step):

          
@@ 430,7 430,7 @@ class UpdateGraph:
         base_revs.sort()
         new_heads.sort()
         if self.latest_index != FIRST_INDEX and NULL_REV in base_revs:
-            print "add_index -- base=null in base_revs. Really do that?"
+            print("add_index -- base=null in base_revs. Really do that?")
         self.latest_index += 1
         self.index_table[self.latest_index] = (tuple(base_revs),
                                                tuple(new_heads))

          
@@ 505,8 505,8 @@ class UpdateGraph:
                 new_edges.append(self.add_edge(bundle[2], (bundle[0],
                                                            PENDING_INSERT1)))
             else:
-                print "update -- Bundle too big to salt! CHK: %i" \
-                      % first_bundle[0]
+                print("update -- Bundle too big to salt! CHK: %i" \
+                      % first_bundle[0])
 
         new_edges = new_edges + self._add_canonical_path_redundancy()
 

          
@@ 599,7 599,7 @@ class UpdateGraph:
 
             This is what you would use to bootstrap from hg rev -1. """
         try:
-            return canonical_path_itr(self, 0, to_index, max_search_len).next()
+            return next(canonical_path_itr(self, 0, to_index, max_search_len))
         except StopIteration:
             raise UpdateGraphException("No such path: %s"
                                        % str((0, to_index)))

          
@@ 739,7 739,7 @@ class UpdateGraph:
         """ Debugging function to check invariants. """
         max_index = -1
         min_index = -1
-        for index in self.index_table.keys():
+        for index in list(self.index_table.keys()):
             max_index = max(index, max_index)
             min_index = min(index, min_index)
         assert self.latest_index == max_index

          
@@ 757,7 757,7 @@ class UpdateGraph:
             assert len(self.index_table[index][1]) > 0
 
         # All edges must be resolvable.
-        for edge in self.edge_table.keys():
+        for edge in list(self.edge_table.keys()):
             assert edge[0] in self.index_table
             assert edge[1] in self.index_table
             assert edge[0] < edge[1]

          
@@ 775,7 775,7 @@ class UpdateGraph:
         assert values[-1] == max_index
         assert values[0] == FIRST_INDEX
         # Indices contiguous
-        assert values == range(FIRST_INDEX, max_index + 1)
+        assert values == list(range(FIRST_INDEX, max_index + 1))
 
         if full:
             # Verify that version map is complete.

          
@@ 855,7 855,7 @@ def break_edges(graph, kill_probability,
         for index in range(0, len(chks)):
             if graph.get_chk((edge[0], edge[1], index)) in skip_chks:
                 # Hack to skip pending requests.
-                print "break_edges -- skipped: ", (edge[0], edge[1], index)
+                print("break_edges -- skipped: ", (edge[0], edge[1], index))
                 continue
             if random.random() < kill_probability:
                 graph.set_chk(edge, index, length, bad_chk)

          
@@ 870,33 870,33 @@ def pretty_index(index):
 def dump_path(graph, path):
     """ Debugging function to print a path. """
     if len(path) == 0:
-        print "EMPTY PATH!"
+        print("EMPTY PATH!")
         return
 
-    print "(%s)-->[%s] cost=%0.2f" % (pretty_index(path[0][0]),
+    print("(%s)-->[%s] cost=%0.2f" % (pretty_index(path[0][0]),
                                       pretty_index(path[-1][1]),
-                                      graph.path_cost(path, True))
+                                      graph.path_cost(path, True)))
     for step in path:
         cost = graph.get_length(step)
-        print "   (%s) -- (%0.2f, %i) --> [%s]" % (pretty_index(step[0]),
+        print("   (%s) -- (%0.2f, %i) --> [%s]" % (pretty_index(step[0]),
                                                 cost,
                                                 step[2],
-                                                pretty_index(step[1]))
+                                                pretty_index(step[1])))
 def dump_paths(graph, paths, msg):
     """ Debugging function to dump a list of paths. """
-    print  "--- %s ---" % msg
+    print("--- %s ---" % msg)
     for path in paths:
         dump_path(graph, path)
-    print "---"
+    print("---")
 
 def print_list(msg, values):
     """ INTERNAL: Helper function. """
     if msg:
-        print msg
+        print(msg)
     for value in values:
-        print "   ", value
+        print("   ", value)
     if len(values) == 0:
-        print
+        print()
 # REDFLAG: is it a version_map or a version_table? decide an fix all names
 # REDFLAG: Scales to what? 10k nodes?
 # Returns version -> index mapping

          
M infocalypse/graphutil.py +8 -8
@@ 22,7 22,7 @@ 
 
 from binascii import hexlify
 
-from graph import FIRST_INDEX, MAX_PATH_LEN, UpdateGraph, \
+from .graph import FIRST_INDEX, MAX_PATH_LEN, UpdateGraph, \
      UpdateGraphException, canonical_path_itr, edges_containing, INSERT_HUGE, \
      INSERT_NORMAL, MAX_METADATA_HACK_LEN
 

          
@@ 32,7 32,7 @@ def graph_to_string(graph):
     """ Returns a human readable representation of the graph. """
     lines = []
     # Indices
-    indices = graph.index_table.keys()
+    indices = list(graph.index_table.keys())
     indices.sort()
     for index in indices:
         if index == FIRST_INDEX:

          
@@ 46,7 46,7 @@ def graph_to_string(graph):
                                ':'.join(entry[1]))))
 
     # Edges
-    index_pairs = graph.edge_table.keys()
+    index_pairs = list(graph.edge_table.keys())
     # MUST sort so you get the same CHK for the same graph instance.
     index_pairs.sort()
     for index_pair in index_pairs:

          
@@ 86,7 86,7 @@ def parse_graph(text):
             heads = fields[divider + 1:]
 
             if index in graph.index_table:
-                print "OVERWRITING INDEX: " , index
+                print("OVERWRITING INDEX: " , index)
             if len(parents) < 1:
                 raise ValueError("index %i has no parent revs" % index)
             if len(heads) < 1:

          
@@ 106,7 106,7 @@ def parse_graph(text):
         #else:
         #    print "SKIPPED LINE:"
         #    print line
-    indices = graph.index_table.keys()
+    indices = list(graph.index_table.keys())
     if len(indices) == 0:
         raise ValueError("No indices?")
     indices.sort()

          
@@ 132,7 132,7 @@ def parse_v100_graph(text):
                 raise ValueError("Exception parsing index values.")
             index = int(fields[1])
             if index in graph.index_table:
-                print "OVERWRITING INDEX: " , index
+                print("OVERWRITING INDEX: " , index)
             if len(tuple(fields[2:])) != 2:
                 raise ValueError("Error parsing index value: %i" % index)
             versions = tuple(fields[2:])

          
@@ 150,7 150,7 @@ def parse_v100_graph(text):
         #else:
         #    print "SKIPPED LINE:"
         #    print line
-    indices = graph.index_table.keys()
+    indices = list(graph.index_table.keys())
     if len(indices) == 0:
         raise ValueError("No indices?")
     indices.sort()

          
@@ 260,7 260,7 @@ def coalesce_indices(original_graph, gra
     assert lacuna == False
 
     # Make indices contiguous.
-    indices = graph.index_table.keys()
+    indices = list(graph.index_table.keys())
     indices.sort()
 
     assert indices[0] == FIRST_INDEX

          
M infocalypse/hgoverlay.py +5 -5
@@ 24,7 24,7 @@ 
 import os
 from mercurial import cmdutil
 
-from pathhacks import add_parallel_sys_path
+from .pathhacks import add_parallel_sys_path
 add_parallel_sys_path('fniki')
 from fileoverlay import OverlayedFiles, DirectFiles, WIKITEXT_ENCODING
 

          
@@ 103,8 103,8 @@ class HgFileOverlay(OverlayedFiles):
         wikitext_dir = self.repo_path(path)
         # Hmmmm... won't work for files in root. use -1?
         return tuple([os.path.split(name)[1] for name in
-                      self.repo.changectx(self.version).
-                      manifest().keys() if name.startswith(wikitext_dir)])
+                      list(self.repo.changectx(self.version).
+                      manifest().keys()) if name.startswith(wikitext_dir)])
 
     def exists_in_repo(self, path):
         """ INTERNAL: Return True if the file exists in the repo,

          
@@ 115,7 115,7 @@ class HgFileOverlay(OverlayedFiles):
     def read(self, path, mode='rb', non_overlayed=False):
         """ Read a file. """
         if non_overlayed:
-            return unicode(
+            return str(
                 get_hg_file(self.repo, self.repo_path(path),
                             self.version, self.tmp_file),
                 WIKITEXT_ENCODING)

          
@@ 123,7 123,7 @@ class HgFileOverlay(OverlayedFiles):
         if os.path.exists(overlayed):
             return DirectFiles.read(self, overlayed, mode)
 
-        return unicode(get_hg_file(self.repo, self.repo_path(path),
+        return str(get_hg_file(self.repo, self.repo_path(path),
                                    self.version, self.tmp_file),
                        WIKITEXT_ENCODING)
 

          
M infocalypse/infcmds.py +72 -72
@@ 27,33 27,33 @@ import socket
 import time
 from binascii import hexlify
 
-from mercurial import util
+from mercurial import util, error
 from mercurial import commands
 
-from fcpclient import parse_progress, is_usk, is_ssk, get_version, \
+from .fcpclient import parse_progress, is_usk, is_ssk, get_version, \
      get_usk_for_usk_version, FCPClient, is_usk_file, is_negative_usk
-from fcpconnection import FCPConnection, PolledSocket, CONNECTION_STATES, \
+from .fcpconnection import FCPConnection, PolledSocket, CONNECTION_STATES, \
      get_code, FCPError
-from fcpmessage import PUT_FILE_DEF
+from .fcpmessage import PUT_FILE_DEF
 
-from requestqueue import RequestRunner
+from .requestqueue import RequestRunner
 
-from graph import UpdateGraph, get_heads, has_version
-from bundlecache import BundleCache, is_writable, make_temp_file
-from updatesm import UpdateStateMachine, QUIESCENT, FINISHING, REQUESTING_URI, \
+from .graph import UpdateGraph, get_heads, has_version
+from .bundlecache import BundleCache, is_writable, make_temp_file
+from .updatesm import UpdateStateMachine, QUIESCENT, FINISHING, REQUESTING_URI, \
      REQUESTING_GRAPH, REQUESTING_BUNDLES, INVERTING_URI, \
      REQUESTING_URI_4_INSERT, INSERTING_BUNDLES, INSERTING_GRAPH, \
      INSERTING_URI, FAILING, REQUESTING_URI_4_COPY, \
      REQUIRES_GRAPH_4_HEADS, REQUESTING_GRAPH_4_HEADS, \
      RUNNING_SINGLE_REQUEST, UpdateContext
 
-from archivesm import ArchiveStateMachine, ArchiveUpdateContext
+from .archivesm import ArchiveStateMachine, ArchiveUpdateContext
 
-from statemachine import StatefulRequest
+from .statemachine import StatefulRequest
 
-from config import Config, DEFAULT_CFG_PATH, FORMAT_VERSION, normalize
+from .config import Config, DEFAULT_CFG_PATH, FORMAT_VERSION, normalize
 
-from knownrepos import DEFAULT_TRUST, DEFAULT_GROUPS
+from .knownrepos import DEFAULT_TRUST, DEFAULT_GROUPS
 
 DEFAULT_PARAMS = {
     # FCP params

          
@@ 110,24 110,24 @@ class UICallbacks:
         if not value:
             value = "UNKNOWN"
 
-        self.ui_.status("FCP connection [%s]\n" % value)
+        self.ui_.status(b"FCP connection [%s]\n" % value)
 
     def transition_callback(self, from_state, to_state):
         """ StateMachine transition callback that writes to a ui."""
         if self.verbosity < 1:
             return
         if self.verbosity > 2:
-            self.ui_.status("[%s]->[%s]\n" % (from_state.name, to_state.name))
+            self.ui_.status(b"[%s]->[%s]\n" % (from_state.name, to_state.name))
             return
         if to_state.name == FAILING:
-            self.ui_.status("Cleaning up after failure...\n")
+            self.ui_.status(b"Cleaning up after failure...\n")
             return
         if to_state.name == FINISHING:
-            self.ui_.status("Cleaning up...\n")
+            self.ui_.status(b"Cleaning up...\n")
             return
         msg = MSG_TABLE.get((from_state.name, to_state.name))
         if not msg is None:
-            self.ui_.status("%s\n" % msg)
+            self.ui_.status(b"%s\n" % msg)
 
     def monitor_callback(self, update_sm, client, msg):
         """ FCP message status callback which writes to a ui. """

          
@@ 165,7 165,7 @@ class UICallbacks:
         else:
             text = msg[0]
 
-        self.ui_.status("%s%s:%s\n" % (prefix, str(client.tag), text))
+        self.ui_.status(b"%s%s:%s\n" % (prefix, str(client.tag), text))
         # REDFLAG: re-add full dumping of FCP errors at debug level?
         #if msg[0].find('Failed') != -1 or msg[0].find('Error') != -1:
             #print  client.in_params.pretty()

          
@@ 195,9 195,9 @@ def get_config_info(ui_, opts):
 
     cfg = Config.from_ui(ui_)
     if cfg.defaults['FORMAT_VERSION'] != FORMAT_VERSION:
-        ui_.warn(('Updating config file: %s\n'
-                  + 'From format version: %s\nTo format version: %s\n') %
-                 (str(cfg.file_name),
+        ui_.warn((b'Updating config file: %s\n'
+                  + b'From format version: %s\nTo format version: %s\n') %
+                 (cfg.file_name,
                   cfg.defaults['FORMAT_VERSION'],
                   FORMAT_VERSION))
 

          
@@ 249,15 249,15 @@ def check_uri(ui_, uri):
 
     if is_usk(uri):
         if not is_usk_file(uri):
-            ui_.status("Only file USKs are allowed."
+            ui_.status(b"Only file USKs are allowed."
                        + "\nMake sure the URI ends with '/<number>' "
                        + "with no trailing '/'.\n")
-            raise util.Abort("Non-file USK %s\n" % uri)
+            raise error.Abort(b"Non-file USK %s\n" % uri)
         # Just fix it instead of doing B&H?
         if is_negative_usk(uri):
-            ui_.status("Negative USK index values are not allowed."
+            ui_.status(b"Negative USK index values are not allowed."
                        + "\nUse --aggressive instead. \n")
-            raise util.Abort("Negative USK %s\n" % uri)
+            raise error.Abort(b"Negative USK %s\n" % uri)
 
 def set_debug_vars(verbosity, params):
     """ Set debug dumping switch variables based on verbosity. """

          
@@ 283,7 283,7 @@ def setup(ui_, repo, params, stored_cfg)
     check_uri(ui_, params.get('REQUEST_URI'))
 
     if not is_writable(os.path.expanduser(stored_cfg.defaults['TMP_DIR'])):
-        raise util.Abort("Can't write to temp dir: %s\n"
+        raise error.Abort(b"Can't write to temp dir: %s\n"
                          % stored_cfg.defaults['TMP_DIR'])
 
     verbosity = params.get('VERBOSITY', 1)

          
@@ 300,11 300,11 @@ def setup(ui_, repo, params, stored_cfg)
         async_socket = PolledSocket(params['FCP_HOST'], params['FCP_PORT'])
         connection = FCPConnection(async_socket, True,
                                    callbacks.connection_state)
-    except socket.error, err: # Not an IOError until 2.6.
+    except socket.error as err: # Not an IOError until 2.6.
         ui_.warn("Connection to FCP server [%s:%i] failed.\n"
                 % (params['FCP_HOST'], params['FCP_PORT']))
         raise err
-    except IOError, err:
+    except IOError as err:
         ui_.warn("Connection to FCP server [%s:%i] failed.\n"
                 % (params['FCP_HOST'], params['FCP_PORT']))
         raise err

          
@@ 345,7 345,7 @@ def run_until_quiescent(update_sm, poll_
             # Poll the FCP Connection.
             try:
                 if not connection.socket.poll():
-                    print "run_until_quiescent -- poll returned False" 
+                    print("run_until_quiescent -- poll returned False") 
                     # REDFLAG: jam into quiesent state?,
                     # CONNECTION_DROPPED state?
                     break

          
@@ 387,7 387,7 @@ def do_key_setup(ui_, update_sm, params,
         insert_uri = ('USK'
                       + stored_cfg.defaults['DEFAULT_PRIVATE_KEY'][3:]
                       + insert_uri[5:])
-        ui_.status("Filled in the insert URI using the default private key.\n")
+        ui_.status(b"Filled in the insert URI using the default private key.\n")
 
     if insert_uri is None or not (is_usk(insert_uri) or is_ssk(insert_uri)):
         return (params.get('REQUEST_URI'), False)

          
@@ 395,7 395,7 @@ def do_key_setup(ui_, update_sm, params,
     update_sm.start_inverting(insert_uri)
     run_until_quiescent(update_sm, params['POLL_SECS'], False)
     if update_sm.get_state(QUIESCENT).prev_state != INVERTING_URI:
-        raise util.Abort("Couldn't invert private key:\n%s" % insert_uri)
+        raise error.Abort(b"Couldn't invert private key:\n%s" % insert_uri)
 
     inverted_uri = update_sm.get_state(INVERTING_URI).get_request_uri()
     params['INVERTED_INSERT_URI'] = inverted_uri

          
@@ 513,7 513,7 @@ def execute_create(ui_, repo, params, st
 
         ui_.debug("%sInsert URI:\n%s\n" % (is_redundant(params['INSERT_URI']),
                                             params['INSERT_URI']))
-        #ui_.status("Current tip: %s\n" % hex_version(repo)[:12])
+        #ui_.status(b"Current tip: %s\n" % hex_version(repo)[:12])
 
         update_sm.start_inserting(UpdateGraph(),
                                   params.get('TO_VERSIONS', ('tip',)),

          
@@ 523,9 523,9 @@ def execute_create(ui_, repo, params, st
 
         if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))):
             inserted_to = update_sm.get_state(INSERTING_URI).get_request_uris()
-            ui_.status("Inserted to:\n%s\n" % '\n'.join(inserted_to))
+            ui_.status(b"Inserted to:\n%s\n" % '\n'.join(inserted_to))
         else:
-            ui_.status("Create failed.\n")
+            ui_.status(b"Create failed.\n")
 
         handle_updating_config(repo, update_sm, params, stored_cfg)
     finally:

          
@@ 549,11 549,11 @@ def execute_copy(ui_, repo, params, stor
         run_until_quiescent(update_sm, params['POLL_SECS'])
 
         if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))):
-            ui_.status("Copied to:\n%s\n" %
+            ui_.status(b"Copied to:\n%s\n" %
                        '\n'.join(update_sm.get_state(INSERTING_URI).
                                  get_request_uris()))
         else:
-            ui_.status("Copy failed.\n")
+            ui_.status(b"Copy failed.\n")
 
         handle_updating_config(repo, update_sm, params, stored_cfg)
     finally:

          
@@ 586,12 586,12 @@ def execute_reinsert(ui_, repo, params, 
                 (not is_usk(params['REQUEST_URI'])) or
                 (not usks_equal(params['REQUEST_URI'],
                                 params['INVERTED_INSERT_URI']))):
-                raise util.Abort("Request URI doesn't match insert URI.")
+                raise error.Abort(b"Request URI doesn't match insert URI.")
 
             ui_.debug("%sInsert URI:\n%s\n" % (is_redundant(params[
                 'INSERT_URI']),
                                                 params['INSERT_URI']))
-        ui_.status("%sRequest URI:\n%s\n" % (is_redundant(params[
+        ui_.status(b"%sRequest URI:\n%s\n" % (is_redundant(params[
             'REQUEST_URI']),
                                              params['REQUEST_URI']))
 

          
@@ 604,9 604,9 @@ def execute_reinsert(ui_, repo, params, 
         run_until_quiescent(update_sm, params['POLL_SECS'])
 
         if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))):
-            ui_.status("Reinsert finished.\n")
+            ui_.status(b"Reinsert finished.\n")
         else:
-            ui_.status("Reinsert failed.\n")
+            ui_.status(b"Reinsert failed.\n")
 
         # Don't need to update the config.
     finally:

          
@@ 629,7 629,7 @@ def execute_push(ui_, repo, params, stor
 
         ui_.debug("%sInsert URI:\n%s\n" % (is_redundant(params['INSERT_URI']),
                                             params['INSERT_URI']))
-        #ui_.status("Current tip: %s\n" % hex_version(repo)[:12])
+        #ui_.status(b"Current tip: %s\n" % hex_version(repo)[:12])
 
         update_sm.start_pushing(params['INSERT_URI'],
                                 params.get('TO_VERSIONS', ('tip',)),

          
@@ 639,13 639,13 @@ def execute_push(ui_, repo, params, stor
 
         if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))):
             inserted_to = update_sm.get_state(INSERTING_URI).get_request_uris()
-            ui_.status("Inserted to:\n%s\n" %
+            ui_.status(b"Inserted to:\n%s\n" %
                        '\n'.join(inserted_to))
         else:
             extra = ''
             if update_sm.ctx.get('UP_TO_DATE', False):
                 extra = '. Local changes already in Freenet'
-            ui_.status("Push failed%s.\n" % extra)
+            ui_.status(b"Push failed%s.\n" % extra)
 
         handle_updating_config(repo, update_sm, params, stored_cfg)
     finally:

          
@@ 673,20 673,20 @@ def execute_pull(ui_, repo, params, stor
                                (index, get_version(params['REQUEST_URI'])))
 
         update_sm = setup(ui_, repo, params, stored_cfg)
-        ui_.status("%sRequest URI:\n%s\n" % (is_redundant(params[
+        ui_.status(b"%sRequest URI:\n%s\n" % (is_redundant(params[
             'REQUEST_URI']),
                                              params['REQUEST_URI']))
-        #ui_.status("Current tip: %s\n" % hex_version(repo)[:12])
+        #ui_.status(b"Current tip: %s\n" % hex_version(repo)[:12])
         update_sm.start_pulling(params['REQUEST_URI'])
         run_until_quiescent(update_sm, params['POLL_SECS'])
 
         if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))):
-            ui_.status("Pulled from:\n%s\n" %
+            ui_.status(b"Pulled from:\n%s\n" %
                        update_sm.get_state('REQUESTING_URI').
                        get_latest_uri())
-            #ui_.status("New tip: %s\n" % hex_version(repo)[:12])
+            #ui_.status(b"New tip: %s\n" % hex_version(repo)[:12])
         else:
-            ui_.status("Pull failed.\n")
+            ui_.status(b"Pull failed.\n")
 
         handle_updating_config(repo, update_sm, params, stored_cfg, True)
     finally:

          
@@ 711,7 711,7 @@ def read_freenet_heads(params, update_sm
             assert not update_sm.ctx.graph is None
             return get_heads(update_sm.ctx.graph)
 
-    raise util.Abort("Couldn't read heads from Freenet.")
+    raise error.Abort(b"Couldn't read heads from Freenet.")
 
 
 NO_INFO_FMT = """There's no stored information about this USK.

          
@@ 736,7 736,7 @@ def execute_info(ui_, repo, params, stor
     """ Run the info command. """
     request_uri = params['REQUEST_URI']
     if request_uri is None or not is_usk_file(request_uri):
-        ui_.status("Only works with USK file URIs.\n")
+        ui_.status(b"Only works with USK file URIs.\n")
         return
 
     usk_hash = normalize(request_uri)

          
@@ 777,7 777,7 @@ def setup_tmp_dir(ui_, tmp):
     if not os.path.exists(tmp):
         try:
             os.makedirs(tmp)
-        except os.error, err:
+        except os.error as err:
             # Will exit below.
             ui_.warn(err)
     return tmp

          
@@ 796,7 796,7 @@ cfg_file: %s
 """
 
 MSG_CFG_EXISTS = \
-"""%s already exists!
+b"""%s already exists!
 Move it out of the way if you really
 want to re-run setup.
 

          
@@ 817,9 817,9 @@ def execute_setup(ui_, host, port, tmp, 
     def connection_failure(msg):
         """ INTERNAL: Display a warning string. """
         ui_.warn(msg)
-        ui_.warn("It looks like your FCP host or port might be wrong.\n")
-        ui_.warn("Set them with --fcphost and/or --fcpport and try again.\n")
-        raise util.Abort("Connection to FCP server failed.")
+        ui_.warn(b"It looks like your FCP host or port might be wrong.\n")
+        ui_.warn(b"Set them with --fcphost and/or --fcpport and try again.\n")
+        raise error.Abort(b"Connection to FCP server failed.")
 
     # Fix defaults.
     if host == '':

          
@@ 830,7 830,7 @@ def execute_setup(ui_, host, port, tmp, 
     if cfg_file is None:
         cfg_file = os.path.expanduser(DEFAULT_CFG_PATH)
 
-    existing_name = ui_.config('infocalypse', 'cfg_file', None)
+    existing_name = ui_.config(b'infocalypse', b'cfg_file', None)
     if not existing_name is None:
         existing_name = os.path.expanduser(existing_name)
         ui_.status(MSG_HGRC_SET % existing_name)

          
@@ 838,19 838,19 @@ def execute_setup(ui_, host, port, tmp, 
 
     if os.path.exists(cfg_file):
         ui_.status(MSG_CFG_EXISTS % cfg_file)
-        raise util.Abort("Refusing to modify existing configuration.")
+        raise error.Abort(b"Refusing to modify existing configuration.")
 
     tmp = setup_tmp_dir(ui_, tmp)
 
     if not is_writable(tmp):
-        raise util.Abort("Can't write to temp dir: %s\n" % tmp)
+        raise error.Abort(b"Can't write to temp dir: %s\n" % tmp)
 
     # Test FCP connection.
     timeout_secs = 20
     connection = None
     default_private_key = None
     try:
-        ui_.status("Testing FCP connection [%s:%i]...\n" % (host, port))
+        ui_.status(b"Testing FCP connection [%s:%i]...\n" % (host, port))
 
         connection = FCPConnection(PolledSocket(host, port))
 

          
@@ 864,13 864,13 @@ def execute_setup(ui_, host, port, tmp, 
             connection_failure(("\nGave up after waiting %i secs for an "
                                + "FCP NodeHello.\n") % timeout_secs)
 
-        ui_.status("Looks good.\nGenerating a default private key...\n")
+        ui_.status(b"Looks good.\nGenerating a default private key...\n")
 
         # Hmmm... this waits on a socket. Will an ioerror cause an abort?
         # Lazy, but I've never seen this call fail except for IO reasons.
         client = FCPClient(connection)
         client.message_callback = lambda x, y:None # Disable chatty default.
-        default_private_key = client.generate_ssk()[1]['InsertURI']
+        default_private_key = client.generate_ssk()[1][b'InsertURI']
 
     except FCPError:
         # Protocol error.

          
@@ 891,7 891,7 @@ def execute_setup(ui_, host, port, tmp, 
     cfg.defaults['DEFAULT_PRIVATE_KEY'] = default_private_key
     Config.to_file(cfg, cfg_file)
 
-    ui_.status("""\nFinished setting configuration.
+    ui_.status(b"""\nFinished setting configuration.
 FCP host: %s
 FCP port: %i
 Temp dir: %s

          
@@ 914,14 914,14 @@ def create_patch_bundle(ui_, repo, freen
     # Make sure you have them all locally
     for head in freenet_heads:
         if not has_version(repo, head):
-            raise util.Abort("The local repository isn't up to date. " +
+            raise error.Abort(b"The local repository isn't up to date. " +
                              "Run hg fn-pull.")
 
     heads = [hexlify(head) for head in repo.heads()]
     heads.sort()
 
     if freenet_heads == heads:
-        raise util.Abort("All local changesets already in the repository " +
+        raise error.Abort(b"All local changesets already in the repository " +
                          "in Freenet.")
 
     # Create a bundle using the freenet_heads as bases.

          
@@ 945,7 945,7 @@ def execute_insert_patch(ui_, repo, para
         update_sm = setup(ui_, repo, params, stored_cfg)
         out_file = make_temp_file(update_sm.ctx.bundle_cache.base_dir)
 
-        ui_.status("Reading repo state from Freenet...\n")
+        ui_.status(b"Reading repo state from Freenet...\n")
         freenet_heads = read_freenet_heads(params, update_sm,
                                            params['REQUEST_URI'])
 

          
@@ 965,7 965,7 @@ def execute_insert_patch(ui_, repo, para
         # Must do this here because file gets deleted.
         chk_len = os.path.getsize(out_file)
 
-        ui_.status("Inserting %i byte patch bundle...\n" %
+        ui_.status(b"Inserting %i byte patch bundle...\n" %
                    os.path.getsize(out_file))
         update_sm.start_single_request(request)
         run_until_quiescent(update_sm, params['POLL_SECS'])

          
@@ 977,18 977,18 @@ def execute_insert_patch(ui_, repo, para
         if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))):
             chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\
                   final_msg[1]['URI']
-            ui_.status("Patch CHK:\n%s\n" %
+            ui_.status(b"Patch CHK:\n%s\n" %
                        chk)
             # ':', '|' not in freenet base64
-            ret = ':'.join(('B', normalize(params['REQUEST_URI']), str(chk_len),
+            ret = b':'.join((b'B', normalize(params['REQUEST_URI']), str(chk_len).encode("utf8"),
                             ':'.join([base[:12] for base in freenet_heads]),
-                            '|', ':'.join([head[:12] for head in heads]), chk))
+                            b'|', ':'.join([head[:12] for head in heads]), chk))
 
-            ui_.status("\nNotification:\n%s\n" % ret
-                        + '\n')
+            ui_.status(b"\nNotification:\n%s\n" % ret
+                        + b'\n')
             return ret
 
-        raise util.Abort("Patch CHK insert failed.")
+        raise error.Abort(b"Patch CHK insert failed.")
 
     finally:
         # Cleans up out file.

          
M infocalypse/insertingbundles.py +6 -6
@@ 20,14 20,14 @@ 
     Author: djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks
 """
 
-from graph import UpToDate, INSERT_SALTED_METADATA, INSERT_HUGE, \
+from .graph import UpToDate, INSERT_SALTED_METADATA, INSERT_HUGE, \
      FREENET_BLOCK_LEN, build_version_table, get_heads, \
      PENDING_INSERT1
-from graphutil import graph_to_string, find_redundant_edges, \
+from .graphutil import graph_to_string, find_redundant_edges, \
      find_alternate_edges, get_huge_top_key_edges
-from bundlecache import BundleException
+from .bundlecache import BundleException
 
-from statemachine import RequestQueueState
+from .statemachine import RequestQueueState
 
 # REDFLAG: duplicated to get around circular deps.
 INSERTING_GRAPH = 'INSERTING_GRAPH'

          
@@ 95,7 95,7 @@ class InsertingBundles(RequestQueueState
         # Update graph.
         try:
             self.set_new_edges(graph)
-        except UpToDate, err:
+        except UpToDate as err:
             # REDFLAG: Later, add FORCE_INSERT parameter?
             # REDFLAG: rework UpToDate exception to include versions, stuff
             #      versions in  ctx?

          
@@ 140,7 140,7 @@ class InsertingBundles(RequestQueueState
     def leave(self, dummy):
         """ Implementation of State virtual. """
         # Hmmm...
-        for request in self.pending.values():
+        for request in list(self.pending.values()):
             self.parent.runner.cancel_request(request)
 
     def reset(self):

          
M infocalypse/keys.py +1 -2
@@ 1,10 1,9 @@ 
-from string import split
 from mercurial import util
 
 
 class USK:
     def __init__(self, path):
-        components = split(path, '/')
+        components = path.split('/')
         # Expecting USK@key/name/edition
         assert len(components) == 3
 

          
M infocalypse/knownrepos.py +2 -2
@@ 22,7 22,7 @@ 
     let me know and I'll add it.
 """
 
-from fcpclient import get_usk_hash
+from .fcpclient import get_usk_hash
 
 # LATER: remove this file
 # djk20110918:  Bad out of date info is worse than none at all.

          
@@ 46,7 46,7 @@ def build_trust_list(id_usk_list):
         if not usk_hash in hashes:
             hashes.append(usk_hash)
         table[fms_id] = hashes
-    for fms_id in table.keys()[:]:
+    for fms_id in list(table.keys())[:]:
         table[fms_id] = tuple(table[fms_id])
     return table
 

          
M infocalypse/plugin_connect.py +6 -6
@@ 5,9 5,9 @@ import threading
 import atexit
 from mercurial import util
 import sys
-from config import Config
-from wot_id import WoT_ID, Local_WoT_ID
-import wot
+from .config import Config
+from .wot_id import WoT_ID, Local_WoT_ID
+from . import wot
 
 PLUGIN_NAME = "org.freenetproject.plugin.dvcs_webui.main.Plugin"
 

          
@@ 34,7 34,7 @@ def connect(ui, repo):
 
     if hi_there['Replies.Message'] == 'Error':
         # TODO: Debugging
-        print hi_there
+        print(hi_there)
         raise util.Abort("Another VCS instance is already connected.")
 
     session_token = hi_there['Replies.SessionToken']

          
@@ 138,7 138,7 @@ def LocalRepoQuery(_, cfg, **opts):
     params = {}
     # Request USKs are keyed by repo path.
     repo_index = 0
-    for path in cfg.request_usks.iterkeys():
+    for path in cfg.request_usks.keys():
         params['Path.{0}'.format(repo_index)] = path
         repo_index += 1
 

          
@@ 155,7 155,7 @@ def RepoListQuery(command, ui, **opts):
 
     repo_list = wot.read_repo_listing(ui, identity)
 
-    for name, key in repo_list.iteritems():
+    for name, key in repo_list.items():
         params['Repo.' + name] = key
 
     return "RepoListResult", params

          
M infocalypse/requestingbundles.py +12 -12
@@ 24,18 24,18 @@ 
 import os
 import random # Hmmm... good enough?
 
-from fcpmessage import GET_DEF
+from .fcpmessage import GET_DEF
 
-from bundlecache import make_temp_file
-from graph import latest_index, \
+from .bundlecache import make_temp_file
+from .graph import latest_index, \
      FREENET_BLOCK_LEN, chk_to_edge_triple_map, \
      dump_paths, MAX_PATH_LEN, get_heads, canonical_path_itr
-from graphutil import parse_graph
-from choose import get_update_edges, dump_update_edges, SaltingState
+from .graphutil import parse_graph
+from .choose import get_update_edges, dump_update_edges, SaltingState
 
-from statemachine import RetryingRequestList, CandidateRequest
+from .statemachine import RetryingRequestList, CandidateRequest
 
-from chk import clear_control_bytes
+from .chk import clear_control_bytes
 
 def fixup(edges, candidate_list):
     """ INTERNAL : Helper used by _set_graph to fix up CHKs->edges. """

          
@@ 177,7 177,7 @@ class RequestingBundles(RetryingRequestL
                 # Don't attempt to queue updates if we don't know
                 # full parent/head info.
                 # REDFLAG: remove test code
-                print "_queue_from_updates -- bailing out", update[4], update[5]
+                print("_queue_from_updates -- bailing out", update[4], update[5])
                 break
 
             if only_latest and update[0] > 5 * FREENET_BLOCK_LEN:

          
@@ 358,7 358,7 @@ class RequestingBundles(RetryingRequestL
         edges = chk_to_edge_triple_map(graph)
 
         skip_chks = set([]) # REDFLAG: remove!
-        for request in self.pending.values():
+        for request in list(self.pending.values()):
             candidate = request.candidate
             if candidate[6]:
                 continue

          
@@ 450,7 450,7 @@ class RequestingBundles(RetryingRequestL
         # REDFLAG: Magick number
         while len(first_paths) < 20:
             try:
-                first_paths.append(paths.next())
+                first_paths.append(next(paths))
             except StopIteration:
                 break
 

          
@@ 876,7 876,7 @@ class RequestingBundles(RetryingRequestL
             required. """
         #print "_remove_old_candidates -- called"
         # Cancel pending requests which are no longer required.
-        for client in self.pending.values():
+        for client in list(self.pending.values()):
             candidate = client.candidate
             if candidate[6]:
                 continue # Skip graph requests.

          
@@ 1005,7 1005,7 @@ class RequestingBundles(RetryingRequestL
         count_edges(edge_counts, bad_counts, self.pending_candidates())
 
         if len(bad_counts) > 0:
-            print "MULTIPLE EDGES: ", bad_counts
+            print("MULTIPLE EDGES: ", bad_counts)
             self.dump()
             assert False
 

          
M infocalypse/requestqueue.py +3 -3
@@ 23,7 23,7 @@ 
 
 import time
 
-from fcpconnection import MinimalClient
+from .fcpconnection import MinimalClient
 
 class QueueableRequest(MinimalClient):
     """ A request which can be queued in a RequestQueue and run

          
@@ 83,7 83,7 @@ class RequestRunner:
 
         # Cancel running requests which have timed out.
         now = time.time()
-        for client in self.running.values():
+        for client in list(self.running.values()):
             assert client.cancel_time_secs
             if client.cancel_time_secs < now:
                 self.connection.remove_request(client.request_id())

          
@@ 109,7 109,7 @@ class RequestRunner:
                 #if 'URI' in client.in_params.fcp_params:
                 #    print "   ", client.in_params.fcp_params['URI']
                 assert client.queue == self.request_queues[self.index]
-                client.in_params.async = True
+                client.in_params.__async = True
                 client.message_callback = self.msg_callback
                 self.running[self.connection.start_request(
                     client, client.custom_data_source)] = client

          
M infocalypse/run_wikibot.py +37 -37
@@ 26,17 26,17 @@ import os
 import signal
 import sys
 
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
 
-from fcpclient import FCPClient, get_usk_hash
-from fcpconnection import FCPConnection, PolledSocket
-from requestqueue import RequestRunner
-from bundlecache import is_writable
+from .fcpclient import FCPClient, get_usk_hash
+from .fcpconnection import FCPConnection, PolledSocket
+from .requestqueue import RequestRunner
+from .bundlecache import is_writable
 
-from fmsstub import FMSStub
+from .fmsstub import FMSStub
 
-from fmsbot import FMSBotRunner, run_event_loops, make_bot_path
-from wikibot import WikiBot
+from .fmsbot import FMSBotRunner, run_event_loops, make_bot_path
+from .wikibot import WikiBot
 
 ############################################################
 # FCP info

          
@@ 109,16 109,16 @@ def get_dirs(base_dir, create=False):
         for value in ret:
             if os.path.exists(value):
                 raise IOError("Directory already exists: %s" % value)
-        print
+        print()
         for value in ret:
             os.makedirs(value)
             if not is_writable(value):
                 raise IOError("Couldn't write to: %s" % value)
-            print "Created: %s" % value
+            print("Created: %s" % value)
 
-        print
-        print "You need to MANUALLY fn-pull the wikitext repo into:"
-        print ret[1]
+        print()
+        print("You need to MANUALLY fn-pull the wikitext repo into:")
+        print(ret[1])
 
     else:
         for value in ret:

          
@@ 141,7 141,7 @@ def get_params(base_dir):
 
     # MUST contain SSK private key
     key_file = KEY_FILE_FMT % get_usk_hash(params['WIKI_REPO_USK'])
-    print "Read insert key from: %s" % key_file
+    print("Read insert key from: %s" % key_file)
 
     # Load private key for the repo from a file..
     insert_ssk = open(os.path.expanduser(key_file), 'rb').read().strip()

          
@@ 154,12 154,12 @@ def get_params(base_dir):
     insert_uri = 'U' + insert_ssk[1:] + '/' + human
 
     # Then invert the request_uri from it.
-    print "Inverting public key from private one..."
+    print("Inverting public key from private one...")
     request_uri = FCPClient.connect(FCP_HOST, FCP_PORT). \
                   get_request_uri(insert_uri)
-    print request_uri
+    print(request_uri)
     if get_usk_hash(request_uri) != get_usk_hash(params['WIKI_REPO_USK']):
-        print "The insert SSK doesn't match WIKI_REPO_USK in fnwiki.cfg!"
+        print("The insert SSK doesn't match WIKI_REPO_USK in fnwiki.cfg!")
         assert False
 
     # LATER: Name convention.

          
@@ 232,7 232,7 @@ def run_wikibot(params):
     # Setup FMSBotRunner to house the WikiBot.
     bot_runner = FMSBotRunner(params)
     if 'MSG_SPOOL_DIR' in params:
-        print "READING MESSAGES FROM SPOOL DIR INSTEAD OF FMS!"
+        print("READING MESSAGES FROM SPOOL DIR INSTEAD OF FMS!")
 
         # This table MUST map all short names to full fms_ids for
         # all message senders. MUST contain the bot fms_id.

          
@@ 277,24 277,24 @@ def cmd_stop(params):
                                  'wikibot_' + params['USK_HASH'],
                                  'pid'), 'rb').read().strip())
 
-        print "Stopping, pid: %i..." % pid
+        print("Stopping, pid: %i..." % pid)
         os.kill(pid, signal.SIGINT)
         os.waitpid(pid, 0)
-        print "Stopped."
+        print("Stopped.")
     except IOError: # no pid file
-        print "Not running."
-    except OSError, err:
+        print("Not running.")
+    except OSError as err:
         if err.errno ==  errno.ECHILD:
             # Process died before waitpid.
-            print "Stopped."
+            print("Stopped.")
         else:
-            print "Failed: ", err
+            print("Failed: ", err)
 
 def cmd_status(params):
     """ Check if the bot is running."""
 
-    print "wikibot_%s:" % params['USK_HASH']
-    print "storage: %s" % params['BOT_STORAGE_DIR']
+    print("wikibot_%s:" % params['USK_HASH'])
+    print("storage: %s" % params['BOT_STORAGE_DIR'])
 
     # Attribution:
     # http://stackoverflow.com/questions/38056/how-do-you-check-in-linux-with- \

          
@@ 304,18 304,18 @@ def cmd_status(params):
                                  'wikibot_' + params['USK_HASH'],
                                  'pid'), 'rb').read().strip())
 
-        print "pid: %i" % pid
+        print("pid: %i" % pid)
         os.kill(pid, 0)
-        print "STATUS: Running"
+        print("STATUS: Running")
     except IOError: # no pid file
-        print "STATUS: Stopped"
-    except OSError, err:
+        print("STATUS: Stopped")
+    except OSError as err:
         if err.errno == errno.ESRCH:
-            print "STATUS: Crashed!"
+            print("STATUS: Crashed!")
         elif err.errno == errno.EPERM:
-            print "No permission to signal this process! Maybe run whoami?"
+            print("No permission to signal this process! Maybe run whoami?")
         else:
-            print "Unknown error checking pid!"
+            print("Unknown error checking pid!")
 
 def cmd_catchup(params):
     """ Rebuild local working files rebuilding IGNORING all

          
@@ 334,10 334,10 @@ def cmd_catchup(params):
 def cmd_help(dummy):
     """ Print a help message."""
 
-    print """USAGE:
+    print("""USAGE:
 run_wikibot.py <cmd>
 
-where <cmd> is %s""" % (', '.join(DISPATCH_TABLE.keys()))
+where <cmd> is %s""" % (', '.join(list(DISPATCH_TABLE.keys()))))
 
 DISPATCH_TABLE = {"setup":cmd_setup,
                   "start":cmd_start,

          
@@ 354,8 354,8 @@ def main():
     try:
         parameters = (None if cmd == 'setup' or cmd == 'help'
                       else get_params(BASE_DIR))
-    except IOError, err:
-        print "FAILED: %s" % str(err)
+    except IOError as err:
+        print("FAILED: %s" % str(err))
         return
 
     DISPATCH_TABLE[cmd](parameters)

          
M infocalypse/sitecmds.py +5 -5
@@ 25,15 25,15 @@ import shutil
 
 from mercurial import util
 
-from fcpconnection import FCPError
-from fcpclient import FCPClient, get_file_infos, set_index_file
+from .fcpconnection import FCPError
+from .fcpclient import FCPClient, get_file_infos, set_index_file
 
 # HACK
-from pathhacks import add_parallel_sys_path
+from .pathhacks import add_parallel_sys_path
 add_parallel_sys_path('fniki')
 import piki
 
-from config import write_default_config
+from .config import write_default_config
 
 def get_insert_uri(params):
     """ Helper function builds the insert URI. """

          
@@ 123,7 123,7 @@ def do_freenet_insert(ui_, repo, params,
             request_uri = client.put_complex_dir(insert_uri, infos,
                                                  default_mime_type)[1]['URI']
             show_request_uri(ui_, params, request_uri)
-        except FCPError, err:
+        except FCPError as err:
             if err.is_code(9): # magick number for collision
                 ui_.warn('An update was already inserted on that index.\n'
                          + 'Set a later index with --index and try again.\n')

          
M infocalypse/statemachine.py +9 -9
@@ 25,8 25,8 @@ 
 
 import os
 
-from fcpconnection import SUCCESS_MSGS
-from requestqueue import QueueableRequest
+from .fcpconnection import SUCCESS_MSGS
+from .requestqueue import QueueableRequest
 
 # Move this to fcpconnection?
 def delete_client_file(client):

          
@@ 76,7 76,7 @@ class StateMachine:
 
     def reset(self):
         """ Reset all State instances owned by the StateMachine. """
-        for state in self.states.values():
+        for state in list(self.states.values()):
             state.reset()
 
 class State:

          
@@ 98,7 98,7 @@ class State:
 
     def reset(self):
         """ Pure virtual to reset the state. """
-        print self.name
+        print(self.name)
         raise NotImplementedError()
 
 class StatefulRequest(QueueableRequest):

          
@@ 120,8 120,8 @@ class RequestQueueState(State):
     def reset(self):
         """ Implementation of State virtual. """
         if len(self.pending) > 0:
-            print ("BUG?: Reseting state: %s with %i pending requests!" %
-                   (self.name, len(self.pending)))
+            print(("BUG?: Reseting state: %s with %i pending requests!" %
+                   (self.name, len(self.pending))))
 
     def next_runnable(self):
         """ Return a MinimalClient instance for the next request to

          
@@ 155,7 155,7 @@ class DecisionState(RequestQueueState):
         """ Pure virtual.
 
             Return the state to transition into. """
-        print "ENOTIMPL:" + self.name
+        print("ENOTIMPL:" + self.name)
         return ""
 
     # Doesn't handle FCP requests.

          
@@ 256,7 256,7 @@ class Canceling(RequestQueueState):
             return
 
         self.pending = from_state.pending.copy()
-        for request in self.pending.values():
+        for request in list(self.pending.values()):
             self.parent.runner.cancel_request(request)
 
     def request_done(self, client, dummy):

          
@@ 325,7 325,7 @@ class RetryingRequestList(RequestQueueSt
     def pending_candidates(self):
         """ Returns the candiates that are currently being run
             by the RequestQueue. """
-        return [request.candidate for request in self.pending.values()]
+        return [request.candidate for request in list(self.pending.values())]
 
     # ORDER:
     # 0) Candidates are popped of the lists.

          
M infocalypse/submission.py +15 -15
@@ 23,18 23,18 @@ 
 
 import os
 import time
-import StringIO
+import io
 
 from mercurial import mdiff
 from mercurial import commands
 from zipfile import ZipFile
 from binascii import hexlify
 
-from graph import hex_version, has_version
-from validate import is_hex_string
-from hgoverlay import HgFileOverlay
+from .graph import hex_version, has_version
+from .validate import is_hex_string
+from .hgoverlay import HgFileOverlay
 
-from pathhacks import add_parallel_sys_path
+from .pathhacks import add_parallel_sys_path
 
 add_parallel_sys_path('wormarc')
 from shafunc import new_sha

          
@@ 60,8 60,8 @@ MAX_INFO_LEN = 1024 # Arbitrary, reasona
 
 #----------------------------------------------------------#
 CRLF = '\x0d\x0a'
-EMPTY_FILE_SHA_HEX = new_sha('').hexdigest()
-EMPTY_FILE_SHA = new_sha('').digest()
+EMPTY_FILE_SHA_HEX = new_sha(b'').hexdigest()
+EMPTY_FILE_SHA = new_sha(b'').digest()
 #----------------------------------------------------------#
 # diff / patch helper funcs
 #

          
@@ 118,10 118,10 @@ def unicode_make_patch(old_text, new_tex
 
     return values[1]
 
-def utf8_sha(unicode_text):
+def utf8_sha(unicode_bytes):
     """ Return a SHA1 hash instance for the utf8 8-bit string rep
         of unicode_text."""
-    return new_sha(unicode_text.encode('utf8'))
+    return new_sha(unicode_bytes.encode('utf8'))
 
 class SubmitError(Exception):
     """ Exception used to indicate failure by bundle_wikitext and

          
@@ 224,7 224,7 @@ def bundle_wikitext(overlay, version, su
 
     illegal_writes = get_read_only_list(overlay)
 
-    buf = StringIO.StringIO()
+    buf = io.StringIO()
     arch = ZipFile(buf, 'w')
     assert version
     arch.writestr('__INFO__', pack_info(version, submitter))

          
@@ 764,18 764,18 @@ class ForkingSubmissionHandler:
                                                self.repo,
                                                self.base_dir,
                                                tmp_file,
-                                               StringIO.StringIO(
+                                               io.StringIO(
                                                    raw_zip_bytes)))
             return True
 
-        except NoChangesError, err:
+        except NoChangesError as err:
             self.logger.debug("apply_submission -- no changes, illegal: %s" %
                               str(err.illegal))
             if not err.illegal:
                 # i.e. zip contained legal changes that were already applied.
                 code = REJECT_APPLIED
 
-        except SubmitError, err:
+        except SubmitError as err:
             self.logger.debug("apply_submission --  err: %s" % str(err))
 
             if err.illegal:

          
@@ 783,7 783,7 @@ class ForkingSubmissionHandler:
                                  str(submission_tuple))
                 code = REJECT_ILLEGAL
 
-        except Exception, err:
+        except Exception as err:
             self.logger.warn("apply_submission -- ILLEGAL .zip(1): %s" %
                               str(submission_tuple))
             raise # DCI

          
@@ 886,7 886,7 @@ class ForkingSubmissionHandler:
     def commit_results(self, msg_id, submission_tuple, results):
         """ INTERNAL: Commit the results of a submission to the local repo. """
 
-        print "RESULTS: ", results
+        print("RESULTS: ", results)
         if len(results[3]) > 0 and sum([len(results[index]) for index in
                                         (0, 1, 2, 4)]) == 0: #HACK, fix order!
             raise NoChangesError()

          
M infocalypse/test_block_redundancy.py +29 -29
@@ 40,17 40,17 @@ import shutil
 import sys
 import unittest
 
-from infcmds import UICallbacks, run_until_quiescent
-from fcpconnection import PolledSocket, FCPConnection
-from fcpclient import FCPClient
-from requestqueue import RequestRunner
-from statemachine import State
-from archivesm import ArchiveStateMachine, ArchiveUpdateContext, \
+from .infcmds import UICallbacks, run_until_quiescent
+from .fcpconnection import PolledSocket, FCPConnection
+from .fcpclient import FCPClient
+from .requestqueue import RequestRunner
+from .statemachine import State
+from .archivesm import ArchiveStateMachine, ArchiveUpdateContext, \
      create_dirs, start, InsertingRedundantBlocks, RequestingRedundantBlocks, \
      chk_file_name
 
-from updatesm import FAILING, FINISHING, QUIESCENT
-from graph import FREENET_BLOCK_LEN
+from .updatesm import FAILING, FINISHING, QUIESCENT
+from .graph import FREENET_BLOCK_LEN
 
 TEST_BASE = '/tmp'
 TEST_ROOT = '__block_test_run__'

          
@@ 105,13 105,13 @@ BAD_CHKS = bad_chk_itr()
 def break_primary(chks):
     chks = list(chks)
     assert len(chks) > 0
-    chks[0] = BAD_CHKS.next()
+    chks[0] = next(BAD_CHKS)
     return chks
 
 def break_redundant(chks):
     chks = list(chks)
     assert len(chks) > 0
-    chks[-1] = BAD_CHKS.next()
+    chks[-1] = next(BAD_CHKS)
     return chks
 
 # Not sure that this will work.

          
@@ 122,7 122,7 @@ class FakeUI:
     def status(self, text):
         if text.endswith('\n'):
             text = text[:-1]
-            print text
+            print(text)
 
 class HoldingBlocks(State):
     """ State to hold blocks for testing RequestingRedundantBlocks """

          
@@ 134,7 134,7 @@ class HoldingBlocks(State):
     def enter(self, dummy_from_state):
         """ State implemenation. """
 
-        print self.blocks
+        print(self.blocks)
         self.parent.transition(self.next_state)
 
     def reset(self):

          
@@ 197,20 197,20 @@ class RedundancyTests(unittest.TestCase)
 
 
     def checkCHK(self, chk, logical_len, length, data=None):
-        print "---"
-        print "Checking: ", chk
+        print("---")
+        print("Checking: ", chk)
         # Something is closing the connection?
         resp = FCPClient.connect(FCP_HOST, FCP_PORT).get(chk)
         self.assertTrue(resp[0] == 'AllData')
-        print "Length: ", len(resp[2])
-        print "Mime_Type: ", resp[1]['Metadata.ContentType']
+        print("Length: ", len(resp[2]))
+        print("Mime_Type: ", resp[1]['Metadata.ContentType'])
         if len(resp[2]) != length:
-            print "Expected len: %i, got: %i!" % (length, len(resp[2]))
+            print("Expected len: %i, got: %i!" % (length, len(resp[2])))
             self.assertTrue(False)
         if not data is None and resp[2][:logical_len] != data:
-            print "Data doesn't match! (only showing first 16 bytes below)"
-            print "got: ", repr(resp[2][:logical_len][:16])
-            print "expected: " , repr(data[:16])
+            print("Data doesn't match! (only showing first 16 bytes below)")
+            print("got: ", repr(resp[2][:logical_len][:16]))
+            print("expected: " , repr(data[:16]))
             self.assertTrue(False)
 
     def _testCheckCHK(self):

          
@@ 223,7 223,7 @@ class RedundancyTests(unittest.TestCase)
     def test_inserting(self):
         # Takes longer to insert existing blocks?
         offset = random.randrange(0, 256)
-        print "offset: ", offset
+        print("offset: ", offset)
         lengths = (FREENET_BLOCK_LEN - 1,
                    FREENET_BLOCK_LEN,
                    FREENET_BLOCK_LEN + 1,

          
@@ 267,9 267,9 @@ class RedundancyTests(unittest.TestCase)
 
         blocks = update_sm.states['TEST_STATE'].files
         for index, entry in enumerate(blocks):
-            print "block [%i]: len: %i" % (index, entry[1])
+            print("block [%i]: len: %i" % (index, entry[1]))
             for chk in entry[2]:
-                print "   ", chk
+                print("   ", chk)
 
         # FREENET_BLOCK_LEN - 1, first is unpadded
         self.checkCHK(blocks[0][2][0], blocks[0][1], blocks[0][1],

          
@@ 349,7 349,7 @@ class RedundancyTests(unittest.TestCase)
                 full_path = os.path.join(ctx.arch_cache_dir(),
                                          chk_file_name(chk))
                 if os.path.exists(full_path):
-                    print "Already cached: ", chk
+                    print("Already cached: ", chk)
                     self.assertTrue(False)
 
 

          
@@ 360,19 360,19 @@ class RedundancyTests(unittest.TestCase)
                 full_path = os.path.join(ctx.arch_cache_dir(),
                                          chk_file_name(chk))
                 if os.path.exists(full_path):
-                    print "%s: CACHED" % str((index, ordinal))
+                    print("%s: CACHED" % str((index, ordinal)))
                     self.assertTrue(os.path.getsize(full_path) ==
                                     block[0])
                     count += 1
                 else:
-                    print "%s: MISSING" % str((index, ordinal))
+                    print("%s: MISSING" % str((index, ordinal)))
             self.assertTrue(count > 0)
 
 
     # REQUIRES: test_inserting run first.
     def test_requesting_all(self):
         if not 'FILE_BLOCKS' in SHARED_STATE:
-            print "You must run test_inserting() before this test."
+            print("You must run test_inserting() before this test.")
             self.assertTrue(False)
 
         ctx, update_sm, start_state = self.setup_request_sm()

          
@@ 393,7 393,7 @@ class RedundancyTests(unittest.TestCase)
 
     def test_requesting_primary(self):
         if not 'FILE_BLOCKS' in SHARED_STATE:
-            print "You must run test_inserting() before this test."
+            print("You must run test_inserting() before this test.")
             self.assertTrue(False)
 
         ctx, update_sm, start_state = self.setup_request_sm()

          
@@ 414,7 414,7 @@ class RedundancyTests(unittest.TestCase)
 
     def test_requesting_redundant(self):
         if not 'FILE_BLOCKS' in SHARED_STATE:
-            print "You must run test_inserting() before this test."
+            print("You must run test_inserting() before this test.")
             self.assertTrue(False)
 
         ctx, update_sm, start_state = self.setup_request_sm()

          
M infocalypse/test_graph.py +74 -74
@@ 30,13 30,13 @@ import shutil
 from binascii import unhexlify
 
 from mercurial import hg, ui
-from bundlecache import BundleCache
-from graph import UpdateGraph, \
+from .bundlecache import BundleCache
+from .graph import UpdateGraph, \
      build_version_table, UpdateGraphException, \
      pull_bundle, FIRST_INDEX, hex_version, UpToDate
-from graphutil import parse_graph, graph_to_string, get_rollup_bounds, \
+from .graphutil import parse_graph, graph_to_string, get_rollup_bounds, \
      minimal_graph
-from chk import bytes_to_chk, CHK_SIZE
+from .chk import bytes_to_chk, CHK_SIZE
 
 # Fix these paths as necessary
 CACHE_DIR = '/tmp/bundle_cache' # MUST exist

          
@@ 73,29 73,29 @@ def set_chks(graph, edges, chks):
     """ Set the chks for edges to random values. """
     for edge in edges:
         length = graph.get_length(edge)
-        graph.set_chk(edge[:2], edge[2], length, chks.next())
+        graph.set_chk(edge[:2], edge[2], length, next(chks))
 
 def test_presentation():
     """ Smoke test graph_to_string and parse_graph. """
     graph = UpdateGraph()
-    print "EMPTY:"
-    print graph_to_string(graph)
-    print "Adding index: ", graph.add_index([VER_1, ], [VER_2, ])
-    print "Adding index: ", graph.add_index([VER_2, ], [VER_3, VER_4])
-    print "Adding index: ", graph.add_index([VER_3, VER_2], [VER_5, ])
+    print("EMPTY:")
+    print(graph_to_string(graph))
+    print("Adding index: ", graph.add_index([VER_1, ], [VER_2, ]))
+    print("Adding index: ", graph.add_index([VER_2, ], [VER_3, VER_4]))
+    print("Adding index: ", graph.add_index([VER_3, VER_2], [VER_5, ]))
     chks = fake_chks()
-    graph.add_edge((-1, 0), (100, chks.next()))
-    graph.add_edge((1, 2), (200, chks.next()))
-    graph.add_edge((-1, 2), (500, chks.next()))
+    graph.add_edge((-1, 0), (100, next(chks)))
+    graph.add_edge((1, 2), (200, next(chks)))
+    graph.add_edge((-1, 2), (500, next(chks)))
     text = graph_to_string(graph)
-    print
-    print text
-    print
+    print()
+    print(text)
+    print()
     graph1 = parse_graph(text)
-    print
+    print()
     text1 = graph_to_string(graph1)
-    print "Round trip:"
-    print text1
+    print("Round trip:")
+    print(text1)
     assert text == text1
 
 def test_update(repo_dir):

          
@@ 106,17 106,17 @@ def test_update(repo_dir):
     cache.remove_files()
     graph = UpdateGraph()
     graph.update(repo, ui, [1, 2], cache)
-    print graph_to_string(graph)
-    print
-    print
+    print(graph_to_string(graph))
+    print()
+    print()
     graph.update(repo, ui, [3, 4], cache)
 
-    print graph_to_string(graph)
-    print
-    print
+    print(graph_to_string(graph))
+    print()
+    print()
     graph.update(repo, ui, [6, ], cache)
 
-    print graph_to_string(graph)
+    print(graph_to_string(graph))
 
 def test_update_real(repo_dir, version_list=None, full=False):
     """ Smoke test graph.update(). """

          
@@ 131,31 131,31 @@ def test_update_real(repo_dir, version_l
 
     chks = fake_chks()
     for vers in version_list:
-        print "UPDATING TO: ", vers
+        print("UPDATING TO: ", vers)
         new_edges = graph.update(repo, ui, vers, cache)
         for edge in new_edges:
             length = graph.get_length(edge)
-            graph.set_chk(edge[:2], edge[2], length, chks.next())
+            graph.set_chk(edge[:2], edge[2], length, next(chks))
 
         # REDFLAG: should call minimal_graph for "real" behavior
         text = graph_to_string(graph)
-        print "GRAPH_LEN: ", len(text)
-        print text
+        print("GRAPH_LEN: ", len(text))
+        print(text)
 
     if full:
-        print "UPDATING TO: latest heads"
+        print("UPDATING TO: latest heads")
         try:
             new_edges = graph.update(repo, ui, None, cache)
             for edge in new_edges:
                 length = graph.get_length(edge)
-                graph.set_chk(edge[:2], edge[2], length, chks.next())
+                graph.set_chk(edge[:2], edge[2], length, next(chks))
 
             # REDFLAG: should call minimal_graph for "real" behavior
             text = graph_to_string(graph)
-            print "GRAPH_LEN: ", len(text)
-            print text
+            print("GRAPH_LEN: ", len(text))
+            print(text)
         except UpToDate:
-            print "Already has the head revs."
+            print("Already has the head revs.")
 
     return (graph, repo, cache)
 

          
@@ 170,8 170,8 @@ def test_minimal_graph(repo_dir, version
         cache = BundleCache(repo, ui_, CACHE_DIR)
         cache.remove_files()
         graph = parse_graph(open(file_name, 'rb').read())
-        print "--- from file: %s ---" % file_name
-        print graph_to_string(graph)
+        print("--- from file: %s ---" % file_name)
+        print(graph_to_string(graph))
     version_map = build_version_table(graph, repo)
 
     # Incomplete, but better than nothing.

          
@@ 182,37 182,37 @@ def test_minimal_graph(repo_dir, version
         chk_bounds[graph.get_chk(edge)] = (
             get_rollup_bounds(graph, repo, edge[0] + 1, edge[1], version_map))
 
-    print "CHK BOUNDS:"
+    print("CHK BOUNDS:")
     for value in chk_bounds:
-        print value
-        print "  ", chk_bounds[value]
-    print
+        print(value)
+        print("  ", chk_bounds[value])
+    print()
     sizes = (512, 1024, 2048, 4096, 16 * 1024)
     for max_size in sizes:
         try:
-            print "MAX:", max(version_map.values())
+            print("MAX:", max(version_map.values()))
             small = minimal_graph(graph, repo, version_map, max_size)
-            print "--- size == %i" % max_size
-            print graph_to_string(small)
+            print("--- size == %i" % max_size)
+            print(graph_to_string(small))
 
             small.rep_invariant(repo, True) # Full check
-            chks = chk_bounds.keys()
+            chks = list(chk_bounds.keys())
             path = small.get_top_key_edges()
-            print "TOP KEY EDGES:"
-            print path
+            print("TOP KEY EDGES:")
+            print(path)
             for edge in path:
                 # MUST rebuild the version map because the indices changed.
                 new_map = build_version_table(small, repo)
                 bounds = get_rollup_bounds(small, repo, edge[0] + 1,
                                            edge[1], new_map)
-                print "CHK:", small.get_chk(edge)
-                print "BOUNDS: ", bounds
+                print("CHK:", small.get_chk(edge))
+                print("BOUNDS: ", bounds)
                 assert chk_bounds[small.get_chk(edge)] == bounds
-                print "DELETING: ", edge, small.get_chk(edge)
+                print("DELETING: ", edge, small.get_chk(edge))
                 chks.remove(small.get_chk(edge))
             assert len(chks) == 0
-        except UpdateGraphException, err:
-            print "IGNORED: ", err
+        except UpdateGraphException as err:
+            print("IGNORED: ", err)
 
 def versions_str(version_list):
     """ Format a list of 40 digit hex versions for humans. """

          
@@ 261,7 261,7 @@ def hexlify_file(in_file):
     data = binascii.hexlify(open(in_file, 'rb').read())
     while len(data):
         chunk = data[:64]
-        print '+ "%s"' % chunk
+        print('+ "%s"' % chunk)
         data = data[len(chunk):]
 
 

          
@@ 442,13 442,13 @@ def dump_version_map(version_map):
         entry.add(version)
         reverse_map[index] = entry
 
-    indices = reverse_map.keys()
+    indices = list(reverse_map.keys())
     indices.sort()
-    print "---Version map---"
+    print("---Version map---")
     for index in indices:
-        print "%i:" % index
+        print("%i:" % index)
         for version in reverse_map[index]:
-            print "   ", version
+            print("   ", version)
 
 # Only compares first 12 digits so full ids can be compared
 # against short ones.

          
@@ 461,18 461,18 @@ def check_result(result_a, result_b):
     for outer in range(0, 2):
         for inner in range(0, len(result_a[outer])):
             if result_a[outer][inner][:12] != result_b[outer][inner][:12]:
-                print "MISMATCH:"
-                print result_a
-                print result_b
+                print("MISMATCH:")
+                print(result_a)
+                print(result_b)
                 assert False
 
 def dump_changesets(repo):
     """ Print all the changesets in a repo. """
-    print "---"
+    print("---")
     max_rev = repo['tip'].rev()
     for rev in range(-1, max_rev + 1):
-        print hex_version(repo, rev)
-    print "---"
+        print(hex_version(repo, rev))
+    print("---")
 # There are many, many ways to fail.
 # More testing would be good.
 

          
@@ 539,8 539,8 @@ def test_rollup():
     edges = graph.update(repo, ui_, ['2f6c65f64ce5', ], cache)
     set_chks(graph, edges, chks)
 
-    print
-    print graph_to_string(graph)
+    print()
+    print(graph_to_string(graph))
     version_map = build_version_table(graph, repo)
 
     dump_version_map(version_map)

          
@@ 548,20 548,20 @@ def test_rollup():
 
     graph.rep_invariant(repo, True) # Verify contiguousness.
 
-    print "From earliest..."
+    print("From earliest...")
     for index in range(0, graph.latest_index + 1):
         parents, heads = get_rollup_bounds(graph, repo, 0, index, version_map)
-        print "(%i->%i): %s" % (0, index, versions_str(heads))
-        print "       ", versions_str(parents)
+        print("(%i->%i): %s" % (0, index, versions_str(heads)))
+        print("       ", versions_str(parents))
 
 
-    print "To latest..."
+    print("To latest...")
     for index in range(0, graph.latest_index + 1):
         parents, heads = get_rollup_bounds(graph, repo, index,
                                            graph.latest_index,
                                            version_map)
-        print "(%i->%i): %s" % (index, graph.latest_index, versions_str(heads))
-        print "       ", versions_str(parents)
+        print("(%i->%i): %s" % (index, graph.latest_index, versions_str(heads)))
+        print("       ", versions_str(parents))
 
 
     # Empty

          
@@ 570,7 570,7 @@ def test_rollup():
                           version_map)
     except AssertionError:
         # Asserted as expected for to_index == FIRST_INDEX
-        print "Got expected assertion."
+        print("Got expected assertion.")
 
     # Rollup of one changeset index.
     result = get_rollup_bounds(graph, repo, 0, 0, version_map)

          
@@ 591,13 591,13 @@ def test_rollup():
 
     # Rollup with head pulled in from earlier base.
     result = get_rollup_bounds(graph, repo, 3, 8, version_map)
-    print result
+    print(result)
     check_result(result, (('4409936ef21f', '62a72a238ffc', ),
                           ('03c047d036ca', '7429bf7b11f5')))
 
     # Rollup after remerge to a single head.
     result = get_rollup_bounds(graph, repo, 0, 9, version_map)
-    print result
+    print(result)
     check_result(result, (('000000000000', ), ('2f6c65f64ce5', )))
 
 if __name__ == "__main__":

          
M infocalypse/test_merging.py +25 -25
@@ 6,16 6,16 @@ import unittest
 
 from mercurial import ui, hg, commands
 
-from pathhacks import add_parallel_sys_path
+from .pathhacks import add_parallel_sys_path
 add_parallel_sys_path('wormarc')
 from shafunc import new_sha
 
 add_parallel_sys_path('fniki')
 from fileoverlay import get_file_funcs
 
-from graph import hex_version
-from submission import bundle_wikitext, ForkingSubmissionHandler, SubmitError
-from hgoverlay import HgFileOverlay
+from .graph import hex_version
+from .submission import bundle_wikitext, ForkingSubmissionHandler, SubmitError
+from .hgoverlay import HgFileOverlay
 
 TEST_BASE = '/tmp'
 TEST_ROOT = '__merging_test_run__'

          
@@ 34,7 34,7 @@ class RepoTests(unittest.TestCase):
         self.tmp_dir  = os.path.join(self.test_root, TMP_DIR)
 
         if LEAVE_TEST_DIR and os.path.exists(full_path):
-            print "Cleaning up directory from previous test run..."
+            print("Cleaning up directory from previous test run...")
             self.remove_test_dirs()
 
         if os.path.exists(full_path):

          
@@ 70,7 70,7 @@ class RepoTests(unittest.TestCase):
             full_path = os.path.join(repo.root, fname)
             dname = os.path.dirname(full_path)
             if dname and not os.path.exists(dname):
-                print "CREATED: ", dname
+                print("CREATED: ", dname)
                 os.makedirs(dname)
 
             out_file = open(full_path, 'wb')

          
@@ 141,7 141,7 @@ class Logging:
         pass
     @classmethod
     def out(cls, msg):
-        print msg
+        print(msg)
     def trace(self, msg):
         self.out("T:" + str(msg))
     def debug(self, msg):

          
@@ 150,10 150,10 @@ class Logging:
         self.out("W:" + str(msg))
 
 def needs_commit():
-    print "NEEDS COMMIT"
+    print("NEEDS COMMIT")
 
 def committed(result):
-    print "COMMITTED: %s" % str(result)
+    print("COMMITTED: %s" % str(result))
 
 DEFAULT_WIKI_ROOT = 'wiki_root'
 DEFAULT_SUBMITTER = 'freenetizen@this_is_not_a_real_fms_id'

          
@@ 172,8 172,8 @@ class NoConflictTests(RepoTests):
                                      'This is a page.\n'),),
                              'automagically generated test repo.')
         cloned = self.clone_repo(repo, 'snarfu')
-        print "REPO: ", repo.root
-        print "CLONED: ", cloned.root
+        print("REPO: ", repo.root)
+        print("CLONED: ", cloned.root)
 
 
     ############################################################

          
@@ 576,7 576,7 @@ class ConflictTests(RepoTests):
             return False
 
         if new_sha(overlay.read(versioned_path)).hexdigest() != sha_value:
-            print "SHA FAILS: ", versioned_path
+            print("SHA FAILS: ", versioned_path)
             self.assertTrue(False)
 
         # quick and dirty test for has forks

          
@@ 789,13 789,13 @@ class ConflictTests(RepoTests):
                  'This fork 1 of the front page.\n',
                  'This fork 2 of the front page.\n',)
 
-        print "---"
-        print "Main  : FrontPage"
-        print "fork 1: ", ("%s_%s" % (page_path, new_sha(texts[1]).
-                                          hexdigest()))
-        print "fork 2: ", ("%s_%s" % (page_path, new_sha(texts[2]).
-                                          hexdigest()))
-        print "---"
+        print("---")
+        print("Main  : FrontPage")
+        print("fork 1: ", ("%s_%s" % (page_path, new_sha(texts[1]).
+                                          hexdigest())))
+        print("fork 2: ", ("%s_%s" % (page_path, new_sha(texts[2]).
+                                          hexdigest())))
+        print("---")
         self.commit_revision(server_repo,
                              ((page_path,
                                texts[0]),

          
@@ 830,9 830,9 @@ class ConflictTests(RepoTests):
         try:
             raw_zip_bytes = self.make_submission_zip(client_repo)
             self.assertTrue(False)
-        except SubmitError, err0:
-            print "Got expected error:"
-            print err0
+        except SubmitError as err0:
+            print("Got expected error:")
+            print(err0)
             self.assertTrue(err0.illegal)
 
         # Resolve one fork in client overlay.

          
@@ 845,9 845,9 @@ class ConflictTests(RepoTests):
         try:
             raw_zip_bytes = self.make_submission_zip(client_repo)
             self.assertTrue(False)
-        except SubmitError, err1:
-            print "Got second expected error:"
-            print err1
+        except SubmitError as err1:
+            print("Got second expected error:")
+            print(err1)
             self.assertTrue(err1.illegal)
 
 

          
M infocalypse/test_topkey.py +1 -1
@@ 20,7 20,7 @@ 
 """
 
 
-from topkey import top_key_tuple_to_bytes, bytes_to_top_key_tuple, \
+from .topkey import top_key_tuple_to_bytes, bytes_to_top_key_tuple, \
      dump_top_key_tuple
 
 BAD_CHK1 = ('CHK@badroutingkey155JblbGup0yNSpoDJgVPnL8E5WXoc,'

          
M infocalypse/topkey.py +13 -13
@@ 42,9 42,9 @@ import struct
 
 from binascii import hexlify, unhexlify
 
-from fcpconnection import sha1_hexdigest
+from .fcpconnection import sha1_hexdigest
 
-from chk import CHK_SIZE, bytes_to_chk, chk_to_bytes
+from .chk import CHK_SIZE, bytes_to_chk, chk_to_bytes
 
 # Known versions:
 # 1.00 -- Initial release.

          
@@ 178,23 178,23 @@ def bytes_to_top_key_tuple(bytes):
     bytes = bytes[BASE_LEN:]
     if hdr != HDR_BYTES:
         if hdr == HDR_V1:
-            print
-            print
-            print "NOTE:"
-            print "hg update -r f67283c92051"
-            print "Will get you back to a version of the"
-            print "infocalypse source that can read this format."
-            print
-            print
+            print()
+            print()
+            print("NOTE:")
+            print("hg update -r f67283c92051")
+            print("Will get you back to a version of the")
+            print("infocalypse source that can read this format.")
+            print()
+            print()
             raise ValueError("Format version mismatch. "
                              + "That repo is in an obsolete format!")
         if hdr[5] != MAJOR_VERSION:
             # DOH! should have done this in initial release.
             raise ValueError("Format version mismatch. "
                              + "Maybe you're running old code?")
-        print "bytes_to_top_key_data -- minor version mismatch: ", hdr
+        print("bytes_to_top_key_data -- minor version mismatch: ", hdr)
     if len(bytes) == 0:
-        print "bytes_to_top_key_data -- No updates?"
+        print("bytes_to_top_key_data -- No updates?")
 
     graph_chks = []
     for dummy in range(0, graph_chk_count):

          
@@ 213,7 213,7 @@ def default_out(text):
     """ Default output function for dump_top_key_tuple(). """
     if text.endswith('\n'):
         text = text[:-1]
-    print text
+    print(text)
 
 def dump_top_key_tuple(top_key_tuple, out_func=default_out):
     """ Debugging function to print a top_key_tuple. """

          
M infocalypse/updatesm.py +15 -15
@@ 24,31 24,31 @@ import os
 import random
 import time
 
-from fcpclient import get_ssk_for_usk_version, get_usk_for_usk_version, \
+from .fcpclient import get_ssk_for_usk_version, get_usk_for_usk_version, \
      is_usk, is_ssk, is_usk_file, get_version, get_negative_usk, \
      make_search_uris, make_frozen_uris, ssk_to_usk
 
-from fcpconnection import SUCCESS_MSGS
-from fcpmessage import GET_DEF, PUT_FILE_DEF, GET_REQUEST_URI_DEF
+from .fcpconnection import SUCCESS_MSGS
+from .fcpmessage import GET_DEF, PUT_FILE_DEF, GET_REQUEST_URI_DEF
 
-from requestqueue import RequestQueue
+from .requestqueue import RequestQueue
 
-from chk import clear_control_bytes
-from bundlecache import make_temp_file, BundleException
-from graph import INSERT_NORMAL, INSERT_PADDED, INSERT_SALTED_METADATA, \
+from .chk import clear_control_bytes
+from .bundlecache import make_temp_file, BundleException
+from .graph import INSERT_NORMAL, INSERT_PADDED, INSERT_SALTED_METADATA, \
      INSERT_HUGE, FREENET_BLOCK_LEN, has_version, \
      pull_bundle, hex_version
-from graphutil import minimal_graph, graph_to_string, parse_graph
-from choose import get_top_key_updates
+from .graphutil import minimal_graph, graph_to_string, parse_graph
+from .choose import get_top_key_updates
 
-from statemachine import StatefulRequest, RequestQueueState, StateMachine, \
+from .statemachine import StatefulRequest, RequestQueueState, StateMachine, \
      Quiescent, Canceling, RetryingRequestList, CandidateRequest, \
      DecisionState, RunningSingleRequest, require_state, delete_client_file
 
-from insertingbundles import InsertingBundles
-from requestingbundles import RequestingBundles
+from .insertingbundles import InsertingBundles
+from .requestingbundles import RequestingBundles
 
-import topkey
+from . import topkey
 
 HG_MIME_TYPE = 'application/mercurial-bundle'
 HG_MIME_TYPE_FMT = HG_MIME_TYPE + '_%i'

          
@@ 256,7 256,7 @@ class CleaningUp(Canceling):
         self.pending.update(self.parent.ctx.orphaned)
         self.parent.ctx.orphaned.clear()
         # Hmmm... should be ok to recancel already canceled requests.
-        for request in self.pending.values():
+        for request in list(self.pending.values()):
             self.parent.runner.cancel_request(request)
         if len(self.pending) == 0:
             self.parent.transition(self.finished_state)

          
@@ 947,7 947,7 @@ class UpdateStateMachine(RequestQueue, S
         ctx.ui_ = self.ctx.ui_
         ctx.bundle_cache = self.ctx.bundle_cache
         if len(self.ctx.orphaned) > 0:
-            print "BUG?: Abandoning orphaned requests."
+            print("BUG?: Abandoning orphaned requests.")
             self.ctx.orphaned.clear()
 
         self.ctx = ctx

          
M infocalypse/validate.py +3 -3
@@ 39,9 39,9 @@ def is_hex_string(value, length=12):
 # http://wiki.freenetproject.org/Base64
 FREENET_BASE64_CHARS = frozenset(
     [ chr(c) for c in
-      (range(ord('0'), ord('9') + 1)
-       + range(ord('a'), ord('z') + 1)
-       + range(ord('A'), ord('Z') + 1)
+      (list(range(ord('0'), ord('9') + 1))
+       + list(range(ord('a'), ord('z') + 1))
+       + list(range(ord('A'), ord('Z') + 1))
        + [ord('~'), ord('-')])
       ])
 

          
M infocalypse/wikibot.py +19 -19
@@ 26,30 26,30 @@ import time
 
 from mercurial import ui, hg, commands
 
-from fcpmessage import GET_DEF, PUT_COMPLEX_DIR_DEF
-from fcpclient import parse_progress, get_file_infos, \
+from .fcpmessage import GET_DEF, PUT_COMPLEX_DIR_DEF
+from .fcpclient import parse_progress, get_file_infos, \
      set_index_file, dir_data_source
 
-from requestqueue import QueueableRequest, RequestQueue
+from .requestqueue import QueueableRequest, RequestQueue
 
-from validate import is_hex_string
-from chk import ENCODED_CHK_SIZE
-from fms import TrustCache, to_msg_string
-from fmsbot import FMSBot
-from submission import ForkingSubmissionHandler, \
+from .validate import is_hex_string
+from .chk import ENCODED_CHK_SIZE
+from .fms import TrustCache, to_msg_string
+from .fmsbot import FMSBot
+from .submission import ForkingSubmissionHandler, \
      REJECT_NOTRUST, REJECT_FCPFAIL, REJECT_APPLIED
 
-from bundlecache import BundleCache, is_writable, make_temp_file
-from updatesm import UpdateContext, UpdateStateMachine, QUIESCENT, FINISHING
-from infcmds import UICallbacks, set_debug_vars
+from .bundlecache import BundleCache, is_writable, make_temp_file
+from .updatesm import UpdateContext, UpdateStateMachine, QUIESCENT, FINISHING
+from .infcmds import UICallbacks, set_debug_vars
 
 # freesite insert stuff
-from statemachine import StatefulRequest
-from sitecmds import dump_wiki_html
+from .statemachine import StatefulRequest
+from .sitecmds import dump_wiki_html
 
-from wikibotctx import WikiBotContext, context_to_str
+from .wikibotctx import WikiBotContext, context_to_str
 
-from pathhacks import add_parallel_sys_path
+from .pathhacks import add_parallel_sys_path
 add_parallel_sys_path('fniki')
 
 HTML_DUMP_DIR = '__html_dump_deletable__'

          
@@ 88,8 88,8 @@ def parse_submission(fms_id, lines, usk_
 
         Returns None if no submission could be parsed.
     """
-    print "LINES:"
-    print lines
+    print("LINES:")
+    print(lines)
     for line in lines:
         if not line.startswith('W:'):
             continue

          
@@ 185,7 185,7 @@ class WikiBot(FMSBot, RequestQueue):
         self.applier.base_dir = os.path.join(self.repo.root,
                                              self.params['WIKI_ROOT'])
 
-        print "BASE_DIR:", self.applier.base_dir
+        print("BASE_DIR:", self.applier.base_dir)
         
         # 2qt?
         self.applier.notify_needs_commit = (

          
@@ 204,7 204,7 @@ class WikiBot(FMSBot, RequestQueue):
                 self.trace("on_shutdown -- removed lock file: %s"
                            % self.parent.get_path(self, 'pid'))
 
-            except IOError, err:
+            except IOError as err:
                 self.warn("on_shutdown -- err: %s" % str(err))
 
         self._send_status_notification('STOPPED')

          
M infocalypse/wikibotctx.py +1 -1
@@ 22,7 22,7 @@ 
 import shelve
 import time
 
-from fcpclient import get_version, get_usk_for_usk_version
+from .fcpclient import get_version, get_usk_for_usk_version
 
 def pretty_timeout(future_time):
     """ Return a human readable string for a timeout. """

          
M infocalypse/wikicmds.py +13 -13
@@ 20,24 20,24 @@ 
 """
 
 import os
-import StringIO
+import io
 
 from binascii import hexlify
 
 from mercurial import util
 
-from config import write_default_config, read_freesite_cfg, normalize
-from submission import bundle_wikitext, unbundle_wikitext, get_info, \
+from .config import write_default_config, read_freesite_cfg, normalize
+from .submission import bundle_wikitext, unbundle_wikitext, get_info, \
      NoChangesError, validate_wikitext
-from hgoverlay import HgFileOverlay
-from infcmds import setup, run_until_quiescent, cleanup
-from statemachine import StatefulRequest
-from fcpmessage import PUT_FILE_DEF, GET_DEF
-from graph import FREENET_BLOCK_LEN, has_version
-from updatesm import QUIESCENT, FINISHING, RUNNING_SINGLE_REQUEST
-from bundlecache import make_temp_file
+from .hgoverlay import HgFileOverlay
+from .infcmds import setup, run_until_quiescent, cleanup
+from .statemachine import StatefulRequest
+from .fcpmessage import PUT_FILE_DEF, GET_DEF
+from .graph import FREENET_BLOCK_LEN, has_version
+from .updatesm import QUIESCENT, FINISHING, RUNNING_SINGLE_REQUEST
+from .bundlecache import make_temp_file
 # HACK
-from pathhacks import add_parallel_sys_path
+from .pathhacks import add_parallel_sys_path
 add_parallel_sys_path('fniki')
 
 import servepiki

          
@@ 203,7 203,7 @@ def execute_wiki_apply(ui_, repo, params
                         final_msg[2]
             assert request.response[0] == 'AllData'
             ui_.status("Fetched %i byte submission.\n" % len(raw_bytes))
-            base_ver, submitter = get_info(StringIO.StringIO(raw_bytes))
+            base_ver, submitter = get_info(io.StringIO(raw_bytes))
             ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n"
                        % (base_ver[:12], submitter))
 

          
@@ 231,7 231,7 @@ def execute_wiki_apply(ui_, repo, params
             overlay.version = base_ver
             validate_wikitext(overlay)
             updates = unbundle_wikitext(overlay,
-                                        StringIO.StringIO(raw_bytes))
+                                        io.StringIO(raw_bytes))
             for index, label in enumerate(('CREATED', 'MODIFIED', 'REMOVED',
                                            'ALREADY PATCHED')):
                 if len(updates[index]) > 0:

          
M infocalypse/wot.py +12 -12
@@ 1,15 1,15 @@ 
 import fcp
 from mercurial import util
-from config import Config
+from .config import Config
 import xml.etree.ElementTree as ET
 from defusedxml.ElementTree import fromstring
 import smtplib
 import atexit
-from keys import USK
+from .keys import USK
 import yaml
 from email.mime.text import MIMEText
 import imaplib
-from wot_id import Local_WoT_ID, WoT_ID
+from .wot_id import Local_WoT_ID, WoT_ID
 
 # TODO: Instead of fcpport and fcphost, functions should accept a node
 

          
@@ 133,7 133,7 @@ def check_notifications(ui, local_identi
     # ')',
 
     # Exclude closing parens, which are of length one.
-    subjects = filter(lambda x: len(x) == 2, subjects)
+    subjects = [x for x in subjects if len(x) == 2]
 
     subjects = [x[1] for x in subjects]
 

          
@@ 141,7 141,7 @@ def check_notifications(ui, local_identi
     subjects = dict((message_number, subject[len('Subject: '):].rstrip()) for
                     message_number, subject in zip(message_numbers, subjects))
 
-    for message_number, subject in subjects.iteritems():
+    for message_number, subject in subjects.items():
         status, fetched = imap.fetch(str(message_number),
                                      r'(body[text] '
                                      r'body[header.fields From)')

          
@@ 189,7 189,7 @@ def read_message_yaml(ui, from_address, 
 
         if not require('vcs', request) or not require('request', request):
             return
-    except yaml.YAMLError, e:
+    except yaml.YAMLError as e:
         ui.status("Notification '%s' has a request but it is not properly"
                   " formatted. Details:\n%s\n" % (subject, e))
         return

          
@@ 294,7 294,7 @@ def build_repo_list(ui, for_identity):
     repos = []
 
     # Add request URIs associated with the given identity.
-    for request_uri in config.request_usks.itervalues():
+    for request_uri in config.request_usks.values():
         if config.get_wot_identity(request_uri) == for_identity.identity_id:
             repos.append(request_uri)
 

          
@@ 375,11 375,11 @@ def read_repo_listing(ui, identity, fcph
     # and these problems should be pointed out (or prevented) for local repo
     # lists.
 
-    for name in repositories.iterkeys():
+    for name in repositories.keys():
         ui.status("Found repository \"{0}\".\n".format(name))
 
     # Convert values from USKs to strings - USKs are not expected elsewhere.
-    for key in repositories.keys():
+    for key in list(repositories.keys()):
         repositories[key] = str(repositories[key])
 
     return repositories

          
@@ 399,7 399,7 @@ def fetch_edition(uri, fcphost=None, fcp
     # TODO: Is there ever legitimately more than one redirect?
     try:
         return node.get(str(uri), priority=1)
-    except fcp.FCPGetFailed, e:
+    except fcp.FCPGetFailed as e:
         # Error code 27 is permanent redirect: there's a newer edition of
         # the USK.
         # https://wiki.freenetproject.org/FCPv2/GetFailed#Fetch_Error_Codes

          
@@ 548,10 548,10 @@ def execute_setup_freemail(ui, local_id,
         # TODO: Is this the correct way to get the configured host?
         smtp = smtplib.SMTP(host, port)
         smtp.login(address, password)
-    except smtplib.SMTPAuthenticationError, e:
+    except smtplib.SMTPAuthenticationError as e:
         raise util.Abort("Could not log in with the given password.\nGot '{0}'\n"
                          .format(e.smtp_error))
-    except smtplib.SMTPConnectError, e:
+    except smtplib.SMTPConnectError as e:
         raise util.Abort("Could not connect to server.\nGot '{0}'\n"
                          .format(e.smtp_error))
 

          
M infocalypse/wot_id.py +9 -9
@@ 2,7 2,7 @@ import fcp
 from mercurial import util
 import string
 import atexit
-from keys import USK
+from .keys import USK
 from base64 import b32encode
 from fcp.node import base64decode
 

          
@@ 52,7 52,7 @@ class WoT_ID(object):
         self.properties = {}
         context_prefix = "Replies.Contexts{0}.Context".format(id_num)
         property_prefix = "Replies.Properties{0}.Property".format(id_num)
-        for key in message.iterkeys():
+        for key in message.keys():
             if key.startswith(context_prefix):
                 self.contexts.append(message[key])
             elif key.startswith(property_prefix) and key.endswith(".Name"):

          
@@ 70,8 70,8 @@ class WoT_ID(object):
                 # value conflict restricts the message to cases where it
                 # actually has an effect.
                 if name in self.properties and value != self.properties[name]:
-                    print("WARNING: '{0}' has conflicting value as a property."
-                          .format(name))
+                    print(("WARNING: '{0}' has conflicting value as a property."
+                          .format(name)))
 
                 self.properties[name] = value
 

          
@@ 94,9 94,9 @@ class WoT_ID(object):
 
         # TODO: Would it be preferable to use ui to obey quieting switches?
         if is_local_identity:
-            print("Using local identity {0}".format(self))
+            print(("Using local identity {0}".format(self)))
         else:
-            print("Using identity {0}".format(self))
+            print(("Using identity {0}".format(self)))
 
     def __str__(self):
         return self.nickname + '@' + self.identity_id

          
@@ 293,7 293,7 @@ def _get_local_identity(wot_identifier, 
     prefix = 'Replies.Nickname'
     # Key: nickname, value (id_num, public key hash).
     matches = {}
-    for key in response.iterkeys():
+    for key in response.keys():
         if key.startswith(prefix) and \
                 response[key].startswith(nickname_prefix):
 

          
@@ 308,7 308,7 @@ def _get_local_identity(wot_identifier, 
 
     # Remove matching nicknames not also matching the (possibly partial)
     # public key hash.
-    for key in matches.keys():
+    for key in list(matches.keys()):
         # public key hash is second member of value tuple.
         if not matches[key][1].startswith(key_prefix):
             del matches[key]

          
@@ 324,7 324,7 @@ def _get_local_identity(wot_identifier, 
     assert len(matches) == 1
 
     # id_num is first member of value tuple.
-    only_key = matches.keys()[0]
+    only_key = list(matches.keys())[0]
     id_num = matches[only_key][0]
 
     return id_num, response

          
M wormarc/archive.py +1 -1
@@ 351,7 351,7 @@ class WORMBlockArchive:
         # Compute the "real" size of each block without unreferenced links
         real_lens = [0 for dummy in range(0, len(self.blocks.tags))]
 
-        for links in self.blocks.link_map.values():
+        for links in list(self.blocks.link_map.values()):
             for link in links:
                 if not link[0] in referenced_shas:
                     continue

          
M wormarc/binaryrep.py +1 -1
@@ 168,7 168,7 @@ def write_file_manifest(name_map, out_st
     out_stream.write(struct.pack(COUNT_FMT, len(name_map)))
     # Sort to make it easier for diff algos to find contiguous
     # changes.
-    names = name_map.keys()
+    names = list(name_map.keys())
     names.sort()
     for name in names:
         length = MANIFEST_ENTRY_HDR_LEN + len(name)

          
M wormarc/filemanifest.py +13 -13
@@ 168,8 168,8 @@ class FileManifest:
                 info.release()
 
         if not updated:
-            if (frozenset(new_name_map.keys()) ==
-                frozenset(self.name_map.keys())):
+            if (frozenset(list(new_name_map.keys())) ==
+                frozenset(list(self.name_map.keys()))):
                 raise UpToDateException("The file manifest is up to date.")
 
         # Add updated manifest

          
@@ 218,7 218,7 @@ class FileManifest:
 
             new_shas = archive.uncommited_shas()
 
-            for value in new_names.values():
+            for value in list(new_names.values()):
                 if value[1] in new_shas:
                     # Adding history for new values is handled by
                     # commit_update().

          
@@ 248,28 248,28 @@ def verify_manifest(archive, manifest, b
         tmp = archive.blocks.tmps.make_temp_file()
         file_sha, link_sha = manifest.name_map[name]
         if not brief:
-            print "Verifying: %s  %s => %s)" % (name,
+            print("Verifying: %s  %s => %s)" % (name,
                                               str_sha(file_sha),
-                                              str_sha(link_sha))
+                                              str_sha(link_sha)))
         archive.get_file(link_sha, tmp)
         history = archive.blocks.get_history(link_sha)
         if not brief:
-            print "History: " + " ".join([str_sha(link[0])
-                                          for link in history])
+            print("History: " + " ".join([str_sha(link[0])
+                                          for link in history]))
 
         retrieved_sha = get_file_sha(tmp)
         if retrieved_sha != file_sha:
-            print "Expected: %s, but got %s." % (str_sha(file_sha),
-                                                 str_sha(retrieved_sha))
+            print("Expected: %s, but got %s." % (str_sha(file_sha),
+                                                 str_sha(retrieved_sha)))
             failures += 1
         else:
             if not brief:
-                print "Ok. Read %i bytes." % os.path.getsize(tmp)
+                print("Ok. Read %i bytes." % os.path.getsize(tmp))
 
         archive.blocks.tmps.remove_temp_file(tmp)
 
     if failures > 0:
-        print "%i entries failed to verify!" % failures
+        print("%i entries failed to verify!" % failures)
         assert False
 
 def fix_backwards_slashes(name):

          
@@ 381,7 381,7 @@ def validate_path(base_dir, full_path):
     """ Catch references to direcories above base_dir. """
     base_dir = os.path.abspath(base_dir)
 
-    if type(full_path) is unicode:
+    if type(full_path) is str:
         raise IOError("Unicode path name: %s" % repr(full_path))
     if not is_printable_ascii(full_path):
         raise IOError("Non-ASCII path name: %s" % repr(full_path))

          
@@ 413,7 413,7 @@ def manifest_to_dir(archive, manifest, t
         return (create, overwrite, set(remove.keys()), remove_dirs)
 
     # Remove files
-    for victim in remove.values():
+    for victim in list(remove.values()):
         if os.path.exists(victim):
             validate_path(target_dir, victim)
             os.remove(victim)

          
M wormarc/linkmap.py +3 -3
@@ 73,7 73,7 @@ class LinkMap(dict):
     # Omit from fixups == delete
     def _update_block_ordinals(self, fixups):
         """ INTERNAL: Implementation helper for update_blocks(). """
-        for sha_hash in self.keys():
+        for sha_hash in list(self.keys()):
             prev = self.get(sha_hash)
             updated = []
             for link in prev:

          
@@ 171,7 171,7 @@ def links_by_block(link_map):
     """ INTERNAL: Implementation helper function for
         verify_link_map(). """
     tables = [{} for dummy in range(0, len(link_map.files))]
-    for links in link_map.values():
+    for links in list(link_map.values()):
         assert len(links) > 0
         for link in links:
             ordinal = link[5]

          
@@ 192,7 192,7 @@ def verify_link_map(link_map):
         raw_shas = raw_block_read(link_map, ordinal)
         # Hashes read from the raw file are the same as
         # the ones that the LinkMap thinks should be in the file.
-        assert frozenset(raw_shas.keys()) == frozenset(by_block[ordinal].keys())
+        assert frozenset(list(raw_shas.keys())) == frozenset(list(by_block[ordinal].keys()))
 
         # Now check values.
         for link_sha in raw_shas:

          
M wormarc/test_archive.py +59 -59
@@ 93,11 93,11 @@ class HandleTemps(ITempFileManager):
         if not os.path.exists(full_path):
             return
 
-        if full_path in self.callers.keys():
+        if full_path in list(self.callers.keys()):
             del self.callers[full_path]
         else:
-            print "HandleTemps.remove_file() -- removing non-managed file???"
-            print full_path
+            print("HandleTemps.remove_file() -- removing non-managed file???")
+            print(full_path)
 
         os.remove(full_path)
 

          
@@ 106,18 106,18 @@ class HandleTemps(ITempFileManager):
             if not os.path.exists(name):
                 continue
 
-            print "LEAKED: ", name
-            print "FROM:"
-            print self.callers[name]
+            print("LEAKED: ", name)
+            print("FROM:")
+            print(self.callers[name])
 
         if len(os.listdir(self.base_dir)) > 0:
             file_count = 0
             for name in os.listdir(self.base_dir):
                 if os.path.isdir(os.path.join(self.base_dir, name)):
                     # Allow directories. e.g. __hg_repo__, __unarchived__.
-                    print "HandleTemps.check_for_leaks -- ignored dir: ", name
+                    print("HandleTemps.check_for_leaks -- ignored dir: ", name)
                     continue
-                print name
+                print(name)
                 file_count += 1
 
             if file_count > 0:

          
@@ 125,7 125,7 @@ class HandleTemps(ITempFileManager):
 
 def dump_blocks(blocks, msg=None, brief=False):
     if not msg is None:
-        print msg
+        print(msg)
     values = []
     for index in range(0, len(blocks.tags)):
         path = blocks.full_path(index)

          
@@ 139,9 139,9 @@ def dump_blocks(blocks, msg=None, brief=
             values.append("%s:[%s]" % (path, length))
 
     if brief:
-        print "blocks: " + " ".join(values)
+        print("blocks: " + " ".join(values))
     else:
-        print "blocks\n" + "\n".join(values)
+        print("blocks\n" + "\n".join(values))
 
 def link_str(link):
     return "(%s, %i, %s, data: %s, %i, %s)" % (str_sha(link[0]),

          
@@ 152,40 152,40 @@ def link_str(link):
                                                link[5])
 def dump_links(links, msg=None):
     if not msg is None:
-        print msg
+        print(msg)
     for link in links:
-        print link_str(link)
+        print(link_str(link))
 
 def dump_link_map(link_map, msg=None, brief=False):
     if not msg is None:
-        print msg
-    print "keys: ", len(link_map)
+        print(msg)
+    print("keys: ", len(link_map))
     if brief:
         return
-    keys = link_map.keys()
+    keys = list(link_map.keys())
     keys.sort()
     for key in keys:
-        print str_sha(key)
+        print(str_sha(key))
         dump_links(link_map[key])
 
 def dump_names_map(names_map, msg=None):
     if not msg is None:
-        print msg
-    keys = names_map.keys()
+        print(msg)
+    keys = list(names_map.keys())
     keys.sort()
     for key in keys:
         hashes = names_map[key]
-        print "%s->(%s, %s)" % (key, str_sha(hashes[0]), str_sha(hashes[1]))
+        print("%s->(%s, %s)" % (key, str_sha(hashes[0]), str_sha(hashes[1])))
 
 def dump_archive(archive, msg=None, brief=False):
-    print "--- start archive dump ---"
+    print("--- start archive dump ---")
     if not msg is None:
-        print msg
-    print "age: %i max_blocks: %i" % (archive.age, archive.max_blocks)
+        print(msg)
+    print("age: %i max_blocks: %i" % (archive.age, archive.max_blocks))
     dump_blocks(archive.blocks, "blocks:")
 
     dump_link_map(archive.blocks.link_map, "link_map:", brief)
-    print "--- end ---"
+    print("--- end ---")
 
 
 def words():

          
@@ 197,7 197,7 @@ WORD_ITR = words()
 def lines(count):
     line = ""
     while count > 0:
-        line += WORD_ITR.next()
+        line += next(WORD_ITR)
         line += " "
         if len(line) > 60:
             ret = line

          
@@ 284,12 284,12 @@ class SmokeTests(ArchiveTestCase):
         return archive
 
     def test_create_archive(self):
-        print
+        print()
         archive = self.make_empty_archive('A')
         dump_archive(archive)
 
     def test_load_archive(self):
-        print
+        print()
         self.make_empty_archive('A')
         b = self.load_archive('A')
         dump_archive(b)

          
@@ 314,20 314,20 @@ class SmokeTests(ArchiveTestCase):
             dump_archive(a, "updated")
 
             # Read
-            print
-            print str_sha(link0[0]), a.get_data(link0[0])
-            print str_sha(link1[0]), a.get_data(link1[0])
-            print str_sha(link2[0]), a.get_data(link2[0])
+            print()
+            print(str_sha(link0[0]), a.get_data(link0[0]))
+            print(str_sha(link1[0]), a.get_data(link1[0]))
+            print(str_sha(link2[0]), a.get_data(link2[0]))
 
             a.close()
 
             b = self.load_archive('A')
             dump_archive(b, "[Reloaded from disk]")
-            print
+            print()
             # Mix up order.
-            print str_sha(link1[0]), b.get_data(link1[0])
-            print str_sha(link0[0]), b.get_data(link0[0])
-            print str_sha(link2[0]), b.get_data(link2[0])
+            print(str_sha(link1[0]), b.get_data(link1[0]))
+            print(str_sha(link0[0]), b.get_data(link0[0]))
+            print(str_sha(link2[0]), b.get_data(link2[0]))
         finally:
             self.tmps.remove_temp_file(t1)
             self.tmps.remove_temp_file(r0)

          
@@ 368,7 368,7 @@ class SmokeTests(ArchiveTestCase):
 
             prev = new_sha
             if iteration > 0 and iteration % 100 == 0:
-                print "iteration: ", iteration
+                print("iteration: ", iteration)
 
     # grrr... giving up on temp files
     def test_single_update(self):

          
@@ 392,9 392,9 @@ class SmokeTests(ArchiveTestCase):
             ('big.txt', '*' * (1 * 128)),
             )
 
-        print "manifest sha: ", str_sha(m.stored_sha)
+        print("manifest sha: ", str_sha(m.stored_sha))
         m.update(a, entries_from_seq(self.tmps, data0))
-        print "manifest sha: ", str_sha(m.stored_sha)
+        print("manifest sha: ", str_sha(m.stored_sha))
 
         dump_archive(a, "AFTER FIRST WRITE:")
         verify_manifest(a, m)

          
@@ 406,17 406,17 @@ class SmokeTests(ArchiveTestCase):
             )
 
         m.update(a, entries_from_seq(self.tmps, data1))
-        print "manifest sha: ", str_sha(m.stored_sha)
+        print("manifest sha: ", str_sha(m.stored_sha))
         dump_archive(a)
         verify_link_map(a.blocks.link_map)
         verify_manifest(a, m)
 
     def test_words(self):
-        print WORD_ITR.next()
+        print(next(WORD_ITR))