def clean(self):
try:
return super(PgwebAuthenticationForm, self).clean()
- except ValueError, e:
+ except ValueError as e:
if e.message.startswith('Unknown password hashing algorithm'):
# This is *probably* a user trying to log in with an account that has not
# been set up properly yet. It could be an actually unsupported hashing
try:
(email, firstname, lastname) = authdatafunc(oa)
email = email.lower()
- except KeyError, e:
+ except KeyError as e:
log.warning("Oauth signing using {0} was missing data: {1}".format(provider, e))
return HttpResponse('OAuth login was missing critical data. To log in, you need to allow access to email, first name and last name!')
if hasattr(m, fn):
try:
return getattr(m, fn)(request)
- except OAuthException, e:
+ except OAuthException as e:
return HttpResponse(e)
- except Exception, e:
+ except Exception as e:
log.error('Excpetion during OAuth: %s' % e)
return HttpResponse('An unhandled exception occurred during the authentication process')
'Content-type': 'application/x-www-form-urlencoded',
})
c.sock.settimeout(10)
- except Exception, e:
+ except Exception as e:
# Error to connect at TCP level
log.error('Failed to connect to google recaptcha API: %s' % e)
raise ValidationError('Failed in API call to google recaptcha')
if fetchedsomething:
importfeed.purge_related()
- except Exception, e:
- print "Failed to load %s: %s" % (importfeed, e)
+ except Exception as e:
+ print("Failed to load %s: %s" % (importfeed, e))
f = open(settings.FTP_PICKLE, "rb")
allnodes = pickle.load(f)
f.close()
- except Exception, e:
+ except Exception as e:
return HttpServerError(request, "Failed to load ftp site information: %s" % e)
# An incoming subpath may either be canonical, or have one or more elements
if b:
return b
return "Could not find body"
- except Exception, e:
+ except Exception as e:
return "Failed to get body: %s" % e
parsed_content.short_description = 'Parsed mail'
))
except ValidationError:
raise
- except Exception, e:
+ except Exception as e:
raise ValidationError("Failed to parse vectors: %s" % e)
try:
if not self.crawl_single_message(listid, listname, year, month, currentmsg):
break
- except Exception, e:
+ except Exception as e:
log("Exception when crawling %s/%s/%s/%s - %s" % (
listname, year, month, currentmsg, e))
# Continue on to try the next message
(url, relprio, internal) = self.queue.get()
try:
self.crawl_page(url, relprio, internal)
- except Exception, e:
+ except Exception as e:
log("Exception crawling '%s': %s" % (url, e))
self.queue.task_done()
pagedata = lossy_unicode(pagedata)
try:
self.page = self.parse_html(pagedata)
- except Exception, e:
+ except Exception as e:
log("Failed to parse HTML for %s" % url)
log(e)
return
else:
# print "Url %s returned status %s" % (url, resp.status)
pass
- except Exception, e:
+ except Exception as e:
log("Exception when loading url %s: %s" % (url, e))
return (2, None, None)
# forcibly terminate the child.
try:
p.join()
- except KeyboardInterrupt, e:
+ except KeyboardInterrupt as e:
print "Keyboard interrupt, terminating child process!"
p.terminate()
- except Exception, e:
+ except Exception as e:
print "Exception %s, terminating child process!" % e
p.terminate()
return True
logging.warning("Varnish purge on %s returned status %s (%s)" % (consumername, resp.status, resp.reason))
return False
- except Exception, ex:
+ except Exception as ex:
logging.error("Exception purging on %s: %s" % (consumername, ex))
return False
return True