Apply futurize --stage-2
Python 2/3 compatibility transformations.
These changes specifically require the use of the "future" library.
This commit is contained in:
parent
457e5739f7
commit
2d078b0004
10 changed files with 31 additions and 17 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
from past.builtins import basestring
|
||||||
from builtins import object
|
from builtins import object
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
from future import standard_library
|
||||||
|
standard_library.install_aliases()
|
||||||
|
from builtins import object
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from conservancy import bsoup
|
from conservancy import bsoup
|
||||||
|
@ -65,14 +68,14 @@ class Entry(models.Model, bsoup.SoupModelMixin):
|
||||||
post_url = ('http://www.sfconservancy.org'
|
post_url = ('http://www.sfconservancy.org'
|
||||||
+ self.get_absolute_url())
|
+ self.get_absolute_url())
|
||||||
|
|
||||||
import xmlrpclib
|
import xmlrpc.client
|
||||||
|
|
||||||
# Ping Technorati
|
# Ping Technorati
|
||||||
j = xmlrpclib.Server('http://rpc.technorati.com/rpc/ping')
|
j = xmlrpc.client.Server('http://rpc.technorati.com/rpc/ping')
|
||||||
reply = j.weblogUpdates.ping(blog_name, blog_url)
|
reply = j.weblogUpdates.ping(blog_name, blog_url)
|
||||||
|
|
||||||
# Ping Google Blog Search
|
# Ping Google Blog Search
|
||||||
j = xmlrpclib.Server('http://blogsearch.google.com/ping/RPC2')
|
j = xmlrpc.client.Server('http://blogsearch.google.com/ping/RPC2')
|
||||||
reply = j.weblogUpdates.ping(blog_name, blog_url, post_url)
|
reply = j.weblogUpdates.ping(blog_name, blog_url, post_url)
|
||||||
|
|
||||||
# Call any superclass's method
|
# Call any superclass's method
|
||||||
|
|
|
@ -15,6 +15,6 @@ def organize_media_by_event(eventmedia_queryset):
|
||||||
mbe = [{'event': x[0].event,
|
mbe = [{'event': x[0].event,
|
||||||
'date': max(y.date_created for y in x),
|
'date': max(y.date_created for y in x),
|
||||||
'media_list': ', '.join(set(y.get_format_display() for y in x))}
|
'media_list': ', '.join(set(y.get_format_display() for y in x))}
|
||||||
for x in media_by_event.values()]
|
for x in list(media_by_event.values())]
|
||||||
mbe.sort(key=(lambda x: x['date']), reverse=True) # sort by date
|
mbe.sort(key=(lambda x: x['date']), reverse=True) # sort by date
|
||||||
return mbe
|
return mbe
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import division
|
||||||
|
from past.utils import old_div
|
||||||
from builtins import object
|
from builtins import object
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
@ -17,7 +19,7 @@ class FundraisingGoal(models.Model):
|
||||||
return self.fundraiser_code_name
|
return self.fundraiser_code_name
|
||||||
|
|
||||||
def percentage_there(self):
|
def percentage_there(self):
|
||||||
return (self.fundraiser_so_far_amount / self.fundraiser_goal_amount ) * 100
|
return (old_div(self.fundraiser_so_far_amount, self.fundraiser_goal_amount) ) * 100
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
ordering = ('fundraiser_code_name',)
|
ordering = ('fundraiser_code_name',)
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from future import standard_library
|
||||||
|
standard_library.install_aliases()
|
||||||
from builtins import object
|
from builtins import object
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -54,14 +56,14 @@ class PressRelease(models.Model, bsoup.SoupModelMixin):
|
||||||
post_url = ('https://www.sfconservancy.org'
|
post_url = ('https://www.sfconservancy.org'
|
||||||
+ self.get_absolute_url())
|
+ self.get_absolute_url())
|
||||||
|
|
||||||
import xmlrpclib
|
import xmlrpc.client
|
||||||
|
|
||||||
# Ping Technorati
|
# Ping Technorati
|
||||||
j = xmlrpclib.Server('http://rpc.technorati.com/rpc/ping')
|
j = xmlrpc.client.Server('http://rpc.technorati.com/rpc/ping')
|
||||||
reply = j.weblogUpdates.ping(blog_name, blog_url)
|
reply = j.weblogUpdates.ping(blog_name, blog_url)
|
||||||
|
|
||||||
# Ping Google Blog Search
|
# Ping Google Blog Search
|
||||||
j = xmlrpclib.Server('http://blogsearch.google.com/ping/RPC2')
|
j = xmlrpc.client.Server('http://blogsearch.google.com/ping/RPC2')
|
||||||
reply = j.weblogUpdates.ping(blog_name, blog_url, post_url)
|
reply = j.weblogUpdates.ping(blog_name, blog_url, post_url)
|
||||||
|
|
||||||
# Call any superclass's method
|
# Call any superclass's method
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
import urlparse
|
from future import standard_library
|
||||||
|
standard_library.install_aliases()
|
||||||
|
from builtins import zip
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from django import template
|
from django import template
|
||||||
|
|
||||||
|
@ -13,8 +16,8 @@ def fill_url(given_url, base_url):
|
||||||
Typical usage is "/URL/path"|fill_url:"https://hostname/"
|
Typical usage is "/URL/path"|fill_url:"https://hostname/"
|
||||||
to generate "https://hostname/URL/path".
|
to generate "https://hostname/URL/path".
|
||||||
"""
|
"""
|
||||||
given_parts = urlparse.urlsplit(given_url)
|
given_parts = urllib.parse.urlsplit(given_url)
|
||||||
base_parts = urlparse.urlsplit(base_url)
|
base_parts = urllib.parse.urlsplit(base_url)
|
||||||
return urlparse.urlunsplit(
|
return urllib.parse.urlunsplit(
|
||||||
given_part or base_part for given_part, base_part in zip(given_parts, base_parts)
|
given_part or base_part for given_part, base_part in zip(given_parts, base_parts)
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
|
from builtins import filter
|
||||||
from builtins import object
|
from builtins import object
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
|
@ -144,7 +145,7 @@ class SoupModelMixin(object):
|
||||||
return elem_pred
|
return elem_pred
|
||||||
|
|
||||||
def _sort_and_slice_elems(self, elem_seq, elem_key, pred, *slice_args):
|
def _sort_and_slice_elems(self, elem_seq, elem_key, pred, *slice_args):
|
||||||
seq = itertools.ifilter(pred, sorted(elem_seq, key=elem_key))
|
seq = filter(pred, sorted(elem_seq, key=elem_key))
|
||||||
if slice_args:
|
if slice_args:
|
||||||
return itertools.islice(seq, *slice_args)
|
return itertools.islice(seq, *slice_args)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -22,7 +22,7 @@ def sitefundraiser(request):
|
||||||
if conservancy.settings.FORCE_CANONICAL_HOSTNAME:
|
if conservancy.settings.FORCE_CANONICAL_HOSTNAME:
|
||||||
_HOST_URL_VAR = {'host_url': 'https://' + conservancy.settings.FORCE_CANONICAL_HOSTNAME}
|
_HOST_URL_VAR = {'host_url': 'https://' + conservancy.settings.FORCE_CANONICAL_HOSTNAME}
|
||||||
def host_url(request):
|
def host_url(request):
|
||||||
return _HOST_URL_VAR
|
return _HOST_URL_VAR.decode('utf-8')
|
||||||
else:
|
else:
|
||||||
def host_url(request):
|
def host_url(request):
|
||||||
return {'host_url': request.build_absolute_uri('/').rstrip('/')}
|
return {'host_url': request.build_absolute_uri('/').rstrip('/').decode('utf-8')}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from builtins import str
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os.path
|
import os.path
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from builtins import str
|
||||||
from mod_python import apache
|
from mod_python import apache
|
||||||
|
|
||||||
# 404 should do NOTHING so apache can handle it. This view is referenced
|
# 404 should do NOTHING so apache can handle it. This view is referenced
|
||||||
|
@ -83,10 +84,10 @@ class ModPythonHandler(BaseHandler):
|
||||||
|
|
||||||
# Convert our custom HttpResponse object back into the mod_python req.
|
# Convert our custom HttpResponse object back into the mod_python req.
|
||||||
req.content_type = response['Content-Type']
|
req.content_type = response['Content-Type']
|
||||||
for key, value in response.items():
|
for key, value in list(response.items()):
|
||||||
if key != 'content-type':
|
if key != 'content-type':
|
||||||
req.headers_out[str(key)] = str(value)
|
req.headers_out[str(key)] = str(value)
|
||||||
for c in response.cookies.values():
|
for c in list(response.cookies.values()):
|
||||||
req.headers_out.add('Set-Cookie', c.output(header=''))
|
req.headers_out.add('Set-Cookie', c.output(header=''))
|
||||||
req.status = response.status_code
|
req.status = response.status_code
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Reference in a new issue