44import pycassa
55import sys
66import time
7- import urllib
7+ import urllib . request , urllib . parse , urllib . error
88from pycassa .cassandra .ttypes import NotFoundException
99from pycassa .util import OrderedDict
1010
1111from daisy import config
1212from daisy import metrics as daisy_metrics
13+ from functools import cmp_to_key
1314pool = daisy_metrics .wrapped_connection_pool ('errors' )
1415
1516from cassandra import ConsistencyLevel
@@ -141,7 +142,7 @@ def get_bucket_counts (release=None, package=None, version=None, pkg_arch=None,
141142 except NotFoundException :
142143 break
143144
144- for column , count in result .iteritems ():
145+ for column , count in result .items ():
145146 if not show_failed and column .startswith ('failed' ):
146147 continue
147148 column = column .encode ('utf-8' )
@@ -154,7 +155,7 @@ def get_bucket_counts (release=None, package=None, version=None, pkg_arch=None,
154155 start = column + '0'
155156 if len (result ) < batch_size :
156157 break
157- return sorted (results .items (), cmp = lambda x , y : cmp (x [1 ], y [1 ]), reverse = True )
158+ return sorted (list ( results .items ()), key = cmp_to_key ( lambda x , y : cmp (x [1 ], y [1 ]) ), reverse = True )
158159
159160def get_crashes_for_bucket (bucketid , limit = 100 , start = None ):
160161 '''
@@ -167,13 +168,13 @@ def get_crashes_for_bucket (bucketid, limit=100, start=None):
167168 try :
168169 if start :
169170 start = pycassa .util .uuid .UUID (start )
170- return bucket_cf .get (bucketid ,
171+ return list ( bucket_cf .get (bucketid ,
171172 column_start = start ,
172173 column_count = limit ,
173- column_reversed = True ).keys ()[1 :]
174+ column_reversed = True ).keys ()) [1 :]
174175 else :
175- return bucket_cf .get (bucketid , column_count = limit ,
176- column_reversed = True ).keys ()
176+ return list ( bucket_cf .get (bucketid , column_count = limit ,
177+ column_reversed = True ).keys ())
177178 except NotFoundException :
178179 return []
179180
@@ -184,7 +185,7 @@ def get_package_for_bucket (bucketid):
184185 oops_cf = pycassa .ColumnFamily (pool , 'OOPS' )
185186 # Grab 5 OOPS IDs, just in case the first one doesn't have a Package field.
186187 try :
187- oopsids = bucket_cf .get (bucketid , column_count = 5 ).keys ()
188+ oopsids = list ( bucket_cf .get (bucketid , column_count = 5 ).keys () )
188189 except NotFoundException :
189190 return ('' , '' )
190191 for oopsid in oopsids :
@@ -272,7 +273,7 @@ def get_retracer_count(date):
272273
273274def get_retracer_counts (start , finish ):
274275 retracestats_cf = pycassa .ColumnFamily (pool , 'RetraceStats' )
275- if finish == sys .maxint :
276+ if finish == sys .maxsize :
276277 start = datetime .date .today () - datetime .timedelta (days = start )
277278 start = start .strftime ('%Y%m%d' )
278279 results = retracestats_cf .get_range ()
@@ -313,7 +314,7 @@ def get_retracer_means(start, finish):
313314 branch [part ] = to_float (timings [timing ])
314315 else :
315316 branch = branch .setdefault (part , {})
316- return result .iteritems ( )
317+ return iter ( result .items () )
317318
318319def get_crash_count (start , finish , release = None ):
319320 counters_cf = pycassa .ColumnFamily (pool , 'Counters' )
@@ -349,7 +350,7 @@ def chunks(l, n):
349350 # http://stackoverflow.com/a/312464/190597
350351 """ Yield successive n-sized chunks from l.
351352 """
352- for i in xrange (0 , len (l ), n ):
353+ for i in range (0 , len (l ), n ):
353354 yield l [i :i + n ]
354355
355356def get_metadata_for_buckets (bucketids , release = None ):
@@ -392,10 +393,10 @@ def get_user_crashes(user_token, limit=50, start=None):
392393 include_timestamp = True )
393394 for r in result :
394395 results [r ] = {'submitted' : result [r ]}
395- start = result .keys ()[- 1 ] + '0'
396+ start = list ( result .keys () )[- 1 ] + '0'
396397 except NotFoundException :
397398 return []
398- return [(k [0 ], k [1 ]) for k in sorted (results .iteritems ( ), key = operator .itemgetter (1 ),
399+ return [(k [0 ], k [1 ]) for k in sorted (iter ( results .items () ), key = operator .itemgetter (1 ),
399400 reverse = True )]
400401
401402def get_average_crashes (field , release , days = 7 ):
@@ -460,7 +461,7 @@ def get_versions_for_bucket(bucketid):
460461def get_source_package_for_bucket (bucketid ):
461462 oops_cf = pycassa .ColumnFamily (pool , 'OOPS' )
462463 bucket_cf = pycassa .ColumnFamily (pool , 'Bucket' )
463- oopsids = bucket_cf .get (bucketid , column_count = 10 ).keys ()
464+ oopsids = list ( bucket_cf .get (bucketid , column_count = 10 ).keys () )
464465 for oopsid in oopsids :
465466 try :
466467 oops = oops_cf .get (str (oopsid ), columns = ['SourcePackage' ])
@@ -497,7 +498,7 @@ def get_binary_packages_for_user(user):
497498 for result in results :
498499 if results [result ] == 0 :
499500 del results [result ]
500- return [k [0 :- 7 ] for k in results .keys ()]
501+ return [k [0 :- 7 ] for k in list ( results .keys () )]
501502
502503def get_package_crash_rate (release , src_package , old_version , new_version ,
503504 pup , date , absolute_uri , exclude_proposed = False ):
@@ -562,12 +563,12 @@ def get_package_crash_rate(release, src_package, old_version, new_version,
562563 # no previous version data so the diff is today's amount
563564 results ['difference' ] = today_crashes
564565 web_link = '?release=%s&package=%s&version=%s' % \
565- (urllib .quote (release ), urllib .quote (src_package ),
566- urllib .quote (new_version ))
566+ (urllib .parse . quote (release ), urllib . parse .quote (src_package ),
567+ urllib .parse . quote (new_version ))
567568 results ['web_link' ] = absolute_uri + web_link
568569 return results
569570 first_date = date
570- oldest_date = old_vers_data .keys ()[- 1 ]
571+ oldest_date = list ( old_vers_data .keys () )[- 1 ]
571572 dates = [x for x in _date_range_iterator (oldest_date , first_date )]
572573 previous_vers_crashes = []
573574 previous_days = len (dates [:- 1 ])
@@ -612,8 +613,8 @@ def get_package_crash_rate(release, src_package, old_version, new_version,
612613 results ['increase' ] = True
613614 results ['difference' ] = difference
614615 web_link = '?release=%s&package=%s&version=%s' % \
615- (urllib .quote (release ), urllib .quote (src_package ),
616- urllib .quote (new_version ))
616+ (urllib .parse . quote (release ), urllib . parse .quote (src_package ),
617+ urllib .parse . quote (new_version ))
617618 results ['web_link' ] = absolute_uri + web_link
618619 results ['previous_period_in_days' ] = previous_days
619620 results ['previous_average' ] = standard_crashes
@@ -636,12 +637,12 @@ def get_package_new_buckets(src_pkg, previous_version, new_version):
636637
637638 new_buckets = set (n_data ).difference (set (p_data ))
638639 for bucket in new_buckets :
639- if isinstance (bucket , unicode ):
640+ if isinstance (bucket , str ):
640641 bucket = bucket .encode ('utf-8' )
641642 # do not return buckets that failed to retrace
642643 if bucket .startswith ('failed:' ):
643644 continue
644- if isinstance (new_version , unicode ):
645+ if isinstance (new_version , str ):
645646 new_version = new_version .encode ('utf-8' )
646647 try :
647648 count = len (bucketversionsystems_cf .get ((bucket , new_version ), column_count = 4 ))
0 commit comments