File convert_to_python3.patch of Package essentia
--- ./essentia/essentia_extractor.py (original)
+++ ./essentia/essentia_extractor.py (refactored)
@@ -28,7 +28,7 @@
def mergeRecursiveDict(d1, d2):
'''This function merges the values contained in d2 into d1.
If a value was already in d1, it overwrites that value.'''
- for (key, value) in d2.items():
+ for (key, value) in list(d2.items()):
if key in d1 and isinstance(value, dict):
mergeRecursiveDict(d1[key], value)
else:
@@ -191,7 +191,7 @@
try:
computeExtractor(name, audio, pool, options)
except Exception:
- print 'ERROR: when trying to compute', name, 'features'
+ print('ERROR: when trying to compute', name, 'features')
raise
def percentile(values, p):
@@ -232,7 +232,7 @@
except KeyError:
wantedStats[namespace] = {}
wantedStats[namespace + '.' + descriptor] = options['specific'][extractor]['output'][descriptor]
- for (k,v) in wantedStats.items():
+ for (k,v) in list(wantedStats.items()):
if not isinstance(v, list):
wantedStats[k] = [v]
stats = wantedStats[k]
@@ -240,7 +240,7 @@
for stat in stats:
if stat not in supportedStats:
unwantedStats += [stat]
- print 'Ignoring', stat, 'for', k, '. It is not supported.'
+ print('Ignoring', stat, 'for', k, '. It is not supported.')
if stat == 'single_gaussian':
unwantedStats += [stat]
wantedStats[k] += ['mean', 'cov', 'icov']
@@ -264,7 +264,7 @@
segmentName = 'segment_' + str("%02d" % segments.index(segment))
if options['verbose']:
- print 'Processing', segmentName, 'from second', segment[0], 'to second', segment[1]
+ print('Processing', segmentName, 'from second', segment[0], 'to second', segment[1])
# creating pool...
poolSegment = essentia.Pool()
@@ -377,7 +377,7 @@
# plotting descriptors evolution
try:
if options['plots']:
- import plotting
+ from . import plotting
plotting.compute(inputFilename, audio, pool, options)
except KeyError: pass
--- ./essentia/plotting.py (original)
+++ ./essentia/plotting.py (refactored)
@@ -40,7 +40,7 @@
if not os.path.exists('plots'):
os.mkdir('plots')
figureName = 'plots/' + name + '.png'
- print('Plotting ' + name + '...')
+ print(('Plotting ' + name + '...'))
pylab.savefig(figureName)
return figureName
@@ -75,7 +75,7 @@
except KeyError:
htmlCode = ''
- print("WARNING: the descriptor", descName, "doesn't exist")
+ print(("WARNING: the descriptor", descName, "doesn't exist"))
return htmlCode
--- ./essentia/pool.py (original)
+++ ./essentia/pool.py (refactored)
@@ -15,7 +15,7 @@
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
-from itertools import izip
+
import numpy
import essentia
from essentia import EssentiaError
@@ -81,7 +81,7 @@
def mean_scope(self, scopeFrom, scopeTo):
descriptors_mean = {}
- for key in self.descriptors.keys():
+ for key in list(self.descriptors.keys()):
descriptor = self.descriptors[self.__currentNamespace][key]
values_in_scope = []
@@ -105,7 +105,7 @@
def var_scope(self, scopeFrom, scopeTo):
descriptors_var = {}
- for key in self.descriptors.keys():
+ for key in list(self.descriptors.keys()):
descriptor = self.descriptors[self.__currentNamespace][key]
values_in_scope = []
@@ -130,7 +130,7 @@
aggregated = {}
for namespace in self.descriptors:
aggregated[namespace] = {}
- descs = self.descriptors[namespace].keys()
+ descs = list(self.descriptors[namespace].keys())
descs.sort()
stats_default = ['mean', 'var', 'min', 'max']
@@ -183,19 +183,19 @@
if 'dmean' in stats:
if not derived:
- derived = [a - b for a, b in izip(values[1:], values[:-1])]
+ derived = [a - b for a, b in zip(values[1:], values[:-1])]
aggrDesc['dmean'] = essentia.array(numpy.mean(numpy.abs(derived), axis=0))
if 'dvar' in stats:
if not derived:
- derived = [a - b for a, b in izip(values[1:], values[:-1])]
+ derived = [a - b for a, b in zip(values[1:], values[:-1])]
aggrDesc['dvar'] = essentia.array(numpy.var(derived, axis=0))
if 'dmean2' in stats:
if not derived:
- derived = [a - b for a, b in izip(values[1:], values[:-1])]
+ derived = [a - b for a, b in zip(values[1:], values[:-1])]
if not derived2:
- derived2 = [a - b for a, b in izip(derived[1:], derived[:-1])]
+ derived2 = [a - b for a, b in zip(derived[1:], derived[:-1])]
if derived2:
aggrDesc['dmean2'] = essentia.array(numpy.mean(numpy.abs(derived2), axis=0))
else:
@@ -203,9 +203,9 @@
if 'dvar2' in stats:
if not derived:
- derived = [a - b for a, b in izip(values[1:], values[:-1])]
+ derived = [a - b for a, b in zip(values[1:], values[:-1])]
if not derived2:
- derived2 = [a - b for a, b in izip(derived[1:], derived[:-1])]
+ derived2 = [a - b for a, b in zip(derived[1:], derived[:-1])]
if derived2:
aggrDesc['dvar2'] = essentia.array(numpy.var(derived2, axis=0))
else:
--- ./essentia/progress.py (original)
+++ ./essentia/progress.py (refactored)
@@ -48,8 +48,8 @@
def updateDisplay(self):
if self.verbose():
- print (self.format % self.percent(self.current),
- sys.stdout.flush())
+ print((self.format % self.percent(self.current),
+ sys.stdout.flush()))
def finish(self):
self.update(self.total)
--- ./essentia/standard.py (original)
+++ ./essentia/standard.py (refactored)
@@ -130,7 +130,7 @@
# load all classes into python
def _reloadAlgorithms(moduleName = __name__):
- for name in _essentia.keys():
+ for name in list(_essentia.keys()):
_create_essentia_class(name, moduleName)
_reloadAlgorithms()
--- ./essentia/translate.py (original)
+++ ./essentia/translate.py (refactored)
@@ -16,10 +16,10 @@
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import inspect, types
-import streaming
+from . import streaming
import _essentia
-import common
-from streaming import _reloadStreamingAlgorithms
+from . import common
+from .streaming import _reloadStreamingAlgorithms
# genetic marker used to track which composite parameters configure which inner algorithms
class MarkerObject(object):
@@ -65,8 +65,8 @@
# to configure that parameter, and the configure_log
def find_edt(composite_param_name, marker_obj, configure_log):
# find inner algorithm and inner parameter name that this composite_param will configure
- for inner_algo_name, properties in configure_log.iteritems():
- for inner_param_name, value in properties['parameters'].iteritems():
+ for inner_algo_name, properties in configure_log.items():
+ for inner_param_name, value in properties['parameters'].items():
if marker_obj == value:
return properties['instance'].paramType(inner_param_name)
@@ -76,7 +76,7 @@
# given a reference to an inner algorithm and the configure_log, returns the name of the algo (use
# lower() if referring to the member var name)
def inner_algo_name(instance, configure_log):
- for algo_name, properties in configure_log.iteritems():
+ for algo_name, properties in configure_log.items():
if instance == properties['instance']:
return algo_name
@@ -145,25 +145,25 @@
def generate_dot_network(configure_log, composite_algo_inst):
# make connections
dot_code ='\n// connecting the network\n'
- for algo_name, properties in configure_log.iteritems():
- for left_connector, right_connectors in properties['instance'].connections.iteritems():
+ for algo_name, properties in configure_log.items():
+ for left_connector, right_connectors in properties['instance'].connections.items():
for right_connector in right_connectors:
if isinstance(right_connector, streaming._StreamConnector):
dot_code += ' _'+inner_algo_name(left_connector.output_algo, configure_log).lower()+':'+left_connector.name+'_o:e'+' -> '+\
'_'+inner_algo_name(right_connector.input_algo, configure_log).lower()+':'+right_connector.name + '_i:w;\n'
- if isinstance(right_connector, types.NoneType):
+ if isinstance(right_connector, type(None)):
inneralgoname = inner_algo_name(left_connector.output_algo, configure_log).lower()
dot_code += ' nowhere_'+inneralgoname+' [shape="box", style="rounded,filled", fillcolor="grey50", color="transparent" label="Nowhere" fontcolor="white" fontsize="18"];\n'+\
' _'+inneralgoname+':'+left_connector.name+'_o:e'+' -> nowhere_'+inneralgoname+';\n'
# make connections from floating inputs
- for name, connector in composite_algo_inst.inputs.iteritems():
+ for name, connector in composite_algo_inst.inputs.items():
innerinputname = connector.name
inneralgoname = inner_algo_name(connector.input_algo, configure_log).lower()
dot_code += ' '+name+':e -> _'+inneralgoname+':'+innerinputname+'_i:w;\n'
# make connections from floating outputs
- for name, connector in composite_algo_inst.outputs.iteritems():
+ for name, connector in composite_algo_inst.outputs.items():
inneroutputname = connector.name
inneralgoname = inner_algo_name(connector.output_algo, configure_log).lower()
dot_code += ' _'+inneralgoname+':'+inneroutputname+'_o:e -> '+name+':w;\n'
@@ -180,7 +180,7 @@
' label='+clustername+';\n\n'
# for each algo in the cluster, declare it in dot:
- for algo_name, properties in configure_log.iteritems():
+ for algo_name, properties in configure_log.items():
dot_code += generate_dot_algo(algo_name, properties['instance'])
# create the connections
@@ -223,7 +223,7 @@
algo_name = self.name()+'_'+str(lbl)
# increment lbl to generate a unique name for inner algo
- lowered_algo_names = [name.lower() for name in configure_log.keys()]
+ lowered_algo_names = [name.lower() for name in list(configure_log.keys())]
while algo_name.lower() in lowered_algo_names:
algo_name = algo_name[:algo_name.index('_')+1] + str(lbl)
lbl +=1
@@ -241,8 +241,8 @@
# itself
kwargs_no_markers = dict(kwargs)
- for key, value in kwargs.iteritems():
- if value in marker_objs.values():
+ for key, value in kwargs.items():
+ if value in list(marker_objs.values()):
if value.default_value == None:
del kwargs_no_markers[key]
else:
@@ -270,7 +270,7 @@
algo.configure = algo.real_configure
### Do some checking on their network ###
- for algo in [ logitem['instance'] for logitem in configure_log.values() ]:
+ for algo in [ logitem['instance'] for logitem in list(configure_log.values()) ]:
if isinstance(algo, streaming.VectorInput):
raise TypeError('essentia.streaming.VectorInput algorithms are not allowed for translatable composite algorithms')
@@ -290,7 +290,7 @@
def sort_by_key(configure_log):
# sort algorithms and conf values:
- sitems = configure_log.items()
+ sitems = list(configure_log.items())
sitems.sort()
sorted_algos = []
sorted_params= []
@@ -334,7 +334,7 @@
for param_name, default_value in zip(param_names, default_values):
h_code += ' declareParameter("'+param_name+'", "", "", '
- if isinstance(default_value, basestring): h_code += '"'+default_value+'"'
+ if isinstance(default_value, str): h_code += '"'+default_value+'"'
else: h_code += str(default_value)
h_code += ');\n'
@@ -396,7 +396,7 @@
cpp_code += 'void ' + composite_algo.__name__ + '::createInnerNetwork() {\n'
# declare inputs
- for input_alias, connector in algo_inst.inputs.iteritems():
+ for input_alias, connector in algo_inst.inputs.items():
input_owner_name = None
input_name = None
@@ -434,7 +434,7 @@
# make connections
for algo_name, properties in zip(sorted_algos, sorted_params): #configure_log.iteritems():
- for left_connector, right_connectors in properties['instance'].connections.iteritems():
+ for left_connector, right_connectors in properties['instance'].connections.items():
for right_connector in right_connectors:
if isinstance(right_connector, streaming._StreamConnector):
cpp_code += ' connect( _'+\
@@ -443,7 +443,7 @@
inner_algo_name(right_connector.input_algo, configure_log).lower() + \
'->input("'+right_connector.name+'") );\n'
- elif isinstance(right_connector, types.NoneType):
+ elif isinstance(right_connector, type(None)):
cpp_code += ' connect( _'+\
inner_algo_name(left_connector.output_algo, configure_log).lower() + \
'->output("'+left_connector.name+'"), NOWHERE );\n'
@@ -475,7 +475,7 @@
# skip if inner algorithm wasn't configured explicitly
if not properties['parameters']: continue
- for param_name, value in properties['parameters'].iteritems():
+ for param_name, value in properties['parameters'].items():
type = common.determineEdt(value)
if 'LIST' in str(type) or 'VECTOR' in str(type):
if type in [common.Edt.VECTOR_STRING]:
@@ -491,11 +491,11 @@
cpp_code += ' _'+algo_name.lower()+'->configure('
- for param_name, value in properties['parameters'].iteritems():
+ for param_name, value in properties['parameters'].items():
if isinstance(value, MarkerObject):
# figure out which composite param it is
composite_param_name = None
- for marker_name, marker_obj in marker_objs.iteritems():
+ for marker_name, marker_obj in marker_objs.items():
if marker_obj == value:
composite_param_name = marker_name
break
@@ -514,7 +514,7 @@
cpp_code += '"'+param_name+'", '+'arrayToVector<Real>(' + param_name + ') '
elif type in [common.Edt.VECTOR_INT, common.Edt.LIST_INT]:
cpp_code += '"'+param_name+'", '+'arrayToVector<int>(' + param_name + ') '
- elif isinstance(value, basestring):
+ elif isinstance(value, str):
cpp_code += '"'+param_name+'", "'+value+'", '
elif isinstance(value, bool):
if value: cpp_code += '"'+param_name+'", true, '
@@ -557,13 +557,13 @@
dot_code += ' edge [color=black, style=solid, weight=1, arrowhead="dotnormal", arrowtail="dot", arrowsize=1, fontsize=6]\n'
# for each input generate nodes
- for name in algo_inst.inputs.keys():
+ for name in list(algo_inst.inputs.keys()):
dot_code += ' '+name+' [label="'+name+'"];\n'
dot_code += generate_dot_cluster(configure_log, composite_algo.__name__, algo_inst)
# for each output generate nodes
- for name in algo_inst.outputs.keys():
+ for name in list(algo_inst.outputs.keys()):
dot_code += ' '+name+' [label="'+name+'"];\n'
dot_code += '}'
--- ./essentia/extractor/average_loudness.py (original)
+++ ./essentia/extractor/average_loudness.py (refactored)
@@ -20,7 +20,7 @@
import sys
from math import *
from essentia import INFO
-from squeezeInto import squeezeInto
+from .squeezeInto import squeezeInto
from essentia.essentia_extractor import descriptorNames
from essentia.progress import Progress
--- ./essentia/extractor/chords.py (original)
+++ ./essentia/extractor/chords.py (refactored)
@@ -50,7 +50,7 @@
for chord in chords:
chords_histogram[chord] += 1.0
- for chord in chords_histogram.keys():
+ for chord in list(chords_histogram.keys()):
chords_histogram[chord] *= 100.0 / len(chords)
return chords_histogram
@@ -108,7 +108,7 @@
# 1st step: find the most frequent chord(s)
max_value = max(chords_histogram.values())
chords_max = []
- for chord in chords_histogram.keys():
+ for chord in list(chords_histogram.keys()):
if chords_histogram[chord] == max_value:
chords_max.append(chord)
# 2nd step: in case of 2 max, let's take the major one
--- ./essentia/extractor/relativeioi.py (original)
+++ ./essentia/extractor/relativeioi.py (refactored)
@@ -51,7 +51,7 @@
for i in range(3,len(onsets)): riois += [ round( (onsets[i] - onsets[i-3]) / interval ) ]
for i in range(4,len(onsets)): riois += [ round( (onsets[i] - onsets[i-4]) / interval ) ]
ioidist = essentia.array(bincount(riois))
- fullioidist = essentia.array(zip( [p/interp for p in range(len(ioidist))], [ioi/sum(ioidist) for ioi in ioidist]))
+ fullioidist = essentia.array(list(zip( [p/interp for p in range(len(ioidist))], [ioi/sum(ioidist) for ioi in ioidist])))
fullioidist = fullioidist[0:interp*5]
peak_detection = essentia.PeakDetection(minPosition = 0., maxPosition = len(ioidist),
maxPeaks = 5, range = len(ioidist) - 1.,
@@ -65,7 +65,7 @@
mags = [ mag/sum(ioidist) for mag in mags ]
# add to pool
- pool.add(namespace + '.' + 'relative_ioi_peaks', essentia.array(zip(pos,mags)))#, pool.GlobalScope)
+ pool.add(namespace + '.' + 'relative_ioi_peaks', essentia.array(list(zip(pos,mags))))#, pool.GlobalScope)
pool.add(namespace + '.' + 'relative_ioi', fullioidist)#, pool.GlobalScope)
# debug plot
--- ./essentia/extractor/segmentation.py (original)
+++ ./essentia/extractor/segmentation.py (refactored)
@@ -21,8 +21,8 @@
import sys
import numpy
import essentia
-import segmentation_bic
-import segmentation_max_energy
+from . import segmentation_bic
+from . import segmentation_max_energy
from essentia import EssentiaError, INFO
from math import *
@@ -30,7 +30,7 @@
def print_onset(onset):
(minutes, seconds) = (int(onset/60.0), int(onset%60))
- print minutes, 'mn', seconds, 's',
+ print(minutes, 'mn', seconds, 's', end=' ')
def doSegmentation(inputFilename, audio, pool, options):
@@ -42,7 +42,7 @@
sampleRate = options['sampleRate']
if segtype == 'fromFile':
- segments = [ map(float, l.strip().split('\t')) for l in open(options[namespace]['segmentsFile'], 'r').readlines() ]
+ segments = [ list(map(float, l.strip().split('\t'))) for l in open(options[namespace]['segmentsFile'], 'r').readlines() ]
else:
if segtype == 'maxEnergy':
@@ -57,7 +57,7 @@
# creating segment wave file
if writeFile:
outputFilename = inputFilename + '.segments.wav'
- print 'Creating segments audio file ' + outputFilename + '...'
+ print('Creating segments audio file ' + outputFilename + '...')
audioOnsetsMarker = essentia.AudioOnsetsMarker(filename = outputFilename, sampleRate = sampleRate)
audioOnsetsMarker(audio, onsets)
@@ -68,16 +68,16 @@
if options['verbose']:
if len(segments) > 0:
- print 'Segments : ',
+ print('Segments : ', end=' ')
for segment in segments:
- print '[',
+ print('[', end=' ')
print_onset(segment[0])
- print ",",
+ print(",", end=' ')
print_onset(segment[1])
- print '] ',
+ print('] ', end=' ')
else:
- print 'No segments found!'
- print
+ print('No segments found!')
+ print()
return segments
--- ./essentia/extractor/segmentation_bic.py (original)
+++ ./essentia/extractor/segmentation_bic.py (refactored)
@@ -36,7 +36,7 @@
if namespace not in namespaces:
- print 'ERROR when trying to compute BIC segmentation: you must compute lowlevel descriptors first!'
+ print('ERROR when trying to compute BIC segmentation: you must compute lowlevel descriptors first!')
# options
minimumSegmentsLength = options['segmentation']['minimumSegmentsLength']
--- ./essentia/extractor/thumbnail.py (original)
+++ ./essentia/extractor/thumbnail.py (refactored)
@@ -32,7 +32,7 @@
else:
minutes = int(floor(onset / 60.0))
seconds = int(floor(onset % 60))
- print minutes, "mn", seconds, "s",
+ print(minutes, "mn", seconds, "s", end=' ')
def compute(megalopool, verbose = True):
@@ -40,7 +40,7 @@
cvar.verbose = False
if verbose:
- print "\nDoing thumbnailing..."
+ print("\nDoing thumbnailing...")
# From megalopool to Gaia point
p = megalopool.to_point()
@@ -98,14 +98,14 @@
if verbose:
if len(thumbnail) > 0:
- print 'Thumbnail : ',
- print '[',
+ print('Thumbnail : ', end=' ')
+ print('[', end=' ')
print_onset(thumbnail[0])
- print ",",
+ print(",", end=' ')
print_onset(thumbnail[1])
- print '] ',
+ print('] ', end=' ')
else:
- print 'No thumbnail found!'
- print
+ print('No thumbnail found!')
+ print()
return thumbnail
--- ./essentia/weka/convert_essentia_to_weka.py (original)
+++ ./essentia/weka/convert_essentia_to_weka.py (refactored)
@@ -15,7 +15,7 @@
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
-from wekafile import WekaFile
+from .wekafile import WekaFile
import yaml
# all the labels you will use
@@ -79,7 +79,7 @@
def convert(inputFilenames, outputFilename):
for (inputFilename, inClass) in inputFilenames:
- print "processing", inputFilename
+ print("processing", inputFilename)
descriptors = yaml.load(open(inputFilename, 'r').read())
--- ./essentia/weka/wekafile.py (original)
+++ ./essentia/weka/wekafile.py (refactored)
@@ -97,10 +97,10 @@
output = "".join(file("weka_test.arff").readlines())
if output == expected_output:
- print "test succeeded"
+ print("test succeeded")
else:
- print "test failed"
- print "---------output-------------------------"
- print output
- print "---------expected output----------------"
- print expected_output
+ print("test failed")
+ print("---------output-------------------------")
+ print(output)
+ print("---------expected output----------------")
+ print(expected_output)