diff --git a/bin/diamond b/bin/diamond index a47bd9452..197c75f72 100755 --- a/bin/diamond +++ b/bin/diamond @@ -148,7 +148,7 @@ def main(): # Read existing pid file try: - pf = file(options.pidfile, 'r') + pf = open(options.pidfile, 'r') pid = int(pf.read().strip()) pf.close() except (IOError, ValueError): @@ -187,7 +187,7 @@ def main(): # Write pid file pid = str(os.getpid()) try: - pf = file(options.pidfile, 'w+') + pf = open(options.pidfile, 'w+') except IOError as e: print("Failed to write PID file: %s" % (e), file=sys.stderr) sys.exit(1) @@ -271,7 +271,7 @@ def main(): # Write pid file pid = str(os.getpid()) try: - pf = file(options.pidfile, 'w+') + pf = open(options.pidfile, 'w+') except IOError as e: log.error("Failed to write child PID file: %s" % (e)) sys.exit(1) diff --git a/bin/diamond-setup b/bin/diamond-setup index 7c3ddcbef..50c64c698 100755 --- a/bin/diamond-setup +++ b/bin/diamond-setup @@ -78,7 +78,7 @@ def getCollectors(path): def typeToString(key): - if isinstance(obj.config[key], basestring): + if isinstance(obj.config[key], str): user_val = obj.config[key] elif isinstance(obj.config[key], bool): user_val = str(obj.config[key]) @@ -95,7 +95,7 @@ def typeToString(key): def stringToType(key, val): if type(obj.config[key]) is type(val): config_file[key] = val - elif isinstance(obj.config[key], basestring): + elif isinstance(obj.config[key], str): if val.lower() == 'false': config_file[key] = False elif val.lower() == 'true': @@ -103,7 +103,7 @@ def stringToType(key, val): else: config_file[key] = val elif isinstance(obj.config[key], bool): - if isinstance(val, basestring): + if isinstance(val, str): config_file[key] = str_to_bool(val) else: config_file[key] = bool(val) @@ -117,7 +117,7 @@ def stringToType(key, val): def boolCheck(val): - if isinstance(val, basestring): + if isinstance(val, str): return str_to_bool(val) elif isinstance(val, bool): return val @@ -137,7 +137,7 @@ def configureKey(key): print("\n") if key in default_conf_help: print(default_conf_help[key]) - val = raw_input(key + ' [' + user_val + ']: ') + val = input(key + ' [' + user_val + ']: ') # Empty user input? Default to current value if len(val) == 0: @@ -191,7 +191,7 @@ if __name__ == "__main__": print(config['server']['collectors_config_path']) print('Please type yes to continue') - val = raw_input('Are you sure? ') + val = input('Are you sure? ') if val != 'yes': sys.exit() @@ -257,8 +257,8 @@ if __name__ == "__main__": config_file.write() - except IOError as (errno, strerror): - print("I/O error({}): {}".format(errno, strerror)) + except IOError as err: + print("I/O error({}): {}".format(err.errno, err.strerror)) except KeyboardInterrupt: print() sys.exit() diff --git a/setup.py b/setup.py index 433974a98..f5bdb8259 100755 --- a/setup.py +++ b/setup.py @@ -5,6 +5,7 @@ import os from glob import glob import platform +import distro def running_under_virtualenv(): @@ -42,11 +43,11 @@ def running_under_virtualenv(): ('share/diamond/user_scripts', []), ] - distro = platform.dist()[0] - distro_major_version = platform.dist()[1].split('.')[0] - if not distro: + distro_name = distro.name() + distro_major_version = int(distro.version().split('.')[0]) + if not distro_name: if 'amzn' in platform.uname()[2]: - distro = 'centos' + distro_name = 'centos' if running_under_virtualenv(): data_files.append(('etc/diamond', @@ -65,20 +66,20 @@ def running_under_virtualenv(): data_files.append(('/var/log/diamond', ['.keep'])) - if distro == 'Ubuntu': + if distro_name == 'Ubuntu': if distro_major_version >= 16: data_files.append(('/usr/lib/systemd/system', ['rpm/systemd/diamond.service'])) else: data_files.append(('/etc/init', ['debian/diamond.upstart'])) - if distro in ['centos', 'redhat', 'debian', 'fedora', 'oracle']: + if distro_name in ['centos', 'redhat', 'debian', 'fedora', 'oracle']: data_files.append(('/etc/init.d', ['bin/init.d/diamond'])) - if distro_major_version >= 7 and not distro == 'debian': + if distro_major_version >= 7 and not distro_name == 'debian': data_files.append(('/usr/lib/systemd/system', ['rpm/systemd/diamond.service'])) - elif distro_major_version >= 6 and not distro == 'debian': + elif distro_major_version >= 6 and not distro_name == 'debian': data_files.append(('/etc/init', ['rpm/upstart/diamond.conf'])) @@ -88,11 +89,7 @@ def running_under_virtualenv(): if running_under_virtualenv(): install_requires = ['configobj', 'psutil', ] else: - if distro in ['debian', 'Ubuntu']: - install_requires = ['python-configobj', 'python-psutil', ] - # Default back to pip style requires - else: - install_requires = ['configobj', 'psutil', ] + install_requires = ['configobj', 'psutil', ] def get_version(): @@ -150,7 +147,7 @@ def pkgPath(root, path, rpath="/"): packages=['diamond', 'diamond.handler', 'diamond.utils'], scripts=['bin/diamond', 'bin/diamond-setup'], data_files=data_files, - python_requires='~=2.7', + python_requires='~=3.0', install_requires=install_requires, classifiers=[ 'Programming Language :: Python', diff --git a/src/collectors/aerospike/aerospike.py b/src/collectors/aerospike/aerospike.py index 5e7181a81..2d1af5bfe 100644 --- a/src/collectors/aerospike/aerospike.py +++ b/src/collectors/aerospike/aerospike.py @@ -136,7 +136,7 @@ def collect_latency(self, data): # Create metrics dict for the namespace/histogram pair dataset = datasets[i].split(',')[1:] - metrics = dict(zip(fields, dataset)) + metrics = dict(list(zip(fields, dataset))) # Publish a metric for each field in the histogram for field in fields: @@ -156,11 +156,11 @@ def collect_latency(self, data): ) = data.split(';')[1::2] # Collapse each type of data line into a dict of metrics - for op_type in raw_lines.keys(): - metrics = dict(zip(fields, raw_lines[op_type].split(',')[1:])) + for op_type in list(raw_lines.keys()): + metrics = dict(list(zip(fields, raw_lines[op_type].split(',')[1:]))) # publish each metric - for metric in metrics.keys(): + for metric in list(metrics.keys()): self.publish_gauge('latency.%s.%s' % (op_type, metric), metrics[metric]) @@ -206,7 +206,7 @@ def collect_throughput(self, data): raw_lines['query'], ) = data.split(';')[1::2] - for op_type in raw_lines.keys(): + for op_type in list(raw_lines.keys()): metric = raw_lines[op_type].split(',')[1] self.publish_gauge('throughput.%s' % op_type, metric) diff --git a/src/collectors/amavis/amavis.py b/src/collectors/amavis/amavis.py index f13db8b4f..3574106a5 100644 --- a/src/collectors/amavis/amavis.py +++ b/src/collectors/amavis/amavis.py @@ -81,7 +81,7 @@ def collect(self): if res: groups = res.groupdict() name = groups['name'] - for metric, value in groups.items(): + for metric, value in list(groups.items()): if metric == 'name': continue mtype = 'GAUGE' diff --git a/src/collectors/aurora/aurora.py b/src/collectors/aurora/aurora.py index 809c6f335..7fd1dc324 100644 --- a/src/collectors/aurora/aurora.py +++ b/src/collectors/aurora/aurora.py @@ -1,6 +1,6 @@ import diamond.collector -import urllib2 +from urllib.request import urlopen class AuroraCollector(diamond.collector.Collector): @@ -32,7 +32,7 @@ def collect(self): self.config['host'], self.config['port']) - response = urllib2.urlopen(url) + response = urlopen(url) for line in response.readlines(): properties = line.split() diff --git a/src/collectors/beanstalkd/beanstalkd.py b/src/collectors/beanstalkd/beanstalkd.py index b1156e71e..c22beb4c0 100644 --- a/src/collectors/beanstalkd/beanstalkd.py +++ b/src/collectors/beanstalkd/beanstalkd.py @@ -71,14 +71,14 @@ def collect(self): info = self._get_stats() - for stat, value in info['instance'].items(): + for stat, value in list(info['instance'].items()): if stat not in self.SKIP_LIST: self.publish(stat, value, metric_type=self.get_metric_type(stat)) for tube_stats in info['tubes']: tube = tube_stats['name'] - for stat, value in tube_stats.items(): + for stat, value in list(tube_stats.items()): if stat != 'name': self.publish('tubes.%s.%s' % (tube, stat), value, metric_type=self.get_metric_type(stat)) diff --git a/src/collectors/bind/bind.py b/src/collectors/bind/bind.py index 440667cb4..e158c5a7e 100644 --- a/src/collectors/bind/bind.py +++ b/src/collectors/bind/bind.py @@ -11,7 +11,7 @@ """ import diamond.collector -import urllib2 +from urllib.request import urlopen import xml.etree.cElementTree as ElementTree @@ -69,7 +69,7 @@ def clean_counter(self, name, value): def collect(self): try: - req = urllib2.urlopen('http://%s:%d/' % ( + req = urlopen('http://%s:%d/' % ( self.config['host'], int(self.config['port']))) except Exception as e: self.log.error('Couldnt connect to bind: %s', e) diff --git a/src/collectors/celerymon/celerymon.py b/src/collectors/celerymon/celerymon.py index 05ba0030d..be0af6d3c 100644 --- a/src/collectors/celerymon/celerymon.py +++ b/src/collectors/celerymon/celerymon.py @@ -18,7 +18,7 @@ """ import diamond.collector -import urllib2 +from urllib.request import urlopen import time try: @@ -68,7 +68,7 @@ def collect(self): celerymon_url = "http://%s:%s/api/task/?since=%i" % ( host, port, self.LastCollectTime) - response = urllib2.urlopen(celerymon_url) + response = urlopen(celerymon_url) body = response.read() celery_data = json.loads(body) diff --git a/src/collectors/ceph/ceph.py b/src/collectors/ceph/ceph.py index 51bc9124e..faeb1c145 100644 --- a/src/collectors/ceph/ceph.py +++ b/src/collectors/ceph/ceph.py @@ -38,7 +38,7 @@ def flatten_dictionary(input, sep='.', prefix=None): [('a.b', 10), ('c', 20)] """ for name, value in sorted(input.items()): - fullname = sep.join(filter(None, [prefix, name])) + fullname = sep.join([_f for _f in [prefix, name] if _f]) if isinstance(value, dict): for result in flatten_dictionary(value, sep, fullname): yield result diff --git a/src/collectors/ceph/test/testceph.py b/src/collectors/ceph/test/testceph.py index 8095f7e17..aa42be40a 100644 --- a/src/collectors/ceph/test/testceph.py +++ b/src/collectors/ceph/test/testceph.py @@ -103,13 +103,13 @@ def test_counter_default_prefix(self): expected = 'ceph.osd.325' sock = '/var/run/ceph/ceph-osd.325.asok' actual = self.collector._get_counter_prefix_from_socket_name(sock) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) def test_counter_alternate_prefix(self): expected = 'ceph.keep-osd.325' sock = '/var/run/ceph/keep-osd.325.asok' actual = self.collector._get_counter_prefix_from_socket_name(sock) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) @patch('glob.glob') def test_get_socket_paths(self, glob_mock): diff --git a/src/collectors/chronyd/chronyd.py b/src/collectors/chronyd/chronyd.py index cb4e4a086..f86f54ca3 100644 --- a/src/collectors/chronyd/chronyd.py +++ b/src/collectors/chronyd/chronyd.py @@ -65,7 +65,7 @@ def get_output(self): return "" def collect(self): - output = self.get_output() + output = self.get_output().decode('utf-8') for line in output.strip().split("\n"): m = LINE_PATTERN.search(line) diff --git a/src/collectors/conntrack/conntrack.py b/src/collectors/conntrack/conntrack.py index c335545d1..cc139a974 100644 --- a/src/collectors/conntrack/conntrack.py +++ b/src/collectors/conntrack/conntrack.py @@ -50,12 +50,12 @@ def collect(self): collected = {} files = [] - if isinstance(self.config['dir'], basestring): + if isinstance(self.config['dir'], str): dirs = [d.strip() for d in self.config['dir'].split(',')] elif isinstance(self.config['dir'], list): dirs = self.config['dir'] - if isinstance(self.config['files'], basestring): + if isinstance(self.config['files'], str): files = [f.strip() for f in self.config['files'].split(',')] elif isinstance(self.config['files'], list): files = self.config['files'] @@ -85,5 +85,5 @@ def collect(self): 'nf_conntrack/ip_conntrack kernel module was ' 'not loaded') else: - for key in collected.keys(): + for key in list(collected.keys()): self.publish(key, collected[key]) diff --git a/src/collectors/cpu/cpu.py b/src/collectors/cpu/cpu.py index 5bfd1222c..0e2841daa 100644 --- a/src/collectors/cpu/cpu.py +++ b/src/collectors/cpu/cpu.py @@ -151,9 +151,9 @@ def cpu_delta_time(interval): metrics['cpu_count'] = ncpus - for cpu in results.keys(): + for cpu in list(results.keys()): stats = results[cpu] - for s in stats.keys(): + for s in list(stats.keys()): # Get Metric Name metric_name = '.'.join([cpu, s]) # Get actual data @@ -163,22 +163,22 @@ def cpu_delta_time(interval): if use_derivative: metrics[metric_name] = self.derivative( metric_name, - long(stats[s]), + int(stats[s]), self.MAX_VALUES[s]) / div else: - metrics[metric_name] = long(stats[s]) / div + metrics[metric_name] = int(stats[s]) / div # Check for a bug in xen where the idle time is doubled for guest # See https://bugzilla.redhat.com/show_bug.cgi?id=624756 if self.config['xenfix'] is None or self.config['xenfix'] is True: if os.path.isdir('/proc/xen'): total = 0 - for metric_name in metrics.keys(): + for metric_name in list(metrics.keys()): if 'cpu0.' in metric_name: total += int(metrics[metric_name]) if total > 110: self.config['xenfix'] = True - for mname in metrics.keys(): + for mname in list(metrics.keys()): if '.idle' in mname: metrics[mname] = float(metrics[mname]) / 2 elif total > 0: @@ -204,28 +204,28 @@ def cpu_delta_time(interval): if use_derivative: metrics[cpu + '.user'] = self.derivative( cpu + '.user', - long(cpu_time[i].user), + int(cpu_time[i].user), self.MAX_VALUES['user']) metrics[cpu + '.system'] = self.derivative( cpu + '.system', - long(cpu_time[i].system), + int(cpu_time[i].system), self.MAX_VALUES['system']) metrics[cpu + '.idle'] = self.derivative( cpu + '.idle', - long(cpu_time[i].idle), + int(cpu_time[i].idle), self.MAX_VALUES['idle']) if hasattr(cpu_time[i], 'nice'): metrics[cpu + '.nice'] = self.derivative( cpu + '.nice', - long(cpu_time[i].nice), + int(cpu_time[i].nice), self.MAX_VALUES['nice']) else: - metrics[cpu + '.user'] = long(cpu_time[i].user) - metrics[cpu + '.system'] = long(cpu_time[i].system) - metrics[cpu + '.idle'] = long(cpu_time[i].idle) + metrics[cpu + '.user'] = int(cpu_time[i].user) + metrics[cpu + '.system'] = int(cpu_time[i].system) + metrics[cpu + '.idle'] = int(cpu_time[i].idle) if hasattr(cpu_time[i], 'nice'): - metrics[cpu + '.nice'] = long(cpu_time[i].nice) + metrics[cpu + '.nice'] = int(cpu_time[i].nice) div = 1 if use_normalization and cpu_count > 0: @@ -234,30 +234,30 @@ def cpu_delta_time(interval): if use_derivative: metrics['total.user'] = self.derivative( 'total.user', - long(total_time.user), + int(total_time.user), self.MAX_VALUES['user']) / div metrics['total.system'] = self.derivative( 'total.system', - long(total_time.system), + int(total_time.system), self.MAX_VALUES['system']) / div metrics['total.idle'] = self.derivative( 'total.idle', - long(total_time.idle), + int(total_time.idle), self.MAX_VALUES['idle']) / div if hasattr(total_time, 'nice'): metrics['total.nice'] = self.derivative( 'total.nice', - long(total_time.nice), + int(total_time.nice), self.MAX_VALUES['nice']) / div else: - metrics['total.user'] = long(total_time.user) / div - metrics['total.system'] = long(total_time.system) / div - metrics['total.idle'] = long(total_time.idle) / div + metrics['total.user'] = int(total_time.user) / div + metrics['total.system'] = int(total_time.system) / div + metrics['total.idle'] = int(total_time.idle) / div if hasattr(total_time, 'nice'): - metrics['total.nice'] = long(total_time.nice) / div + metrics['total.nice'] = int(total_time.nice) / div # Publish Metric - for metric_name in metrics.keys(): + for metric_name in list(metrics.keys()): self.publish(metric_name, metrics[metric_name], precision=2) diff --git a/src/collectors/cpu/test/testcpu.py b/src/collectors/cpu/test/testcpu.py index 44336e20b..f0ef5a423 100644 --- a/src/collectors/cpu/test/testcpu.py +++ b/src/collectors/cpu/test/testcpu.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from cpu import CPUCollector diff --git a/src/collectors/cpuacct_cgroup/cpuacct_cgroup.py b/src/collectors/cpuacct_cgroup/cpuacct_cgroup.py index ed3ec2675..f15d02e73 100644 --- a/src/collectors/cpuacct_cgroup/cpuacct_cgroup.py +++ b/src/collectors/cpuacct_cgroup/cpuacct_cgroup.py @@ -62,8 +62,8 @@ def collect(self): stat_file.close() # create metrics from collected utimes and stimes for cgroups - for parent, cpuacct in results.iteritems(): - for key, value in cpuacct.iteritems(): + for parent, cpuacct in results.items(): + for key, value in cpuacct.items(): metric_name = '.'.join([parent, key]) self.publish(metric_name, value, metric_type='GAUGE') return True diff --git a/src/collectors/cpuacct_cgroup/test/testcpuacct_cgroup.py b/src/collectors/cpuacct_cgroup/test/testcpuacct_cgroup.py index 4e2430262..dd93cda20 100644 --- a/src/collectors/cpuacct_cgroup/test/testcpuacct_cgroup.py +++ b/src/collectors/cpuacct_cgroup/test/testcpuacct_cgroup.py @@ -8,9 +8,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from cpuacct_cgroup import CpuAcctCgroupCollector diff --git a/src/collectors/darner/darner.py b/src/collectors/darner/darner.py index 33f69b10f..ea76504ee 100644 --- a/src/collectors/darner/darner.py +++ b/src/collectors/darner/darner.py @@ -117,7 +117,7 @@ def collect(self): hosts = self.config.get('hosts') # Convert a string config value to be an array - if isinstance(hosts, basestring): + if isinstance(hosts, str): hosts = [hosts] for host in hosts: @@ -140,7 +140,7 @@ def collect(self): queues[queue][queue_stat]) # figure out what we're configured to get, defaulting to everything - desired = self.config.get('publish', stats.keys()) + desired = self.config.get('publish', list(stats.keys())) # for everything we want for stat in desired: diff --git a/src/collectors/dirstats/dirstats.py b/src/collectors/dirstats/dirstats.py index 22b9a9630..e0f2ed37c 100644 --- a/src/collectors/dirstats/dirstats.py +++ b/src/collectors/dirstats/dirstats.py @@ -11,7 +11,7 @@ from time import time try: - import Queue as queue + import queue as queue except ImportError: import queue diff --git a/src/collectors/diskspace/diskspace.py b/src/collectors/diskspace/diskspace.py index f719e0c57..39181336b 100644 --- a/src/collectors/diskspace/diskspace.py +++ b/src/collectors/diskspace/diskspace.py @@ -78,7 +78,7 @@ def process_config(self): super(DiskSpaceCollector, self).process_config() # Precompile things self.exclude_filters = self.config['exclude_filters'] - if isinstance(self.exclude_filters, basestring): + if isinstance(self.exclude_filters, str): self.exclude_filters = [self.exclude_filters] if not self.exclude_filters: @@ -87,7 +87,7 @@ def process_config(self): self.exclude_reg = re.compile('|'.join(self.exclude_filters)) self.filesystems = [] - if isinstance(self.config['filesystems'], basestring): + if isinstance(self.config['filesystems'], str): for filesystem in self.config['filesystems'].split(','): self.filesystems.append(filesystem.strip()) elif isinstance(self.config['filesystems'], list): @@ -187,7 +187,7 @@ def collect(self): self.log.error('No diskspace metrics retrieved') return None - for info in results.itervalues(): + for info in results.values(): if info['device'] in labels: name = labels[info['device']] else: diff --git a/src/collectors/disktemp/disktemp.py b/src/collectors/disktemp/disktemp.py index ff900bda0..506e942a1 100644 --- a/src/collectors/disktemp/disktemp.py +++ b/src/collectors/disktemp/disktemp.py @@ -67,7 +67,7 @@ def match_device(self, device, path): # If the regex has a capture group for pretty printing, pick # the last matched capture group if self.devices.groups > 0: - key = '.'.join(filter(None, [g for g in m.groups()])) + key = '.'.join([_f for _f in [g for g in m.groups()] if _f]) return {key: self.get_temp(os.path.join('/dev', device))} @@ -88,7 +88,7 @@ def collect(self): instances.update(self.match_device(device, '/dev/disk/by-id/')) metrics = {} - for device, p in instances.items(): + for device, p in list(instances.items()): output = p.communicate()[0].strip() try: @@ -96,5 +96,5 @@ def collect(self): except: self.log.warn('Disk temperature retrieval failed on ' + device) - for metric in metrics.keys(): + for metric in list(metrics.keys()): self.publish(metric, metrics[metric]) diff --git a/src/collectors/diskusage/diskusage.py b/src/collectors/diskusage/diskusage.py index 0779e4bf8..6e3b1504a 100644 --- a/src/collectors/diskusage/diskusage.py +++ b/src/collectors/diskusage/diskusage.py @@ -164,13 +164,13 @@ def collect(self): self.log.error('No diskspace metrics retrieved') return None - for key, info in results.iteritems(): + for key, info in results.items(): metrics = {} name = info['device'] if not reg.match(name): continue - for key, value in info.iteritems(): + for key, value in info.items(): if key == 'device': continue oldkey = key diff --git a/src/collectors/docker_collector/docker_collector.py b/src/collectors/docker_collector/docker_collector.py index e7612d8c4..0a22728c3 100644 --- a/src/collectors/docker_collector/docker_collector.py +++ b/src/collectors/docker_collector/docker_collector.py @@ -87,7 +87,7 @@ def collect(self): for container in running_containers: name = "containers." + "".join(container['Names'][0][1:]) s = client.stats(container["Id"]) - stat = json.loads(s.next()) + stat = json.loads(next(s)) for path in self.METRICS: val = self.get_value(path, stat) if val is not None: diff --git a/src/collectors/drbd/drbd.py b/src/collectors/drbd/drbd.py index daa70537f..f993a1adc 100644 --- a/src/collectors/drbd/drbd.py +++ b/src/collectors/drbd/drbd.py @@ -79,8 +79,8 @@ def collect(self): self.log.error("Can't read DRBD status file: {}".format(errormsg)) return - for resource in results.keys(): - for metric_name, metric_value in results[resource].items(): + for resource in list(results.keys()): + for metric_name, metric_value in list(results[resource].items()): if metric_value.isdigit(): self.publish(resource + "." + metric_name, metric_value) else: diff --git a/src/collectors/dropwizard/dropwizard.py b/src/collectors/dropwizard/dropwizard.py index 2af875287..1302e57c5 100644 --- a/src/collectors/dropwizard/dropwizard.py +++ b/src/collectors/dropwizard/dropwizard.py @@ -5,7 +5,8 @@ """ -import urllib2 +from urllib.error import HTTPError +from urllib.request import urlopen try: import json @@ -45,8 +46,8 @@ def collect(self): url = 'http://%s:%i/metrics' % ( self.config['host'], int(self.config['port'])) try: - response = urllib2.urlopen(url) - except urllib2.HTTPError as err: + response = urlopen(url) + except HTTPError as err: self.log.error("%s: %s", url, err) return diff --git a/src/collectors/dseopscenter/dseopscenter.py b/src/collectors/dseopscenter/dseopscenter.py index 1217101f9..4d3fd792f 100644 --- a/src/collectors/dseopscenter/dseopscenter.py +++ b/src/collectors/dseopscenter/dseopscenter.py @@ -4,7 +4,7 @@ Collect the DataStax OpsCenter metrics """ -import urllib2 +from urllib.request import urlopen import datetime try: @@ -119,7 +119,7 @@ def _get_schema(self): int(self.config['port']), self.config['cluster_id']) try: - response = urllib2.urlopen(url) + response = urlopen(url) except Exception as err: self.log.error('%s: %s', url, err) return False @@ -157,7 +157,7 @@ def _get(self, start, end, step=60): self.config['default_tail_opts']) try: - response = urllib2.urlopen(url) + response = urlopen(url) except Exception as err: self.log.error('%s: %s', url, err) return False diff --git a/src/collectors/elasticsearch/elasticsearch.py b/src/collectors/elasticsearch/elasticsearch.py index 17ac0d1e2..65b22b5a8 100755 --- a/src/collectors/elasticsearch/elasticsearch.py +++ b/src/collectors/elasticsearch/elasticsearch.py @@ -13,7 +13,7 @@ """ -import urllib2 +from urllib.request import Request, urlopen import base64 import re from diamond.collector import str_to_bool @@ -33,7 +33,7 @@ class ElasticSearchCollector(diamond.collector.Collector): def process_config(self): super(ElasticSearchCollector, self).process_config() instance_list = self.config['instances'] - if isinstance(instance_list, basestring): + if isinstance(instance_list, str): instance_list = [instance_list] if len(instance_list) == 0: @@ -111,12 +111,12 @@ def _get(self, scheme, host, port, path, assert_key=None): """ url = '%s://%s:%i/%s' % (scheme, host, port, path) try: - request = urllib2.Request(url) + request = Request(url) if self.config['user'] and self.config['password']: base64string = base64.standard_b64encode( '%s:%s' % (self.config['user'], self.config['password'])) request.add_header("Authorization", "Basic %s" % base64string) - response = urllib2.urlopen(request) + response = urlopen(request) except Exception as err: self.log.error("%s: %s" % (url, err)) return False @@ -135,13 +135,13 @@ def _get(self, scheme, host, port, path, assert_key=None): return doc def _copy_one_level(self, metrics, prefix, data, filter=lambda key: True): - for key, value in data.iteritems(): - if filter(key): + for key, value in data.items(): + if list(filter(key)): metric_path = '%s.%s' % (prefix, key) self._set_or_sum_metric(metrics, metric_path, value) def _copy_two_level(self, metrics, prefix, data, filter=lambda key: True): - for key1, d1 in data.iteritems(): + for key1, d1 in data.items(): self._copy_one_level(metrics, '%s.%s' % (prefix, key1), d1, filter) def _index_metrics(self, metrics, prefix, index): @@ -236,7 +236,7 @@ def collect_instance_index_stats(self, scheme, host, port, metrics): else: return - for name, index in indices.iteritems(): + for name, index in indices.items(): self._index_metrics(metrics, 'indices.%s' % name, index['primaries']) @@ -246,7 +246,7 @@ def collect_instance(self, alias, scheme, host, port): return metrics = {} - node = result['nodes'].keys()[0] + node = list(result['nodes'].keys())[0] data = result['nodes'][node] # @@ -367,7 +367,7 @@ def collect_instance(self, alias, scheme, host, port): if 'heap_used_percent' in mem: metrics['jvm.mem.heap_used_percent'] = mem['heap_used_percent'] - for pool, d in mem['pools'].iteritems(): + for pool, d in mem['pools'].items(): pool = pool.replace(' ', '_') metrics['jvm.mem.pools.%s.used' % pool] = d['used_in_bytes'] metrics['jvm.mem.pools.%s.max' % pool] = d['max_in_bytes'] @@ -377,7 +377,7 @@ def collect_instance(self, alias, scheme, host, port): gc = jvm['gc'] collection_count = 0 collection_time_in_millis = 0 - for collector, d in gc['collectors'].iteritems(): + for collector, d in gc['collectors'].items(): metrics['jvm.gc.collection.%s.count' % collector] = d[ 'collection_count'] collection_count += d['collection_count'] diff --git a/src/collectors/elb/elb.py b/src/collectors/elb/elb.py index 25013de27..21403a290 100644 --- a/src/collectors/elb/elb.py +++ b/src/collectors/elb/elb.py @@ -39,7 +39,7 @@ """ import calendar -import cPickle +import pickle import datetime import functools import re @@ -76,7 +76,7 @@ def __init__(self, func): def __call__(self, *args, **kwargs): # If the function args cannot be used as a cache hash key, fail fast - key = cPickle.dumps((args, kwargs)) + key = pickle.dumps((args, kwargs)) try: return self.cache[key] except KeyError: @@ -269,9 +269,9 @@ def process_metric(self, region_cw_conn, zone, start_time, end_time, # instead of wasting space to store/emit a zero. if len(stats) == 0 and metric.default_to_zero: stats.append({ - u'Timestamp': start_time, + 'Timestamp': start_time, metric.aws_type: 0.0, - u'Unit': u'Count' + 'Unit': 'Count' }) for stat in stats: @@ -317,7 +317,7 @@ def collect(self): end_time = now.replace(second=0, microsecond=0) start_time = end_time - datetime.timedelta(seconds=self.interval) - for region in self.config['regions'].keys(): + for region in list(self.config['regions'].keys()): region_cw_conn = cloudwatch.connect_to_region(region, **self.auth_kwargs) self.process_region(region_cw_conn, start_time, end_time) diff --git a/src/collectors/elb/test/testelb.py b/src/collectors/elb/test/testelb.py index ee3b47d20..312d7f579 100644 --- a/src/collectors/elb/test/testelb.py +++ b/src/collectors/elb/test/testelb.py @@ -78,19 +78,19 @@ def test_ignore(self, publish_metric, elb_connect_to_region, ts = datetime.datetime.utcnow().replace(second=0, microsecond=0) cw_conn.get_metric_statistics.side_effect = [ - [{u'Timestamp': ts, u'Average': 1.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Average': 2.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 3.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Average': 4.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 6.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 7.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 8.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 9.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 10.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 11.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 12.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Maximum': 13.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 14.0, u'Unit': u'Count'}], + [{'Timestamp': ts, 'Average': 1.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Average': 2.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 3.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Average': 4.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 6.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 7.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 8.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 9.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 10.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 11.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 12.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Maximum': 13.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 14.0, 'Unit': 'Count'}], ] cloudwatch.connect_to_region = Mock() @@ -154,19 +154,19 @@ def test_collect(self, publish_metric, connect_to_region, cloudwatch): ts = datetime.datetime.utcnow().replace(second=0, microsecond=0) cw_conn.get_metric_statistics.side_effect = [ - [{u'Timestamp': ts, u'Average': 1.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Average': 2.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 3.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Average': 4.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 6.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 7.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 8.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 9.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 10.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 11.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 12.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Maximum': 13.0, u'Unit': u'Count'}], - [{u'Timestamp': ts, u'Sum': 14.0, u'Unit': u'Count'}], + [{'Timestamp': ts, 'Average': 1.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Average': 2.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 3.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Average': 4.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 6.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 7.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 8.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 9.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 10.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 11.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 12.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Maximum': 13.0, 'Unit': 'Count'}], + [{'Timestamp': ts, 'Sum': 14.0, 'Unit': 'Count'}], ] cloudwatch.connect_to_region = Mock() diff --git a/src/collectors/endecadgraph/endecadgraph.py b/src/collectors/endecadgraph/endecadgraph.py index 55e9ddd28..3e84656b0 100644 --- a/src/collectors/endecadgraph/endecadgraph.py +++ b/src/collectors/endecadgraph/endecadgraph.py @@ -11,9 +11,9 @@ """ +from urllib.request import urlopen import diamond.collector -import urllib2 -from StringIO import StringIO +from io import StringIO import re import xml.etree.cElementTree as ElementTree @@ -87,7 +87,7 @@ def createKey(element): return key def processElem(elem, keyList): - for k, v in elem.items(): + for k, v in list(elem.items()): prefix = '.'.join(keyList) if k not in self.IGNORE_ELEMENTS and self.NUMVAL_MATCH.match(v): k = makeSane(k) @@ -111,7 +111,7 @@ def walkXML(context, elemList): url = 'http://%s:%d/admin?op=stats' % (self.config['host'], self.config['port']) try: - xml = urllib2.urlopen(url, timeout=self.config['timeout']).read() + xml = urlopen(url, timeout=self.config['timeout']).read() except Exception as e: self.log.error('Could not connect to endeca on %s: %s' % (url, e)) return {} diff --git a/src/collectors/etcdstat/etcdstat.py b/src/collectors/etcdstat/etcdstat.py index 53e4b4d8a..0f9b39856 100644 --- a/src/collectors/etcdstat/etcdstat.py +++ b/src/collectors/etcdstat/etcdstat.py @@ -12,9 +12,10 @@ ``` """ +from urllib.error import HTTPError +from urllib.request import urlopen import diamond.collector import json -import urllib2 METRICS_KEYS = ['sendPkgRate', 'recvPkgRate', @@ -76,7 +77,7 @@ def collect_self_metrics(self): def collect_store_metrics(self): metrics = self.get_store_metrics() - for k, v in metrics.iteritems(): + for k, v in metrics.items(): key = self.clean_up(k) self.publish("store.%s" % key, v) @@ -100,8 +101,8 @@ def get_metrics(self, category): url = "%s://%s:%s/v2/stats/%s" % (protocol, self.config['host'], self.config['port'], category) - return json.load(urllib2.urlopen(url, **opts)) - except (urllib2.HTTPError, ValueError) as err: + return json.load(urlopen(url, **opts)) + except (HTTPError, ValueError) as err: self.log.error('Unable to read JSON response: %s' % err) return {} diff --git a/src/collectors/eventstoreprojections/eventstoreprojections.py b/src/collectors/eventstoreprojections/eventstoreprojections.py index 7c77a01e7..59fac4c86 100644 --- a/src/collectors/eventstoreprojections/eventstoreprojections.py +++ b/src/collectors/eventstoreprojections/eventstoreprojections.py @@ -17,7 +17,8 @@ """ -import urllib2 +from urllib.error import URLError +from urllib.request import Request, urlopen import json import diamond.collector @@ -59,12 +60,12 @@ def get_default_config(self): def _json_to_flat_metrics(self, prefix, data): - for key, value in data.items(): + for key, value in list(data.items()): if isinstance(value, dict): for k, v in self._json_to_flat_metrics( "%s.%s" % (prefix, key), value): yield k, v - elif isinstance(value, basestring): + elif isinstance(value, str): if value == "Running": value = 1 yield ("%s.%s" % (prefix, key), value) @@ -91,12 +92,12 @@ def collect(self): self.config['route'] ) - req = urllib2.Request(eventstore_host, headers=self.config['headers']) + req = Request(eventstore_host, headers=self.config['headers']) req.add_header('Content-type', 'application/json') try: - resp = urllib2.urlopen(req) - except urllib2.URLError as e: + resp = urlopen(req) + except URLError as e: self.log.error("Can't open url %s. %s", eventstore_host, e) else: content = resp.read() diff --git a/src/collectors/filestat/filestat.py b/src/collectors/filestat/filestat.py index fe3a17557..2457194a6 100644 --- a/src/collectors/filestat/filestat.py +++ b/src/collectors/filestat/filestat.py @@ -116,13 +116,13 @@ def get_userlist(self): based on the variables user_include and user_exclude """ # convert user/group lists to arrays if strings - if isinstance(self.config['user_include'], basestring): + if isinstance(self.config['user_include'], str): self.config['user_include'] = self.config['user_include'].split() - if isinstance(self.config['user_exclude'], basestring): + if isinstance(self.config['user_exclude'], str): self.config['user_exclude'] = self.config['user_exclude'].split() - if isinstance(self.config['group_include'], basestring): + if isinstance(self.config['group_include'], str): self.config['group_include'] = self.config['group_include'].split() - if isinstance(self.config['group_exclude'], basestring): + if isinstance(self.config['group_exclude'], str): self.config['group_exclude'] = self.config['group_exclude'].split() rawusers = os.popen("lsof | awk '{ print $3 }' | sort | uniq -d" @@ -206,9 +206,9 @@ def get_typelist(self): typelist = [] # convert type list into arrays if strings - if isinstance(self.config['type_include'], basestring): + if isinstance(self.config['type_include'], str): self.config['type_include'] = self.config['type_include'].split() - if isinstance(self.config['type_exclude'], basestring): + if isinstance(self.config['type_exclude'], str): self.config['type_exclude'] = self.config['type_exclude'].split() # remove any not in include list @@ -259,8 +259,8 @@ def collect(self): # collect open files per user per type if self.config['collect_user_data']: data = self.process_lsof(self.get_userlist(), self.get_typelist()) - for ukey in data.iterkeys(): - for tkey in data[ukey].iterkeys(): + for ukey in data.keys(): + for tkey in data[ukey].keys(): self.log.debug('files.user.%s.%s %s' % ( ukey, tkey, int(data[ukey][tkey]))) self.publish('user.%s.%s' % (ukey, tkey), diff --git a/src/collectors/filestat/test/testfilestat.py b/src/collectors/filestat/test/testfilestat.py index 42f18eadf..84c340103 100644 --- a/src/collectors/filestat/test/testfilestat.py +++ b/src/collectors/filestat/test/testfilestat.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from filestat import FilestatCollector diff --git a/src/collectors/fluentd/fluentd.py b/src/collectors/fluentd/fluentd.py index 567a86888..a70c23912 100644 --- a/src/collectors/fluentd/fluentd.py +++ b/src/collectors/fluentd/fluentd.py @@ -19,8 +19,8 @@ """ +from urllib.request import urlopen import diamond.collector -import urllib2 import json @@ -51,7 +51,7 @@ def collect(self): params = (self.config['host'], self.config['port'], self.API_PATH) url = "http://%s:%s/%s" % params - res = urllib2.urlopen(url) + res = urlopen(url) data = json.load(res) result = self.parse_api_output(data) @@ -61,7 +61,7 @@ def collect(self): def parse_api_output(self, status): result = [] for p in status.get('plugins'): - if p['type'] in self.config['collect'].keys(): + if p['type'] in list(self.config['collect'].keys()): for m in self.config['collect'].get(p['type']): tag = ".".join([p['type'], m]) result.append((tag, p.get(m))) diff --git a/src/collectors/flume/flume.py b/src/collectors/flume/flume.py index 27e39b344..59e4d415c 100644 --- a/src/collectors/flume/flume.py +++ b/src/collectors/flume/flume.py @@ -10,7 +10,8 @@ """ -import urllib2 +from urllib.error import URLError +from urllib.request import urlopen import diamond.collector try: @@ -79,7 +80,7 @@ def collect(self): ) try: - resp = urllib2.urlopen(url) + resp = urlopen(url) try: j = json.loads(resp.read()) resp.close() @@ -87,14 +88,14 @@ def collect(self): resp.close() self.log.error('Cannot load json data: %s', e) return None - except urllib2.URLError as e: + except URLError as e: self.log.error('Failed to open url: %s', e) return None except Exception as e: self.log.error('Unknown error opening url: %s', e) return None - for comp in j.iteritems(): + for comp in j.items(): comp_name = comp[0] comp_items = comp[1] comp_type = comp_items['Type'] diff --git a/src/collectors/hadoop/hadoop.py b/src/collectors/hadoop/hadoop.py index 4cda82d13..6ead7948f 100644 --- a/src/collectors/hadoop/hadoop.py +++ b/src/collectors/hadoop/hadoop.py @@ -76,7 +76,7 @@ def collect_from(self, filename): key, value = metric.split('=', 1) metrics[key] = value - for metric in metrics.keys(): + for metric in list(metrics.keys()): try: if data['name'] == 'jvm.metrics': diff --git a/src/collectors/haproxy/haproxy.py b/src/collectors/haproxy/haproxy.py index 5b6304ae4..ebaa2baea 100644 --- a/src/collectors/haproxy/haproxy.py +++ b/src/collectors/haproxy/haproxy.py @@ -11,7 +11,7 @@ """ import re -import urllib2 +from urllib.request import Request, urlopen import base64 import csv import socket @@ -64,9 +64,9 @@ def http_get_csv_data(self, section=None): Request stats from HAProxy Server """ metrics = [] - req = urllib2.Request(self._get_config_value(section, 'url')) + req = Request(self._get_config_value(section, 'url')) try: - handle = urllib2.urlopen(req) + handle = urlopen(req) return handle.readlines() except Exception as e: if not hasattr(e, 'code') or e.code != 401: @@ -101,7 +101,7 @@ def http_get_csv_data(self, section=None): authheader = 'Basic %s' % base64string req.add_header("Authorization", authheader) try: - handle = urllib2.urlopen(req) + handle = urlopen(req) metrics = handle.readlines() return metrics except IOError as e: diff --git a/src/collectors/hbase/hbase.py b/src/collectors/hbase/hbase.py index 999b06ccc..09e90ea80 100644 --- a/src/collectors/hbase/hbase.py +++ b/src/collectors/hbase/hbase.py @@ -62,7 +62,7 @@ def collect_from(self, filename): key, value = metric.split('=', 1) metrics[key] = value - for metric in metrics.keys(): + for metric in list(metrics.keys()): try: if data['name'] == 'jvm.metrics': diff --git a/src/collectors/http/http.py b/src/collectors/http/_http.py similarity index 95% rename from src/collectors/http/http.py rename to src/collectors/http/_http.py index 1f5c73e4a..753bd074e 100644 --- a/src/collectors/http/http.py +++ b/src/collectors/http/_http.py @@ -27,7 +27,7 @@ http:__www_site_com_admin_page_html """ -import urllib2 +from urllib.request import Request, urlopen import diamond.collector import datetime @@ -63,9 +63,9 @@ def collect(self): for url in self.config['req_url']: self.log.debug("collecting %s", str(url)) req_start = datetime.datetime.now() - req = urllib2.Request(url, headers=self.config['headers']) + req = Request(url, headers=self.config['headers']) try: - handle = urllib2.urlopen(req) + handle = urlopen(req) the_page = handle.read() req_end = datetime.datetime.now() req_time = req_end - req_start diff --git a/src/collectors/httpd/httpd.py b/src/collectors/httpd/httpd.py index 64b9fac57..fd74677ce 100644 --- a/src/collectors/httpd/httpd.py +++ b/src/collectors/httpd/httpd.py @@ -12,8 +12,8 @@ """ import re -import httplib -import urlparse +import http.client as httplib +from urllib.parse import urlparse import diamond.collector @@ -25,7 +25,7 @@ def process_config(self): self.config['urls'].append(self.config['url']) self.urls = {} - if isinstance(self.config['urls'], basestring): + if isinstance(self.config['urls'], str): self.config['urls'] = self.config['urls'].split(',') for url in self.config['urls']: @@ -59,7 +59,7 @@ def get_default_config(self): return config def collect(self): - for nickname in self.urls.keys(): + for nickname in list(self.urls.keys()): url = self.urls[nickname] try: diff --git a/src/collectors/httpd/test/testhttpd.py b/src/collectors/httpd/test/testhttpd.py index 7cd18c54a..61d7e08b0 100644 --- a/src/collectors/httpd/test/testhttpd.py +++ b/src/collectors/httpd/test/testhttpd.py @@ -10,12 +10,12 @@ from diamond.collector import Collector from httpd import HttpdCollector -import httplib +import http.client ########################################################################## -class TestHTTPResponse(httplib.HTTPResponse): +class TestHTTPResponse(http.client.HTTPResponse): def __init__(self): pass @@ -39,8 +39,8 @@ def setUp(self, config=None): self.HTTPResponse = TestHTTPResponse() - httplib.HTTPConnection.request = Mock(return_value=True) - httplib.HTTPConnection.getresponse = Mock( + http.client.HTTPConnection.request = Mock(return_value=True) + http.client.HTTPConnection.getresponse = Mock( return_value=self.HTTPResponse) def test_import(self): diff --git a/src/collectors/httpjson/httpjson.py b/src/collectors/httpjson/httpjson.py index 304f0beee..0dad7d31e 100644 --- a/src/collectors/httpjson/httpjson.py +++ b/src/collectors/httpjson/httpjson.py @@ -9,7 +9,8 @@ """ -import urllib2 +from urllib.error import URLError +from urllib.request import Request, urlopen import json import diamond.collector @@ -35,7 +36,7 @@ def get_default_config(self): return default_config def _json_to_flat_metrics(self, prefix, data): - for key, value in data.items(): + for key, value in list(data.items()): if isinstance(value, dict): for k, v in self._json_to_flat_metrics( "%s.%s" % (prefix, key), value): @@ -51,12 +52,12 @@ def _json_to_flat_metrics(self, prefix, data): def collect(self): url = self.config['url'] - req = urllib2.Request(url, headers=self.config['headers']) + req = Request(url, headers=self.config['headers']) req.add_header('Content-type', 'application/json') try: - resp = urllib2.urlopen(req) - except urllib2.URLError as e: + resp = urlopen(req) + except URLError as e: self.log.error("Can't open url %s. %s", url, e) else: diff --git a/src/collectors/icinga_stats/icinga_stats.py b/src/collectors/icinga_stats/icinga_stats.py index 0d2555304..7a66702f5 100644 --- a/src/collectors/icinga_stats/icinga_stats.py +++ b/src/collectors/icinga_stats/icinga_stats.py @@ -23,23 +23,23 @@ def collect(self): stats = self.parse_stats_file(self.config["status_path"]) if len(stats) == 0: return {} - elif "info" not in stats.keys(): + elif "info" not in list(stats.keys()): return {} - elif "programstatus" not in stats.keys(): + elif "programstatus" not in list(stats.keys()): return {} metrics = self.get_icinga_stats(stats["programstatus"]) - if "hoststatus" in stats.keys(): + if "hoststatus" in list(stats.keys()): metrics = dict( - metrics.items() + self.get_host_stats( - stats["hoststatus"]).items()) + list(metrics.items()) + list(self.get_host_stats( + stats["hoststatus"]).items())) - if "servicestatus" in stats.keys(): + if "servicestatus" in list(stats.keys()): metrics = dict( - metrics.items() + self.get_svc_stats( - stats["servicestatus"]).items()) + list(metrics.items()) + list(self.get_svc_stats( + stats["servicestatus"]).items())) - for metric in metrics.keys(): + for metric in list(metrics.keys()): self.log.debug("Publishing '%s %s'.", metric, metrics[metric]) self.publish(metric, metrics[metric]) @@ -68,12 +68,12 @@ def get_default_config(self): def get_icinga_stats(self, app_stats): """ Extract metrics from 'programstatus' """ stats = {} - stats = dict(stats.items() + self._get_active_stats(app_stats).items()) - stats = dict(stats.items() + self._get_cached_stats(app_stats).items()) + stats = dict(list(stats.items()) + list(self._get_active_stats(app_stats).items())) + stats = dict(list(stats.items()) + list(self._get_cached_stats(app_stats).items())) stats = dict( - stats.items() + self._get_command_execution(app_stats).items()) + list(stats.items()) + list(self._get_command_execution(app_stats).items())) stats = dict( - stats.items() + self._get_externalcmd_stats(app_stats).items()) + list(stats.items()) + list(self._get_externalcmd_stats(app_stats).items())) stats["uptime"] = self._get_uptime(app_stats) return stats @@ -106,7 +106,7 @@ def parse_stats_file(self, file_name): stats["programstatus"] = tmp_dict else: entity_type = tmp_dict["_type"] - if entity_type not in stats.keys(): + if entity_type not in list(stats.keys()): stats[entity_type] = [] stats[entity_type].append(tmp_dict) @@ -221,7 +221,7 @@ def _get_active_stats(self, app_stats): "active_ondemand_service_check_stats", ] for app_key in app_keys: - if app_key not in app_stats.keys(): + if app_key not in list(app_stats.keys()): continue splitted = app_key.split("_") @@ -245,7 +245,7 @@ def _get_cached_stats(self, app_stats): "cached_service_check_stats", ] for app_key in app_keys: - if app_key not in app_stats.keys(): + if app_key not in list(app_stats.keys()): continue (x01, x05, x15) = self._convert_tripplet(app_stats[app_key]) @@ -268,7 +268,7 @@ def _get_command_execution(self, app_stats): "parallel_host_check_stats", ] for app_key in app_keys: - if app_key not in app_stats.keys(): + if app_key not in list(app_stats.keys()): continue scratch = app_key.split("_")[0] @@ -300,19 +300,19 @@ def _get_externalcmd_stats(self, app_stats): "x15": "external_command.15", } stats = {} - if khigh in app_stats.keys() and str(app_stats[khigh]).isdigit(): + if khigh in list(app_stats.keys()) and str(app_stats[khigh]).isdigit(): key = aliases[khigh] stats[key] = int(app_stats[khigh]) - if ktotal in app_stats.keys() and str(app_stats[ktotal].isdigit()): + if ktotal in list(app_stats.keys()) and str(app_stats[ktotal].isdigit()): key = aliases[ktotal] stats[key] = int(app_stats[ktotal]) - if kused in app_stats.keys() and str(app_stats[kused].isdigit()): + if kused in list(app_stats.keys()) and str(app_stats[kused].isdigit()): key = aliases[kused] stats[key] = int(app_stats[ktotal]) - if kstats in app_stats.keys(): + if kstats in list(app_stats.keys()): (x01, x05, x15) = self._convert_tripplet(app_stats[kstats]) stats[aliases["x01"]] = x01 stats[aliases["x05"]] = x05 @@ -322,7 +322,7 @@ def _get_externalcmd_stats(self, app_stats): def _get_uptime(self, app_stats): """ Return Icinga's uptime """ - if "program_start" not in app_stats.keys(): + if "program_start" not in list(app_stats.keys()): return 0 if not app_stats["program_start"].isdigit(): @@ -370,11 +370,11 @@ def _sanitize_entity(self, entity): "passive_checks_enabled": "passive_checks", } sane = {} - for akey in aliases.keys(): + for akey in list(aliases.keys()): sane[aliases[akey]] = None - aliases_keys = aliases.keys() - for key in entity.keys(): + aliases_keys = list(aliases.keys()) + for key in list(entity.keys()): if key not in aliases_keys: continue diff --git a/src/collectors/interrupt/interrupt.py b/src/collectors/interrupt/interrupt.py index ccf52f8b0..2381266a2 100644 --- a/src/collectors/interrupt/interrupt.py +++ b/src/collectors/interrupt/interrupt.py @@ -68,7 +68,7 @@ def collect(self): metric_value = data[1] self.publish(metric_name, self.derivative(metric_name, - long(metric_value), + int(metric_value), counter)) else: if len(data[0]) == cpuCount + 1: @@ -90,7 +90,7 @@ def collect(self): metric_name_node = metric_name + 'CPU' + str(index - 1) value = int(self.derivative(metric_name_node, - long(value), counter)) + int(value), counter)) total += value self.publish(metric_name_node, value) diff --git a/src/collectors/interrupt/soft.py b/src/collectors/interrupt/soft.py index 31626fc88..6f1cf67e1 100644 --- a/src/collectors/interrupt/soft.py +++ b/src/collectors/interrupt/soft.py @@ -68,7 +68,7 @@ def collect(self): metric_value = int(data[1]) metric_value = int(self.derivative( metric_name, - long(metric_value), counter)) + int(metric_value), counter)) self.publish(metric_name, metric_value) for i in range(2, len(data)): @@ -76,7 +76,7 @@ def collect(self): metric_value = int(data[i]) metric_value = int(self.derivative( metric_name, - long(metric_value), counter)) + int(metric_value), counter)) self.publish(metric_name, metric_value) # Close file diff --git a/src/collectors/interrupt/test/testinterrupt.py b/src/collectors/interrupt/test/testinterrupt.py index 3f978c373..4889ff424 100644 --- a/src/collectors/interrupt/test/testinterrupt.py +++ b/src/collectors/interrupt/test/testinterrupt.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from interrupt import InterruptCollector diff --git a/src/collectors/interrupt/test/testsoft.py b/src/collectors/interrupt/test/testsoft.py index 7c9eb2afd..5e8dba968 100644 --- a/src/collectors/interrupt/test/testsoft.py +++ b/src/collectors/interrupt/test/testsoft.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from soft import SoftInterruptCollector diff --git a/src/collectors/iodrivesnmp/iodrivesnmp.py b/src/collectors/iodrivesnmp/iodrivesnmp.py index ab45c6f9c..229c11285 100644 --- a/src/collectors/iodrivesnmp/iodrivesnmp.py +++ b/src/collectors/iodrivesnmp/iodrivesnmp.py @@ -106,7 +106,7 @@ def collect_snmp(self, device, host, port, community): # Set timestamp timestamp = time.time() - for k, v in self.IODRIVE_STATS.items(): + for k, v in list(self.IODRIVE_STATS.items()): # Get Metric Name and Value metricName = '.'.join([k]) metricValue = int(self.get(v, host, port, community)[v]) @@ -120,7 +120,7 @@ def collect_snmp(self, device, host, port, community): # Publish Metric self.publish_metric(metric) - for k, v in self.IODRIVE_BYTE_STATS.items(): + for k, v in list(self.IODRIVE_BYTE_STATS.items()): # Get Metric Name and Value metricName = '.'.join([k]) metricValue = int(self.get(v, host, port, community)[v]) diff --git a/src/collectors/ip/ip.py b/src/collectors/ip/ip.py index 2d6adb816..42089efca 100644 --- a/src/collectors/ip/ip.py +++ b/src/collectors/ip/ip.py @@ -103,15 +103,15 @@ def collect(self): data = data.split() # Zip up the keys and values - for i in xrange(1, len(header)): + for i in range(1, len(header)): metrics[header[i]] = data[i] - for metric_name in metrics.keys(): + for metric_name in list(metrics.keys()): if ((len(self.config['allowed_names']) > 0 and metric_name not in self.config['allowed_names'])): continue - value = long(metrics[metric_name]) + value = int(metrics[metric_name]) # Publish the metric if metric_name in self.GAUGES: diff --git a/src/collectors/ip/test/testip.py b/src/collectors/ip/test/testip.py index 4ac373d92..34b507aa7 100644 --- a/src/collectors/ip/test/testip.py +++ b/src/collectors/ip/test/testip.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from ip import IPCollector @@ -116,7 +116,7 @@ def test_should_work_with_all_data(self, publish_mock): 'FragCreates': 0, } - self.setUp(allowed_names=metrics.keys()) + self.setUp(allowed_names=list(metrics.keys())) IPCollector.PROC = [ self.getFixturePath('proc_net_snmp_1'), diff --git a/src/collectors/ipvs/ipvs.py b/src/collectors/ipvs/ipvs.py index e5a4dd868..0108f3679 100644 --- a/src/collectors/ipvs/ipvs.py +++ b/src/collectors/ipvs/ipvs.py @@ -72,8 +72,7 @@ def collect(self): p.wait() if p.returncode == 255: - self.statcommand = filter( - lambda a: a != '--exact', self.statcommand) + self.statcommand = [a for a in self.statcommand if a != '--exact'] p = subprocess.Popen(self.statcommand, stdout=subprocess.PIPE).communicate()[0][:-1] @@ -101,7 +100,7 @@ def collect(self): else: continue - for metric, column in columns.iteritems(): + for metric, column in columns.items(): metric_name = ".".join([external, backend, metric]) # metric_value = int(row[column]) value = row[column] @@ -135,11 +134,11 @@ def collect(self): if row[0] == "TCP" or row[0] == "UDP": if total: - for metric, value in total.iteritems(): + for metric, value in total.items(): self.publish( ".".join([external, "total", metric]), value) - for k in columns.keys(): + for k in list(columns.keys()): total[k] = 0.0 external = row[0] + "_" + string.replace(row[1], ".", "_") @@ -149,7 +148,7 @@ def collect(self): else: continue - for metric, column in columns.iteritems(): + for metric, column in columns.items(): metric_name = ".".join([external, backend, metric]) # metric_value = int(row[column]) value = row[column] @@ -167,5 +166,5 @@ def collect(self): self.publish(metric_name, metric_value) if total: - for metric, value in total.iteritems(): + for metric, value in total.items(): self.publish(".".join([external, "total", metric]), value) diff --git a/src/collectors/jbossapi/jbossapi.py b/src/collectors/jbossapi/jbossapi.py index f3646ae0b..a65e02a9c 100644 --- a/src/collectors/jbossapi/jbossapi.py +++ b/src/collectors/jbossapi/jbossapi.py @@ -363,7 +363,7 @@ def get_data(self, op_type, current_host, current_port, current_proto, return output def is_number(self, value): - return (isinstance(value, (int, long, float)) and + return (isinstance(value, (int, float)) and not isinstance(value, bool)) def string_fix(self, s): diff --git a/src/collectors/jcollectd/collectd_network.py b/src/collectors/jcollectd/collectd_network.py index 0beba967a..6fe8e1213 100644 --- a/src/collectors/jcollectd/collectd_network.py +++ b/src/collectors/jcollectd/collectd_network.py @@ -32,12 +32,12 @@ if sys.version_info.major == 2: # Python 2.7 io.StringIO does not like unicode - from StringIO import StringIO + from io import StringIO else: try: from io import StringIO except ImportError: - from cStringIO import StringIO + from io import StringIO DEFAULT_PORT = 25826 @@ -168,7 +168,7 @@ class Data(object): typeinstance = None def __init__(self, **kw): - [setattr(self, k, v) for k, v in kw.items()] + [setattr(self, k, v) for k, v in list(kw.items())] @property def datetime(self): @@ -367,7 +367,7 @@ def interpret(self, iterable=None, poll_interval=0.2): if iterable is None: return None - if isinstance(iterable, basestring): + if isinstance(iterable, str): iterable = self.decode(poll_interval, iterable) return interpret_opcodes(iterable) diff --git a/src/collectors/jcollectd/jcollectd.py b/src/collectors/jcollectd/jcollectd.py index 42791d96e..57debd91c 100644 --- a/src/collectors/jcollectd/jcollectd.py +++ b/src/collectors/jcollectd/jcollectd.py @@ -25,7 +25,11 @@ import threading import re -import Queue +try: + import queue +except ImportError: + import queue as queue + import diamond.collector import diamond.metric @@ -63,7 +67,7 @@ def collect(self): try: dp = q.get(False) metric = self.make_metric(dp) - except Queue.Empty: + except queue.Empty: break self.publish_metric(metric) @@ -118,7 +122,7 @@ def __init__(self, host, port, log, poll_interval=0.4): self.log = log self.poll_interval = poll_interval - self.queue = Queue.Queue() + self.queue = queue.Queue() def run(self): self.log.info('ListenerThread started on {}:{}(udp)'.format( @@ -147,7 +151,7 @@ def send_to_collector(self, items): try: metric = self.transform(item) self.queue.put(metric) - except Queue.Full: + except queue.Full: self.log.error('Queue to collector is FULL') except Exception as e: self.log.error('B00M! type={}, exception={}'.format( diff --git a/src/collectors/jolokia/cassandra_jolokia.py b/src/collectors/jolokia/cassandra_jolokia.py index cdefc3ca2..4075a5945 100644 --- a/src/collectors/jolokia/cassandra_jolokia.py +++ b/src/collectors/jolokia/cassandra_jolokia.py @@ -56,7 +56,7 @@ def __init__(self, *args, **kwargs): def update_config(self, config): if 'percentiles' in config: - self.percentiles = map(int, config['percentiles']) + self.percentiles = list(map(int, config['percentiles'])) if 'histogram_regex' in config: self.histogram_regex = re.compile(config['histogram_regex']) diff --git a/src/collectors/jolokia/jolokia.py b/src/collectors/jolokia/jolokia.py index 37c0adb89..4865cfcd5 100644 --- a/src/collectors/jolokia/jolokia.py +++ b/src/collectors/jolokia/jolokia.py @@ -48,13 +48,14 @@ ``` """ +from urllib.error import HTTPError +from urllib.request import Request, urlopen import diamond.collector import base64 from contextlib import closing import json import re -import urllib -import urllib2 +import urllib.request, urllib.parse, urllib.error class JolokiaCollector(diamond.collector.Collector): @@ -120,7 +121,7 @@ def get_default_config(self): def __init__(self, *args, **kwargs): super(JolokiaCollector, self).__init__(*args, **kwargs) self.mbeans = [] - if isinstance(self.config['mbeans'], basestring): + if isinstance(self.config['mbeans'], str): for mbean in self.config['mbeans'].split('|'): self.mbeans.append(mbean.strip()) elif isinstance(self.config['mbeans'], list): @@ -135,11 +136,11 @@ def __init__(self, *args, **kwargs): ] if isinstance(self.config['rewrite'], dict): self.rewrite.extend([(re.compile(old), new) for old, new in - self.config['rewrite'].items()]) + list(self.config['rewrite'].items())]) self.domains = [] if 'domains' in self.config: - if isinstance(self.config['domains'], basestring): + if isinstance(self.config['domains'], str): for domain in self.config['domains'].split('|'): self.domains.append(domain.strip()) elif isinstance(self.config['domains'], list): @@ -169,7 +170,7 @@ def _get_domains(self): listing = self._list_request() try: if listing['status'] == 200: - self.domains = listing['value'].keys() + self.domains = list(listing['value'].keys()) else: self.log.error('Jolokia status %s while retrieving MBean ' 'listing.', listing['status']) @@ -202,7 +203,7 @@ def collect(self): # The reponse was totally empty, or not an expected format self.log.error('Unable to retrieve domain %s.', domain) continue - for k, v in mbeans.iteritems(): + for k, v in mbeans.items(): if self._check_mbean(k): self.collect_bean(k, v) @@ -230,16 +231,16 @@ def _list_request(self): # need some time to process the downloaded metrics, so that's why # timeout is lower than the interval. timeout = max(2, float(self.config['interval']) * 2 / 3) - with closing(urllib2.urlopen(self._create_request(url), + with closing(urlopen(self._create_request(url), timeout=timeout)) as response: return self._read_json(response) - except (urllib2.HTTPError, ValueError) as e: + except (HTTPError, ValueError) as e: self.log.error('Unable to read JSON response: %s', str(e)) return {} def _read_request(self, domain): try: - url_path = '/?%s' % urllib.urlencode({ + url_path = '/?%s' % urllib.parse.urlencode({ 'maxCollectionSize': '0', 'ignoreErrors': 'true', 'canonicalNaming': @@ -253,10 +254,10 @@ def _read_request(self, domain): # need some time to process the downloaded metrics, so that's why # timeout is lower than the interval. timeout = max(2, float(self.config['interval']) * 2 / 3) - with closing(urllib2.urlopen(self._create_request(url), + with closing(urlopen(self._create_request(url), timeout=timeout)) as response: return self._read_json(response) - except (urllib2.HTTPError, ValueError): + except (HTTPError, ValueError): self.log.error('Unable to read JSON response.') return {} @@ -268,11 +269,11 @@ def _escape_domain(self, domain): domain = re.sub('!', '!!', domain) domain = re.sub('/', '!/', domain) domain = re.sub('"', '!"', domain) - domain = urllib.quote(domain) + domain = urllib.parse.quote(domain) return domain def _create_request(self, url): - req = urllib2.Request(url) + req = Request(url) username = self.config["username"] password = self.config["password"] if username is not None and password is not None: @@ -287,8 +288,8 @@ def clean_up(self, text): return text def collect_bean(self, prefix, obj): - for k, v in obj.iteritems(): - if type(v) in [int, float, long]: + for k, v in obj.items(): + if type(v) in [int, float]: key = "%s.%s" % (prefix, k) key = self.clean_up(key) if key != "": diff --git a/src/collectors/kafkastat/kafkastat.py b/src/collectors/kafkastat/kafkastat.py index 81f5a1429..afd68aa3d 100644 --- a/src/collectors/kafkastat/kafkastat.py +++ b/src/collectors/kafkastat/kafkastat.py @@ -8,9 +8,10 @@ * urllib2 * xml.etree """ -import urllib2 +from urllib.error import URLError +from urllib.request import urlopen -from urllib import urlencode +from urllib.parse import urlencode try: from xml.etree import ElementTree @@ -31,7 +32,7 @@ class KafkaCollector(diamond.collector.Collector): 'float': float, 'int': int, 'java.lang.Object': float, - 'long': long, + 'long': int, } def get_default_config_help(self): @@ -70,8 +71,8 @@ def _get(self, path, query_args=None): path, urlencode(qargs)) try: - response = urllib2.urlopen(url) - except urllib2.URLError as err: + response = urlopen(url) + except URLError as err: self.log.error("%s: %s", url, err) return None @@ -185,5 +186,5 @@ def collect(self): metrics.update(stats) # Publish stats - for metric, value in metrics.iteritems(): + for metric, value in metrics.items(): self.publish(metric, value) diff --git a/src/collectors/kafkastat/test/testkafka.py b/src/collectors/kafkastat/test/testkafka.py index ce3fc2f23..b2461ba69 100755 --- a/src/collectors/kafkastat/test/testkafka.py +++ b/src/collectors/kafkastat/test/testkafka.py @@ -1,8 +1,7 @@ #!/usr/bin/python # coding=utf-8 ############################################################################### -import urllib2 -from urlparse import urlparse, parse_qs +from urllib import urlparse, parse_qs try: from xml.etree import ElementTree @@ -62,7 +61,7 @@ def test_get(self, urlopen_mock): @run_only_if_ElementTree_is_available @patch('urllib2.urlopen') def test_get_httperror(self, urlopen_mock): - urlopen_mock.side_effect = urllib2.URLError('BOOM') + urlopen_mock.side_effect = urllib.error.URLError('BOOM') result = self.collector._get('/path') @@ -109,10 +108,10 @@ def test_query_mbean(self, get_mock): get_mock.return_value = self._get_xml_fixture('mbean.xml') expected_metrics = { - 'kafka.logs.mytopic-1.CurrentOffset': long('213500615'), - 'kafka.logs.mytopic-1.NumAppendedMessages': long('224634137'), + 'kafka.logs.mytopic-1.CurrentOffset': int('213500615'), + 'kafka.logs.mytopic-1.NumAppendedMessages': int('224634137'), 'kafka.logs.mytopic-1.NumberOfSegments': int('94'), - 'kafka.logs.mytopic-1.Size': long('50143615339'), + 'kafka.logs.mytopic-1.Size': int('50143615339'), } metrics = self.collector.query_mbean('kafka:type=kafka.logs.mytopic-1') @@ -125,10 +124,10 @@ def test_query_mbean_with_prefix(self, get_mock): get_mock.return_value = self._get_xml_fixture('mbean.xml') expected_metrics = { - 'some.prefix.CurrentOffset': long('213500615'), - 'some.prefix.NumAppendedMessages': long('224634137'), + 'some.prefix.CurrentOffset': int('213500615'), + 'some.prefix.NumAppendedMessages': int('224634137'), 'some.prefix.NumberOfSegments': int('94'), - 'some.prefix.Size': long('50143615339'), + 'some.prefix.Size': int('50143615339'), } metrics = self.collector.query_mbean('kafka:type=kafka.logs.mytopic-0', diff --git a/src/collectors/kvm/test/testkvm.py b/src/collectors/kvm/test/testkvm.py index f1f8cd951..63c71ba25 100644 --- a/src/collectors/kvm/test/testkvm.py +++ b/src/collectors/kvm/test/testkvm.py @@ -11,9 +11,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from kvm import KVMCollector diff --git a/src/collectors/libvirtkvm/libvirtkvm.py b/src/collectors/libvirtkvm/libvirtkvm.py index 9b95fd7c5..f6841931d 100644 --- a/src/collectors/libvirtkvm/libvirtkvm.py +++ b/src/collectors/libvirtkvm/libvirtkvm.py @@ -125,36 +125,36 @@ def collect(self): # Disk stats disks = self.get_disk_devices(dom) accum = {} - for stat in self.blockStats.keys(): + for stat in list(self.blockStats.keys()): accum[stat] = 0 for disk in disks: stats = dom.blockStats(disk) - for stat in self.blockStats.keys(): + for stat in list(self.blockStats.keys()): idx = self.blockStats[stat] val = stats[idx] accum[stat] += val self.publish('block.%s.%s' % (disk, stat), val, instance=name) - for stat in self.blockStats.keys(): + for stat in list(self.blockStats.keys()): self.publish('block.total.%s' % stat, accum[stat], instance=name) # Network stats vifs = self.get_network_devices(dom) accum = {} - for stat in self.vifStats.keys(): + for stat in list(self.vifStats.keys()): accum[stat] = 0 for vif in vifs: stats = dom.interfaceStats(vif) - for stat in self.vifStats.keys(): + for stat in list(self.vifStats.keys()): idx = self.vifStats[stat] val = stats[idx] accum[stat] += val self.publish('net.%s.%s' % (vif, stat), val, instance=name) - for stat in self.vifStats.keys(): + for stat in list(self.vifStats.keys()): self.publish('net.total.%s' % stat, accum[stat], instance=name) diff --git a/src/collectors/loadavg/test/testloadavg.py b/src/collectors/loadavg/test/testloadavg.py index 6f237019e..a028a7f84 100644 --- a/src/collectors/loadavg/test/testloadavg.py +++ b/src/collectors/loadavg/test/testloadavg.py @@ -10,9 +10,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from loadavg import LoadAverageCollector diff --git a/src/collectors/mdstat/mdstat.py b/src/collectors/mdstat/mdstat.py index 697f48fd0..b30ab3a7b 100644 --- a/src/collectors/mdstat/mdstat.py +++ b/src/collectors/mdstat/mdstat.py @@ -77,7 +77,7 @@ def traverse(d, metric_name=''): consisting of the hierarchically concatenated keys of its branch. """ - for key, value in d.iteritems(): + for key, value in d.items(): if isinstance(value, dict): if metric_name == '': metric_name_next = key @@ -252,7 +252,7 @@ def _parse_array_status(self, block): array_status_dict_sanitizied = {} # convert all non None values to float - for key, value in array_status_dict.iteritems(): + for key, value in array_status_dict.items(): if not value: continue if key == 'superblock_version': @@ -306,7 +306,7 @@ def _parse_array_bitmap(self, block): array_bitmap_dict_sanitizied = {} # convert all values to int - for key, value in array_bitmap_dict.iteritems(): + for key, value in array_bitmap_dict.items(): if not value: continue array_bitmap_dict_sanitizied[key] = int(value) diff --git a/src/collectors/memcached/memcached.py b/src/collectors/memcached/memcached.py index a36817908..e54c5b644 100644 --- a/src/collectors/memcached/memcached.py +++ b/src/collectors/memcached/memcached.py @@ -155,7 +155,7 @@ def collect(self): hosts = self.config.get('hosts') # Convert a string config value to be an array - if isinstance(hosts, basestring): + if isinstance(hosts, str): hosts = [hosts] for host in hosts: @@ -170,7 +170,7 @@ def collect(self): stats = self.get_stats(hostname, port) # figure out what we're configured to get, defaulting to everything - desired = self.config.get('publish', stats.keys()) + desired = self.config.get('publish', list(stats.keys())) # for everything we want for stat in desired: diff --git a/src/collectors/memcached_slab/memcached_slab.py b/src/collectors/memcached_slab/memcached_slab.py index 7128ceb2b..895b52166 100644 --- a/src/collectors/memcached_slab/memcached_slab.py +++ b/src/collectors/memcached_slab/memcached_slab.py @@ -66,10 +66,10 @@ def dict_to_paths(dict_): } """ metrics = {} - for k, v in dict_.iteritems(): + for k, v in dict_.items(): if isinstance(v, dict): submetrics = dict_to_paths(v) - for subk, subv in submetrics.iteritems(): + for subk, subv in submetrics.items(): metrics['.'.join([str(k), str(subk)])] = subv else: metrics[k] = v @@ -115,7 +115,7 @@ def collect(self): unparsed_slab_stats = self.get_slab_stats() slab_stats = parse_slab_stats(unparsed_slab_stats) paths = dict_to_paths(slab_stats) - for path, value in paths.iteritems(): + for path, value in paths.items(): # Add path and prefix to metric (e.g. # 'servers.cache-main-01.memchached_slab') full_path = self.get_metric_path(path) diff --git a/src/collectors/memory/test/testmemory.py b/src/collectors/memory/test/testmemory.py index 4ea45e808..bb8e72c9f 100644 --- a/src/collectors/memory/test/testmemory.py +++ b/src/collectors/memory/test/testmemory.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from memory import MemoryCollector diff --git a/src/collectors/memory_cgroup/memory_cgroup.py b/src/collectors/memory_cgroup/memory_cgroup.py index abbc3f653..e07be6bd4 100644 --- a/src/collectors/memory_cgroup/memory_cgroup.py +++ b/src/collectors/memory_cgroup/memory_cgroup.py @@ -112,8 +112,8 @@ def collect(self): break # create metrics from collected utimes and stimes for cgroups - for parent, cpuacct in results.iteritems(): - for key, value in cpuacct.iteritems(): + for parent, cpuacct in results.items(): + for key, value in cpuacct.items(): metric_name = '.'.join([parent, key]) self.publish(metric_name, value, metric_type='GAUGE') return True diff --git a/src/collectors/memory_cgroup/test/testmemory_cgroup.py b/src/collectors/memory_cgroup/test/testmemory_cgroup.py index 3981e1595..1b981a4b8 100644 --- a/src/collectors/memory_cgroup/test/testmemory_cgroup.py +++ b/src/collectors/memory_cgroup/test/testmemory_cgroup.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from memory_cgroup import MemoryCgroupCollector @@ -112,9 +112,9 @@ def test_should_not_include_filtered_metrics(self, publish_mock): 'lxc.testcontainer.total_swap': 1, } [self.assertPublished(publish_mock, k, v) - for k, v in should_be_published.iteritems()] + for k, v in should_be_published.items()] [self.assertUnpublished(publish_mock, k, v) - for k, v in should_not_be_published.iteritems()] + for k, v in should_not_be_published.items()] if __name__ == "__main__": unittest.main() diff --git a/src/collectors/memory_docker/memory_docker.py b/src/collectors/memory_docker/memory_docker.py index 3d526ed14..09b48925c 100644 --- a/src/collectors/memory_docker/memory_docker.py +++ b/src/collectors/memory_docker/memory_docker.py @@ -36,7 +36,7 @@ def collect(self): return super(MemoryDockerCollector, self).collect() def publish(self, metric_name, value, metric_type): - for container_id, container_name in self.containers.items(): + for container_id, container_name in list(self.containers.items()): metric_name = metric_name.replace( 'docker.' + container_id + '.', 'docker.' + container_name + '.') diff --git a/src/collectors/memory_docker/test/testmemory_docker.py b/src/collectors/memory_docker/test/testmemory_docker.py index 00cdfa5b4..f51c131d2 100644 --- a/src/collectors/memory_docker/test/testmemory_docker.py +++ b/src/collectors/memory_docker/test/testmemory_docker.py @@ -10,9 +10,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO try: from docker import Client @@ -29,10 +29,10 @@ fixtures.append([root, dirnames, filenames]) docker_fixture = [ - {u'Id': u'c3341726a9b4235a35b390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5', - u'Names': [u'/testcontainer']}, - {u'Id': u'9c151939e20682b924d7299875e94a4aabbe946b30b407f89e276507432c625b', - u'Names': None}] + {'Id': 'c3341726a9b4235a35b390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5', + 'Names': ['/testcontainer']}, + {'Id': '9c151939e20682b924d7299875e94a4aabbe946b30b407f89e276507432c625b', + 'Names': None}] def run_only_if_docker_client_is_available(func): diff --git a/src/collectors/memory_lxc/memory_lxc.py b/src/collectors/memory_lxc/memory_lxc.py index 84b980920..902bc52b0 100644 --- a/src/collectors/memory_lxc/memory_lxc.py +++ b/src/collectors/memory_lxc/memory_lxc.py @@ -60,7 +60,7 @@ def collect(self): self.log.debug("Trying to collect from %s", filename) collected[metric_name] = self._read_file(filename) - for key in collected.keys(): + for key in list(collected.keys()): if collected[key] is None: continue diff --git a/src/collectors/mesos/mesos.py b/src/collectors/mesos/mesos.py index 16e6d3f6e..b78dceb00 100644 --- a/src/collectors/mesos/mesos.py +++ b/src/collectors/mesos/mesos.py @@ -18,10 +18,10 @@ """ import copy +from urllib.request import urlopen import diamond.collector import json -import urllib2 -from urlparse import urlparse +from urllib.parse import urlparse import diamond.collector @@ -102,7 +102,7 @@ def _add_cpu_usage(self, cur_read): """Compute cpu usage based on cpu time spent compared to elapsed time """ - for executor_id, cur_data in cur_read.items(): + for executor_id, cur_data in list(cur_read.items()): if executor_id in self.executors_prev_read: prev_data = self.executors_prev_read[executor_id] prev_stats = prev_data['statistics'] @@ -121,7 +121,7 @@ def _add_cpu_usage(self, cur_read): def _add_cpu_percent(self, cur_read): """Compute cpu percent basing on the provided utilisation """ - for executor_id, cur_data in cur_read.items(): + for executor_id, cur_data in list(cur_read.items()): stats = cur_data['statistics'] cpus_limit = stats.get('cpus_limit') cpus_utilisation = stats.get('cpus_utilisation') @@ -132,7 +132,7 @@ def _add_mem_percent(self, cur_read): """Compute memory percent utilisation based on the mem_rss_bytes and mem_limit_bytes """ - for executor_id, cur_data in cur_read.items(): + for executor_id, cur_data in list(cur_read.items()): stats = cur_data['statistics'] mem_rss_bytes = stats.get('mem_rss_bytes') mem_limit_bytes = stats.get('mem_limit_bytes') @@ -197,7 +197,7 @@ def _get(self, path): """ url = self._get_url(path) try: - response = urllib2.urlopen(url) + response = urlopen(url) except Exception as err: self.log.error("%s: %s", url, err) return False @@ -239,7 +239,7 @@ def _publish_tasks_statistics(self, result): self._publish(metrics, False) def _publish(self, result, sanitize_executor_id=True): - for executor_id, executor in result.iteritems(): + for executor_id, executor in result.items(): executor_statistics = executor['statistics'] for key in executor_statistics: value = executor_statistics[key] diff --git a/src/collectors/mesos_cgroup/mesos_cgroup.py b/src/collectors/mesos_cgroup/mesos_cgroup.py index d73b64287..c15be20c9 100644 --- a/src/collectors/mesos_cgroup/mesos_cgroup.py +++ b/src/collectors/mesos_cgroup/mesos_cgroup.py @@ -22,9 +22,10 @@ ``` """ +from urllib.error import HTTPError +from urllib.request import urlopen import diamond.collector import json -import urllib2 import os @@ -125,8 +126,8 @@ def get_mesos_state(self): self.config['port'], self.config['mesos_state_path']) - return json.load(urllib2.urlopen(url)) - except (urllib2.HTTPError, ValueError) as err: + return json.load(urlopen(url)) + except (HTTPError, ValueError) as err: self.log.error('Unable to read JSON response: %s' % err) return {} diff --git a/src/collectors/mesos_cgroup/test/testmesos_cgroup.py b/src/collectors/mesos_cgroup/test/testmesos_cgroup.py index 734655f66..b4ca3d9b8 100644 --- a/src/collectors/mesos_cgroup/test/testmesos_cgroup.py +++ b/src/collectors/mesos_cgroup/test/testmesos_cgroup.py @@ -2,7 +2,7 @@ # coding=utf-8 ########################################################################## -from __future__ import print_function + from test import CollectorTestCase from test import get_collector_config from test import unittest diff --git a/src/collectors/mogilefs/mogilefs.py b/src/collectors/mogilefs/mogilefs.py index d43560b04..6afcd3136 100644 --- a/src/collectors/mogilefs/mogilefs.py +++ b/src/collectors/mogilefs/mogilefs.py @@ -41,9 +41,9 @@ def collect(self): for line in out.splitlines()[:-1]: name, var = line.partition(" ")[::2] - myvars[name.strip()] = long(var) + myvars[name.strip()] = int(var) - for key, value in myvars.iteritems(): + for key, value in myvars.items(): # Set Metric Name metric_name = key # Set Metric Value diff --git a/src/collectors/mongodb/mongodb.py b/src/collectors/mongodb/mongodb.py index 0a1bc00e0..f30fad51c 100644 --- a/src/collectors/mongodb/mongodb.py +++ b/src/collectors/mongodb/mongodb.py @@ -27,6 +27,7 @@ from diamond.collector import str_to_bool import re import zlib +from functools import reduce try: import pymongo @@ -118,7 +119,7 @@ def collect(self): hosts = self.config.get('hosts') # Convert a string config value to be an array - if isinstance(hosts, basestring): + if isinstance(hosts, str): hosts = [hosts] # we need this for backwards compatibility @@ -353,10 +354,10 @@ def _publish_metrics(self, prev_keys, key, data, publishfn=None): self._publish_metrics(keys, new_key, value) elif isinstance(value, int) or isinstance(value, float): publishfn('.'.join(keys), value) - elif isinstance(value, long): + elif isinstance(value, int): publishfn('.'.join(keys), float(value)) elif isinstance(value, datetime.datetime): - publishfn('.'.join(keys), long(value.strftime('%s'))) + publishfn('.'.join(keys), int(value.strftime('%s'))) def _extract_simple_data(self, data): return { diff --git a/src/collectors/mongodb/test/testmongodb.py b/src/collectors/mongodb/test/testmongodb.py index b0e936099..217680ee1 100644 --- a/src/collectors/mongodb/test/testmongodb.py +++ b/src/collectors/mongodb/test/testmongodb.py @@ -90,7 +90,7 @@ def test_should_publish_nested_keys_for_db_stats(self, def test_should_publish_stats_with_long_type(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() @@ -119,7 +119,7 @@ def test_should_ignore_unneeded_databases(self, def test_should_ignore_unneeded_collections(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.connection['db1'].collection_names.return_value = ['collection1', @@ -147,7 +147,7 @@ def test_should_ignore_unneeded_collections(self, def test_should_ignore_replset_status_if_disabled(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() @@ -177,7 +177,7 @@ def test_should_publish_keys_from_real_server_stats(self, for c in publish_mock.call_args_list: m = c[0][0] datapoints_per_metric[m] += 1 - dupes = [m for m, n in datapoints_per_metric.iteritems() if n > 1] + dupes = [m for m, n in datapoints_per_metric.items() if n > 1] self.assertEqual(len(dupes), 0, 'BUG: 1+ point for same metric received: %s' % ', '.join(dupes)) @@ -260,7 +260,7 @@ def test_should_publish_nested_keys_for_db_stats(self, def test_should_publish_stats_with_long_type(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() @@ -291,7 +291,7 @@ def test_should_ignore_unneeded_databases(self, def test_should_ignore_unneeded_collections(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.connection['db1'].collection_names.return_value = ['collection1', @@ -340,7 +340,7 @@ def test_import(self): def test_should_publish_replset_status_if_enabled(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() diff --git a/src/collectors/monit/monit.py b/src/collectors/monit/monit.py index abd3e6d92..2934ca0ff 100644 --- a/src/collectors/monit/monit.py +++ b/src/collectors/monit/monit.py @@ -9,7 +9,8 @@ """ -import urllib2 +from urllib.error import HTTPError +from urllib.request import Request, urlopen import base64 from xml.dom.minidom import parseString @@ -47,15 +48,15 @@ def collect(self): url = 'http://%s:%i/_status?format=xml' % (self.config['host'], int(self.config['port'])) try: - request = urllib2.Request(url) + request = Request(url) # # shouldn't need to check this base64string = base64.encodestring('%s:%s' % ( self.config['user'], self.config['passwd'])).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) - response = urllib2.urlopen(request) - except urllib2.HTTPError as err: + response = urlopen(request) + except HTTPError as err: self.log.error("%s: %s", err, url) return diff --git a/src/collectors/mountstats/mountstats.py b/src/collectors/mountstats/mountstats.py index f8d699657..f66503c60 100755 --- a/src/collectors/mountstats/mountstats.py +++ b/src/collectors/mountstats/mountstats.py @@ -60,7 +60,7 @@ class MountStatsCollector(diamond.collector.Collector): def process_config(self): super(MountStatsCollector, self).process_config() self.exclude_filters = self.config['exclude_filters'] - if isinstance(self.exclude_filters, basestring): + if isinstance(self.exclude_filters, str): self.exclude_filters = [self.exclude_filters] if len(self.exclude_filters) > 0: @@ -69,7 +69,7 @@ def process_config(self): self.exclude_reg = None self.include_filters = self.config['include_filters'] - if isinstance(self.include_filters, basestring): + if isinstance(self.include_filters, str): self.include_filters = [self.include_filters] if len(self.include_filters) > 0: @@ -161,12 +161,12 @@ def collect(self): elif tokens[0] == 'events:': for i in range(0, len(self.EVENTS_MAP)): metric_name = "%s.events.%s" % (path, self.EVENTS_MAP[i]) - metric_value = long(tokens[i + 1]) + metric_value = int(tokens[i + 1]) self.publish_counter(metric_name, metric_value) elif tokens[0] == 'bytes:': for i in range(0, len(self.BYTES_MAP)): metric_name = "%s.bytes.%s" % (path, self.BYTES_MAP[i]) - metric_value = long(tokens[i + 1]) + metric_value = int(tokens[i + 1]) self.publish_counter(metric_name, metric_value) elif tokens[0] == 'xprt:': proto = tokens[1] @@ -177,13 +177,13 @@ def collect(self): for i in range(0, len(self.XPRT_MAP[proto])): metric_name = "%s.xprt.%s.%s" % (path, proto, self.XPRT_MAP[proto][i]) - metric_value = long(tokens[i + 2]) + metric_value = int(tokens[i + 2]) self.publish_counter(metric_name, metric_value) elif tokens[0][:-1] in self.RPCS_MAP: rpc = tokens[0][:-1] - ops = long(tokens[1]) - rtt = long(tokens[7]) - exe = long(tokens[8]) + ops = int(tokens[1]) + rtt = int(tokens[7]) + exe = int(tokens[8]) metric_fmt = "%s.rpc.%s.%s" ops_name = metric_fmt % (path, rpc.lower(), 'ops') diff --git a/src/collectors/mysqlstat/mysql55.py b/src/collectors/mysqlstat/mysql55.py index 1b9938e45..ac80cd981 100644 --- a/src/collectors/mysqlstat/mysql55.py +++ b/src/collectors/mysqlstat/mysql55.py @@ -16,7 +16,7 @@ """ -from __future__ import division + try: import MySQLdb diff --git a/src/collectors/mysqlstat/mysqlstat.py b/src/collectors/mysqlstat/mysqlstat.py index 1f800d049..7f15f9e52 100644 --- a/src/collectors/mysqlstat/mysqlstat.py +++ b/src/collectors/mysqlstat/mysqlstat.py @@ -342,7 +342,7 @@ def get_stats(self, params): try: rows = self.get_db_master_status() for row_master in rows: - for key, value in row_master.items(): + for key, value in list(row_master.items()): if key in self._IGNORE_KEYS: continue try: @@ -358,7 +358,7 @@ def get_stats(self, params): try: rows = self.get_db_slave_status() for row_slave in rows: - for key, value in row_slave.items(): + for key, value in list(row_slave.items()): if key in self._IGNORE_KEYS: continue try: @@ -377,7 +377,7 @@ def get_stats(self, params): innodb_status_output = rows[0] - todo = self.innodb_status_keys.keys() + todo = list(self.innodb_status_keys.keys()) for line in innodb_status_output['Status'].split('\n'): for key in todo: match = self.innodb_status_keys[key].match(line) diff --git a/src/collectors/netapp/netapp.py b/src/collectors/netapp/netapp.py index 9850f5144..2da477005 100644 --- a/src/collectors/netapp/netapp.py +++ b/src/collectors/netapp/netapp.py @@ -33,7 +33,7 @@ """ -from __future__ import print_function + import sys import time import re @@ -225,11 +225,11 @@ def _gen_delta_depend(self, path, derivative, multiplier, prettyname, shortpath = ".".join(path.split(".")[:-1]) basename = path.split(".")[-1] secondary_delta = None - if basename in self.DIVIDERS.keys(): + if basename in list(self.DIVIDERS.keys()): mateKey = ".".join([shortpath, self.DIVIDERS[basename]]) else: return - if mateKey in derivative.keys(): + if mateKey in list(derivative.keys()): secondary_delta = derivative[mateKey] else: return @@ -272,7 +272,7 @@ def collect(self, device, ip, user, password): # We're only able to query a single object at a time, # so we'll loop over the objects. - for na_object in self.METRICS.keys(): + for na_object in list(self.METRICS.keys()): # For easy reference later, generate a new dict for this object LOCALMETRICS = {} @@ -285,7 +285,7 @@ def collect(self, device, ip, user, password): # Keep track of how long has passed since we checked last CollectTime = time.time() time_delta = None - if na_object in self.LastCollectTime.keys(): + if na_object in list(self.LastCollectTime.keys()): time_delta = CollectTime - self.LastCollectTime[na_object] self.LastCollectTime[na_object] = CollectTime @@ -293,7 +293,7 @@ def collect(self, device, ip, user, password): query = NaServer.NaElement("perf-object-get-instances-iter-start") query.child_add_string("objectname", na_object) counters = NaServer.NaElement("counters") - for metric in LOCALMETRICS.keys(): + for metric in list(LOCALMETRICS.keys()): counters.child_add_string("counter", metric) query.child_add(counters) @@ -369,17 +369,17 @@ def collect(self, device, ip, user, password): # and saves a new point, we'll need to store all derivatives # for local reference. derivative = {} - for key in raw.keys(): + for key in list(raw.keys()): derivative[key] = self.derivative(key, raw[key]) - for key in raw.keys(): + for key in list(raw.keys()): metricname = key.split(".")[-1] prettyname = LOCALMETRICS[metricname]["prettyname"] multiplier = LOCALMETRICS[metricname]["multiplier"] if metricname in self.DROPMETRICS: continue - elif metricname in self.DIVIDERS.keys(): + elif metricname in list(self.DIVIDERS.keys()): self._gen_delta_depend(key, derivative, multiplier, prettyname, device) else: diff --git a/src/collectors/netapp/netappDisk.py b/src/collectors/netapp/netappDisk.py index 537d2ec2a..19430b567 100644 --- a/src/collectors/netapp/netappDisk.py +++ b/src/collectors/netapp/netappDisk.py @@ -18,7 +18,7 @@ """ -from __future__ import print_function + import diamond.collector import time from diamond.metric import Metric diff --git a/src/collectors/netscalersnmp/netscalersnmp.py b/src/collectors/netscalersnmp/netscalersnmp.py index 8fe942b3d..5b05a74dc 100644 --- a/src/collectors/netscalersnmp/netscalersnmp.py +++ b/src/collectors/netscalersnmp/netscalersnmp.py @@ -173,7 +173,7 @@ def collect_snmp(self, device, host, port, community): timestamp = time.time() # Collect Netscaler System OIDs - for k, v in self.NETSCALER_SYSTEM_GUAGES.items(): + for k, v in list(self.NETSCALER_SYSTEM_GUAGES.items()): # Get Metric Name and Value metricName = '.'.join([k]) metricValue = int(self.get(v, host, port, community)[v]) @@ -185,13 +185,13 @@ def collect_snmp(self, device, host, port, community): self.publish_metric(metric) # Collect Netscaler System Counter OIDs - for k, v in self.NETSCALER_SYSTEM_COUNTERS.items(): + for k, v in list(self.NETSCALER_SYSTEM_COUNTERS.items()): # Get Metric Name and Value metricName = '.'.join([k]) # Get Metric Path metricPath = '.'.join(['devices', device, 'system', metricName]) # Get Metric Value - metricValue = self.derivative(metricPath, long( + metricValue = self.derivative(metricPath, int( self.get(v, host, port, community)[v]), self.MAX_VALUE) # Create Metric metric = Metric(metricPath, metricValue, timestamp, 0) @@ -199,8 +199,8 @@ def collect_snmp(self, device, host, port, community): self.publish_metric(metric) # Collect Netscaler Services - serviceNames = [v.strip("\'") for v in self.walk( - self.NETSCALER_SERVICE_NAMES, host, port, community).values()] + serviceNames = [v.strip("\'") for v in list(self.walk( + self.NETSCALER_SERVICE_NAMES, host, port, community).values())] for serviceName in serviceNames: # Get Service Name in OID form @@ -215,8 +215,7 @@ def collect_snmp(self, device, host, port, community): community)[serviceTypeOid].strip("\'")) # Filter excluded service types - if serviceType in map(lambda v: int(v), - self.config.get('exclude_service_type')): + if serviceType in [int(v) for v in self.config.get('exclude_service_type')]: continue # Get Service State @@ -228,11 +227,10 @@ def collect_snmp(self, device, host, port, community): community)[serviceStateOid].strip("\'")) # Filter excluded service states - if serviceState in map(lambda v: int(v), - self.config.get('exclude_service_state')): + if serviceState in [int(v) for v in self.config.get('exclude_service_state')]: continue - for k, v in self.NETSCALER_SERVICE_GUAGES.items(): + for k, v in list(self.NETSCALER_SERVICE_GUAGES.items()): serviceGuageOid = ".".join( [v, self._convert_from_oid(serviceNameOid)]) # Get Metric Name @@ -254,8 +252,8 @@ def collect_snmp(self, device, host, port, community): self.publish_metric(metric) # Collect Netscaler Vservers - vserverNames = [v.strip("\'") for v in self.walk( - self.NETSCALER_VSERVER_NAMES, host, port, community).values()] + vserverNames = [v.strip("\'") for v in list(self.walk( + self.NETSCALER_VSERVER_NAMES, host, port, community).values())] for vserverName in vserverNames: # Get Vserver Name in OID form @@ -270,8 +268,7 @@ def collect_snmp(self, device, host, port, community): community)[vserverTypeOid].strip("\'")) # filter excluded vserver types - if vserverType in map(lambda v: int(v), - self.config.get('exclude_vserver_type')): + if vserverType in [int(v) for v in self.config.get('exclude_vserver_type')]: continue # Get Service State @@ -283,11 +280,10 @@ def collect_snmp(self, device, host, port, community): community)[vserverStateOid].strip("\'")) # Filter excluded vserver state - if vserverState in map(lambda v: int(v), - self.config.get('exclude_vserver_state')): + if vserverState in [int(v) for v in self.config.get('exclude_vserver_state')]: continue - for k, v in self.NETSCALER_VSERVER_GUAGES.items(): + for k, v in list(self.NETSCALER_VSERVER_GUAGES.items()): vserverGuageOid = ".".join( [v, self._convert_from_oid(vserverNameOid)]) # Get Metric Name diff --git a/src/collectors/netstat/test/testnetstat.py b/src/collectors/netstat/test/testnetstat.py index c786b41b3..650430e6d 100644 --- a/src/collectors/netstat/test/testnetstat.py +++ b/src/collectors/netstat/test/testnetstat.py @@ -2,7 +2,7 @@ # coding=utf-8 ################################################################################ -from __future__ import print_function + from test import CollectorTestCase from test import get_collector_config from test import unittest diff --git a/src/collectors/network/network.py b/src/collectors/network/network.py index 366f5e070..a3289090a 100644 --- a/src/collectors/network/network.py +++ b/src/collectors/network/network.py @@ -99,7 +99,7 @@ def collect(self): return None network_stats = psutil.network_io_counters(True) - for device in network_stats.keys(): + for device in list(network_stats.keys()): network_stat = network_stats[device] results[device] = {} results[device]['rx_bytes'] = network_stat.bytes_recv @@ -109,12 +109,12 @@ def collect(self): for device in results: stats = results[device] - for s, v in stats.items(): + for s, v in list(stats.items()): # Get Metric Name metric_name = '.'.join([device, s]) # Get Metric Value metric_value = self.derivative(metric_name, - long(v), + int(v), diamond.collector.MAX_COUNTER) # Convert rx_bytes and tx_bytes diff --git a/src/collectors/network/test/testnetwork.py b/src/collectors/network/test/testnetwork.py index 24d5873f5..478c11969 100644 --- a/src/collectors/network/test/testnetwork.py +++ b/src/collectors/network/test/testnetwork.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from network import NetworkCollector diff --git a/src/collectors/nfs/nfs.py b/src/collectors/nfs/nfs.py index ac8f12ad2..018af57f0 100644 --- a/src/collectors/nfs/nfs.py +++ b/src/collectors/nfs/nfs.py @@ -215,9 +215,9 @@ def collect(self): # Close File file.close() - for stat in results.keys(): + for stat in list(results.keys()): metric_name = stat - metric_value = long(float(results[stat])) + metric_value = int(float(results[stat])) metric_value = self.derivative(metric_name, metric_value) self.publish(metric_name, metric_value, precision=3) return True diff --git a/src/collectors/nfs/test/testnfs.py b/src/collectors/nfs/test/testnfs.py index 55b9c0078..26a3d7c2a 100644 --- a/src/collectors/nfs/test/testnfs.py +++ b/src/collectors/nfs/test/testnfs.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from nfs import NfsCollector diff --git a/src/collectors/nfsd/nfsd.py b/src/collectors/nfsd/nfsd.py index 63d4b26f3..f9a90d5f9 100644 --- a/src/collectors/nfsd/nfsd.py +++ b/src/collectors/nfsd/nfsd.py @@ -189,9 +189,9 @@ def collect(self): # Close File file.close() - for stat in results.keys(): + for stat in list(results.keys()): metric_name = '.' + stat - metric_value = long(float(results[stat])) + metric_value = int(float(results[stat])) metric_value = self.derivative(metric_name, metric_value) self.publish(metric_name, metric_value, precision=3) return True diff --git a/src/collectors/nfsd/test/testnfsd.py b/src/collectors/nfsd/test/testnfsd.py index f4de04989..23cb0e7d1 100644 --- a/src/collectors/nfsd/test/testnfsd.py +++ b/src/collectors/nfsd/test/testnfsd.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from nfsd import NfsdCollector diff --git a/src/collectors/nginx/nginx.py b/src/collectors/nginx/nginx.py index 78faa2b98..032a1d6dc 100644 --- a/src/collectors/nginx/nginx.py +++ b/src/collectors/nginx/nginx.py @@ -56,7 +56,6 @@ """ -import urllib2 import re import diamond.collector import json @@ -217,9 +216,9 @@ def collect(self): int(self.config['req_port']), self.config['req_path']) - req = urllib2.Request(url=url, headers=headers) + req = urllib.request.Request(url=url, headers=headers) try: - handle = urllib2.urlopen(req) + handle = urllib.request.urlopen(req) # Test for json payload; indicates nginx+ if handle.info().gettype() == 'application/json': diff --git a/src/collectors/ntp/ntp.py b/src/collectors/ntp/ntp.py index ce2221a78..a2b684f1d 100644 --- a/src/collectors/ntp/ntp.py +++ b/src/collectors/ntp/ntp.py @@ -88,7 +88,7 @@ def get_ntpdate_stats(self): data[metric_name] = {'val': offset, 'precision': self.config['precision']} - return data.items() + return list(data.items()) def collect(self): for stat, v in self.get_ntpdate_stats(): diff --git a/src/collectors/ntpd/ntpd.py b/src/collectors/ntpd/ntpd.py index bed84ffcf..cda3231f1 100644 --- a/src/collectors/ntpd/ntpd.py +++ b/src/collectors/ntpd/ntpd.py @@ -96,7 +96,7 @@ def convert_to_second(when_ntpd_ouput): if data['when']['val'].endswith(('m', 'h', 'd')): data['when']['val'] = convert_to_second(data['when']['val']) - return data.items() + return list(data.items()) def get_ntpdc_kerninfo_output(self): return self.run_command([self.config['ntpdc_bin'], '-c', 'kerninfo']) @@ -121,7 +121,7 @@ def get_ntpdc_kerninfo_stats(self): elif key == 'status': data['status'] = {'val': val, 'precision': 0} - return data.items() + return list(data.items()) def get_ntpdc_sysinfo_output(self): return self.run_command([self.config['ntpdc_bin'], '-c', 'sysinfo']) @@ -143,7 +143,7 @@ def get_ntpdc_sysinfo_stats(self): except Exception: pass - return data.items() + return list(data.items()) def collect(self): for stat, v in self.get_ntpq_stats(): diff --git a/src/collectors/numa/numa.py b/src/collectors/numa/numa.py index 0ffeeadba..c6c44dd43 100644 --- a/src/collectors/numa/numa.py +++ b/src/collectors/numa/numa.py @@ -13,7 +13,7 @@ from re import compile as re_compile import logging -node_re = re_compile('(?P^node \d+ (free|size)): (?P\d+) \MB') +node_re = re_compile('(?P^node \d+ (free|size)): (?P\d+) MB') class NumaCollector(diamond.collector.Collector): diff --git a/src/collectors/nvidia_gpu/nvidia_gpu.py b/src/collectors/nvidia_gpu/nvidia_gpu.py index 2f1cfb96b..89524c72d 100644 --- a/src/collectors/nvidia_gpu/nvidia_gpu.py +++ b/src/collectors/nvidia_gpu/nvidia_gpu.py @@ -11,7 +11,6 @@ * nvidia-ml-py (Optional) """ -from itertools import izip try: import pynvml USE_PYTHON_BINDING = True @@ -71,7 +70,7 @@ def collect_via_nvidia_smi(self, stats_config): stats = result.strip().split(',') assert len(stats) == len(stats_config) index = stats[0] - for stat_name, metric in izip(stats_config[1:], stats[1:]): + for stat_name, metric in zip(stats_config[1:], stats[1:]): metric_name = 'gpu_{index}.{stat_name}'.format( index=str(index), stat_name=stat_name @@ -89,7 +88,7 @@ def collect_via_pynvml(self, stats_config): pynvml.nvmlInit() device_count = pynvml.nvmlDeviceGetCount() - for device_index in xrange(device_count): + for device_index in range(device_count): handle = pynvml.nvmlDeviceGetHandleByIndex(device_index) memoryInfo = pynvml.nvmlDeviceGetMemoryInfo(handle) utilizationRates = pynvml.nvmlDeviceGetUtilizationRates(handle) diff --git a/src/collectors/onewire/onewire.py b/src/collectors/onewire/onewire.py index d3c40d6fd..a0b3553f7 100644 --- a/src/collectors/onewire/onewire.py +++ b/src/collectors/onewire/onewire.py @@ -58,11 +58,11 @@ def collect(self): if '.' in ld: self.read_values(ld, self.config['scan'], metrics) - for oid, files in self.config.iteritems(): + for oid, files in self.config.items(): if oid[:3] == 'id:': self.read_values(oid[3:], files, metrics) - for fn, fv in metrics.iteritems(): + for fn, fv in metrics.items(): self.publish(fn, fv, 2) def read_values(self, oid, files, metrics): @@ -74,7 +74,7 @@ def read_values(self, oid, files, metrics): oid_path = os.path.join(self.config['owfs'], oid) oid = oid.replace('.', '_') - for fn, alias in files.iteritems(): + for fn, alias in files.items(): fv = os.path.join(oid_path, fn) if os.path.isfile(fv): try: diff --git a/src/collectors/openldap/openldap.py b/src/collectors/openldap/openldap.py index 0a24b18e1..12086e557 100644 --- a/src/collectors/openldap/openldap.py +++ b/src/collectors/openldap/openldap.py @@ -135,7 +135,7 @@ def get_datapoints(self, ldap_url, username, password): conn.simple_bind_s(username, password) try: - for key in self.STATS.keys(): + for key in list(self.STATS.keys()): base = self.STATS[key]['base'] attr = self.STATS[key]['attr'] num = conn.search(base, ldap.SCOPE_BASE, @@ -164,5 +164,5 @@ def collect(self): self.log.error('Unable to query %s: %s' % (ldap_url, e)) return {} - for name, value in datapoints.items(): + for name, value in list(datapoints.items()): self.publish(name, value) diff --git a/src/collectors/openstackswift/openstackswift.py b/src/collectors/openstackswift/openstackswift.py index a413bffb6..f6d3970fb 100644 --- a/src/collectors/openstackswift/openstackswift.py +++ b/src/collectors/openstackswift/openstackswift.py @@ -69,7 +69,7 @@ def collect(self): self.publish('dispersion.errors', len(stderr.split('\n')) - 1) data = json.loads(stdout) for t in ('object', 'container'): - for (k, v) in data[t].items(): + for (k, v) in list(data[t].items()): self.publish('dispersion.%s.%s' % (t, k), v) # container metrics returned by stat diff --git a/src/collectors/openstackswiftrecon/openstackswiftrecon.py b/src/collectors/openstackswiftrecon/openstackswiftrecon.py index 51ad82a60..278ecf943 100644 --- a/src/collectors/openstackswiftrecon/openstackswiftrecon.py +++ b/src/collectors/openstackswiftrecon/openstackswiftrecon.py @@ -50,7 +50,7 @@ def get_default_config(self): def _process_cache(self, d, path=()): """Recusively walk a nested recon cache dict to obtain path/values""" - for k, v in d.iteritems(): + for k, v in d.items(): if not isinstance(v, dict): self.metrics.append((path + (k,), v)) else: diff --git a/src/collectors/openvpn/openvpn.py b/src/collectors/openvpn/openvpn.py index dcc31c9c8..c3fd184e0 100644 --- a/src/collectors/openvpn/openvpn.py +++ b/src/collectors/openvpn/openvpn.py @@ -37,7 +37,7 @@ import socket import diamond.collector import os.path -import urlparse +from urllib.parse import urlparse import time @@ -86,7 +86,7 @@ class Object(object): return parsed def collect(self): - if isinstance(self.config['instances'], basestring): + if isinstance(self.config['instances'], str): instances = [self.config['instances']] else: instances = self.config['instances'] @@ -237,7 +237,7 @@ def parse(self, name, lines): def publish_number(self, key, value): key = key.replace('/', '-').replace(' ', '_').lower() try: - value = long(value) + value = int(value) except ValueError: self.log.error('OpenVPN expected a number for "%s", got "%s"', key, value) diff --git a/src/collectors/ossec/ossec.py b/src/collectors/ossec/ossec.py index 9c9da1cef..2baa5b110 100644 --- a/src/collectors/ossec/ossec.py +++ b/src/collectors/ossec/ossec.py @@ -77,6 +77,6 @@ def collect(self): else: states[state] += 1 - for state, count in states.items(): + for state, count in list(states.items()): name = 'agents.' + re.sub('[^a-z]', '_', state.lower()) self.publish(name, count) diff --git a/src/collectors/passenger_stats/passenger_stats.py b/src/collectors/passenger_stats/passenger_stats.py index 82ba557ed..28515be26 100644 --- a/src/collectors/passenger_stats/passenger_stats.py +++ b/src/collectors/passenger_stats/passenger_stats.py @@ -218,11 +218,11 @@ def collect(self): return {} dict_stats = self.get_passenger_memory_stats() - if len(dict_stats.keys()) == 0: + if len(list(dict_stats.keys())) == 0: return {} queue_stats = self.get_passenger_queue_stats() - if len(queue_stats.keys()) == 0: + if len(list(queue_stats.keys())) == 0: return {} overall_cpu = self.get_passenger_cpu_usage(dict_stats) diff --git a/src/collectors/pgbouncer/pgbouncer.py b/src/collectors/pgbouncer/pgbouncer.py index 2c174c0a8..05ee80940 100644 --- a/src/collectors/pgbouncer/pgbouncer.py +++ b/src/collectors/pgbouncer/pgbouncer.py @@ -83,15 +83,15 @@ def collect(self): } } - for name, instance in instances.iteritems(): + for name, instance in instances.items(): host = instance['host'] port = instance['port'] user = instance.get('user') or self.config['user'] password = instance.get('password') or self.config['password'] for database, stats in self._get_stats_by_database( - host, port, user, password).iteritems(): - for stat_name, stat_value in stats.iteritems(): + host, port, user, password).items(): + for stat_name, stat_value in stats.items(): self.publish( self._get_metric_name(name, database, stat_name), stat_value) diff --git a/src/collectors/pgq/pgq.py b/src/collectors/pgq/pgq.py index 8697c57ec..8210c6146 100644 --- a/src/collectors/pgq/pgq.py +++ b/src/collectors/pgq/pgq.py @@ -52,7 +52,7 @@ def collect(self): self.log.error('Unable to import module psycopg2') return None - for instance, configuration in self.config['instances'].iteritems(): + for instance, configuration in self.config['instances'].items(): connection = psycopg2.connect(configuration['dsn']) connection.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT, @@ -63,13 +63,13 @@ def _collect_for_instance(self, instance, connection): """Collects metrics for a named connection.""" with connection.cursor() as cursor: for queue, metrics in self.get_queue_info(instance, cursor): - for name, metric in metrics.items(): + for name, metric in list(metrics.items()): self.publish('.'.join((instance, queue, name)), metric) with connection.cursor() as cursor: consumers = self.get_consumer_info(instance, cursor) for queue, consumer, metrics in consumers: - for name, metric in metrics.items(): + for name, metric in list(metrics.items()): key_parts = (instance, queue, 'consumers', consumer, name) self.publish('.'.join(key_parts), metric) diff --git a/src/collectors/phpfpm/phpfpm.py b/src/collectors/phpfpm/phpfpm.py index 4c2763c8e..7a4af0f59 100644 --- a/src/collectors/phpfpm/phpfpm.py +++ b/src/collectors/phpfpm/phpfpm.py @@ -36,7 +36,7 @@ except ImportError: import simplejson as json -import urllib2 +from urllib.request import urlopen import diamond.collector @@ -72,7 +72,7 @@ def collect(self): self.config['uri'] = self.config['uri'][1:] try: - response = urllib2.urlopen("http://%s:%s/%s?json" % ( + response = urlopen("http://%s:%s/%s?json" % ( self.config['host'], int(self.config['port']), self.config['uri'])) except Exception as e: @@ -97,7 +97,7 @@ def collect(self): 'max_children_reached', 'slow_requests' ] - for k, v in j.items(): + for k, v in list(j.items()): # # php-fpm has spaces in the keys so lets replace all spaces with _ k = k.replace(" ", "_") diff --git a/src/collectors/ping/ping.py b/src/collectors/ping/ping.py index 8d28816db..6c882256f 100644 --- a/src/collectors/ping/ping.py +++ b/src/collectors/ping/ping.py @@ -58,7 +58,7 @@ def get_default_config(self): return config def collect(self): - for key in self.config.keys(): + for key in list(self.config.keys()): if key[:7] == "target_": host = self.config[key] metric_name = host.replace('.', '_') diff --git a/src/collectors/portstat/portstat.py b/src/collectors/portstat/portstat.py index 634a0ed60..80aeeb49f 100644 --- a/src/collectors/portstat/portstat.py +++ b/src/collectors/portstat/portstat.py @@ -37,7 +37,7 @@ class PortStatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs): super(PortStatCollector, self).__init__(*args, **kwargs) self.ports = {} - for port_name, cfg in self.config['port'].items(): + for port_name, cfg in list(self.config['port'].items()): port_cfg = {} for key in ('number',): port_cfg[key] = cfg.get(key, []) @@ -66,10 +66,10 @@ def collect(self): self.log.error('Unable to import module psutil') return {} - for port_name, port_cfg in self.ports.iteritems(): + for port_name, port_cfg in self.ports.items(): port = int(port_cfg['number']) stats = get_port_stats(port) - for stat_name, stat_value in stats.iteritems(): + for stat_name, stat_value in stats.items(): metric_name = '%s.%s' % (port_name, stat_name) self.publish(metric_name, stat_value) diff --git a/src/collectors/postfix/postfix.py b/src/collectors/postfix/postfix.py index aeda631c2..db7995b65 100644 --- a/src/collectors/postfix/postfix.py +++ b/src/collectors/postfix/postfix.py @@ -24,7 +24,7 @@ from diamond.collector import str_to_bool -DOTS_TO_UNDERS = {ord(u'.'): u'_'} +DOTS_TO_UNDERS = {ord('.'): '_'} class PostfixCollector(diamond.collector.Collector): @@ -95,22 +95,22 @@ def collect(self): if not data: return - if str_to_bool(self.config['include_clients']) and u'clients' in data: - for client, value in data['clients'].iteritems(): + if str_to_bool(self.config['include_clients']) and 'clients' in data: + for client, value in data['clients'].items(): # translate dots to underscores in client names - metric = u'.'.join(['clients', + metric = '.'.join(['clients', client.translate(DOTS_TO_UNDERS)]) dvalue = self.derivative(metric, value) self.publish(metric, dvalue, precision=4) - for action in (u'in', u'recv', u'send'): + for action in ('in', 'recv', 'send'): if action not in data: continue - for sect, stats in data[action].iteritems(): - for status, value in stats.iteritems(): + for sect, stats in data[action].items(): + for status, value in stats.items(): metric = '.'.join([action, sect, status.translate(DOTS_TO_UNDERS)]) @@ -119,8 +119,8 @@ def collect(self): self.publish(metric, dvalue, precision=4) - if u'local' in data: - for key, value in data[u'local'].iteritems(): + if 'local' in data: + for key, value in data['local'].items(): metric = '.'.join(['local', key]) dvalue = self.derivative(metric, value) diff --git a/src/collectors/postgres/postgres.py b/src/collectors/postgres/postgres.py index 7732f2343..eebe2281e 100644 --- a/src/collectors/postgres/postgres.py +++ b/src/collectors/postgres/postgres.py @@ -224,7 +224,7 @@ def fetch(self, pg_version): # If row > length 2, assume each column name maps to # key => value else: - for key, value in row.iteritems(): + for key, value in row.items(): if key in ('datname', 'schemaname', 'relname', 'indexrelname', 'funcname',): continue diff --git a/src/collectors/proc/proc.py b/src/collectors/proc/proc.py index 4ee9444ba..4b2b106e6 100644 --- a/src/collectors/proc/proc.py +++ b/src/collectors/proc/proc.py @@ -64,7 +64,7 @@ def collect(self): metric_name = data[0] metric_value = int(data[1]) metric_value = int(self.derivative(metric_name, - long(metric_value), + int(metric_value), counter)) self.publish(metric_name, metric_value) diff --git a/src/collectors/proc/test/testproc.py b/src/collectors/proc/test/testproc.py index 997d9ef6f..047b5a7af 100644 --- a/src/collectors/proc/test/testproc.py +++ b/src/collectors/proc/test/testproc.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from proc import ProcessStatCollector diff --git a/src/collectors/processresources/processresources.py b/src/collectors/processresources/processresources.py index 05d7636d9..a467b9c9c 100644 --- a/src/collectors/processresources/processresources.py +++ b/src/collectors/processresources/processresources.py @@ -81,7 +81,7 @@ def process_info(process, info_keys): if type(value) in [float, int]: results.update({key: value}) elif hasattr(value, '_asdict'): - for subkey, subvalue in value._asdict().iteritems(): + for subkey, subvalue in value._asdict().items(): results.update({"%s.%s" % (key, subkey): subvalue}) return results @@ -111,7 +111,7 @@ def process_config(self): """ self.processes = {} self.processes_info = {} - for pg_name, cfg in self.config['process'].items(): + for pg_name, cfg in list(self.config['process'].items()): pg_cfg = {} for key in ('exe', 'name', 'cmdline'): pg_cfg[key] = cfg.get(key, []) @@ -158,7 +158,7 @@ def get_default_config(self): return config def save_process_info(self, pg_name, process_info): - for key, value in process_info.iteritems(): + for key, value in process_info.items(): if key in self.processes_info[pg_name]: self.processes_info[pg_name][key] += value else: @@ -173,7 +173,7 @@ def collect_process_info(self, process): exe = get_value(process, 'exe') except psutil.AccessDenied: exe = "" - for pg_name, cfg in self.processes.items(): + for pg_name, cfg in list(self.processes.items()): if match_process(pid, name, cmdline, exe, cfg): pi = process_info(process, self.config['info_keys']) if cfg['count_workers']: @@ -198,11 +198,11 @@ def collect(self): self.collect_process_info(process) # publish results - for pg_name, counters in self.processes_info.iteritems(): + for pg_name, counters in self.processes_info.items(): if counters: metrics = ( ("%s.%s" % (pg_name, key), value) - for key, value in counters.iteritems()) + for key, value in counters.items()) else: if self.processes[pg_name]['count_workers']: metrics = (('%s.workers_count' % pg_name, 0), ) diff --git a/src/collectors/puppetagent/puppetagent.py b/src/collectors/puppetagent/puppetagent.py index 579cc0ece..d4c117495 100644 --- a/src/collectors/puppetagent/puppetagent.py +++ b/src/collectors/puppetagent/puppetagent.py @@ -55,9 +55,9 @@ def collect(self): summary = self._get_summary() - for sect, data in summary.iteritems(): - for stat, value in data.iteritems(): - if value is None or isinstance(value, basestring): + for sect, data in summary.items(): + for stat, value in data.items(): + if value is None or isinstance(value, str): continue metric = '.'.join([sect, stat]) diff --git a/src/collectors/puppetdashboard/puppetdashboard.py b/src/collectors/puppetdashboard/puppetdashboard.py index b07e38159..1e1eb3571 100644 --- a/src/collectors/puppetdashboard/puppetdashboard.py +++ b/src/collectors/puppetdashboard/puppetdashboard.py @@ -9,7 +9,7 @@ """ -import urllib2 +from urllib.request import urlopen import re import diamond.collector @@ -40,7 +40,7 @@ def get_default_config(self): def collect(self): try: - response = urllib2.urlopen("http://%s:%s/" % ( + response = urlopen("http://%s:%s/" % ( self.config['host'], int(self.config['port']))) except Exception as e: self.log.error('Couldnt connect to puppet-dashboard: %s', e) diff --git a/src/collectors/puppetdb/puppetdb.py b/src/collectors/puppetdb/puppetdb.py index 34320bc1b..f4b91fb34 100644 --- a/src/collectors/puppetdb/puppetdb.py +++ b/src/collectors/puppetdb/puppetdb.py @@ -10,7 +10,7 @@ """ -import urllib2 +from urllib.request import urlopen import diamond.collector from diamond.convertor import time as time_convertor @@ -90,7 +90,7 @@ def fetch_metrics(self, url): try: url = "http://%s:%s/%s" % ( self.config['host'], int(self.config['port']), url) - response = urllib2.urlopen(url) + response = urlopen(url) except Exception as e: self.log.error('Couldn\'t connect to puppetdb: %s -> %s', url, e) return {} diff --git a/src/collectors/rabbitmq/rabbitmq.py b/src/collectors/rabbitmq/rabbitmq.py index 5f712e09d..8f0f1d2d0 100644 --- a/src/collectors/rabbitmq/rabbitmq.py +++ b/src/collectors/rabbitmq/rabbitmq.py @@ -21,11 +21,11 @@ ** """ +from urllib.request import Request, urlopen import diamond.collector import re -from urlparse import urljoin -from urllib import quote -import urllib2 +from urllib.parse import urljoin +from urllib.parse import quote from base64 import b64encode try: @@ -47,9 +47,9 @@ def __init__(self, log, host, user, password, timeout=5, scheme="http"): def do_call(self, path): url = urljoin(self.base_url, path) - req = urllib2.Request(url) + req = Request(url) req.add_header('Authorization', self._authorization) - return json.load(urllib2.urlopen(req, timeout=self.timeout)) + return json.load(urlopen(req, timeout=self.timeout)) def get_all_vhosts(self): return self.do_call('vhosts') @@ -243,7 +243,7 @@ def collect(self): vhost_conf, legacy = self.get_vhost_conf(vhost_names) # Iterate all vhosts in our vhosts configurations - for vhost, queues in vhost_conf.iteritems(): + for vhost, queues in vhost_conf.items(): vhost_name = vhost if self.config['replace_dot']: vhost_name = vhost_name.replace( @@ -288,7 +288,7 @@ def _publish_metrics(self, name, prev_keys, key, data): if isinstance(value, dict): for new_key in value: self._publish_metrics(name, keys, new_key, value) - elif isinstance(value, (float, int, long)): + elif isinstance(value, (float, int)): joined_keys = '.'.join(keys) if name: publish_key = '{}.{}'.format(name, joined_keys) diff --git a/src/collectors/redisstat/redisstat.py b/src/collectors/redisstat/redisstat.py index e2e28c614..81e9e9e57 100644 --- a/src/collectors/redisstat/redisstat.py +++ b/src/collectors/redisstat/redisstat.py @@ -111,7 +111,7 @@ def process_config(self): super(RedisCollector, self).process_config() instance_list = self.config['instances'] # configobj make str of single-element list, let's convert - if isinstance(instance_list, basestring): + if isinstance(instance_list, str): instance_list = [instance_list] # process original single redis instance @@ -170,7 +170,7 @@ def process_config(self): self.instances[nickname] = (host, port, None, auth) - self.log.debug("Configured instances: %s" % self.instances.items()) + self.log.debug("Configured instances: %s" % list(self.instances.items())) def get_default_config_help(self): config_help = super(RedisCollector, self).get_default_config_help() @@ -330,7 +330,7 @@ def collect_instance(self, nick, host, port, unix_socket, auth): # Then calculate the % maxmemory of memory used maxmemory_config = self._get_config(host, port, unix_socket, auth, 'maxmemory') - if maxmemory_config and 'maxmemory' in maxmemory_config.keys(): + if maxmemory_config and 'maxmemory' in list(maxmemory_config.keys()): maxmemory = float(maxmemory_config['maxmemory']) # Only report % used if maxmemory is a non zero value @@ -379,6 +379,6 @@ def collect(self): self.log.error('Unable to import module redis') return {} - for nick in self.instances.keys(): + for nick in list(self.instances.keys()): (host, port, unix_socket, auth) = self.instances[nick] self.collect_instance(nick, host, int(port), unix_socket, auth) diff --git a/src/collectors/redisstat/test/testredisstat.py b/src/collectors/redisstat/test/testredisstat.py index b4e95df32..2dc64ee63 100644 --- a/src/collectors/redisstat/test/testredisstat.py +++ b/src/collectors/redisstat/test/testredisstat.py @@ -287,7 +287,7 @@ def test_hostport_or_instance_config(self, publish_mock): }, } - for testname, data in testcases.items(): + for testname, data in list(testcases.items()): config = get_collector_config('RedisCollector', data['config']) collector = RedisCollector(config, None) diff --git a/src/collectors/resqueweb/resqueweb.py b/src/collectors/resqueweb/resqueweb.py index 1b7606e20..ab30ab2b5 100644 --- a/src/collectors/resqueweb/resqueweb.py +++ b/src/collectors/resqueweb/resqueweb.py @@ -9,7 +9,7 @@ """ -import urllib2 +from urllib.request import urlopen import diamond.collector @@ -35,7 +35,7 @@ def get_default_config(self): def collect(self): try: - response = urllib2.urlopen("http://%s:%s/stats.txt" % ( + response = urlopen("http://%s:%s/stats.txt" % ( self.config['host'], int(self.config['port']))) except Exception as e: self.log.error('Couldnt connect to resque-web: %s', e) diff --git a/src/collectors/scribe/scribe.py b/src/collectors/scribe/scribe.py index d31cef02e..785999946 100644 --- a/src/collectors/scribe/scribe.py +++ b/src/collectors/scribe/scribe.py @@ -71,7 +71,7 @@ def get_scribe_stats(self): metric = self.key_to_metric(key) data[metric] = int(val) - return data.items() + return list(data.items()) def collect(self): for stat, val in self.get_scribe_stats(): diff --git a/src/collectors/servertechpdu/servertechpdu.py b/src/collectors/servertechpdu/servertechpdu.py index a17271333..cd4fb7c02 100644 --- a/src/collectors/servertechpdu/servertechpdu.py +++ b/src/collectors/servertechpdu/servertechpdu.py @@ -76,9 +76,9 @@ def collect_snmp(self, device, host, port, community): inputFeeds = {} # Collect PDU input gauge values - for gaugeName, gaugeOid in self.PDU_SYSTEM_GAUGES.items(): + for gaugeName, gaugeOid in list(self.PDU_SYSTEM_GAUGES.items()): systemGauges = self.walk(gaugeOid, host, port, community) - for o, gaugeValue in systemGauges.items(): + for o, gaugeValue in list(systemGauges.items()): # Get Metric Name metricName = gaugeName # Get Metric Value @@ -94,15 +94,15 @@ def collect_snmp(self, device, host, port, community): # Collect PDU input feed names inputFeedNames = self.walk( self.PDU_INFEED_NAMES, host, port, community) - for o, inputFeedName in inputFeedNames.items(): + for o, inputFeedName in list(inputFeedNames.items()): # Extract input feed name inputFeed = ".".join(o.split(".")[-2:]) inputFeeds[inputFeed] = inputFeedName # Collect PDU input gauge values - for gaugeName, gaugeOid in self.PDU_INFEED_GAUGES.items(): + for gaugeName, gaugeOid in list(self.PDU_INFEED_GAUGES.items()): inputFeedGauges = self.walk(gaugeOid, host, port, community) - for o, gaugeValue in inputFeedGauges.items(): + for o, gaugeValue in list(inputFeedGauges.items()): # Extract input feed name inputFeed = ".".join(o.split(".")[-2:]) diff --git a/src/collectors/sidekiq/sidekiq.py b/src/collectors/sidekiq/sidekiq.py index dcaa1e7df..759a35238 100644 --- a/src/collectors/sidekiq/sidekiq.py +++ b/src/collectors/sidekiq/sidekiq.py @@ -8,7 +8,6 @@ * redis """ -from itertools import izip try: import redis @@ -86,10 +85,10 @@ def get_redis_client(self): if sentinel_ports: assert len(sentinel_ports) == len(ports) else: - sentinel_ports = [None for _ in xrange(len(ports))] + sentinel_ports = [None for _ in range(len(ports))] - for port, sentinel_port in izip(ports, sentinel_ports): - for db in xrange(0, int(databases)): + for port, sentinel_port in zip(ports, sentinel_ports): + for db in range(0, int(databases)): master = self.get_master( host, port, sentinel_port, sentinel_name ) diff --git a/src/collectors/sidekiqweb/sidekiqweb.py b/src/collectors/sidekiqweb/sidekiqweb.py index 52ef9be1b..7ad504784 100644 --- a/src/collectors/sidekiqweb/sidekiqweb.py +++ b/src/collectors/sidekiqweb/sidekiqweb.py @@ -15,7 +15,7 @@ except ImportError: import simplejson as json -import urllib2 +from urllib.request import urlopen import diamond.collector @@ -42,7 +42,7 @@ def get_default_config(self): def collect(self): try: - response = urllib2.urlopen("http://%s:%s/dashboard/stats" % ( + response = urlopen("http://%s:%s/dashboard/stats" % ( self.config['host'], int(self.config['port']))) except Exception as e: self.log.error('Couldnt connect to sidekiq-web: %s', e) @@ -55,9 +55,9 @@ def collect(self): return {} for k in j: - for item, value in j[k].items(): + for item, value in list(j[k].items()): - if isinstance(value, (str, unicode)) and 'M' in value: + if isinstance(value, str) and 'M' in value: value = float(value.replace('M', '')) for unit in self.config['byte_unit']: unit_value = diamond.convertor.binary.convert( diff --git a/src/collectors/slabinfo/test/testslabinfo.py b/src/collectors/slabinfo/test/testslabinfo.py index 106514db4..afccdf4be 100644 --- a/src/collectors/slabinfo/test/testslabinfo.py +++ b/src/collectors/slabinfo/test/testslabinfo.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from slabinfo import SlabInfoCollector diff --git a/src/collectors/slony/slony.py b/src/collectors/slony/slony.py index 1a2ef9dc5..ae0c91eee 100644 --- a/src/collectors/slony/slony.py +++ b/src/collectors/slony/slony.py @@ -94,7 +94,7 @@ def collect(self): } } - for name, instance in instances.iteritems(): + for name, instance in instances.items(): host = self.config['host'] port = self.config['port'] user = instance.get('user') or self.config['user'] diff --git a/src/collectors/snmpinterface/snmpinterface.py b/src/collectors/snmpinterface/snmpinterface.py index 4c547522f..66c69ecbf 100644 --- a/src/collectors/snmpinterface/snmpinterface.py +++ b/src/collectors/snmpinterface/snmpinterface.py @@ -128,7 +128,7 @@ def collect_snmp(self, device, host, port, community): # Get Interface Indexes ifIndexOid = '.'.join([self.IF_MIB_INDEX_OID]) ifIndexData = self.walk(ifIndexOid, host, port, community) - ifIndexes = [v for v in ifIndexData.values()] + ifIndexes = [v for v in list(ifIndexData.values())] for ifIndex in ifIndexes: # Get Interface Type @@ -145,7 +145,7 @@ def collect_snmp(self, device, host, port, community): ifName = re.sub(r'(\"|\')', '', ifName) # Get Gauges - for gaugeName, gaugeOid in self.IF_MIB_GAUGE_OID_TABLE.items(): + for gaugeName, gaugeOid in list(self.IF_MIB_GAUGE_OID_TABLE.items()): ifGaugeOid = '.'.join([self.IF_MIB_GAUGE_OID_TABLE[gaugeName], ifIndex]) ifGaugeData = self.get(ifGaugeOid, host, port, community) @@ -166,7 +166,7 @@ def collect_snmp(self, device, host, port, community): self.publish_gauge(metricPath, metricValue) # Get counters (64bit) - counterItems = self.IF_MIB_COUNTER_OID_TABLE.items() + counterItems = list(self.IF_MIB_COUNTER_OID_TABLE.items()) for counterName, counterOid in counterItems: ifCounterOid = '.'.join( [self.IF_MIB_COUNTER_OID_TABLE[counterName], ifIndex]) diff --git a/src/collectors/snmpraw/snmpraw.py b/src/collectors/snmpraw/snmpraw.py index e3688c554..908f84e58 100644 --- a/src/collectors/snmpraw/snmpraw.py +++ b/src/collectors/snmpraw/snmpraw.py @@ -120,7 +120,7 @@ def _get_value_walk(self, device, oid, host, port, community): return # because we only allow 1-key dicts, we can pick with absolute index - value = data.items()[0][1] + value = list(data.items())[0][1] return value def _get_value(self, device, oid, host, port, community): @@ -162,7 +162,7 @@ def collect_snmp(self, device, host, port, community): dev_config = self.config['devices'][device] if 'oids' in dev_config: - for oid, metricName in dev_config['oids'].items(): + for oid, metricName in list(dev_config['oids'].items()): if (device, oid) in self.skip_list: self.log.debug( diff --git a/src/collectors/sockstat/sockstat.py b/src/collectors/sockstat/sockstat.py index 10492f6c3..044ddb617 100644 --- a/src/collectors/sockstat/sockstat.py +++ b/src/collectors/sockstat/sockstat.py @@ -57,7 +57,7 @@ def collect(self): self.collect_stat(result, f) f.close() - for key, value in result.items(): + for key, value in list(result.items()): self.publish(key, value, metric_type='GAUGE') def collect_stat(self, data, f): @@ -65,6 +65,6 @@ def collect_stat(self, data, f): for line in f: match = _RE.match(line) if match: - for key, value in match.groupdict().items(): + for key, value in list(match.groupdict().items()): if value: data[key] += int(value) diff --git a/src/collectors/sockstat/test/testsockstat.py b/src/collectors/sockstat/test/testsockstat.py index bd8db36b5..e2a408d24 100644 --- a/src/collectors/sockstat/test/testsockstat.py +++ b/src/collectors/sockstat/test/testsockstat.py @@ -37,7 +37,7 @@ class Klass(Iterator): def close(self): pass - def next(self): + def __next__(self): raise StopIteration open_mock.return_value = Klass() diff --git a/src/collectors/solr/solr.py b/src/collectors/solr/solr.py index b77ba4868..6ba1b8cdf 100644 --- a/src/collectors/solr/solr.py +++ b/src/collectors/solr/solr.py @@ -12,7 +12,7 @@ """ import posixpath -import urllib2 +from urllib.request import urlopen try: import json @@ -75,7 +75,7 @@ def _get(self, path): url = 'http://%s:%i/%s' % ( self.config['host'], int(self.config['port']), path) try: - response = urllib2.urlopen(url) + response = urlopen(url) except Exception as err: self.log.error("%s: %s", url, err) return False @@ -99,7 +99,7 @@ def collect(self): # If no core is specified, provide statistics for all cores result = self._get('/solr/admin/cores?action=STATUS&wt=json') if result: - cores = result['status'].keys() + cores = list(result['status'].keys()) metrics = {} for core in cores: @@ -131,7 +131,7 @@ def collect(self): continue s = result['solr-mbeans'] - stats = dict((s[i], s[i + 1]) for i in xrange(0, len(s), 2)) + stats = dict((s[i], s[i + 1]) for i in range(0, len(s), 2)) if 'core' in self.config['stats']: core_searcher = stats["CORE"]["searcher"]["stats"] diff --git a/src/collectors/sqs/sqs.py b/src/collectors/sqs/sqs.py index 43fffa964..aae1420b8 100644 --- a/src/collectors/sqs/sqs.py +++ b/src/collectors/sqs/sqs.py @@ -63,7 +63,7 @@ def collect(self): if not sqs: self.log.error("boto module not found!") return - for (region, region_cfg) in self.config['regions'].items(): + for (region, region_cfg) in list(self.config['regions'].items()): assert 'queues' in region_cfg auth_kwargs = _get_auth_kwargs(config=region_cfg) queues = region_cfg['queues'].split(',') diff --git a/src/collectors/squid/squid.py b/src/collectors/squid/squid.py index d5afe3119..54c68765b 100644 --- a/src/collectors/squid/squid.py +++ b/src/collectors/squid/squid.py @@ -67,15 +67,15 @@ def _getData(self, host, port): squid_sock.connect((host, int(port))) squid_sock.settimeout(0.25) squid_sock.sendall( - "GET cache_object://localhost/counters HTTP/1.0\r\n" + + ("GET cache_object://localhost/counters HTTP/1.0\r\n" + "Host: localhost\r\n" + "Accept: */*\r\n" + - "Connection: close\r\n\r\n") + "Connection: close\r\n\r\n").encode()) fulldata = '' while True: - data = squid_sock.recv(1024) + data = squid_sock.recv(1024).decode('utf-8') if not data: break fulldata = fulldata + data @@ -87,7 +87,7 @@ def _getData(self, host, port): return fulldata def collect(self): - for nickname in self.squid_hosts.keys(): + for nickname in list(self.squid_hosts.keys()): squid_host = self.squid_hosts[nickname] fulldata = self._getData(squid_host['host'], diff --git a/src/collectors/supervisord/supervisord.py b/src/collectors/supervisord/supervisord.py index cb15dfa3f..78a692635 100644 --- a/src/collectors/supervisord/supervisord.py +++ b/src/collectors/supervisord/supervisord.py @@ -27,7 +27,7 @@ """ -import xmlrpclib +import xmlrpc.client as xmlrpclib try: import supervisor.xmlrpc diff --git a/src/collectors/tcp/tcp.py b/src/collectors/tcp/tcp.py index a357dd65b..309e60496 100644 --- a/src/collectors/tcp/tcp.py +++ b/src/collectors/tcp/tcp.py @@ -260,15 +260,15 @@ def collect(self): header = header.split() data = data.split() - for i in xrange(1, len(header)): + for i in range(1, len(header)): metrics[header[i]] = data[i] - for metric_name in metrics.keys(): + for metric_name in list(metrics.keys()): if ((len(self.config['allowed_names']) > 0 and metric_name not in self.config['allowed_names'])): continue - value = long(metrics[metric_name]) + value = int(metrics[metric_name]) # Publish the metric if metric_name in self.config['gauges']: diff --git a/src/collectors/tcp/test/testtcp.py b/src/collectors/tcp/test/testtcp.py index addabf76b..8bf01448b 100644 --- a/src/collectors/tcp/test/testtcp.py +++ b/src/collectors/tcp/test/testtcp.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from tcp import TCPCollector diff --git a/src/collectors/tokumx/test/testtokumx.py b/src/collectors/tokumx/test/testtokumx.py index e43ef159a..2b859dce5 100644 --- a/src/collectors/tokumx/test/testtokumx.py +++ b/src/collectors/tokumx/test/testtokumx.py @@ -84,7 +84,7 @@ def test_should_publish_nested_keys_for_db_stats(self, def test_should_publish_stats_with_long_type(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() @@ -114,7 +114,7 @@ def test_should_ignore_unneeded_databases(self, def test_should_ignore_unneeded_collections(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.connection['db1'].collection_names.return_value = ['collection1', @@ -205,7 +205,7 @@ def test_should_publish_nested_keys_for_db_stats(self, def test_should_publish_stats_with_long_type(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() @@ -236,7 +236,7 @@ def test_should_ignore_unneeded_databases(self, def test_should_ignore_unneeded_collections(self, publish_mock, connector_mock): - data = {'more_keys': long(1), 'key': 2, 'string': 'str'} + data = {'more_keys': int(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.connection['db1'].collection_names.return_value = ['collection1', diff --git a/src/collectors/tokumx/tokumx.py b/src/collectors/tokumx/tokumx.py index 736a2d3e8..26e466b86 100644 --- a/src/collectors/tokumx/tokumx.py +++ b/src/collectors/tokumx/tokumx.py @@ -141,7 +141,7 @@ def collect(self): serverStatus = conn.db.command('serverStatus') engineStatus = conn.db.command('engineStatus') - data = dict(serverStatus.items() + engineStatus.items()) + data = dict(list(serverStatus.items()) + list(engineStatus.items())) self._publish_transformed(data, base_prefix) if str_to_bool(self.config['simple']): @@ -261,7 +261,7 @@ def _publish_metrics(self, prev_keys, key, data, publishfn=None): self._publish_metrics(keys, new_key, value) elif isinstance(value, int) or isinstance(value, float): publishfn('.'.join(keys), value) - elif isinstance(value, long): + elif isinstance(value, int): publishfn('.'.join(keys), float(value)) def _extract_simple_data(self, data): diff --git a/src/collectors/twemproxy/twemproxy.py b/src/collectors/twemproxy/twemproxy.py index 0df1b2b28..58549de94 100644 --- a/src/collectors/twemproxy/twemproxy.py +++ b/src/collectors/twemproxy/twemproxy.py @@ -113,17 +113,17 @@ def get_stats(self, host, port): stats = {} pools = {} - for stat, value in data.iteritems(): + for stat, value in data.items(): # Test if this is a pool if isinstance(value, dict): pool_name = stat.replace('.', '_') pools[pool_name] = {} - for pool_stat, pool_value in value.iteritems(): + for pool_stat, pool_value in value.items(): # Test if this is a pool server if isinstance(pool_value, dict): server_name = pool_stat.replace('.', '_') pools[pool_name][server_name] = {} - for server_stat, server_value in pool_value.iteritems(): + for server_stat, server_value in pool_value.items(): pools[pool_name][server_name][server_stat] = \ int(server_value) else: @@ -140,7 +140,7 @@ def collect(self): hosts = self.config.get('hosts') # Convert a string config value to be an array - if isinstance(hosts, basestring): + if isinstance(hosts, str): hosts = [hosts] for host in hosts: @@ -161,11 +161,11 @@ def collect(self): self.publish_counter(alias + "." + stat, stats[stat]) # Pool stats - for pool, pool_stats in pools.iteritems(): - for stat, stat_value in pool_stats.iteritems(): + for pool, pool_stats in pools.items(): + for stat, stat_value in pool_stats.items(): # Test if this is a pool server if isinstance(stat_value, dict): - for server_stat, server_value in stat_value.iteritems(): + for server_stat, server_value in stat_value.items(): if server_stat in self.GAUGES: self.publish_gauge( alias + ".pools." + pool + ".servers." + diff --git a/src/collectors/udp/test/testudp.py b/src/collectors/udp/test/testudp.py index 7edcff02f..84d0753db 100644 --- a/src/collectors/udp/test/testudp.py +++ b/src/collectors/udp/test/testudp.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from udp import UDPCollector diff --git a/src/collectors/udp/udp.py b/src/collectors/udp/udp.py index 8e4f04acc..d9ea5f222 100644 --- a/src/collectors/udp/udp.py +++ b/src/collectors/udp/udp.py @@ -83,16 +83,16 @@ def collect(self): header = header.split() data = data.split() - for i in xrange(1, len(header)): + for i in range(1, len(header)): metrics[header[i]] = data[i] - for metric_name in metrics.keys(): + for metric_name in list(metrics.keys()): if ((len(self.config['allowed_names']) > 0 and metric_name not in self.config['allowed_names'])): continue value = metrics[metric_name] - value = self.derivative(metric_name, long(value)) + value = self.derivative(metric_name, int(value)) # Publish the metric self.publish(metric_name, value, 0) diff --git a/src/collectors/unbound/unbound.py b/src/collectors/unbound/unbound.py index e5e1fa9db..94db9be44 100644 --- a/src/collectors/unbound/unbound.py +++ b/src/collectors/unbound/unbound.py @@ -86,5 +86,5 @@ def collect(self): if include_hist: histogram = self.get_massaged_histogram(raw_histogram) - for intv, value in histogram.iteritems(): + for intv, value in histogram.items(): self.publish('histogram.' + intv, value) diff --git a/src/collectors/uptime/test/test_uptime.py b/src/collectors/uptime/test/test_uptime.py index 9dc2bec29..732fc39dc 100644 --- a/src/collectors/uptime/test/test_uptime.py +++ b/src/collectors/uptime/test/test_uptime.py @@ -8,9 +8,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from uptime import UptimeCollector diff --git a/src/collectors/users/users.py b/src/collectors/users/users.py index 825410de4..38f8df8cf 100644 --- a/src/collectors/users/users.py +++ b/src/collectors/users/users.py @@ -65,7 +65,7 @@ def collect(self): metrics[utmp.ut_user] = metrics.get(utmp.ut_user, 0) + 1 metrics['total'] = metrics['total'] + 1 - for metric_name in metrics.keys(): + for metric_name in list(metrics.keys()): self.publish(metric_name, metrics[metric_name]) return True diff --git a/src/collectors/userscripts/userscripts.py b/src/collectors/userscripts/userscripts.py index 4e2174d02..ad6641e57 100644 --- a/src/collectors/userscripts/userscripts.py +++ b/src/collectors/userscripts/userscripts.py @@ -85,7 +85,7 @@ def collect(self): self.log.error("%s returned error output (stderr): %s" % (absolutescriptpath, err)) # Use filter to remove empty lines of output - for line in filter(None, out.split('\n')): + for line in [_f for _f in out.split('\n') if _f]: # Ignore invalid lines try: name, value = line.split() diff --git a/src/collectors/vmsdoms/vmsdoms.py b/src/collectors/vmsdoms/vmsdoms.py index 723689ab5..bffa84907 100644 --- a/src/collectors/vmsdoms/vmsdoms.py +++ b/src/collectors/vmsdoms/vmsdoms.py @@ -103,7 +103,7 @@ def collect(self): for dom, ctrl in vms_domains: try: # Get value and scale. - value = long(ctrl.get(key)) * scale + value = int(ctrl.get(key)) * scale except vms.control.ControlException: continue diff --git a/src/collectors/vmsfs/vmsfs.py b/src/collectors/vmsfs/vmsfs.py index e4c30ae13..681e62b5d 100644 --- a/src/collectors/vmsfs/vmsfs.py +++ b/src/collectors/vmsfs/vmsfs.py @@ -32,7 +32,7 @@ def vmsfs_stats_read(self, filename): for line in stats_fd: tokens = line.split() - stats[tokens[0][0:-1]] = long(tokens[1]) + stats[tokens[0][0:-1]] = int(tokens[1]) except: if stats_fd: stats_fd.close() diff --git a/src/collectors/vmstat/test/testvmstat.py b/src/collectors/vmstat/test/testvmstat.py index 81adbcdcb..30d897f00 100644 --- a/src/collectors/vmstat/test/testvmstat.py +++ b/src/collectors/vmstat/test/testvmstat.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from vmstat import VMStatCollector diff --git a/src/collectors/websitemonitor/websitemonitor.py b/src/collectors/websitemonitor/websitemonitor.py index af0723469..da021abf6 100644 --- a/src/collectors/websitemonitor/websitemonitor.py +++ b/src/collectors/websitemonitor/websitemonitor.py @@ -7,7 +7,7 @@ """ -import urllib2 +from werkzeug import Request import time from datetime import datetime import diamond.collector @@ -38,7 +38,7 @@ def get_default_config(self): return default_config def collect(self): - req = urllib2.Request('%s' % (self.config['URL'])) + req = Request('%s' % (self.config['URL'])) try: # time in seconds since epoch as a floating number @@ -48,7 +48,7 @@ def collect(self): ).strftime('%B %d, %Y %H:%M:%S') self.log.debug('Start time: %s' % (st)) - resp = urllib2.urlopen(req) + resp = urlopen(req) # time in seconds since epoch as a floating number end_time = time.time() # human-readable end time e.eg. November 25, 2013 18:15:56 @@ -60,7 +60,7 @@ def collect(self): self.publish('response_time.%s' % (resp.code), rt, metric_type='COUNTER') # urllib2 will puke on non HTTP 200/OK URLs - except urllib2.URLError as e: + except URLError as e: if e.code != 200: # time in seconds since epoch as a floating number end_time = time.time() diff --git a/src/collectors/xen_collector/test/testxen.py b/src/collectors/xen_collector/test/testxen.py index aca58c007..08096c67f 100644 --- a/src/collectors/xen_collector/test/testxen.py +++ b/src/collectors/xen_collector/test/testxen.py @@ -48,15 +48,15 @@ def __init__(self, id): def info(self): if self.id == 0: - return [1, 49420888L, 49420888L, 8, 911232000000000L] + return [1, 49420888, 49420888, 8, 911232000000000] if self.id == 1: - return [1, 2097152L, 2097152L, 2, 310676150000000L] + return [1, 2097152, 2097152, 2, 310676150000000] if self.id == 2: - return [1, 2097152L, 2097152L, 2, 100375300000000L] + return [1, 2097152, 2097152, 2, 100375300000000] if self.id == 3: - return [1, 10485760L, 10485760L, 2, 335312040000000L] + return [1, 10485760, 10485760, 2, 335312040000000] if self.id == 4: - return [1, 10485760L, 10485760L, 2, 351313480000000L] + return [1, 10485760, 10485760, 2, 351313480000000] libvirt_m = Mock() libvirt_m.getInfo.return_value = ['x86_64', 48262, 8, 1200, 2, 1, 4, 1] diff --git a/src/collectors/xen_collector/xen_collector.py b/src/collectors/xen_collector/xen_collector.py index a55a2fe41..bbe030e4d 100644 --- a/src/collectors/xen_collector/xen_collector.py +++ b/src/collectors/xen_collector/xen_collector.py @@ -78,5 +78,5 @@ def collect(self): 'TotalCores': totalcores, 'FreeCores': (totalcores - coresallocated) } - for k in results.keys(): + for k in list(results.keys()): self.publish(k, results[k], 0) diff --git a/src/collectors/xfs/test/testxfs.py b/src/collectors/xfs/test/testxfs.py index 924a4a332..5f514e082 100644 --- a/src/collectors/xfs/test/testxfs.py +++ b/src/collectors/xfs/test/testxfs.py @@ -9,9 +9,9 @@ from mock import patch try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO from diamond.collector import Collector from xfs import XFSCollector diff --git a/src/collectors/xfs/xfs.py b/src/collectors/xfs/xfs.py index 52a109123..a65f7d6ba 100644 --- a/src/collectors/xfs/xfs.py +++ b/src/collectors/xfs/xfs.py @@ -256,7 +256,7 @@ def collect(self): items = line.rstrip().split() stats[items[0]] = [int(a) for a in items[1:]] - for key in stats.keys(): + for key in list(stats.keys()): for item in enumerate(data_structure[key]): metric_name = '.'.join([key, item[1]]) value = stats[key][item[0]] diff --git a/src/collectors/zookeeper/zookeeper.py b/src/collectors/zookeeper/zookeeper.py index b132ba25c..3f839426b 100644 --- a/src/collectors/zookeeper/zookeeper.py +++ b/src/collectors/zookeeper/zookeeper.py @@ -118,7 +118,7 @@ def collect(self): hosts = self.config.get('hosts') # Convert a string config value to be an array - if isinstance(hosts, basestring): + if isinstance(hosts, str): hosts = [hosts] for host in hosts: @@ -130,7 +130,7 @@ def collect(self): stats = self.get_stats(hostname, port) # figure out what we're configured to get, defaulting to everything - desired = self.config.get('publish', stats.keys()) + desired = self.config.get('publish', list(stats.keys())) # for everything we want for stat in desired: diff --git a/src/diamond/collector.py b/src/diamond/collector.py index 40daad298..052a4cce7 100644 --- a/src/diamond/collector.py +++ b/src/diamond/collector.py @@ -16,7 +16,7 @@ from diamond.metric import Metric from diamond.utils.config import load_config -from error import DiamondException +from diamond.error import DiamondException # Detect the architecture of the system and set the counters for MAX_VALUES # appropriately. Otherwise, rolling over counters will cause incorrect or @@ -144,7 +144,7 @@ def str_to_bool(value): Converts string truthy/falsey strings to a bool Empty strings are false """ - if isinstance(value, basestring): + if isinstance(value, str): value = value.strip().lower() if value in ['true', 't', 'yes', 'y']: return True @@ -196,16 +196,16 @@ def load_config(self, configfile=None, override_config=None): config = load_config(self.configfile) if 'collectors' in config: - if 'default' in config['collectors']: - self.config.merge(config['collectors']['default']) + if 'collectors.default' in config: + self.config.merge(config['collectors.default']) if self.name in config['collectors']: self.config.merge(config['collectors'][self.name]) if override_config is not None: if 'collectors' in override_config: - if 'default' in override_config['collectors']: - self.config.merge(override_config['collectors']['default']) + if 'collectors.default' in override_config: + self.config.merge(override_config['collectors.default']) if self.name in override_config['collectors']: self.config.merge(override_config['collectors'][self.name]) @@ -218,7 +218,7 @@ def process_config(self): event """ if 'byte_unit' in self.config: - if isinstance(self.config['byte_unit'], basestring): + if isinstance(self.config['byte_unit'], str): self.config['byte_unit'] = self.config['byte_unit'].split() if 'enabled' in self.config: diff --git a/src/diamond/gmetric.py b/src/diamond/gmetric.py index 5b06afd2a..5768e2586 100644 --- a/src/diamond/gmetric.py +++ b/src/diamond/gmetric.py @@ -37,7 +37,7 @@ # Made it work with the Ganglia 3.1 data format -from __future__ import print_function + from xdrlib import Packer, Unpacker import socket @@ -80,7 +80,7 @@ def __init__(self, host, port, protocol): def send(self, NAME, VAL, TYPE='', UNITS='', SLOPE='both', TMAX=60, DMAX=0, GROUP=""): if SLOPE not in slope_str2int: - raise ValueError("Slope must be one of: " + str(self.slope.keys())) + raise ValueError("Slope must be one of: " + str(list(self.slope.keys()))) if TYPE not in self.type: raise ValueError("Type must be one of: " + str(self.type)) if len(NAME) == 0: diff --git a/src/diamond/handler/archive.py b/src/diamond/handler/archive.py index 0157cb6d3..9e8aad4fb 100644 --- a/src/diamond/handler/archive.py +++ b/src/diamond/handler/archive.py @@ -5,7 +5,7 @@ every night and remove after 7 days. """ -from Handler import Handler +from diamond.handler.Handler import Handler import logging import logging.handlers diff --git a/src/diamond/handler/cloudwatch.py b/src/diamond/handler/cloudwatch.py index d1ab0ec4f..d40543151 100644 --- a/src/diamond/handler/cloudwatch.py +++ b/src/diamond/handler/cloudwatch.py @@ -44,7 +44,7 @@ import sys import datetime -from Handler import Handler +from diamond.handler.Handler import Handler from configobj import Section try: @@ -93,9 +93,9 @@ def __init__(self, config=None): 'collect_without_dimension', 'dimensions') self.rules = [] - for key_name, section in self.config.items(): + for key_name, section in list(self.config.items()): if section.__class__ is Section: - keys = section.keys() + keys = list(section.keys()) rules = self.get_default_rule_config() for key in keys: if key not in self.valid_config: diff --git a/src/diamond/handler/datadog.py b/src/diamond/handler/datadog.py index d25be756c..ba1d73b51 100644 --- a/src/diamond/handler/datadog.py +++ b/src/diamond/handler/datadog.py @@ -10,7 +10,7 @@ """ -from Handler import Handler +from diamond.handler.Handler import Handler import logging from collections import deque diff --git a/src/diamond/handler/g_metric.py b/src/diamond/handler/g_metric.py index 9d41804af..75182a359 100644 --- a/src/diamond/handler/g_metric.py +++ b/src/diamond/handler/g_metric.py @@ -5,7 +5,7 @@ [Ganglia Monitoring System](http://ganglia.sourceforge.net/) """ -from Handler import Handler +from diamond.handler.Handler import Handler import logging try: import gmetric diff --git a/src/diamond/handler/graphite.py b/src/diamond/handler/graphite.py index 87d0cae1e..eb5098103 100644 --- a/src/diamond/handler/graphite.py +++ b/src/diamond/handler/graphite.py @@ -16,7 +16,7 @@ """ -from Handler import Handler +from diamond.handler.Handler import Handler import socket import time @@ -128,7 +128,7 @@ def _send_data(self, data): Try to send all data in buffer. """ try: - self.socket.sendall(data) + self.socket.sendall(data.encode()) self._reset_errors() except: self._close() @@ -136,7 +136,7 @@ def _send_data(self, data): "trying reconnect.") self._connect() try: - self.socket.sendall(data) + self.socket.sendall(data.encode()) except: return self._reset_errors() diff --git a/src/diamond/handler/graphitepickle.py b/src/diamond/handler/graphitepickle.py index edda9c5e8..227fda55a 100644 --- a/src/diamond/handler/graphitepickle.py +++ b/src/diamond/handler/graphitepickle.py @@ -18,10 +18,10 @@ import struct -from graphite import GraphiteHandler +from diamond.handler.graphite import GraphiteHandler try: - import cPickle as pickle + import pickle as pickle except ImportError: import pickle as pickle diff --git a/src/diamond/handler/hostedgraphite.py b/src/diamond/handler/hostedgraphite.py index 01289d6b1..f6297b38d 100644 --- a/src/diamond/handler/hostedgraphite.py +++ b/src/diamond/handler/hostedgraphite.py @@ -16,8 +16,8 @@ """ -from Handler import Handler -from graphite import GraphiteHandler +from diamond.handler.Handler import Handler +from diamond.handler.graphite import GraphiteHandler class HostedGraphiteHandler(Handler): diff --git a/src/diamond/handler/httpHandler.py b/src/diamond/handler/httpHandler.py index f3bfefbe5..db87be3f2 100755 --- a/src/diamond/handler/httpHandler.py +++ b/src/diamond/handler/httpHandler.py @@ -5,8 +5,8 @@ Send metrics to a http endpoint via POST """ -from Handler import Handler -import urllib2 +from urllib.request import Request, urlopen +from diamond.handler.Handler import Handler class HttpPostHandler(Handler): @@ -56,6 +56,6 @@ def flush(self): self.post() def post(self): - req = urllib2.Request(self.url, "\n".join(self.metrics)) - urllib2.urlopen(req) + req = Request(self.url, "\n".join(self.metrics)) + urlopen(req) self.metrics = [] diff --git a/src/diamond/handler/influxdbHandler.py b/src/diamond/handler/influxdbHandler.py index 7f3af4b3c..632749958 100644 --- a/src/diamond/handler/influxdbHandler.py +++ b/src/diamond/handler/influxdbHandler.py @@ -29,7 +29,7 @@ """ import time -from Handler import Handler +from diamond.handler.Handler import Handler try: from influxdb.client import InfluxDBClient diff --git a/src/diamond/handler/libratohandler.py b/src/diamond/handler/libratohandler.py index c080ded31..6f3f99ce9 100644 --- a/src/diamond/handler/libratohandler.py +++ b/src/diamond/handler/libratohandler.py @@ -12,7 +12,7 @@ """ -from Handler import Handler +from diamond.handler.Handler import Handler import logging import time import re @@ -48,7 +48,7 @@ def __init__(self, config=None): # If a user leaves off the ending comma, cast to a array for them include_filters = self.config['include_filters'] - if isinstance(include_filters, basestring): + if isinstance(include_filters, str): include_filters = [include_filters] self.include_reg = re.compile(r'(?:%s)' % '|'.join(include_filters)) diff --git a/src/diamond/handler/logentries_diamond.py b/src/diamond/handler/logentries_diamond.py index 59cd81593..721467b80 100644 --- a/src/diamond/handler/logentries_diamond.py +++ b/src/diamond/handler/logentries_diamond.py @@ -5,9 +5,10 @@ based on data in real time. """ -from Handler import Handler +from urllib.error import URLError +from urllib.request import Request, urlopen +from diamond.handler.Handler import Handler import logging -import urllib2 import json from collections import deque @@ -75,9 +76,9 @@ def _send(self): metric = self.queue.popleft() topic, value, timestamp = str(metric).split() msg = json.dumps({"event": {topic: value}}) - req = urllib2.Request("https://js.logentries.com/v1/logs/" + + req = Request("https://js.logentries.com/v1/logs/" + self.log_token, msg) try: - urllib2.urlopen(req) - except urllib2.URLError as e: + urlopen(req) + except URLError as e: logging.error("Can't send log message to Logentries %s", e) diff --git a/src/diamond/handler/mqtt.py b/src/diamond/handler/mqtt.py index fc60ffa7a..b55ddfebf 100644 --- a/src/diamond/handler/mqtt.py +++ b/src/diamond/handler/mqtt.py @@ -59,7 +59,7 @@ """ -from Handler import Handler +from diamond.handler.Handler import Handler from diamond.collector import get_hostname import os HAVE_SSL = True diff --git a/src/diamond/handler/multigraphite.py b/src/diamond/handler/multigraphite.py index 4d8f8e55f..e7b7c3f73 100644 --- a/src/diamond/handler/multigraphite.py +++ b/src/diamond/handler/multigraphite.py @@ -6,8 +6,8 @@ Specify them as a list of hosts divided by comma. """ -from Handler import Handler -from graphite import GraphiteHandler +from diamond.handler.Handler import Handler +from diamond.handler.graphite import GraphiteHandler from copy import deepcopy diff --git a/src/diamond/handler/multigraphitepickle.py b/src/diamond/handler/multigraphitepickle.py index 3c49d4320..3d52f17a6 100644 --- a/src/diamond/handler/multigraphitepickle.py +++ b/src/diamond/handler/multigraphitepickle.py @@ -6,8 +6,8 @@ servers. Specify them as a list of hosts divided by comma. """ -from Handler import Handler -from graphitepickle import GraphitePickleHandler +from diamond.handler.Handler import Handler +from diamond.handler.graphitepickle import GraphitePickleHandler from copy import deepcopy diff --git a/src/diamond/handler/mysql.py b/src/diamond/handler/mysql.py index ee4e188ab..81ad77300 100644 --- a/src/diamond/handler/mysql.py +++ b/src/diamond/handler/mysql.py @@ -4,7 +4,7 @@ Insert the collected values into a mysql table """ -from Handler import Handler +from diamond.handler.Handler import Handler import MySQLdb diff --git a/src/diamond/handler/null.py b/src/diamond/handler/null.py index c593dd852..d17b109c2 100644 --- a/src/diamond/handler/null.py +++ b/src/diamond/handler/null.py @@ -4,7 +4,7 @@ Output the collected values to the debug log channel. """ -from Handler import Handler +from diamond.handler.Handler import Handler class NullHandler(Handler): diff --git a/src/diamond/handler/queue.py b/src/diamond/handler/queue.py index 8ebf44107..f14947249 100644 --- a/src/diamond/handler/queue.py +++ b/src/diamond/handler/queue.py @@ -5,8 +5,12 @@ do not try to use it as a normal handler """ -from Handler import Handler -import Queue +from diamond.handler.Handler import Handler +try: + import queue +except ImportError: + from . import queue as Queue + class QueueHandler(Handler): @@ -34,7 +38,7 @@ def _process(self, metric): """ try: self.queue.put(metric, block=False) - except Queue.Full: + except queue.Full: self._throttle_error('Queue full, check handlers for delays') def flush(self): @@ -48,5 +52,5 @@ def _flush(self): # Send a None down the queue to indicate a flush try: self.queue.put(None, block=False) - except Queue.Full: + except queue.Full: self._throttle_error('Queue full, check handlers for delays') diff --git a/src/diamond/handler/rabbitmq_pubsub.py b/src/diamond/handler/rabbitmq_pubsub.py index 974824f59..397899a21 100644 --- a/src/diamond/handler/rabbitmq_pubsub.py +++ b/src/diamond/handler/rabbitmq_pubsub.py @@ -4,7 +4,7 @@ Output the collected values to RabitMQ pub/sub channel """ -from Handler import Handler +from diamond.handler.Handler import Handler import time try: @@ -120,7 +120,7 @@ def _bind(self, rmq_server): """ Create PUB socket and bind """ - if ((rmq_server in self.connections.keys() and + if ((rmq_server in list(self.connections.keys()) and self.connections[rmq_server] is not None and self.connections[rmq_server].is_open)): # It seems we already have this server, so let's try _unbind just @@ -158,7 +158,7 @@ def _bind(self, rmq_server): self.reconnect_interval = 1 except Exception as exception: self.log.debug("Caught exception in _bind: %s", exception) - if rmq_server in self.connections.keys(): + if rmq_server in list(self.connections.keys()): self._unbind(rmq_server) if self.reconnect_interval >= 16: @@ -184,14 +184,14 @@ def __del__(self): Destroy instance of the rmqHandler class """ if hasattr(self, 'connections'): - for rmq_server in self.connections.keys(): + for rmq_server in list(self.connections.keys()): self._unbind(rmq_server) def process(self, metric): """ Process a metric and send it to RMQ pub socket """ - for rmq_server in self.connections.keys(): + for rmq_server in list(self.connections.keys()): try: if ((self.connections[rmq_server] is None or self.connections[rmq_server].is_open is False)): diff --git a/src/diamond/handler/rabbitmq_topic.py b/src/diamond/handler/rabbitmq_topic.py index 664a2a442..9dea2a994 100644 --- a/src/diamond/handler/rabbitmq_topic.py +++ b/src/diamond/handler/rabbitmq_topic.py @@ -6,7 +6,7 @@ the metric path """ -from Handler import Handler +from diamond.handler.Handler import Handler try: import pika diff --git a/src/diamond/handler/riemann.py b/src/diamond/handler/riemann.py index 02943dc7a..29ddab320 100644 --- a/src/diamond/handler/riemann.py +++ b/src/diamond/handler/riemann.py @@ -18,7 +18,7 @@ """ -from Handler import Handler +from diamond.handler.Handler import Handler import logging try: diff --git a/src/diamond/handler/rrdtool.py b/src/diamond/handler/rrdtool.py index 032b18166..d10df00ee 100644 --- a/src/diamond/handler/rrdtool.py +++ b/src/diamond/handler/rrdtool.py @@ -7,9 +7,13 @@ import os import re import subprocess -import Queue +try: + import queue +except ImportError: + from . import queue as Queue -from Handler import Handler + +from diamond.handler.Handler import Handler # # Constants for RRD file creation. @@ -160,7 +164,7 @@ def process(self, metric): def _queue(self, filename, timestamp, value): if filename not in self._queues: - queue = Queue.Queue() + queue = queue.Queue() self._queues[filename] = queue else: queue = self._queues[filename] @@ -169,7 +173,7 @@ def _queue(self, filename, timestamp, value): def flush(self): # Grab all current queues. - for filename in self._queues.keys(): + for filename in list(self._queues.keys()): self._flush_queue(filename) def _flush_queue(self, filename): @@ -199,7 +203,7 @@ def _flush_queue(self, filename): if timestamp not in updates: updates[timestamp] = [] updates[timestamp].append(value) - except Queue.Empty: + except queue.Empty: break # Save the last update time. @@ -210,10 +214,8 @@ def _flush_queue(self, filename): # This will look like