Skip to content

Commit 6baaf48

Browse files
Rishabh Moudgilmasci
authored andcommitted
Port hdfs_namenode tests to pytest (#1615)
* Port over hdfs_namenode to pytest * port over test to pytest * assert coverage on the metrics received * fix style issues with check * minor cosmetics
1 parent e22e48e commit 6baaf48

18 files changed

+261
-308
lines changed

hdfs_namenode/MANIFEST.in

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
include README.md
2+
include requirements.in
3+
include requirements.txt
4+
include requirements-dev.txt
5+
graft datadog_checks
6+
graft tests
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# (C) Datadog, Inc. 2018
2+
# All rights reserved
3+
# Licensed under a 3-clause BSD style license (see LICENSE)
4+
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# (C) Datadog, Inc. 2018
2+
# All rights reserved
3+
# Licensed under a 3-clause BSD style license (see LICENSE)
4+
__version__ = "1.2.1"
Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
1-
from . import hdfs_namenode
1+
# (C) Datadog, Inc. 2018
2+
# All rights reserved
3+
# Licensed under a 3-clause BSD style license (see LICENSE)
4+
from .hdfs_namenode import HDFSNameNode
5+
from .__about__ import __version__
26

3-
HDFSNameNode = hdfs_namenode.HDFSNameNode
4-
5-
__version__ = "1.2.1"
6-
7-
__all__ = ['hdfs_namenode']
7+
__all__ = [
8+
'__version__',
9+
'HDFSNameNode'
10+
]

hdfs_namenode/datadog_checks/hdfs_namenode/hdfs_namenode.py

Lines changed: 44 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
# (C) Datadog, Inc. 2010-2017
1+
# (C) Datadog, Inc. 2018
22
# All rights reserved
3-
# Licensed under Simplified BSD License (see LICENSE)
3+
# Licensed under a 3-clause BSD style license (see LICENSE)
44

55

66
'''
@@ -40,7 +40,7 @@
4040
from simplejson import JSONDecodeError
4141

4242
# Project
43-
from checks import AgentCheck
43+
from datadog_checks.checks import AgentCheck
4444

4545
# Service check names
4646
JMX_SERVICE_CHECK = 'hdfs.namenode.jmx.can_connect'
@@ -59,34 +59,35 @@
5959

6060
# HDFS metrics
6161
HDFS_NAME_SYSTEM_STATE_METRICS = {
62-
'CapacityTotal' : ('hdfs.namenode.capacity_total', GAUGE),
63-
'CapacityUsed' : ('hdfs.namenode.capacity_used', GAUGE),
64-
'CapacityRemaining' : ('hdfs.namenode.capacity_remaining', GAUGE),
65-
'TotalLoad' : ('hdfs.namenode.total_load', GAUGE),
66-
'FsLockQueueLength' : ('hdfs.namenode.fs_lock_queue_length', GAUGE),
67-
'BlocksTotal' : ('hdfs.namenode.blocks_total', GAUGE),
68-
'MaxObjects' : ('hdfs.namenode.max_objects', GAUGE),
69-
'FilesTotal' : ('hdfs.namenode.files_total', GAUGE),
70-
'PendingReplicationBlocks' : ('hdfs.namenode.pending_replication_blocks', GAUGE),
71-
'UnderReplicatedBlocks' : ('hdfs.namenode.under_replicated_blocks', GAUGE),
72-
'ScheduledReplicationBlocks' : ('hdfs.namenode.scheduled_replication_blocks', GAUGE),
73-
'PendingDeletionBlocks' : ('hdfs.namenode.pending_deletion_blocks', GAUGE),
74-
'NumLiveDataNodes' : ('hdfs.namenode.num_live_data_nodes', GAUGE),
75-
'NumDeadDataNodes' : ('hdfs.namenode.num_dead_data_nodes', GAUGE),
76-
'NumDecomLiveDataNodes' : ('hdfs.namenode.num_decom_live_data_nodes', GAUGE),
77-
'NumDecomDeadDataNodes' : ('hdfs.namenode.num_decom_dead_data_nodes', GAUGE),
78-
'VolumeFailuresTotal' : ('hdfs.namenode.volume_failures_total', GAUGE),
79-
'EstimatedCapacityLostTotal' : ('hdfs.namenode.estimated_capacity_lost_total', GAUGE),
80-
'NumDecommissioningDataNodes' : ('hdfs.namenode.num_decommissioning_data_nodes', GAUGE),
81-
'NumStaleDataNodes' : ('hdfs.namenode.num_stale_data_nodes', GAUGE),
82-
'NumStaleStorages' : ('hdfs.namenode.num_stale_storages', GAUGE),
62+
'CapacityTotal': ('hdfs.namenode.capacity_total', GAUGE),
63+
'CapacityUsed': ('hdfs.namenode.capacity_used', GAUGE),
64+
'CapacityRemaining': ('hdfs.namenode.capacity_remaining', GAUGE),
65+
'TotalLoad': ('hdfs.namenode.total_load', GAUGE),
66+
'FsLockQueueLength': ('hdfs.namenode.fs_lock_queue_length', GAUGE),
67+
'BlocksTotal': ('hdfs.namenode.blocks_total', GAUGE),
68+
'MaxObjects': ('hdfs.namenode.max_objects', GAUGE),
69+
'FilesTotal': ('hdfs.namenode.files_total', GAUGE),
70+
'PendingReplicationBlocks': ('hdfs.namenode.pending_replication_blocks', GAUGE),
71+
'UnderReplicatedBlocks': ('hdfs.namenode.under_replicated_blocks', GAUGE),
72+
'ScheduledReplicationBlocks': ('hdfs.namenode.scheduled_replication_blocks', GAUGE),
73+
'PendingDeletionBlocks': ('hdfs.namenode.pending_deletion_blocks', GAUGE),
74+
'NumLiveDataNodes': ('hdfs.namenode.num_live_data_nodes', GAUGE),
75+
'NumDeadDataNodes': ('hdfs.namenode.num_dead_data_nodes', GAUGE),
76+
'NumDecomLiveDataNodes': ('hdfs.namenode.num_decom_live_data_nodes', GAUGE),
77+
'NumDecomDeadDataNodes': ('hdfs.namenode.num_decom_dead_data_nodes', GAUGE),
78+
'VolumeFailuresTotal': ('hdfs.namenode.volume_failures_total', GAUGE),
79+
'EstimatedCapacityLostTotal': ('hdfs.namenode.estimated_capacity_lost_total', GAUGE),
80+
'NumDecommissioningDataNodes': ('hdfs.namenode.num_decommissioning_data_nodes', GAUGE),
81+
'NumStaleDataNodes': ('hdfs.namenode.num_stale_data_nodes', GAUGE),
82+
'NumStaleStorages': ('hdfs.namenode.num_stale_storages', GAUGE),
8383
}
8484

8585
HDFS_NAME_SYSTEM_METRICS = {
86-
'MissingBlocks' : ('hdfs.namenode.missing_blocks', GAUGE),
87-
'CorruptBlocks' : ('hdfs.namenode.corrupt_blocks', GAUGE)
86+
'MissingBlocks': ('hdfs.namenode.missing_blocks', GAUGE),
87+
'CorruptBlocks': ('hdfs.namenode.corrupt_blocks', GAUGE)
8888
}
8989

90+
9091
class HDFSNameNode(AgentCheck):
9192

9293
def check(self, instance):
@@ -100,26 +101,21 @@ def check(self, instance):
100101
tags = list(set(tags))
101102

102103
# Get metrics from JMX
103-
self._hdfs_namenode_metrics(jmx_address, disable_ssl_validation,
104-
HDFS_NAME_SYSTEM_STATE_BEAN,
105-
HDFS_NAME_SYSTEM_STATE_METRICS, tags)
104+
self._hdfs_namenode_metrics(jmx_address, disable_ssl_validation, HDFS_NAME_SYSTEM_STATE_BEAN,
105+
HDFS_NAME_SYSTEM_STATE_METRICS, tags)
106106

107-
self._hdfs_namenode_metrics(jmx_address, disable_ssl_validation,
108-
HDFS_NAME_SYSTEM_BEAN,
109-
HDFS_NAME_SYSTEM_METRICS, tags)
107+
self._hdfs_namenode_metrics(jmx_address, disable_ssl_validation, HDFS_NAME_SYSTEM_BEAN,
108+
HDFS_NAME_SYSTEM_METRICS, tags)
110109

111-
self.service_check(JMX_SERVICE_CHECK,
112-
AgentCheck.OK,
113-
tags=tags,
114-
message='Connection to %s was successful' % jmx_address)
110+
self.service_check(JMX_SERVICE_CHECK, AgentCheck.OK, tags=tags,
111+
message='Connection to %s was successful' % jmx_address)
115112

116113
def _hdfs_namenode_metrics(self, jmx_uri, disable_ssl_validation, bean_name, metrics, tags):
117114
'''
118115
Get HDFS namenode metrics from JMX
119116
'''
120-
response = self._rest_request_to_json(jmx_uri, disable_ssl_validation,
121-
JMX_PATH,
122-
query_params={'qry':bean_name}, tags=tags)
117+
response = self._rest_request_to_json(jmx_uri, disable_ssl_validation, JMX_PATH,
118+
query_params={'qry': bean_name}, tags=tags)
123119

124120
beans = response.get('beans', [])
125121

@@ -169,38 +165,30 @@ def _rest_request_to_json(self, address, disable_ssl_validation, object_path, qu
169165
self.log.debug('Attempting to connect to "%s"' % url)
170166

171167
try:
172-
response = requests.get(url, timeout=self.default_integration_http_timeout, verify=not disable_ssl_validation)
168+
response = requests.get(url, timeout=self.default_integration_http_timeout,
169+
verify=not disable_ssl_validation)
173170
response.raise_for_status()
174171
response_json = response.json()
175172

176173
except Timeout as e:
177-
self.service_check(JMX_SERVICE_CHECK,
178-
AgentCheck.CRITICAL,
179-
tags=tags,
180-
message="Request timeout: {0}, {1}".format(url, e))
174+
self.service_check(JMX_SERVICE_CHECK, AgentCheck.CRITICAL, tags=tags,
175+
message="Request timeout: {}, {}".format(url, e))
181176
raise
182177

183178
except (HTTPError,
184179
InvalidURL,
185180
ConnectionError) as e:
186-
self.service_check(JMX_SERVICE_CHECK,
187-
AgentCheck.CRITICAL,
188-
tags=tags,
189-
message="Request failed: {0}, {1}".format(url, e))
181+
self.service_check(JMX_SERVICE_CHECK, AgentCheck.CRITICAL, tags=tags,
182+
message="Request failed: {}, {}".format(url, e))
190183
raise
191184

192185
except JSONDecodeError as e:
193-
self.service_check(JMX_SERVICE_CHECK,
194-
AgentCheck.CRITICAL,
195-
tags=tags,
196-
message='JSON Parse failed: {0}, {1}'.format(url, e))
186+
self.service_check(JMX_SERVICE_CHECK, AgentCheck.CRITICAL, tags=tags,
187+
message='JSON Parse failed: {}, {}'.format(url, e))
197188
raise
198189

199190
except ValueError as e:
200-
self.service_check(JMX_SERVICE_CHECK,
201-
AgentCheck.CRITICAL,
202-
tags=tags,
203-
message=str(e))
191+
self.service_check(JMX_SERVICE_CHECK, AgentCheck.CRITICAL, tags=tags, message=str(e))
204192
raise
205193

206194
return response_json

hdfs_namenode/manifest.json

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,13 @@
11
{
22
"maintainer": "[email protected]",
3-
"manifest_version": "0.1.1",
4-
"max_agent_version": "6.0.0",
5-
"min_agent_version": "5.6.3",
3+
"manifest_version": "1.0.0",
64
"name": "hdfs_namenode",
75
"short_description": "Track cluster disk usage, volume failures, dead DataNodes, and more.",
86
"support": "core",
97
"supported_os": [
108
"linux",
119
"mac_os"
1210
],
13-
"version": "1.2.1",
1411
"guid": "41454590-0a25-4146-9c74-9d377db42764",
1512
"public_title": "Datadog-HDFS Namenode Integration",
1613
"categories":["processing", "os & system"],

hdfs_namenode/requirements-dev.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
mock==2.0.0
2+
pytest

hdfs_namenode/setup.py

Lines changed: 21 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -1,71 +1,33 @@
1+
# (C) Datadog, Inc. 2018
2+
# All rights reserved
3+
# Licensed under a 3-clause BSD style license (see LICENSE)
4+
15
# Always prefer setuptools over distutils
26
from setuptools import setup
37
# To use a consistent encoding
48
from codecs import open
59
from os import path
610

7-
import json
8-
import re
9-
10-
here = path.abspath(path.dirname(__file__))
11-
12-
def parse_req_line(line):
13-
line = line.strip()
14-
if not line or line.startswith('--hash') or line[0] == '#':
15-
return None
16-
req = line.rpartition('#')
17-
if len(req[1]) == 0:
18-
line = req[2].strip()
19-
else:
20-
line = req[1].strip()
21-
22-
if '--hash=' in line:
23-
line = line[:line.find('--hash=')].strip()
24-
if ';' in line:
25-
line = line[:line.find(';')].strip()
26-
if '\\' in line:
27-
line = line[:line.find('\\')].strip()
28-
29-
return line
11+
HERE = path.abspath(path.dirname(__file__))
3012

3113
# Get the long description from the README file
32-
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
14+
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
3315
long_description = f.read()
3416

35-
# Parse requirements
36-
runtime_reqs = ['datadog_checks_base']
37-
with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f:
38-
for line in f.readlines():
39-
req = parse_req_line(line)
40-
if req:
41-
runtime_reqs.append(req)
17+
# Get version info
18+
ABOUT = {}
19+
with open(path.join(HERE, "datadog_checks", "hdfs_namenode", "__about__.py")) as f:
20+
exec(f.read(), ABOUT)
4221

43-
def read(*parts):
44-
with open(path.join(here, *parts), 'r') as fp:
45-
return fp.read()
4622

47-
def find_version(*file_paths):
48-
version_file = read(*file_paths)
49-
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
50-
version_file, re.M)
51-
if version_match:
52-
return version_match.group(1)
53-
raise RuntimeError("Unable to find version string.")
23+
def get_requirements(fpath):
24+
with open(path.join(HERE, fpath), encoding='utf-8') as f:
25+
return f.readlines()
5426

55-
# https://packaging.python.org/guides/single-sourcing-package-version/
56-
version = find_version("datadog_checks", "hdfs_namenode", "__init__.py")
57-
58-
manifest_version = None
59-
with open(path.join(here, 'manifest.json'), encoding='utf-8') as f:
60-
manifest = json.load(f)
61-
manifest_version = manifest.get('version')
62-
63-
if version != manifest_version:
64-
raise Exception("Inconsistent versioning in module and manifest - aborting wheel build")
6527

6628
setup(
6729
name='datadog-hdfs_namenode',
68-
version=version,
30+
version=ABOUT['__version__'],
6931
description='The HDFS Namenode check',
7032
long_description=long_description,
7133
keywords='datadog agent hdfs_namenode check',
@@ -78,15 +40,15 @@ def find_version(*file_paths):
7840
author_email='[email protected]',
7941

8042
# License
81-
license='MIT',
43+
license='BSD',
8244

8345
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
8446
classifiers=[
8547
'Development Status :: 5 - Production/Stable',
8648
'Intended Audience :: Developers',
8749
'Intended Audience :: System Administrators',
8850
'Topic :: System :: Monitoring',
89-
'License :: OSI Approved :: MIT License',
51+
'License :: OSI Approved :: BSD License',
9052
'Programming Language :: Python :: 2',
9153
'Programming Language :: Python :: 2.7',
9254
],
@@ -95,26 +57,13 @@ def find_version(*file_paths):
9557
packages=['datadog_checks.hdfs_namenode'],
9658

9759
# Run-time dependencies
98-
install_requires=list(set(runtime_reqs)),
99-
100-
# Development dependencies, run with:
101-
# $ pip install -e .[dev]
102-
extras_require={
103-
'dev': [
104-
'check-manifest',
105-
'datadog_agent_tk>=5.15',
106-
],
107-
},
108-
109-
# Testing setup and dependencies
110-
tests_require=[
111-
'nose',
112-
'coverage',
113-
'datadog_agent_tk>=5.15',
60+
install_requires=get_requirements('requirements.in') + [
61+
'datadog_checks_base',
11462
],
115-
test_suite='nose.collector',
63+
setup_requires=['pytest-runner', ],
64+
tests_require=get_requirements('requirements-dev.txt'),
11665

11766
# Extra files to ship with the wheel package
118-
package_data={b'datadog_checks.hdfs_namenode': ['conf.yaml.example']},
67+
package_data={b'datadog_checks.hdfs_namenode': ['conf.yaml.example', 'autoconf.yaml']},
11968
include_package_data=True,
12069
)

0 commit comments

Comments
 (0)