Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion crawler/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from emitters_manager import EmittersManager
from host_crawler import HostCrawler
from vms_crawler import VirtualMachinesCrawler
from k8s_pods_crawler import PodsCrawler

logger = None

Expand Down Expand Up @@ -94,10 +95,11 @@ def main():
Modes.MOUNTPOINT,
Modes.OUTCONTAINER,
Modes.MESOS,
Modes.K8S
],
default=Modes.INVM,
help='The crawler mode: '
'{INVM,OUTVM,MOUNTPOINT,OUTCONTAINER}. '
'{INVM,OUTVM,MOUNTPOINT,OUTCONTAINER,K8S}. '
'Defaults to INVM',
)
parser.add_argument(
Expand Down Expand Up @@ -222,6 +224,14 @@ def main():
host_namespace=args.namespace,
plugin_places=args.plugin_places,
options=options)
elif args.crawlmode == 'K8S':
crawler = PodsCrawler(
features=args.features,
environment=args.environment,
user_list=args.crawlContainers,
host_namespace=args.namespace,
plugin_places=args.plugin_places,
options=options)
else:
raise NotImplementedError('Invalid crawlmode')

Expand Down
3 changes: 2 additions & 1 deletion crawler/crawlmodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@
OUTVM='OUTVM',
MOUNTPOINT='MOUNTPOINT',
OUTCONTAINER='OUTCONTAINER',
MESOS='MESOS')
MESOS='MESOS',
K8S='K8S')
19 changes: 19 additions & 0 deletions crawler/icrawl_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,3 +71,22 @@ def get_feature(self):
Returns the feature type as a string.
"""
raise NotImplementedError()


class IPodCrawler(IPlugin):

"""
Crawler plugin interface

subclasses of this class can be used to implement crawling functions
for pod managed in the host that crawler is working
"""

def crawl(self, pod):
raise NotImplementedError()

def get_feature(self):
"""
Return the feature type as a string
"""
raise NotImplementedError()
42 changes: 42 additions & 0 deletions crawler/k8s_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import logging
import socket
from kubernetes import client, config

logger = logging.getLogger('crawlutils')


class KubernetesClient(object):
'''
Used for communicating with k8s API server.
This client generates v1.Pod object and passes it to each clawer plugin.

This utility class depends on Kubernetes Python Library,
which is developped in kube-incubator project.

v1.Pod object has compatibility for Kubernetes Object Model.
See also Kuberenetes REST API Reference.
https://kubernetes.io/docs/api-reference/v1/definitions
'''

def __init__(self, namespace=None):
self.namespace = namespace
self.hostname = socket.gethostname()
self.ipaddr = socket.gethostbyname(self.hostname)

# TODO: multiple KUBE_CONFIG support
# current version refers to k8s authentication config on
# DEFAULT KUBE CONFIG PATH (~/.kube/config) only.
# need to set .kube/config before crawling
config.load_kube_config()
self.v1client = client.CoreV1Api()

def list_all_pods(self):
'''
list up all living v1.Pod objects managed in this host,
then yield matched v1.Pod object to crawler plugin
'''
# V1PodList
podList = self.v1client.list_pod_for_all_namespaces()
for pod in podList.items:
if pod.status.host_ip == self.ipaddr:
yield pod
55 changes: 55 additions & 0 deletions crawler/k8s_pods_crawler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from k8s_client import KubernetesClient
import plugins_manager
from base_crawler import BaseCrawler, BaseFrame


class PodFrame(BaseFrame):

def __init__(self, feature_types, pod):
BaseFrame.__init__(self, feature_types)
# TODO: add additional k8s special metadata if needed
# self.metadata.update(pod.get_metadata_dict())
self.metadata['system_type'] = 'kubernetes'


class PodsCrawler(BaseCrawler):

def __init__(self,
features=['os', 'cpu'],
environment='kubernetes',
user_list='ALL',
host_namespace='',
plugin_places=['plugins'],
options={}):

BaseCrawler.__init__(
self,
features=features,
plugin_places=plugin_places,
options=options)
plugins_manager.reload_env_plugin(environment, plugin_places)
plugins_manager.reload_pod_crawl_plugins(
features, plugin_places, options)
self.plugins = plugins_manager.get_pod_crawl_plugins(features)
self.environment = environment
self.host_namespace = host_namespace
self.user_list = user_list
self.k8s_client = KubernetesClient()

def _crawl_pod(self, pod, ignore_plugin_exception=True):
frame = PodFrame(self.features, pod)
for (plugin_obj, plugin_args) in self.plugins:
try:
frame.add_features(
plugin_obj.crawl(pod, **plugin_args))
except Exception as exc:
if not ignore_plugin_exception:
raise exc
return frame

def crawl(self, ignore_plugin_exception=True):
for pod in self.k8s_client.list_all_pods():
yield self._crawl_pod(pod, ignore_plugin_exception)

def polling_pod_crawl(self, timeout, ignore_plugin_exception=True):
raise NotImplementedError()
8 changes: 8 additions & 0 deletions crawler/plugins/applications/redis/redis_pod_crawler.plugin
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[Core]
Name = application_redis_pod
Module = redis_pod_crawler

[Documentation]
Author = Tatsuhiro Chiba ([email protected])
Version = 0.1
Description = Redis crawling function for k8s pod on the host
46 changes: 46 additions & 0 deletions crawler/plugins/applications/redis/redis_pod_crawler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
from icrawl_plugin import IPodCrawler
from plugins.applications.redis import feature
from requests.exceptions import ConnectionError
import redis
import logging

logger = logging.getLogger('crawlutils')


class RedisPodCrawler(IPodCrawler):
'''
Crawling app provided metrics for redis container on docker/k8s.
usually redis listens on port 6379.
'''

def __init__(self):
self.feature_type = "application"
self.feature_key = "redis"
self.default_port = 6379

def get_feature(self):
return self.feature_key

def crawl(self, pod, **kwargs):
'''
pod equals to V1Pod objects
see REST definition in the following link
https://kubernetes.io/docs/api-reference/v1/definitions/#_v1_pod
'''
if self.feature_key not in pod.metadata.labels['app']:
raise NameError("not %s container" % self.feature_key)

# retrive ip & port info
ip = pod.status.pod_ip
port = pod.spec.containers[0].ports[0].container_port

# access redis endpoint
client = redis.Redis(host=ip, port=port)
try:
metrics = client.info()
except ConnectionError as ce:
logger.info("redis does not listen on port:%d", port)
raise ce

feature_attributes = feature.create_feature(metrics)
return [(self.feature_key, feature_attributes, self.feature_type)]
30 changes: 29 additions & 1 deletion crawler/plugins_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
from yapsy.PluginManager import PluginManager
import urlparse
import config_parser
from icrawl_plugin import IContainerCrawler, IVMCrawler, IHostCrawler
from icrawl_plugin import IContainerCrawler, IVMCrawler, \
IHostCrawler, IPodCrawler
from iemit_plugin import IEmitter
from runtime_environment import IRuntimeEnvironment
from utils import misc
Expand All @@ -14,6 +15,7 @@
# default runtime environment: cloudsigth and plugins in 'plugins/'
runtime_env = None

pod_crawl_plugins = []
container_crawl_plugins = []
vm_crawl_plugins = []
host_crawl_plugins = []
Expand Down Expand Up @@ -135,6 +137,21 @@ def load_crawl_plugins(
yield (plugin.plugin_object, plugin_args)


def reload_pod_crawl_plugins(
features=['os,' 'cpu'],
plugin_places=['plugins'],
options={}):
global pod_crawl_plugins

pod_crawl_plugins = list(
load_crawl_plugins(
category_filter={
"crawler": IPodCrawler},
features=features,
plugin_places=plugin_places,
options=options))


def reload_container_crawl_plugins(
features=['os', 'cpu'],
plugin_places=['plugins'],
Expand Down Expand Up @@ -180,6 +197,17 @@ def reload_host_crawl_plugins(
options=options))


def get_pod_crawl_plugins(
features=[
'os',
'cpu'
]):
global pod_crawl_plugins
if not pod_crawl_plugins:
reload_pod_crawl_plugins(features=features)
return pod_crawl_plugins


def get_container_crawl_plugins(
features=[
'package',
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@ Yapsy==1.11.223
configobj==4.7.0
redis>=2.10.5
morph==0.1.2
kubernetes>=1.0.0b1