[rhq] Branch 'feature/hadoop-plugin' - modules/plugins

lkrejci lkrejci at fedoraproject.org
Thu Aug 2 09:25:42 UTC 2012


 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java          |    6 
 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java             |  209 ++++++++++
 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerConfigurationDelegate.java |    2 
 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerDiscovery.java             |  153 +++++++
 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java            |  209 ----------
 modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java            |  158 -------
 modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml                                  |   30 -
 7 files changed, 381 insertions(+), 386 deletions(-)

New commits:
commit 1f6662cc739ca16343f388d5ed8422571ad03656
Author: Lukas Krejci <lkrejci at redhat.com>
Date:   Thu Aug 2 11:25:18 2012 +0200

    Renaming stuff, making config props not required

diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java
index fdffe35..933eabd 100644
--- a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java
+++ b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopOperationsDelegate.java
@@ -41,9 +41,9 @@ public class HadoopOperationsDelegate {
     private static final long MAX_WAIT = 1000 * 60 * 5;
     private static final int MAX_OUTPUT = 2048;
 
-    private ResourceContext<HadoopServiceComponent> resourceContext;
+    private ResourceContext<HadoopServerComponent> resourceContext;
 
-    public HadoopOperationsDelegate(ResourceContext<HadoopServiceComponent> resourceContext) {
+    public HadoopOperationsDelegate(ResourceContext<HadoopServerComponent> resourceContext) {
         this.resourceContext = resourceContext;
     }
 
@@ -147,7 +147,7 @@ public class HadoopOperationsDelegate {
 
     private ProcessExecutionResults invokeGeneralOperation(HadoopSupportedOperations operation) {
         String hadoopHome = resourceContext.getPluginConfiguration()
-            .getSimple(HadoopServiceDiscovery.HOME_DIR_PROPERTY).getStringValue();
+            .getSimple(HadoopServerDiscovery.HOME_DIR_PROPERTY).getStringValue();
         String executable = hadoopHome + operation.getRelativePathToExecutable();
 
         ProcessExecutionResults results = executeExecutable(resourceContext.getSystemInformation(), executable,
diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java
new file mode 100644
index 0000000..9acc4ff
--- /dev/null
+++ b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerComponent.java
@@ -0,0 +1,209 @@
+/*
+ * RHQ Management Platform
+ * Copyright (C) 2005-2012 Red Hat, Inc.
+ * All rights reserved.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ */
+
+package org.rhq.plugins.hadoop;
+
+import java.io.File;
+import java.util.Set;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.mc4j.ems.connection.EmsConnection;
+import org.mc4j.ems.connection.bean.EmsBean;
+import org.mc4j.ems.connection.bean.attribute.EmsAttribute;
+
+import org.rhq.core.domain.configuration.Configuration;
+import org.rhq.core.domain.configuration.ConfigurationUpdateStatus;
+import org.rhq.core.domain.measurement.AvailabilityType;
+import org.rhq.core.domain.measurement.MeasurementDataNumeric;
+import org.rhq.core.domain.measurement.MeasurementDataTrait;
+import org.rhq.core.domain.measurement.MeasurementReport;
+import org.rhq.core.domain.measurement.MeasurementScheduleRequest;
+import org.rhq.core.pluginapi.configuration.ConfigurationFacet;
+import org.rhq.core.pluginapi.configuration.ConfigurationUpdateReport;
+import org.rhq.core.pluginapi.event.EventContext;
+import org.rhq.core.pluginapi.event.log.Log4JLogEntryProcessor;
+import org.rhq.core.pluginapi.event.log.LogFileEventPoller;
+import org.rhq.core.pluginapi.inventory.ResourceComponent;
+import org.rhq.core.pluginapi.inventory.ResourceContext;
+import org.rhq.core.pluginapi.measurement.MeasurementFacet;
+import org.rhq.core.pluginapi.operation.OperationFacet;
+import org.rhq.core.pluginapi.operation.OperationResult;
+import org.rhq.plugins.jmx.JMXComponent;
+import org.rhq.plugins.jmx.JMXServerComponent;
+
+public class HadoopServerComponent extends JMXServerComponent<ResourceComponent<?>> implements
+    JMXComponent<ResourceComponent<?>>, MeasurementFacet, OperationFacet, ConfigurationFacet {
+
+    private static final Log LOG = LogFactory.getLog(HadoopServerComponent.class);
+    
+    private static final String LOG_EVENT_TYPE = "logEntry";
+    private static final String LOG_POLLING_INTERVAL_PROPERTY = "logPollingInterval";
+    
+    private HadoopServerConfigurationDelegate configurationDelegate;
+    
+    private HadoopOperationsDelegate operationsDelegate;
+
+    @Override
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public void start(ResourceContext context) throws Exception {
+        super.start(context);
+        configurationDelegate = new HadoopServerConfigurationDelegate(context);
+        this.operationsDelegate = new HadoopOperationsDelegate(context);
+
+        EventContext events = context.getEventContext();
+        if (events != null) {
+            File logFile = determineLogFile();
+            int interval = Integer.parseInt(context.getPluginConfiguration().getSimpleValue(LOG_POLLING_INTERVAL_PROPERTY, "60"));                        
+            events.registerEventPoller(new LogFileEventPoller(events, LOG_EVENT_TYPE, logFile, new Log4JLogEntryProcessor(LOG_EVENT_TYPE, logFile)), interval);
+        }
+    }
+    
+    @Override
+    public void stop() {
+        EventContext events = getResourceContext().getEventContext();
+        if (events != null) {
+            events.unregisterEventPoller(LOG_EVENT_TYPE);
+        }
+        super.stop();
+    }
+    
+    /**
+     * Return availability of this resource
+     *  @see org.rhq.core.pluginapi.inventory.ResourceComponent#getAvailability()
+     */
+    @Override
+    public AvailabilityType getAvailability() {
+        return getResourceContext().getNativeProcess().isRunning() ? AvailabilityType.UP : AvailabilityType.DOWN;
+    }
+
+    @Override
+    public EmsConnection getEmsConnection() {
+        EmsConnection conn = super.getEmsConnection();
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("EmsConnection is " + conn.toString());
+        }
+        return conn;
+
+    }
+
+    /**
+     * Gather measurement data
+     *  @see org.rhq.core.pluginapi.measurement.MeasurementFacet#getValues(org.rhq.core.domain.measurement.MeasurementReport, java.util.Set)
+     */
+    public void getValues(MeasurementReport report, Set<MeasurementScheduleRequest> metrics) throws Exception {
+        for (MeasurementScheduleRequest request : metrics) {
+            String name = request.getName();
+            int delimIndex = name.lastIndexOf(':');
+            String beanName = name.substring(0, delimIndex);
+            String attributeName = name.substring(delimIndex + 1);
+            try {
+                EmsConnection emsConnection = getEmsConnection();
+                EmsBean bean = emsConnection.getBean(beanName);
+                if (bean != null) {
+                    bean.refreshAttributes();
+                    EmsAttribute attribute = bean.getAttribute(attributeName);
+                    if (attribute != null) {
+                        Object valueObject = attribute.refresh();
+                        if (valueObject instanceof Number) {
+                            Number value = (Number) valueObject;
+                            report.addData(new MeasurementDataNumeric(request, value.doubleValue()));
+                        } else {
+                            report.addData(new MeasurementDataTrait(request, valueObject.toString()));
+                        }
+                    } else {
+                        LOG.warn("Attribute " + attributeName + " not found");
+                    }
+                } else {
+                    LOG.warn("MBean " + beanName + " not found");
+                }
+            } catch (Exception e) {
+                LOG.error("Failed to obtain measurement [" + name + "]", e);
+            }
+        }
+    }
+
+    public Configuration loadResourceConfiguration() throws Exception {
+        return configurationDelegate.loadConfiguration();
+    }
+
+    public void updateResourceConfiguration(ConfigurationUpdateReport report) {    
+        try {
+            Configuration updatedConfiguration = report.getConfiguration();
+            configurationDelegate.updateConfiguration(updatedConfiguration);
+            report.setStatus(ConfigurationUpdateStatus.SUCCESS);
+        } catch (Exception e) {
+            report.setErrorMessageFromThrowable(e);
+            report.setStatus(ConfigurationUpdateStatus.FAILURE);
+        }
+    }
+
+    /**
+     * Invokes the passed operation on the managed resource
+     * @param name Name of the operation
+     * @param params The method parameters
+     * @return An operation result
+     * @see org.rhq.core.pluginapi.operation.OperationFacet
+     */
+    public OperationResult invokeOperation(String name, Configuration params) throws Exception {
+        HadoopSupportedOperations operation = HadoopSupportedOperations.valueOf(name.toUpperCase());
+        return operationsDelegate.invoke(operation, params);
+    }
+    
+    private File determineLogFile() {
+        String username = getResourceContext().getNativeProcess().getCredentialsName().getUser();
+        String hostname = getResourceContext().getSystemInformation().getHostname();
+        
+        String serverType = getServerType();
+        
+        String name = "hadoop-" + username + "-" + serverType + "-" + hostname + ".log";
+                        
+        return new File(new File(getHomeDir(), "logs"), name);
+    }
+    
+    private String getServerType() {
+        String mainClass = getResourceContext().getPluginConfiguration().getSimpleValue("_mainClass");
+        int dot = mainClass.lastIndexOf('.');
+        String className = mainClass.substring(dot + 1);
+        
+        return className.toLowerCase();
+    }
+    
+    private File getHomeDir() {
+        File homeDir =
+            new File(getResourceContext().getPluginConfiguration().getSimpleValue(HadoopServerDiscovery.HOME_DIR_PROPERTY));
+
+        if (!homeDir.exists()) {
+            throw new IllegalArgumentException("The configured home directory of this Hadoop instance ("
+                + homeDir.getAbsolutePath() + ") no longer exists.");
+        }
+
+        if (!homeDir.isDirectory()) {
+            throw new IllegalArgumentException("The configured home directory of this Hadoop instance ("
+                + homeDir.getAbsolutePath() + ") is not a directory.");
+        }
+
+        if (!homeDir.canRead()) {
+            throw new IllegalArgumentException("The configured home directory of this Hadoop instance ("
+                + homeDir.getAbsolutePath() + ") is not readable.");
+        }
+
+        return homeDir;
+    }    
+}
diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerConfigurationDelegate.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerConfigurationDelegate.java
index cf4ddf8..4a42b3d 100644
--- a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerConfigurationDelegate.java
+++ b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerConfigurationDelegate.java
@@ -170,7 +170,7 @@ public class HadoopServerConfigurationDelegate {
 
     private File getHomeDir() {
         File homeDir =
-            new File(componentContext.getPluginConfiguration().getSimpleValue(HadoopServiceDiscovery.HOME_DIR_PROPERTY));
+            new File(componentContext.getPluginConfiguration().getSimpleValue(HadoopServerDiscovery.HOME_DIR_PROPERTY));
 
         if (!homeDir.exists()) {
             throw new IllegalArgumentException("The configured home directory of this Hadoop instance ("
diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerDiscovery.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerDiscovery.java
new file mode 100644
index 0000000..c1512e9
--- /dev/null
+++ b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServerDiscovery.java
@@ -0,0 +1,153 @@
+/*
+ * RHQ Management Platform
+ * Copyright (C) 2005-2012 Red Hat, Inc.
+ * All rights reserved.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ */
+package org.rhq.plugins.hadoop;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.mc4j.ems.connection.support.metadata.LocalVMTypeDescriptor;
+
+import org.rhq.core.domain.configuration.Configuration;
+import org.rhq.core.domain.configuration.PropertySimple;
+import org.rhq.core.domain.resource.ResourceType;
+import org.rhq.core.pluginapi.inventory.DiscoveredResourceDetails;
+import org.rhq.core.pluginapi.inventory.InvalidPluginConfigurationException;
+import org.rhq.core.pluginapi.inventory.ProcessScanResult;
+import org.rhq.core.pluginapi.inventory.ResourceComponent;
+import org.rhq.core.pluginapi.inventory.ResourceDiscoveryComponent;
+import org.rhq.core.pluginapi.inventory.ResourceDiscoveryContext;
+import org.rhq.plugins.jmx.JMXDiscoveryComponent;
+
+/**
+ * Discover individual hadoop processes
+ * @author Heiko W. Rupp
+ * @author Lukas Krejci
+ */
+public class HadoopServerDiscovery implements ResourceDiscoveryComponent<ResourceComponent<?>> {
+
+    private final Log log = LogFactory.getLog(HadoopServerDiscovery.class);
+    private static final String HADOOP_VERSION_MATCH = "hadoop-core-([0-9\\.]+)\\.jar";
+    private static final Pattern HADOOP_VERSION_PATTERN = Pattern.compile(HADOOP_VERSION_MATCH);
+    private static final String MAIN_CLASS_PROPERTY = "_mainClass";
+    public static final String HOME_DIR_PROPERTY = "hadoop.home.dir";
+    private static final String HOME_DIR_OPTION = "-Dhadoop.home.dir";
+
+    public Set<DiscoveredResourceDetails> discoverResources(
+        ResourceDiscoveryContext<ResourceComponent<?>> resourceDiscoveryContext)
+        throws InvalidPluginConfigurationException, Exception {
+
+        Set<DiscoveredResourceDetails> details = new HashSet<DiscoveredResourceDetails>();
+
+        List<ProcessScanResult> processScans = resourceDiscoveryContext.getAutoDiscoveredProcesses();
+        ResourceType resourceType = resourceDiscoveryContext.getResourceType();
+        String rtName = resourceType.getName();
+
+        for (ProcessScanResult psr : processScans) {
+
+            String cwd = psr.getProcessInfo().getCurrentWorkingDirectory();
+
+            String homeDir = getHadoopHomeDirIfAvailable(psr.getProcessInfo().getCommandLine());
+            if (homeDir == null) {
+                homeDir = cwd;
+            }
+
+            String version = getVersion(homeDir);
+
+            Configuration pluginConfiguration = resourceDiscoveryContext.getDefaultPluginConfiguration();
+
+            DiscoveredResourceDetails detail = new DiscoveredResourceDetails(resourceType, // ResourceType
+                homeDir, // ResourceKey
+                rtName, // resource name
+                version, // Version
+                rtName + " ( " + cwd + " )", // description
+                pluginConfiguration, psr.getProcessInfo() // process info
+                );
+
+            /*
+             * We'll connect to the discovered VM on the local host, so set the jmx connection
+             * properties accordingly. This may only work on JDK6+, but then JDK5 is deprecated
+             * anyway.
+             */
+            pluginConfiguration.put(new PropertySimple(JMXDiscoveryComponent.COMMAND_LINE_CONFIG_PROPERTY,
+                pluginConfiguration.getSimpleValue(MAIN_CLASS_PROPERTY, null)));
+            pluginConfiguration.put(new PropertySimple(JMXDiscoveryComponent.CONNECTION_TYPE,
+                LocalVMTypeDescriptor.class.getName()));
+
+            pluginConfiguration.put(new PropertySimple(HOME_DIR_PROPERTY, homeDir));
+
+            log.debug("Discovered " + detail);
+
+            details.add(detail);
+        }
+
+        return details;
+    }
+
+    /**
+     * Get hadoop version
+     * from command line by looking at haoop-core-xx-core.jar
+     * @param commandLine Command line args for the java executable
+     * @return hdoop version string or null if it can not be determined
+     */
+    private String getVersion(String hadoopHomeDir) {
+
+        File homeDir = new File(hadoopHomeDir);
+        if (homeDir.isDirectory() && homeDir.canRead()) {
+            String[] foundCoreJars = homeDir.list(new FilenameFilter() {
+                public boolean accept(File dir, String name) {
+                    return HADOOP_VERSION_PATTERN.matcher(name).matches();
+                }
+            });
+
+            if (foundCoreJars == null || foundCoreJars.length == 0) {
+                return null;
+            }
+
+            Matcher matcher = HADOOP_VERSION_PATTERN.matcher(foundCoreJars[0]);
+            if (matcher.matches()) {
+                return matcher.group(1);
+            } else {
+                return null;
+            }
+        } else {
+            return null;
+        }
+    }
+
+    private String getHadoopHomeDirIfAvailable(String[] cmdline) {
+        for (int i = 0; i < cmdline.length; ++i) {
+            String cmd = cmdline[i];
+            if (cmd.startsWith(HOME_DIR_OPTION)) {
+                int eqPos = cmd.indexOf('=');
+                if (eqPos > 0) {
+                    return cmd.substring(eqPos + 1);
+                }
+            }
+        }
+
+        return null;
+    }
+}
diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
deleted file mode 100644
index 94c1b4a..0000000
--- a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceComponent.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * RHQ Management Platform
- * Copyright (C) 2005-2012 Red Hat, Inc.
- * All rights reserved.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation version 2 of the License.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- */
-
-package org.rhq.plugins.hadoop;
-
-import java.io.File;
-import java.util.Set;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.mc4j.ems.connection.EmsConnection;
-import org.mc4j.ems.connection.bean.EmsBean;
-import org.mc4j.ems.connection.bean.attribute.EmsAttribute;
-
-import org.rhq.core.domain.configuration.Configuration;
-import org.rhq.core.domain.configuration.ConfigurationUpdateStatus;
-import org.rhq.core.domain.measurement.AvailabilityType;
-import org.rhq.core.domain.measurement.MeasurementDataNumeric;
-import org.rhq.core.domain.measurement.MeasurementDataTrait;
-import org.rhq.core.domain.measurement.MeasurementReport;
-import org.rhq.core.domain.measurement.MeasurementScheduleRequest;
-import org.rhq.core.pluginapi.configuration.ConfigurationFacet;
-import org.rhq.core.pluginapi.configuration.ConfigurationUpdateReport;
-import org.rhq.core.pluginapi.event.EventContext;
-import org.rhq.core.pluginapi.event.log.Log4JLogEntryProcessor;
-import org.rhq.core.pluginapi.event.log.LogFileEventPoller;
-import org.rhq.core.pluginapi.inventory.ResourceComponent;
-import org.rhq.core.pluginapi.inventory.ResourceContext;
-import org.rhq.core.pluginapi.measurement.MeasurementFacet;
-import org.rhq.core.pluginapi.operation.OperationFacet;
-import org.rhq.core.pluginapi.operation.OperationResult;
-import org.rhq.plugins.jmx.JMXComponent;
-import org.rhq.plugins.jmx.JMXServerComponent;
-
-public class HadoopServiceComponent extends JMXServerComponent<ResourceComponent<?>> implements
-    JMXComponent<ResourceComponent<?>>, MeasurementFacet, OperationFacet, ConfigurationFacet {
-
-    private static final Log LOG = LogFactory.getLog(HadoopServiceComponent.class);
-    
-    private static final String LOG_EVENT_TYPE = "logEntry";
-    private static final String LOG_POLLING_INTERVAL_PROPERTY = "logPollingInterval";
-    
-    private HadoopServerConfigurationDelegate configurationDelegate;
-    
-    private HadoopOperationsDelegate operationsDelegate;
-
-    @Override
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-    public void start(ResourceContext context) throws Exception {
-        super.start(context);
-        configurationDelegate = new HadoopServerConfigurationDelegate(context);
-        this.operationsDelegate = new HadoopOperationsDelegate(context);
-
-        EventContext events = context.getEventContext();
-        if (events != null) {
-            File logFile = determineLogFile();
-            int interval = Integer.parseInt(context.getPluginConfiguration().getSimpleValue(LOG_POLLING_INTERVAL_PROPERTY, "60"));                        
-            events.registerEventPoller(new LogFileEventPoller(events, LOG_EVENT_TYPE, logFile, new Log4JLogEntryProcessor(LOG_EVENT_TYPE, logFile)), interval);
-        }
-    }
-    
-    @Override
-    public void stop() {
-        EventContext events = getResourceContext().getEventContext();
-        if (events != null) {
-            events.unregisterEventPoller(LOG_EVENT_TYPE);
-        }
-        super.stop();
-    }
-    
-    /**
-     * Return availability of this resource
-     *  @see org.rhq.core.pluginapi.inventory.ResourceComponent#getAvailability()
-     */
-    @Override
-    public AvailabilityType getAvailability() {
-        return getResourceContext().getNativeProcess().isRunning() ? AvailabilityType.UP : AvailabilityType.DOWN;
-    }
-
-    @Override
-    public EmsConnection getEmsConnection() {
-        EmsConnection conn = super.getEmsConnection();
-        if (LOG.isTraceEnabled()) {
-            LOG.trace("EmsConnection is " + conn.toString());
-        }
-        return conn;
-
-    }
-
-    /**
-     * Gather measurement data
-     *  @see org.rhq.core.pluginapi.measurement.MeasurementFacet#getValues(org.rhq.core.domain.measurement.MeasurementReport, java.util.Set)
-     */
-    public void getValues(MeasurementReport report, Set<MeasurementScheduleRequest> metrics) throws Exception {
-        for (MeasurementScheduleRequest request : metrics) {
-            String name = request.getName();
-            int delimIndex = name.lastIndexOf(':');
-            String beanName = name.substring(0, delimIndex);
-            String attributeName = name.substring(delimIndex + 1);
-            try {
-                EmsConnection emsConnection = getEmsConnection();
-                EmsBean bean = emsConnection.getBean(beanName);
-                if (bean != null) {
-                    bean.refreshAttributes();
-                    EmsAttribute attribute = bean.getAttribute(attributeName);
-                    if (attribute != null) {
-                        Object valueObject = attribute.refresh();
-                        if (valueObject instanceof Number) {
-                            Number value = (Number) valueObject;
-                            report.addData(new MeasurementDataNumeric(request, value.doubleValue()));
-                        } else {
-                            report.addData(new MeasurementDataTrait(request, valueObject.toString()));
-                        }
-                    } else {
-                        LOG.warn("Attribute " + attributeName + " not found");
-                    }
-                } else {
-                    LOG.warn("MBean " + beanName + " not found");
-                }
-            } catch (Exception e) {
-                LOG.error("Failed to obtain measurement [" + name + "]", e);
-            }
-        }
-    }
-
-    public Configuration loadResourceConfiguration() throws Exception {
-        return configurationDelegate.loadConfiguration();
-    }
-
-    public void updateResourceConfiguration(ConfigurationUpdateReport report) {    
-        try {
-            Configuration updatedConfiguration = report.getConfiguration();
-            configurationDelegate.updateConfiguration(updatedConfiguration);
-            report.setStatus(ConfigurationUpdateStatus.SUCCESS);
-        } catch (Exception e) {
-            report.setErrorMessageFromThrowable(e);
-            report.setStatus(ConfigurationUpdateStatus.FAILURE);
-        }
-    }
-
-    /**
-     * Invokes the passed operation on the managed resource
-     * @param name Name of the operation
-     * @param params The method parameters
-     * @return An operation result
-     * @see org.rhq.core.pluginapi.operation.OperationFacet
-     */
-    public OperationResult invokeOperation(String name, Configuration params) throws Exception {
-        HadoopSupportedOperations operation = HadoopSupportedOperations.valueOf(name.toUpperCase());
-        return operationsDelegate.invoke(operation, params);
-    }
-    
-    private File determineLogFile() {
-        String username = getResourceContext().getNativeProcess().getCredentialsName().getUser();
-        String hostname = getResourceContext().getSystemInformation().getHostname();
-        
-        String serverType = getServerType();
-        
-        String name = "hadoop-" + username + "-" + serverType + "-" + hostname + ".log";
-                        
-        return new File(new File(getHomeDir(), "logs"), name);
-    }
-    
-    private String getServerType() {
-        String mainClass = getResourceContext().getPluginConfiguration().getSimpleValue("_mainClass");
-        int dot = mainClass.lastIndexOf('.');
-        String className = mainClass.substring(dot + 1);
-        
-        return className.toLowerCase();
-    }
-    
-    private File getHomeDir() {
-        File homeDir =
-            new File(getResourceContext().getPluginConfiguration().getSimpleValue(HadoopServiceDiscovery.HOME_DIR_PROPERTY));
-
-        if (!homeDir.exists()) {
-            throw new IllegalArgumentException("The configured home directory of this Hadoop instance ("
-                + homeDir.getAbsolutePath() + ") no longer exists.");
-        }
-
-        if (!homeDir.isDirectory()) {
-            throw new IllegalArgumentException("The configured home directory of this Hadoop instance ("
-                + homeDir.getAbsolutePath() + ") is not a directory.");
-        }
-
-        if (!homeDir.canRead()) {
-            throw new IllegalArgumentException("The configured home directory of this Hadoop instance ("
-                + homeDir.getAbsolutePath() + ") is not readable.");
-        }
-
-        return homeDir;
-    }    
-}
diff --git a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java b/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
deleted file mode 100644
index e292168..0000000
--- a/modules/plugins/hadoop/src/main/java/org/rhq/plugins/hadoop/HadoopServiceDiscovery.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * RHQ Management Platform
- * Copyright (C) 2005-2012 Red Hat, Inc.
- * All rights reserved.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation version 2 of the License.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- */
-package org.rhq.plugins.hadoop;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.mc4j.ems.connection.support.metadata.LocalVMTypeDescriptor;
-
-import org.rhq.core.domain.configuration.Configuration;
-import org.rhq.core.domain.configuration.PropertySimple;
-import org.rhq.core.domain.resource.ResourceType;
-import org.rhq.core.pluginapi.inventory.DiscoveredResourceDetails;
-import org.rhq.core.pluginapi.inventory.InvalidPluginConfigurationException;
-import org.rhq.core.pluginapi.inventory.ProcessScanResult;
-import org.rhq.core.pluginapi.inventory.ResourceComponent;
-import org.rhq.core.pluginapi.inventory.ResourceDiscoveryComponent;
-import org.rhq.core.pluginapi.inventory.ResourceDiscoveryContext;
-import org.rhq.plugins.jmx.JMXDiscoveryComponent;
-
-/**
- * Discover individual hadoop processes
- * @author Heiko W. Rupp
- * @author Lukas Krejci
- */
-public class HadoopServiceDiscovery implements ResourceDiscoveryComponent<ResourceComponent<?>> {
-
-    private final Log log = LogFactory.getLog(HadoopServiceDiscovery.class);
-    private static final String HADOOP_VERSION_MATCH = "hadoop-core-([0-9\\.]+)\\.jar";
-    private static final Pattern HADOOP_VERSION_PATTERN = Pattern.compile(HADOOP_VERSION_MATCH);
-    private static final String MAIN_CLASS_PROPERTY = "_mainClass";
-    public static final String HOME_DIR_PROPERTY = "hadoop.home.dir";
-    private static final String HOME_DIR_OPTION = "-Dhadoop.home.dir";    
-    
-    public Set<DiscoveredResourceDetails> discoverResources(
-        ResourceDiscoveryContext<ResourceComponent<?>> resourceDiscoveryContext)
-        throws InvalidPluginConfigurationException, Exception {
-
-        Set<DiscoveredResourceDetails> details = new HashSet<DiscoveredResourceDetails>();
-
-        List<ProcessScanResult> processScans = resourceDiscoveryContext.getAutoDiscoveredProcesses();
-        ResourceType resourceType = resourceDiscoveryContext.getResourceType();
-        String rtName = resourceType.getName();
-
-        for (ProcessScanResult psr : processScans) {
-
-            if (psr.getProcessScan().getName().equals(rtName)) {
-
-                String cwd = psr.getProcessInfo().getCurrentWorkingDirectory();
-
-                String version = getVersion(cwd);
-
-                Configuration pluginConfiguration = resourceDiscoveryContext.getDefaultPluginConfiguration();
-
-                //TODO is it ok to base the resource key on the current working directory as opposed to
-                //the configured hadoop.home.dir? How do they differ?
-                DiscoveredResourceDetails detail = new DiscoveredResourceDetails(resourceType, // ResourceType
-                    rtName + ":" + cwd, // ResourceKey
-                    rtName, // resource name
-                    version, // Version
-                    "Hadoop " + rtName + " ( " + cwd + " )", // description
-                    pluginConfiguration, psr.getProcessInfo() // process info
-                    );
-
-                /*
-                 * We'll connect to the discovered VM on the local host, so set the jmx connection
-                 * properties accordingly. This may only work on JDK6+, but then JDK5 is deprecated
-                 * anyway.
-                 */
-                pluginConfiguration.put(new PropertySimple(JMXDiscoveryComponent.COMMAND_LINE_CONFIG_PROPERTY,
-                    pluginConfiguration.getSimpleValue(MAIN_CLASS_PROPERTY, null)));
-                pluginConfiguration.put(new PropertySimple(JMXDiscoveryComponent.CONNECTION_TYPE,
-                    LocalVMTypeDescriptor.class.getName()));
-                
-                String homeDir = getHadoopHomeDirIfAvailable(psr.getProcessInfo().getCommandLine());
-                if (homeDir == null) {
-                    homeDir = cwd;                    
-                }
-                
-                pluginConfiguration.put(new PropertySimple(HOME_DIR_PROPERTY, homeDir));
-                
-                log.debug("Discovered " + detail);
-
-                details.add(detail);
-            }
-        }
-
-        return details;
-    }
-
-    /**
-     * Get hadoop version
-     * from command line by looking at haoop-core-xx-core.jar
-     * @param commandLine Command line args for the java executable
-     * @return hdoop version string or null if it can not be determined
-     */
-    private String getVersion(String hadoopHomeDir) {
-
-        File homeDir = new File(hadoopHomeDir);
-        if (homeDir.isDirectory() && homeDir.canRead()) {
-            String[] foundCoreJars = homeDir.list(new FilenameFilter() {
-                public boolean accept(File dir, String name) {
-                    return HADOOP_VERSION_PATTERN.matcher(name).matches();
-                }
-            });
-
-            if (foundCoreJars == null || foundCoreJars.length == 0) {
-                return null;
-            }
-
-            Matcher matcher = HADOOP_VERSION_PATTERN.matcher(foundCoreJars[0]);
-            if (matcher.matches()) {
-                return matcher.group(1);
-            } else {
-                return null;
-            }
-        } else {
-            return null;
-        }
-    }
-
-    private String getHadoopHomeDirIfAvailable(String[] cmdline) {
-        for(int i = 0; i < cmdline.length; ++i) {
-            String cmd = cmdline[i];
-            if (cmd.startsWith(HOME_DIR_OPTION)) {
-                int eqPos = cmd.indexOf('=');
-                if (eqPos > 0) {
-                    return cmd.substring(eqPos + 1); 
-                }
-            }
-        }
-        
-        return null;
-    }
-}
diff --git a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
index 819d265..1b3a372 100644
--- a/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
+++ b/modules/plugins/hadoop/src/main/resources/META-INF/rhq-plugin.xml
@@ -5,7 +5,7 @@
   <depends plugin="JMX" useClasses="true"/>
 
   <!-- NameNode (http://wiki.apache.org/hadoop/NameNode) -->
-  <server name="NameNode" discovery="HadoopServiceDiscovery" class="HadoopServiceComponent">
+  <server name="Hadoop NameNode" discovery="HadoopServerDiscovery" class="HadoopServerComponent">
     <plugin-configuration>
       <c:simple-property name="hadoop.home.dir" displayName="Home Directory"/>
       <c:simple-property name="_mainClass" displayName="Main Class" readOnly="true"
@@ -56,15 +56,15 @@
     <event name="logEntry" description="an entry in a log file"/>
 
     <resource-configuration>
-      <c:simple-property name="conf/core-site.xml:fs.default.name" displayName="Namenode URI"/>
+      <c:simple-property name="conf/core-site.xml:fs.default.name" displayName="Namenode URI" required="false"/>
       <c:simple-property name="conf/hdfs-site.xml:dfs.name.dir" displayName="Local Namespace and Logs Storage Directory"
-        description="Path on the local filesystem where the NameNode stores the namespace and transactions logs persistently."/>
+        description="Path on the local filesystem where the NameNode stores the namespace and transactions logs persistently." required="false"/>
       <c:simple-property name="conf/hdfs-site.xml:dfs.block.size" displayName="HDFS Block Size"
-        description="Path on the local filesystem where the NameNode stores the namespace and transactions logs persistently. The value is in bytes."/>
+        description="The default block size for new files. The value is in bytes." required="false"/>
     </resource-configuration>
   </server>
 
-  <server name="SecondaryNameNode" discovery="HadoopServiceDiscovery" class="HadoopServiceComponent">
+  <server name="Hadoop SecondaryNameNode" discovery="HadoopServerDiscovery" class="HadoopServerComponent">
     <plugin-configuration>
       <c:simple-property name="_mainClass" displayName="Main Class" readOnly="true"
         default="org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode"/>
@@ -73,7 +73,7 @@
   </server>
 
   <!-- DataNode (http://wiki.apache.org/hadoop/DataNode) -->
-  <server name="DataNode" discovery="HadoopServiceDiscovery" class="HadoopServiceComponent">
+  <server name="DataNode" discovery="HadoopServerDiscovery" class="HadoopServerComponent">
     <plugin-configuration>
       <c:simple-property name="_mainClass" displayName="Main Class" readOnly="true"
         default="org.apache.hadoop.hdfs.server.datanode.DataNode"/>
@@ -90,12 +90,12 @@
 
     <resource-configuration>
       <c:simple-property name="conf/hdfs-site.xml:dfs.data.dir" displayName="Storage Directory"
-        description="Comma separated list of paths on the local filesystem of a DataNode where it should store its blocks."/>
+        description="Comma separated list of paths on the local filesystem of a DataNode where it should store its blocks." required="false"/>
     </resource-configuration>
   </server>
 
   <!-- JobTracker (http://wiki.apache.org/hadoop/JobTracker) -->
-  <server name="JobTracker" discovery="HadoopServiceDiscovery" class="HadoopServiceComponent">
+  <server name="Hadoop JobTracker" discovery="HadoopServerDiscovery" class="HadoopServerComponent">
     <plugin-configuration>
       <c:simple-property name="baseObjectName" defaultValue="hadoop:service=JobTracker"/>
       <c:simple-property name="_mainClass" displayName="Main Class" readOnly="true"
@@ -136,17 +136,17 @@
     <metric property="Hadoop:service=JobTracker,name=JobTrackerMetrics:trackers_decommissioned" displayName="Excluded Nodes"/>
 
     <resource-configuration>
-        <c:simple-property name="conf/mapred-site.xml:mapred.job.tracker" displayName="Host And Port" description="Host or IP and port of JobTracker. host:port pair."/>
-        <c:simple-property name="conf/mapred-site.xml:mapred.system.dir" displayName="System Files Location" description="Path on the HDFS where where the MapReduce framework stores system files e.g. /hadoop/mapred/system/. This is in the default filesystem (HDFS) and must be accessible from both the server and client machines."/>
-        <c:simple-property name="conf/mapred-site.xml:mapred.local.dir" displayName="Data Files Location" description="Comma-separated list of paths on the local filesystem where temporary MapReduce data is written. Multiple paths help spread disk i/o."/>
-        <c:simple-property name="conf/mapred-site.xml:mapred.tasktracker.map.tasks.maximum" displayName="Maximum Map Tasks" description="The maximum number of Map tasks, which are run simultaneously on a given TaskTracker, individually. Defaults to 2 (2 maps and 2 reduces), but vary it depending on your hardware."/>
-        <c:simple-property name="conf/mapred-site.xml:mapred.tasktracker.reduce.tasks.maximum" displayName="Maximum Reduce Tasks" description="The maximum number of Reduce tasks, which are run simultaneously on a given TaskTracker, individually. Defaults to 2 (2 maps and 2 reduces), but vary it depending on your hardware."/>
-        <c:simple-property name="conf/mapred-site.xml:mapred.queue.names" displayName="Job Queues" description="Comma separated list of queues to which jobs can be submitted. The MapReduce system always supports atleast one queue with the name as default. Hence, this parameter's value should always contain the string default. Some job schedulers supported in Hadoop, like the Capacity Scheduler, support multiple queues. If such a scheduler is being used, the list of configured queue names must be specified here. Once queues are defined, users can submit jobs to a queue using the property name mapred.job.queue.name in the job configuration. There could be a separate configuration file for configuring properties of these queues that is managed by the scheduler. Refer to the documentation of the scheduler for information on the same."/>
+        <c:simple-property name="conf/mapred-site.xml:mapred.job.tracker" displayName="Host And Port" description="Host or IP and port of JobTracker. host:port pair." required="false"/>
+        <c:simple-property name="conf/mapred-site.xml:mapred.system.dir" displayName="System Files Location" description="Path on the HDFS where where the MapReduce framework stores system files e.g. /hadoop/mapred/system/. This is in the default filesystem (HDFS) and must be accessible from both the server and client machines." required="false"/>
+        <c:simple-property name="conf/mapred-site.xml:mapred.local.dir" displayName="Data Files Location" description="Comma-separated list of paths on the local filesystem where temporary MapReduce data is written. Multiple paths help spread disk i/o." required="false"/>
+        <c:simple-property name="conf/mapred-site.xml:mapred.tasktracker.map.tasks.maximum" displayName="Maximum Map Tasks" description="The maximum number of Map tasks, which are run simultaneously on a given TaskTracker, individually. Defaults to 2 (2 maps and 2 reduces), but vary it depending on your hardware." required="false"/>
+        <c:simple-property name="conf/mapred-site.xml:mapred.tasktracker.reduce.tasks.maximum" displayName="Maximum Reduce Tasks" description="The maximum number of Reduce tasks, which are run simultaneously on a given TaskTracker, individually. Defaults to 2 (2 maps and 2 reduces), but vary it depending on your hardware." required="false"/>
+        <c:simple-property name="conf/mapred-site.xml:mapred.queue.names" displayName="Job Queues" description="Comma separated list of queues to which jobs can be submitted. The MapReduce system always supports atleast one queue with the name as default. Hence, this parameter's value should always contain the string default. Some job schedulers supported in Hadoop, like the Capacity Scheduler, support multiple queues. If such a scheduler is being used, the list of configured queue names must be specified here. Once queues are defined, users can submit jobs to a queue using the property name mapred.job.queue.name in the job configuration. There could be a separate configuration file for configuring properties of these queues that is managed by the scheduler. Refer to the documentation of the scheduler for information on the same." required="false"/>
     </resource-configuration>
   </server>
 
   <!-- TaskTracker (http://wiki.apache.org/hadoop/TaskTracker) -->
-  <server name="TaskTracker" discovery="HadoopServiceDiscovery" class="HadoopServiceComponent">
+  <server name="Hadoop TaskTracker" discovery="HadoopServerDiscovery" class="HadoopServerComponent">
     <plugin-configuration>
       <c:simple-property name="_mainClass" displayName="Main Class" readOnly="true"
         default="org.apache.hadoop.mapred.TaskTracker"/>




More information about the rhq-commits mailing list