You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/01/31 20:50:27 UTC
[02/51] [partial] AMBARI-4491. Move all the supported versions in
Baikal for stack to python code (remove dependence on puppet). (aonishuk)
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json b/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json
deleted file mode 100644
index 9520b02..0000000
--- a/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json
+++ /dev/null
@@ -1,549 +0,0 @@
-{
- "roleCommand": "START",
- "clusterName": "cl1",
- "hostname": "c6402.ambari.apache.org",
- "hostLevelParams": {
- "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
- "ambari_db_rca_password": "mapred",
- "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
- "jce_name": "UnlimitedJCEPolicyJDK7.zip",
- "oracle_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//ojdbc6.jar",
- "repo_info": "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/1.x/updates/1.3.3.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-1.3.4\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/1.x/updates/1.3.3.0\"}]",
- "package_list": "[{\"type\":\"rpm\",\"name\":\"hive\"},{\"type\":\"rpm\",\"name\":\"mysql-connector-java\"},{\"type\":\"rpm\",\"name\":\"mysql\"},{\"type\":\"rpm\",\"name\":\"mysql-server\"}]",
- "stack_version": "1.3.4",
- "stack_name": "HDP",
- "db_name": "ambari",
- "ambari_db_rca_driver": "org.postgresql.Driver",
- "jdk_name": "jdk-7u45-linux-x64.tar.gz",
- "ambari_db_rca_username": "mapred",
- "java_home": "/usr/jdk64/jdk1.7.0_45",
- "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"
- },
- "commandType": "EXECUTION_COMMAND",
- "roleParams": {},
- "serviceName": "HIVE",
- "role": "MYSQL_SERVER",
- "commandParams": {
- "command_timeout": "600",
- "service_package_folder": "HIVE",
- "script_type": "PYTHON",
- "schema_version": "2.0",
- "script": "scripts/mysql_server.py",
- "excluded_hosts": "host1"
- },
- "taskId": 117,
- "public_hostname": "c6402.ambari.apache.org",
- "configurations": {
- "mapred-site": {
- "ambari.mapred.child.java.opts.memory": "768",
- "mapred.job.reduce.input.buffer.percent": "0.0",
- "mapred.job.map.memory.mb": "1536",
- "mapred.output.compression.type": "BLOCK",
- "mapred.jobtracker.maxtasks.per.job": "-1",
- "mapreduce.jobtracker.keytab.file": "/etc/security/keytabs/jt.service.keytab",
- "mapred.map.output.compression.codec": "org.apache.hadoop.io.compress.SnappyCodec",
- "mapred.child.root.logger": "INFO,TLA",
- "mapred.tasktracker.tasks.sleeptime-before-sigkill": "250",
- "io.sort.spill.percent": "0.9",
- "mapred.reduce.parallel.copies": "30",
- "mapred.userlog.retain.hours": "24",
- "mapred.reduce.tasks.speculative.execution": "false",
- "mapred.healthChecker.interval": "135000",
- "io.sort.mb": "200",
- "mapreduce.jobtracker.kerberos.principal": "jt/_HOST@EXAMPLE.COM",
- "mapred.jobtracker.blacklist.fault-timeout-window": "180",
- "mapreduce.cluster.administrators": " hadoop",
- "mapred.job.shuffle.input.buffer.percent": "0.7",
- "mapred.job.tracker.history.completed.location": "/mapred/history/done",
- "io.sort.record.percent": ".2",
- "mapred.cluster.max.reduce.memory.mb": "4096",
- "mapred.job.reuse.jvm.num.tasks": "1",
- "mapreduce.jobhistory.intermediate-done-dir": "/mr-history/tmp",
- "mapred.job.tracker.http.address": "c6402.ambari.apache.org:50030",
- "mapred.job.tracker.persist.jobstatus.hours": "1",
- "mapred.healthChecker.script.path": "/etc/hadoop/conf/health_check",
- "mapreduce.jobtracker.staging.root.dir": "/user",
- "mapred.job.shuffle.merge.percent": "0.66",
- "mapred.cluster.reduce.memory.mb": "2048",
- "mapred.job.tracker.persist.jobstatus.dir": "/mapred/jobstatus",
- "mapreduce.tasktracker.group": "hadoop",
- "mapred.tasktracker.map.tasks.maximum": "4",
- "mapred.child.java.opts": "-server -Xmx${ambari.mapred.child.java.opts.memory}m -Djava.net.preferIPv4Stack=true",
- "mapreduce.jobhistory.keytab.file": "/etc/security/keytabs/jt.service.keytab",
- "mapred.jobtracker.retirejob.check": "10000",
- "mapred.job.tracker": "c6402.ambari.apache.org:50300",
- "mapreduce.history.server.embedded": "false",
- "io.sort.factor": "100",
- "hadoop.job.history.user.location": "none",
- "mapreduce.reduce.input.limit": "10737418240",
- "mapred.reduce.slowstart.completed.maps": "0.05",
- "mapred.cluster.max.map.memory.mb": "6144",
- "mapreduce.tasktracker.keytab.file": "/etc/security/keytabs/tt.service.keytab",
- "mapred.jobtracker.taskScheduler": "org.apache.hadoop.mapred.CapacityTaskScheduler",
- "mapred.max.tracker.blacklists": "16",
- "mapreduce.tasktracker.kerberos.principal": "tt/_HOST@EXAMPLE.COM",
- "mapred.local.dir": "/hadoop/mapred",
- "mapreduce.history.server.http.address": "c6402.ambari.apache.org:51111",
- "mapred.jobtracker.restart.recover": "false",
- "mapred.jobtracker.blacklist.fault-bucket-width": "15",
- "mapred.jobtracker.retirejob.interval": "21600000",
- "tasktracker.http.threads": "50",
- "mapred.job.tracker.persist.jobstatus.active": "false",
- "mapred.system.dir": "/mapred/system",
- "mapred.tasktracker.reduce.tasks.maximum": "2",
- "mapred.cluster.map.memory.mb": "1536",
- "mapred.hosts.exclude": "/etc/hadoop/conf/mapred.exclude",
- "mapred.queue.names": "default",
- "mapreduce.jobhistory.webapp.address": "c6402.ambari.apache.org:19888",
- "mapreduce.fileoutputcommitter.marksuccessfuljobs": "false",
- "mapred.job.reduce.memory.mb": "2048",
- "mapreduce.jobhistory.done-dir": "/mr-history/done",
- "mapred.healthChecker.script.timeout": "60000",
- "jetty.connector": "org.mortbay.jetty.nio.SelectChannelConnector",
- "mapreduce.jobtracker.split.metainfo.maxsize": "50000000",
- "mapred.job.tracker.handler.count": "50",
- "mapred.inmem.merge.threshold": "1000",
- "mapred.hosts": "/etc/hadoop/conf/mapred.include",
- "mapred.task.tracker.task-controller": "org.apache.hadoop.mapred.LinuxTaskController",
- "mapred.jobtracker.completeuserjobs.maximum": "0",
- "mapred.task.timeout": "600000",
- "mapreduce.jobhistory.kerberos.principal": "jt/_HOST@EXAMPLE.COM",
- "mapred.map.tasks.speculative.execution": "false"
- },
- "oozie-site": {
- "oozie.service.PurgeService.purge.interval": "3600",
- "oozie.service.CallableQueueService.queue.size": "1000",
- "oozie.service.SchemaService.wf.ext.schemas": "shell-action-0.1.xsd,email-action-0.1.xsd,hive-action-0.2.xsd,sqoop-action-0.2.xsd,ssh-action-0.1.xsd,distcp-action-0.1.xsd",
- "oozie.service.JPAService.jdbc.url": "jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;create=true",
- "oozie.service.HadoopAccessorService.nameNode.whitelist": " ",
- "oozie.service.JPAService.jdbc.driver": "org.apache.derby.jdbc.EmbeddedDriver",
- "local.realm": "EXAMPLE.COM",
- "use.system.libpath.for.mapreduce.and.pig.jobs": "false",
- "oozie.service.HadoopAccessorService.kerberos.enabled": "true",
- "oozie.service.JPAService.create.db.schema": "false",
- "oozie.authentication.kerberos.name.rules": "RULE:[2:$1@$0](jt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](tt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](nn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](dn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nDEFAULT",
- "oozie.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
- "oozie.service.ActionService.executor.ext.classes": "org.apache.oozie.action.email.EmailActionExecutor,\norg.apache.oozie.action.hadoop.HiveActionExecutor,\norg.apache.oozie.action.hadoop.ShellActionExecutor,\norg.apache.oozie.action.hadoop.SqoopActionExecutor,\norg.apache.oozie.action.hadoop.DistcpActionExecutor",
- "oozie.service.HadoopAccessorService.kerberos.principal": "oozie/c6402.ambari.apache.org@EXAMPLE.COM",
- "oozie.service.AuthorizationService.authorization.enabled": "true",
- "oozie.base.url": "http://c6402.ambari.apache.org:11000/oozie",
- "oozie.service.JPAService.jdbc.password": "q",
- "oozie.service.coord.normal.default.timeout": "120",
- "oozie.service.JPAService.pool.max.active.conn": "10",
- "oozie.service.PurgeService.older.than": "30",
- "oozie.db.schema.name": "oozie",
- "oozie.service.HadoopAccessorService.hadoop.configurations": "*=/etc/hadoop/conf",
- "oozie.service.HadoopAccessorService.jobTracker.whitelist": " ",
- "oozie.service.CallableQueueService.callable.concurrency": "3",
- "oozie.service.JPAService.jdbc.username": "oozie",
- "oozie.service.CallableQueueService.threads": "10",
- "oozie.systemmode": "NORMAL",
- "oozie.service.HadoopAccessorService.keytab.file": "/etc/security/keytabs/oozie.service.keytab",
- "oozie.service.WorkflowAppService.system.libpath": "/user/${user.name}/share/lib",
- "oozie.authentication.type": "kerberos",
- "oozie.authentication.kerberos.principal": "HTTP/c6402.ambari.apache.org@EXAMPLE.COM",
- "oozie.system.id": "oozie-${user.name}"
- },
- "webhcat-site": {
- "templeton.pig.path": "pig.tar.gz/pig/bin/pig",
- "templeton.hive.properties": "hive.metastore.local=false,hive.metastore.uris=thrift://c6402.ambari.apache.org:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@EXAMPLE.COM",
- "templeton.override.enabled": "false",
- "templeton.jar": "/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar",
- "templeton.kerberos.secret": "secret",
- "templeton.kerberos.principal": "HTTP/c6402.ambari.apache.org@EXAMPLE.COM",
- "templeton.zookeeper.hosts": "c6401.ambari.apache.org:2181",
- "templeton.exec.timeout": "60000",
- "templeton.storage.class": "org.apache.hcatalog.templeton.tool.ZooKeeperStorage",
- "templeton.hive.archive": "hdfs:///apps/webhcat/hive.tar.gz",
- "templeton.streaming.jar": "hdfs:///apps/webhcat/hadoop-streaming.jar",
- "templeton.port": "50111",
- "templeton.hadoop.conf.dir": "/etc/hadoop/conf",
- "templeton.libjars": "/usr/lib/zookeeper/zookeeper.jar",
- "templeton.hadoop": "/usr/bin/hadoop",
- "templeton.hive.path": "hive.tar.gz/hive/bin/hive",
- "templeton.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
- "templeton.hcat": "/usr/bin/hcat",
- "templeton.pig.archive": "hdfs:///apps/webhcat/pig.tar.gz"
- },
- "global": {
- "tasktracker_task_controller": "org.apache.hadoop.mapred.LinuxTaskController",
- "oozie_keytab": "/etc/security/keytabs/oozie.service.keytab",
- "hadoop_http_principal_name": "HTTP/_HOST",
- "kinit_path_local": "/usr/bin",
- "nagios_keytab_path": "/etc/security/keytabs/nagios.service.keytab",
- "hbase_regionserver_heapsize": "1024m",
- "datanode_primary_name": "dn",
- "namenode_principal_name": "nn/_HOST",
- "namenode_keytab": "/etc/security/keytabs/nn.service.keytab",
- "nagios_principal_name": "nagios/c6402.ambari.apache.org@EXAMPLE.COM",
- "dfs_datanode_http_address": "1022",
- "hbase_user_keytab": "/etc/security/keytabs/hbase.headless.keytab",
- "jobtracker_primary_name": "jt",
- "hbase_pid_dir": "/var/run/hbase",
- "namenode_opt_maxnewsize": "200m",
- "syncLimit": "5",
- "clientPort": "2181",
- "oozie_jdbc_driver": "org.apache.derby.jdbc.EmbeddedDriver",
- "hive_metastore_primary_name": "hive",
- "hbase_master_keytab": "/etc/security/keytabs/hbase.service.keytab",
- "nagios_primary_name": "nagios",
- "jobtracker_principal_name": "jt/_HOST",
- "hive_database": "New MySQL Database",
- "hcat_pid_dir": "/etc/run/webhcat",
- "oozie_derby_database": "Derby",
- "snappy_enabled": "true",
- "oozie_pid_dir": "/var/run/oozie",
- "datanode_principal_name": "dn/_HOST",
- "hive_metastore_keytab": "/etc/security/keytabs/hive.service.keytab",
- "nagios_group": "nagios",
- "hcat_user": "hcat",
- "hadoop_heapsize": "1024",
- "hbase_regionserver_primary_name": "hbase",
- "zk_user": "zookeeper",
- "rrdcached_base_dir": "/var/lib/ganglia/rrds",
- "keytab_path": "/etc/security/keytabs",
- "hive_pid_dir": "/var/run/hive",
- "webhcat_server": "c6402.ambari.apache.org",
- "zk_data_dir": "/hadoop/zookeeper",
- "hcat_log_dir": "/var/log/webhcat",
- "oozie_hostname": "c6402.ambari.apache.org",
- "tasktracker_principal_name": "tt/_HOST",
- "jobtracker_keytab": "/etc/security/keytabs/jt.service.keytab",
- "tasktracker_keytab": "/etc/security/keytabs/tt.service.keytab",
- "zookeeper_keytab_path": "/etc/security/keytabs/zk.service.keytab",
- "namenode_heapsize": "1024m",
- "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
- "kerberos_domain": "EXAMPLE.COM",
- "snamenode_keytab": "/etc/security/keytabs/nn.service.keytab",
- "nagios_server": "c6402.ambari.apache.org",
- "ganglia_runtime_dir": "/var/run/ganglia/hdp",
- "lzo_enabled": "true",
- "oozie_principal_name": "oozie/c6402.ambari.apache.org",
- "hive_jdbc_driver": "com.mysql.jdbc.Driver",
- "dfs_datanode_address": "1019",
- "namenode_opt_newsize": "200m",
- "initLimit": "10",
- "hive_database_type": "mysql",
- "zk_pid_dir": "/var/run/zookeeper",
- "namenode_primary_name": "nn",
- "tickTime": "2000",
- "hive_metastore_principal_name": "hive/_HOST",
- "datanode_keytab": "/etc/security/keytabs/dn.service.keytab",
- "zk_log_dir": "/var/log/zookeeper",
- "oozie_http_principal_name": "HTTP/c6402.ambari.apache.org",
- "tasktracker_primary_name": "tt",
- "hadoop_http_keytab": "/etc/security/keytabs/spnego.service.keytab",
- "gmetad_user": "nobody",
- "oozie_http_keytab": "/etc/security/keytabs/spnego.service.keytab",
- "hive_metastore": "c6402.ambari.apache.org",
- "nagios_user": "nagios",
- "security_enabled": "true",
- "proxyuser_group": "users",
- "namenode_formatted_mark_dir": "/var/run/hadoop/hdfs/namenode/formatted/",
- "hbase_primary_name": "hbase",
- "oozie_http_primary_name": "HTTP",
- "dtnode_heapsize": "1024m",
- "zookeeper_principal_name": "zookeeper/_HOST@EXAMPLE.COM",
- "oozie_log_dir": "/var/log/oozie",
- "webhcat_http_keytab": "/etc/security/keytabs/spnego.service.keytab",
- "hdfs_user_keytab": "/etc/security/keytabs/hdfs.headless.keytab",
- "oozie_user": "oozie",
- "oozie_data_dir": "/hadoop/oozie/data",
- "oozie_primary_name": "oozie",
- "hdfs_log_dir_prefix": "/var/log/hadoop",
- "zookeeper_primary_name": "zookeeper",
- "hbase_master_principal_name": "hbase/_HOST",
- "jtnode_heapsize": "1024m",
- "yarn_user": "yarn",
- "gmond_user": "nobody",
- "nagios_web_login": "nagiosadmin",
- "nagios_contact": "q@q.q",
- "snamenode_primary_name": "nn",
- "hdfs_user": "hdfs",
- "oozie_database_type": "derby",
- "webhcat_user": "hcat",
- "hive_hostname": "c6402.ambari.apache.org",
- "hbase_regionserver_principal_name": "hbase/_HOST",
- "hive_log_dir": "/var/log/hive",
- "smokeuser_principal_name": "ambari-qa",
- "mapred_user": "mapred",
- "smokeuser_primary_name": "ambari-qa",
- "jtnode_opt_maxnewsize": "200m",
- "hbase_master_primary_name": "hbase",
- "oozie_servername": "c6402.ambari.apache.org",
- "hdfs_primary_name": "hdfs",
- "hive_ambari_database": "MySQL",
- "rca_enabled": "true",
- "hadoop_http_primary_name": "HTTP",
- "webHCat_http_principal_name": "HTTP/c6402.ambari.apache.org",
- "mysql_connector_url": "${download_url}/mysql-connector-java-5.1.18.zip",
- "hive_metastore_port": "9083",
- "hbase_user": "hbase",
- "snamenode_principal_name": "nn/_HOST",
- "oozie_database": "New Derby Database",
- "hbase_log_dir": "/var/log/hbase",
- "user_group": "hadoop",
- "hive_user": "hive",
- "webHCat_http_primary_name": "HTTP",
- "nagios_web_password": "q",
- "smokeuser": "ambari-qa",
- "ganglia_conf_dir": "/etc/ganglia/hdp",
- "hbase_master_heapsize": "1024m",
- "kerberos_install_type": "MANUALLY_SET_KERBEROS",
- "hadoop_pid_dir_prefix": "/var/run/hadoop",
- "hive_aux_jars_path": "/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar",
- "jtnode_opt_newsize": "200m",
- "hbase_regionserver_keytab": "/etc/security/keytabs/hbase.service.keytab",
- "hbase_principal_name": "hbase",
- "hdfs_principal_name": "hdfs"
- },
- "hdfs-site": {
- "dfs.namenode.avoid.write.stale.datanode": "true",
- "dfs.namenode.kerberos.internal.spnego.principal": "${dfs.web.authentication.kerberos.principal}",
- "ipc.server.max.response.size": "5242880",
- "dfs.datanode.kerberos.principal": "dn/_HOST@EXAMPLE.COM",
- "dfs.heartbeat.interval": "3",
- "dfs.block.access.token.enable": "true",
- "dfs.support.append": "true",
- "dfs.cluster.administrators": " hdfs",
- "ambari.dfs.datanode.http.port": "1022",
- "dfs.block.size": "134217728",
- "dfs.blockreport.initialDelay": "120",
- "dfs.namenode.kerberos.principal": "nn/_HOST@EXAMPLE.COM",
- "dfs.hosts": "/etc/hadoop/conf/dfs.include",
- "dfs.datanode.du.reserved": "1073741824",
- "dfs.replication": "3",
- "dfs.namenode.handler.count": "100",
- "dfs.web.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
- "dfs.namenode.stale.datanode.interval": "30000",
- "dfs.datanode.socket.write.timeout": "0",
- "ipc.server.read.threadpool.size": "5",
- "dfs.balance.bandwidthPerSec": "6250000",
- "dfs.datanode.address": "0.0.0.0:${ambari.dfs.datanode.port}",
- "dfs.webhdfs.enabled": "true",
- "dfs.datanode.failed.volumes.tolerated": "0",
- "dfs.permissions.supergroup": "hdfs",
- "dfs.secondary.http.address": "c6402.ambari.apache.org:50090",
- "ambari.dfs.datanode.port": "1019",
- "dfs.namenode.write.stale.datanode.ratio": "1.0f",
- "dfs.name.dir": "/hadoop/hdfs/namenode",
- "dfs.access.time.precision": "0",
- "dfs.secondary.namenode.kerberos.internal.spnego.principal": "${dfs.web.authentication.kerberos.principal}",
- "dfs.https.address": "c6401.ambari.apache.org:50470",
- "dfs.datanode.http.address": "0.0.0.0:${ambari.dfs.datanode.http.port}",
- "dfs.data.dir": "/hadoop/hdfs/data",
- "dfs.secondary.https.port": "50490",
- "dfs.permissions": "true",
- "dfs.secondary.namenode.keytab.file": "/etc/security/keytabs/nn.service.keytab",
- "dfs.web.authentication.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM",
- "dfs.block.local-path-access.user": "hbase",
- "dfs.datanode.ipc.address": "0.0.0.0:8010",
- "dfs.web.ugi": "gopher,gopher",
- "dfs.datanode.du.pct": "0.85f",
- "dfs.secondary.namenode.kerberos.principal": "nn/_HOST@EXAMPLE.COM",
- "dfs.datanode.keytab.file": "/etc/security/keytabs/dn.service.keytab",
- "dfs.http.address": "c6401.ambari.apache.org:50070",
- "dfs.namenode.keytab.file": "/etc/security/keytabs/nn.service.keytab",
- "dfs.https.port": "50070",
- "dfs.replication.max": "50",
- "dfs.datanode.max.xcievers": "4096",
- "dfs.namenode.avoid.read.stale.datanode": "true",
- "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude",
- "dfs.datanode.data.dir.perm": "750",
- "dfs.safemode.threshold.pct": "1.0f",
- "dfs.umaskmode": "077"
- },
- "hbase-site": {
- "hbase.client.keyvalue.maxsize": "10485760",
- "hbase.regionserver.keytab.file": "/etc/security/keytabs/hbase.service.keytab",
- "hbase.hstore.compactionThreshold": "3",
- "hbase.zookeeper.property.clientPort": "2181",
- "hbase.rootdir": "hdfs://c6401.ambari.apache.org:8020/apps/hbase/data",
- "hbase.regionserver.handler.count": "60",
- "dfs.client.read.shortcircuit": "true",
- "hbase.bulkload.staging.dir": "/apps/hbase/staging",
- "hbase.regionserver.global.memstore.lowerLimit": "0.38",
- "hbase.master.kerberos.principal": "hbase/_HOST@EXAMPLE.COM",
- "hbase.hregion.memstore.block.multiplier": "2",
- "hbase.hregion.memstore.flush.size": "134217728",
- "hbase.superuser": "hbase",
- "hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.hadoop.hbase.security.access.AccessController",
- "hbase.rpc.engine": "org.apache.hadoop.hbase.ipc.SecureRpcEngine",
- "hbase.hregion.max.filesize": "10737418240",
- "hbase.regionserver.global.memstore.upperLimit": "0.4",
- "zookeeper.session.timeout": "60000",
- "hbase.tmp.dir": "/hadoop/hbase",
- "hbase.regionserver.kerberos.principal": "hbase/_HOST@EXAMPLE.COM",
- "hfile.block.cache.size": "0.40",
- "hbase.security.authentication": "kerberos",
- "hbase.zookeeper.quorum": "c6401.ambari.apache.org",
- "zookeeper.znode.parent": "/hbase-secure",
- "hbase.coprocessor.master.classes": "org.apache.hadoop.hbase.security.access.AccessController",
- "hbase.hstore.blockingStoreFiles": "10",
- "hbase.hregion.majorcompaction": "86400000",
- "hbase.security.authorization": "true",
- "hbase.master.keytab.file": "/etc/security/keytabs/hbase.service.keytab",
- "hbase.cluster.distributed": "true",
- "hbase.hregion.memstore.mslab.enabled": "true",
- "hbase.client.scanner.caching": "100",
- "hbase.zookeeper.useMulti": "true"
- },
- "core-site": {
- "fs.default.name": "hdfs://c6401.ambari.apache.org:8020",
- "hadoop.proxyuser.HTTP.groups": "users",
- "hadoop.proxyuser.HTTP.hosts": "c6402.ambari.apache.org",
- "hadoop.proxyuser.hcat.hosts": "c6402.ambari.apache.org",
- "fs.checkpoint.period": "21600",
- "hadoop.proxyuser.hcat.groups": "users",
- "fs.checkpoint.size": "67108864",
- "fs.trash.interval": "360",
- "hadoop.proxyuser.hive.groups": "users",
- "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.SnappyCodec",
- "hadoop.security.authentication": "kerberos",
- "fs.checkpoint.edits.dir": "${fs.checkpoint.dir}",
- "ipc.client.idlethreshold": "8000",
- "io.file.buffer.size": "131072",
- "io.compression.codec.lzo.class": "com.hadoop.compression.lzo.LzoCodec",
- "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",
- "webinterface.private.actions": "false",
- "hadoop.proxyuser.hive.hosts": "c6402.ambari.apache.org",
- "hadoop.proxyuser.oozie.groups": "users",
- "hadoop.security.authorization": "true",
- "fs.checkpoint.dir": "/hadoop/hdfs/namesecondary",
- "ipc.client.connect.max.retries": "50",
- "hadoop.security.auth_to_local": "RULE:[2:$1@$0](jt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](tt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](nn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](dn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nRULE:[2:$1@$0](oozie@.*EXAMPLE.COM)s/.*/oozie/\nDEFAULT",
- "hadoop.proxyuser.oozie.hosts": "c6402.ambari.apache.org",
- "ipc.client.connection.maxidletime": "30000"
- },
- "hive-site": {
- "hive.enforce.sorting": "true",
- "javax.jdo.option.ConnectionPassword": "q",
- "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
- "hive.optimize.bucketmapjoin.sortedmerge": "true",
- "fs.file.impl.disable.cache": "true",
- "hive.auto.convert.join.noconditionaltask": "true",
- "hive.server2.authentication.kerberos.principal": "hive/_HOST@EXAMPLE.COM",
- "hive.optimize.bucketmapjoin": "true",
- "hive.map.aggr": "true",
- "hive.security.authorization.enabled": "true",
- "hive.optimize.reducededuplication.min.reducer": "1",
- "hive.metastore.kerberos.keytab.file": "/etc/security/keytabs/hive.service.keytab",
- "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083",
- "hive.mapjoin.bucket.cache.size": "10000",
- "hive.auto.convert.join.noconditionaltask.size": "1000000000",
- "javax.jdo.option.ConnectionUserName": "hive",
- "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
- "hive.server2.authentication": "KERBEROS",
- "hive.metastore.sasl.enabled": "true",
- "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
- "hive.metastore.client.socket.timeout": "60",
- "hive.metastore.kerberos.principal": "hive/_HOST@EXAMPLE.COM",
- "hive.semantic.analyzer.factory.impl": "org.apache.hivealog.cli.HCatSemanticAnalyzerFactory",
- "hive.auto.convert.join": "true",
- "hive.enforce.bucketing": "true",
- "hive.mapred.reduce.tasks.speculative.execution": "false",
- "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true",
- "hive.auto.convert.sortmerge.join": "true",
- "fs.hdfs.impl.disable.cache": "true",
- "hive.security.authorization.manager": "org.apache.hcatalog.security.HdfsAuthorizationProvider",
- "ambari.hive.db.schema.name": "hive",
- "hive.metastore.execute.setugi": "true",
- "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
- "hive.server2.enable.doAs": "true",
- "hive.optimize.mapjoin.mapreduce": "true",
- "hive.server2.authentication.kerberos.keytab": "/etc/security/keytabs/hive.service.keytab"
- }
- },
- "configurationTags": {
- "mapred-site": {
- "tag": "version1389980437965"
- },
- "oozie-site": {
- "tag": "version1389980437966"
- },
- "webhcat-site": {
- "tag": "version1389980437965"
- },
- "global": {
- "tag": "version1389980437965"
- },
- "hdfs-site": {
- "tag": "version1389980437965"
- },
- "hbase-site": {
- "tag": "version1389980437965"
- },
- "core-site": {
- "tag": "version1389980437965"
- },
- "hive-site": {
- "tag": "version1389980437965"
- }
- },
- "commandId": "4-2",
- "clusterHostInfo": {
- "snamenode_host": [
- "c6402.ambari.apache.org"
- ],
- "ganglia_monitor_hosts": [
- "c6401.ambari.apache.org",
- "c6402.ambari.apache.org"
- ],
- "nagios_server_host": [
- "c6402.ambari.apache.org"
- ],
- "hive_metastore_hosts": [
- "c6402.ambari.apache.org"
- ],
- "all_ping_ports": [
- "8670",
- "8670"
- ],
- "mapred_tt_hosts": [
- "c6401.ambari.apache.org",
- "c6402.ambari.apache.org"
- ],
- "all_hosts": [
- "c6401.ambari.apache.org",
- "c6402.ambari.apache.org"
- ],
- "hbase_rs_hosts": [
- "c6401.ambari.apache.org",
- "c6402.ambari.apache.org"
- ],
- "slave_hosts": [
- "c6401.ambari.apache.org",
- "c6402.ambari.apache.org"
- ],
- "namenode_host": [
- "c6401.ambari.apache.org"
- ],
- "ganglia_server_host": [
- "c6402.ambari.apache.org"
- ],
- "hbase_master_hosts": [
- "c6401.ambari.apache.org"
- ],
- "hive_mysql_host": [
- "c6402.ambari.apache.org"
- ],
- "oozie_server": [
- "c6402.ambari.apache.org"
- ],
- "webhcat_server_host": [
- "c6402.ambari.apache.org"
- ],
- "jtnode_host": [
- "c6402.ambari.apache.org"
- ],
- "zookeeper_hosts": [
- "c6402.ambari.apache.org"
- ],
- "hs_host": [
- "c6402.ambari.apache.org"
- ],
- "hive_server_host": [
- "c6402.ambari.apache.org"
- ]
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.3/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/hooks/before-INSTALL/test_before_install.py b/ambari-server/src/test/python/stacks/1.3.3/hooks/before-INSTALL/test_before_install.py
deleted file mode 100644
index 4b385bb..0000000
--- a/ambari-server/src/test/python/stacks/1.3.3/hooks/before-INSTALL/test_before_install.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from stacks.utils.RMFTestCase import *
-
-
-class TestHookBeforeInstall(RMFTestCase):
- def test_hook_default(self):
- self.executeScript("1.3.3/hooks/before-INSTALL/scripts/hook.py",
- classname="BeforeConfigureHook",
- command="hook",
- config_file="default.json"
- )
- self.assertResourceCalled('Group', 'hadoop',)
- self.assertResourceCalled('Group', 'users',)
- self.assertResourceCalled('Group', 'users',)
- self.assertResourceCalled('User', 'ambari-qa',
- gid='hadoop',
- groups=[u'users'],)
- self.assertResourceCalled('File', '/tmp/changeUid.sh',
- content=StaticFile('changeToSecureUid.sh'),
- mode=0555,)
- self.assertResourceCalled('Execute',
- '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
- not_if='test $(id -u ambari-qa) -gt 1000',)
- self.assertResourceCalled('User', 'hbase',
- gid='hadoop',
- groups=[u'hadoop'],)
- self.assertResourceCalled('File', '/tmp/changeUid.sh',
- content=StaticFile('changeToSecureUid.sh'),
- mode=0555,)
- self.assertResourceCalled('Execute',
- '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
- not_if='test $(id -u hbase) -gt 1000',)
- self.assertResourceCalled('Group', 'nagios',)
- self.assertResourceCalled('User', 'nagios', gid='nagios',)
- self.assertResourceCalled('User', 'oozie', gid='hadoop',)
- self.assertResourceCalled('User', 'hcat', gid='hadoop',)
- self.assertResourceCalled('User', 'hcat', gid='hadoop',)
- self.assertResourceCalled('User', 'hive',
- gid='hadoop',)
- self.assertResourceCalled('Group', 'nobody',)
- self.assertResourceCalled('Group', 'nobody',)
- self.assertResourceCalled('User', 'nobody',
- gid='hadoop',
- groups=[u'nobody'],)
- self.assertResourceCalled('User', 'nobody',
- gid='hadoop',
- groups=[u'nobody'],)
- self.assertResourceCalled('User', 'hdfs',
- gid='hadoop',
- groups=[u'hadoop'],)
- self.assertResourceCalled('User', 'mapred',
- gid='hadoop',
- groups=[u'hadoop'],)
- self.assertResourceCalled('User', 'zookeeper',
- gid='hadoop',)
- self.assertResourceCalled('Package', 'unzip',)
- self.assertResourceCalled('Package', 'net-snmp',)
- self.assertResourceCalled('Package', 'net-snmp-utils',)
- self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/1.3.3/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/1.3.3/hooks/before-START/test_before_start.py
deleted file mode 100644
index 6829655..0000000
--- a/ambari-server/src/test/python/stacks/1.3.3/hooks/before-START/test_before_start.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from stacks.utils.RMFTestCase import *
-
-class TestHookBeforeStart(RMFTestCase):
- def test_hook_default(self):
- self.executeScript("1.3.3/hooks/before-START/scripts/hook.py",
- classname="BeforeConfigureHook",
- command="hook",
- config_file="default.json"
- )
- self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
- not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
- path = ['/bin', '/usr/bin/'],
- )
- self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
- not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
- path = ['/bin', '/usr/bin/'],
- )
- self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
- not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
- ignore_failures = True,
- path = ['/bin', '/usr/bin/'],
- )
- self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
- content = Template('snmpd.conf.j2'),
- )
- self.assertResourceCalled('Service', 'snmpd',
- action = ['restart'],
- )
- self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
- only_if = 'test -f /selinux/enforce',
- )
- self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-i386-32; ln -sf /usr/lib/libsnappy.so /usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
- )
- self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
- )
- self.assertResourceCalled('Directory', '/etc/hadoop/conf',
- owner = 'root',
- group = 'root',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop',
- owner = 'root',
- group = 'root',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'root',
- group = 'root',
- recursive = True,
- )
- self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
- content = Template('hdfs.conf.j2'),
- owner = 'root',
- group = 'root',
- mode = 0644,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
- content = Template('taskcontroller.cfg.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
- content = Template('hadoop-env.sh.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
- content = Template('commons-logging.properties.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
- content = Template('slaves.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
- content = Template('health_check.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.driver=.*~ambari.jobhistory.driver=org.postgresql.Driver~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA=.*~log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.driver=.*~log4j.appender.JHA.driver=${ambari.jobhistory.driver}~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.database=.*~log4j.appender.JHA.database=${ambari.jobhistory.database}~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.logger=.*~ambari.jobhistory.logger=DEBUG,JHA~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.password=.*~log4j.appender.JHA.password=${ambari.jobhistory.password}~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.database=.*~ambari.jobhistory.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=.*~log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=.*~log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=${ambari.jobhistory.logger}~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.user=.*~log4j.appender.JHA.user=${ambari.jobhistory.user}~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.user=.*~ambari.jobhistory.user=mapred~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.password=.*~ambari.jobhistory.password=mapred~' /etc/hadoop/conf/log4j.properties",
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
- content = Template('hadoop-metrics2.properties.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('XmlConfig', 'core-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['core-site'],
- )
- self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
- owner = 'mapred',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['mapred-site'],
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
- content = StaticFile('task-log4j.properties'),
- mode = 0755,
- )
- self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['hdfs-site'],
- )
- self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/hadoop-tools.jar',
- to = '/usr/lib/hadoop/hadoop-tools.jar',
- )
- self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_monitor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_monitor.py b/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_monitor.py
new file mode 100644
index 0000000..79c24ee
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_monitor.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+
+class TestGangliaMonitor(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py",
+ classname="GangliaMonitor",
+ command="configure",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Directory', '/etc/ganglia/hdp',
+ owner = 'root',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/usr/libexec/hdp/ganglia',
+ owner = 'root',
+ group = 'root',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmetad',
+ content = StaticFile('gmetad.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmond',
+ content = StaticFile('gmond.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkGmond.sh',
+ content = StaticFile('checkGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkRrdcached.sh',
+ content = StaticFile('checkRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmetadLib.sh',
+ content = StaticFile('gmetadLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmondLib.sh',
+ content = StaticFile('gmondLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/rrdcachedLib.sh',
+ content = StaticFile('rrdcachedLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/setupGanglia.sh',
+ content = StaticFile('setupGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmetad.sh',
+ content = StaticFile('startGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmond.sh',
+ content = StaticFile('startGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startRrdcached.sh',
+ content = StaticFile('startRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmetad.sh',
+ content = StaticFile('stopGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmond.sh',
+ content = StaticFile('stopGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopRrdcached.sh',
+ content = StaticFile('stopRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/teardownGanglia.sh',
+ content = StaticFile('teardownGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaClusters.conf',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaEnv.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaLib.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Directory', '/etc/ganglia/conf.d',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/conf.d/modgstatus.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/conf.d/multicpu.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/gmond.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py",
+ classname="GangliaMonitor",
+ command="start",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'chkconfig gmond off',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmond start >> /tmp/gmond.log 2>&1 ; /bin/ps auwx | /bin/grep [g]mond >> /tmp/gmond.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py",
+ classname="GangliaMonitor",
+ command="stop",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmond stop >> /tmp/gmond.log 2>&1 ; /bin/ps auwx | /bin/grep [g]mond >> /tmp/gmond.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertNoMoreResources()
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py b/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py
new file mode 100644
index 0000000..870bc22
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+
+class TestGangliaServer(RMFTestCase):
+
+ def test_configure_default(self):
+ self.executeScript("2.0.6/services/GANGLIA/package/scripts/ganglia_server.py",
+ classname="GangliaServer",
+ command="configure",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Directory', '/usr/libexec/hdp/ganglia',
+ owner = 'root',
+ group = 'root',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmetad',
+ content = StaticFile('gmetad.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/etc/init.d/hdp-gmond',
+ content = StaticFile('gmond.init'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkGmond.sh',
+ content = StaticFile('checkGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkRrdcached.sh',
+ content = StaticFile('checkRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmetadLib.sh',
+ content = StaticFile('gmetadLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmondLib.sh',
+ content = StaticFile('gmondLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/rrdcachedLib.sh',
+ content = StaticFile('rrdcachedLib.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/setupGanglia.sh',
+ content = StaticFile('setupGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmetad.sh',
+ content = StaticFile('startGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmond.sh',
+ content = StaticFile('startGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startRrdcached.sh',
+ content = StaticFile('startRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmetad.sh',
+ content = StaticFile('stopGmetad.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmond.sh',
+ content = StaticFile('stopGmond.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopRrdcached.sh',
+ content = StaticFile('stopRrdcached.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/teardownGanglia.sh',
+ content = StaticFile('teardownGanglia.sh'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaClusters.conf',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaEnv.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaLib.sh',
+ owner = 'root',
+ template_tag = None,
+ group = 'root',
+ mode = 0755,
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPResourceManager -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNodeManager -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHistoryServer -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPDataNode -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseRegionServer -m -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -t -o root -g hadoop',
+ path = ['/usr/libexec/hdp/ganglia',
+ '/usr/sbin',
+ '/sbin:/usr/local/bin',
+ '/bin',
+ '/usr/bin'],
+ )
+ self.assertResourceCalled('Directory', '/var/lib/ganglia/dwoo',
+ owner = 'nobody',
+ recursive = True,
+ mode = 0777,
+ )
+ self.assertResourceCalled('Directory', '/srv/www/cgi-bin',
+ recursive = True,
+ )
+ self.assertResourceCalled('File', '/srv/www/cgi-bin/rrd.py',
+ content = StaticFile('rrd.py'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/etc/ganglia/gmetad.conf',
+ owner = 'root',
+ group = 'hadoop',
+ )
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("2.0.6/services/GANGLIA/package/scripts/ganglia_server.py",
+ classname="GangliaServer",
+ command="start",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmetad start >> /tmp/gmetad.log 2>&1 ; /bin/ps auwx | /bin/grep [g]metad >> /tmp/gmetad.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertResourceCalled('MonitorWebserver', 'restart',
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("2.0.6/services/GANGLIA/package/scripts/ganglia_server.py",
+ classname="GangliaServer",
+ command="stop",
+ config_file="default.json"
+ )
+ self.assertResourceCalled('Execute', 'service hdp-gmetad stop >> /tmp/gmetad.log 2>&1 ; /bin/ps auwx | /bin/grep [g]metad >> /tmp/gmetad.log 2>&1',
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+ )
+ self.assertResourceCalled('MonitorWebserver', 'restart',
+ )
+ self.assertNoMoreResources()
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
new file mode 100644
index 0000000..2d8bd77
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHBaseClient(RMFTestCase):
+
+ def test_configure_secured(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_client.py",
+ classname = "HbaseClient",
+ command = "configure",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-RS',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_client_jaas.conf',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertNoMoreResources()
+
+ def test_configure_default(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_client.py",
+ classname = "HbaseClient",
+ command = "configure",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-RS',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertNoMoreResources()
+
+
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/43f14b34/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
new file mode 100644
index 0000000..9032f38
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+class TestHBaseMaster(RMFTestCase):
+ def test_configure_default(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "configure",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertNoMoreResources()
+
+ def test_start_default(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "start",
+ config_file="default.json"
+ )
+
+ self.assert_configure_default()
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
+ not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
+ user = 'hbase'
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_default(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "stop",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master && rm -f /var/run/hbase/hbase-hbase-master.pid',
+ not_if = None,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_decom_default(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "decommission",
+ config_file="default.json"
+ )
+
+ self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host1',
+ logoutput = True,
+ user = 'hbase',
+ )
+ self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host2',
+ logoutput = True,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_configure_secured(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "configure",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertNoMoreResources()
+
+ def test_start_secured(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "start",
+ config_file="secured.json"
+ )
+
+ self.assert_configure_secured()
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
+ not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_stop_secured(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "stop",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master && rm -f /var/run/hbase/hbase-hbase-master.pid',
+ not_if = None,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def test_decom_secure(self):
+ self.executeScript("2.0.6/services/HBASE/package/scripts/hbase_master.py",
+ classname = "HbaseMaster",
+ command = "decommission",
+ config_file="secured.json"
+ )
+
+ self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase; /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host1',
+ logoutput = True,
+ user = 'hbase',
+ )
+ self.assertNoMoreResources()
+
+ def assert_configure_default(self):
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-MASTER',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/hadoop/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+
+ def assert_configure_secured(self):
+ self.assertResourceCalled('Directory', '/etc/hbase/conf',
+ owner = 'hbase',
+ group = 'hadoop',
+ recursive = True,
+ )
+ self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hbase-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ conf_dir = '/etc/hbase/conf',
+ configurations = self.getConfig()['configurations']['hdfs-site'], # don't hardcode all the properties
+ )
+ self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
+ owner = 'hbase',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase-env.sh',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+ owner = 'hbase',
+ template_tag = 'GANGLIA-MASTER',
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_master_jaas.conf',
+ owner = 'hbase',
+ template_tag = None,
+ )
+ self.assertResourceCalled('Directory', '/var/run/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/hadoop/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hbase',
+ owner = 'hbase',
+ recursive = True,
+ )
\ No newline at end of file