You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by lp...@apache.org on 2017/09/12 10:04:18 UTC
[01/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
[Forced Update!]
Repository: ambari
Updated Branches:
refs/heads/feature-branch-AMBARI-21307 44aeca5d2 -> 1c22c7368 (forced update)
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/sqoop_unsupported_jdbc_driver.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/sqoop_unsupported_jdbc_driver.json b/ambari-server/src/test/python/common-services/configs/sqoop_unsupported_jdbc_driver.json
new file mode 100644
index 0000000..dc76fbb
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/sqoop_unsupported_jdbc_driver.json
@@ -0,0 +1,879 @@
+{
+ "roleCommand": "SERVICE_CHECK",
+ "clusterName": "c1",
+ "hostname": "c6401.ambari.apache.org",
+ "hostLevelParams": {
+ "not_managed_hdfs_path_list": "[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+ "agent_stack_retry_count": "5",
+ "agent_stack_retry_on_unavailability": "false",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "ambari_db_rca_password": "mapred",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "repo_info": "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "stack_version": "2.0",
+ "stack_name": "HDP",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "jdk_name": "jdk-7u67-linux-x64.tar.gz",
+ "ambari_db_rca_username": "mapred",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "java_version": "8",
+ "db_name": "ambari"
+ },
+ "commandType": "EXECUTION_COMMAND",
+ "roleParams": {},
+ "serviceName": "HIVE",
+ "role": "HIVE_CLIENT",
+ "commandParams": {
+ "command_timeout": "300",
+ "service_package_folder": "OOZIE",
+ "script_type": "PYTHON",
+ "script": "scripts/service_check.py",
+ "excluded_hosts": "host1,host2",
+ "mark_draining_only" : "false",
+ "xml_configs_list":[{"hdfs-site.xml":"hdfs-site"}],
+ "env_configs_list":[{"hadoop-env.sh":"hadoop-env"}],
+ "output_file":"HDFS_CLIENT-configs.tar.gz"
+
+ },
+ "taskId": 152,
+ "public_hostname": "c6401.ambari.apache.org",
+ "configurations": {
+ "mapred-site": {
+ "mapreduce.jobhistory.address": "c6402.ambari.apache.org:10020",
+ "mapreduce.cluster.administrators": " hadoop",
+ "mapreduce.reduce.input.buffer.percent": "0.0",
+ "mapreduce.output.fileoutputformat.compress": "false",
+ "mapreduce.framework.name": "yarn",
+ "mapreduce.map.speculative": "false",
+ "mapreduce.reduce.shuffle.merge.percent": "0.66",
+ "yarn.app.mapreduce.am.resource.mb": "683",
+ "mapreduce.map.java.opts": "-Xmx273m",
+ "mapreduce.application.classpath": "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*",
+ "mapreduce.job.reduce.slowstart.completedmaps": "0.05",
+ "mapreduce.output.fileoutputformat.compress.type": "BLOCK",
+ "mapreduce.reduce.speculative": "false",
+ "mapreduce.reduce.java.opts": "-Xmx546m",
+ "mapreduce.am.max-attempts": "2",
+ "yarn.app.mapreduce.am.admin-command-opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN",
+ "mapreduce.reduce.log.level": "INFO",
+ "mapreduce.map.sort.spill.percent": "0.7",
+ "mapreduce.task.timeout": "300000",
+ "mapreduce.map.memory.mb": "341",
+ "mapreduce.task.io.sort.factor": "100",
+ "mapreduce.jobhistory.intermediate-done-dir": "/mr-history/tmp",
+ "mapreduce.reduce.memory.mb": "683",
+ "yarn.app.mapreduce.am.log.level": "INFO",
+ "mapreduce.map.log.level": "INFO",
+ "mapreduce.shuffle.port": "13562",
+ "mapreduce.admin.user.env": "LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &> /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`",
+ "mapreduce.map.output.compress": "false",
+ "yarn.app.mapreduce.am.staging-dir": "/user",
+ "mapreduce.reduce.shuffle.parallelcopies": "30",
+ "mapreduce.reduce.shuffle.input.buffer.percent": "0.7",
+ "mapreduce.jobhistory.webapp.address": "c6402.ambari.apache.org:19888",
+ "mapreduce.jobhistory.done-dir": "/mr-history/done",
+ "mapreduce.admin.reduce.child.java.opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN",
+ "mapreduce.task.io.sort.mb": "136",
+ "yarn.app.mapreduce.am.command-opts": "-Xmx546m",
+ "mapreduce.admin.map.child.java.opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN"
+ },
+ "oozie-site": {
+ "oozie.service.PurgeService.purge.interval": "3600",
+ "oozie.service.CallableQueueService.queue.size": "1000",
+ "oozie.service.SchemaService.wf.ext.schemas": "shell-action-0.1.xsd,email-action-0.1.xsd,hive-action-0.2.xsd,sqoop-action-0.2.xsd,ssh-action-0.1.xsd,distcp-action-0.1.xsd,shell-action-0.2.xsd,oozie-sla-0.1.xsd,oozie-sla-0.2.xsd,hive-action-0.3.xsd",
+ "oozie.service.JPAService.jdbc.url": "jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;create=true",
+ "oozie.service.HadoopAccessorService.nameNode.whitelist": " ",
+ "use.system.libpath.for.mapreduce.and.pig.jobs": "false",
+ "oozie.db.schema.name": "oozie",
+ "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials",
+ "oozie.service.JPAService.create.db.schema": "false",
+ "oozie.authentication.kerberos.name.rules": "\n RULE:[2:$1@$0]([jt]t@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-MAPREDUSER/\n RULE:[2:$1@$0]([nd]n@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-HDFSUSER/\n RULE:[2:$1@$0](hm@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-HBASE-USER/\n RULE:[2:$1@$0](rs@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-HBASE-USER/\n DEFAULT",
+ "oozie.service.ActionService.executor.ext.classes": "\n org.apache.oozie.action.email.EmailActionExecutor,\n org.apache.oozie.action.hadoop.HiveActionExecutor,\n org.apache.oozie.action.hadoop.ShellActionExecutor,\n org.apache.oozie.action.hadoop.SqoopActionExecutor,\n org.apache.oozie.action.hadoop.DistcpActionExecutor",
+ "oozie.base.url": "http://c6402.ambari.apache.org:11000/oozie",
+ "oozie.service.JPAService.jdbc.password": "asd",
+ "oozie.service.coord.normal.default.timeout": "120",
+ "oozie.service.AuthorizationService.security.enabled": "true",
+ "oozie.service.JPAService.pool.max.active.conn": "10",
+ "oozie.service.PurgeService.older.than": "30",
+ "oozie.service.coord.push.check.requeue.interval": "30000",
+ "oozie.service.HadoopAccessorService.hadoop.configurations": "*=/etc/hadoop/conf",
+ "oozie.service.HadoopAccessorService.jobTracker.whitelist": " ",
+ "oozie.service.CallableQueueService.callable.concurrency": "3",
+ "oozie.service.JPAService.jdbc.username": "oozie",
+ "oozie.service.CallableQueueService.threads": "10",
+ "oozie.services.ext": "org.apache.oozie.service.PartitionDependencyManagerService,org.apache.oozie.service.HCatAccessorService",
+ "oozie.systemmode": "NORMAL",
+ "oozie.service.WorkflowAppService.system.libpath": "/user/${user.name}/share/lib",
+ "oozie.services": "\n org.apache.oozie.service.SchedulerService,\n org.apache.oozie.service.InstrumentationService,\n org.apache.oozie.service.CallableQueueService,\n org.apache.oozie.service.UUIDService,\n org.apache.oozie.service.ELService,\n org.apache.oozie.service.AuthorizationService,\n org.apache.oozie.service.UserGroupInformationService,\n org.apache.oozie.service.HadoopAccessorService,\n org.apache.oozie.service.URIHandlerService,\n org.apache.oozie.service.MemoryLocksService,\n org.apache.oozie.service.DagXLogInfoService,\n org.apache.oozie.service.SchemaService,\n org.apache.oozie.service.LiteWorkflowAppService,\n org.apache.oozie.service.JPAService,\n org.apache.oozie.service.StoreService,\n org.apache.oozie.service.CoordinatorStoreService,\n org.apache.oozie.service.SLAStoreService,\n org.apache.oozie.service.DBLiteWorkflowStoreService,\n
org.apache.oozie.service.CallbackService,\n org.apache.oozie.service.ActionService,\n org.apache.oozie.service.ActionCheckerService,\n org.apache.oozie.service.RecoveryService,\n org.apache.oozie.service.PurgeService,\n org.apache.oozie.service.CoordinatorEngineService,\n org.apache.oozie.service.BundleEngineService,\n org.apache.oozie.service.DagEngineService,\n org.apache.oozie.service.CoordMaterializeTriggerService,\n org.apache.oozie.service.StatusTransitService,\n org.apache.oozie.service.PauseTransitService,\n org.apache.oozie.service.GroupsService,\n org.apache.oozie.service.ProxyUserService",
+ "oozie.service.URIHandlerService.uri.handlers": "org.apache.oozie.dependency.FSURIHandler,org.apache.oozie.dependency.HCatURIHandler",
+ "oozie.authentication.type": "simple",
+ "oozie.service.JPAService.jdbc.driver": "org.apache.derby.jdbc.EmbeddedDriver",
+ "oozie.system.id": "oozie-${user.name}"
+ },
+ "storm-site": {
+ "topology.tuple.serializer": "backtype.storm.serialization.types.ListDelegateSerializer",
+ "topology.workers": "1",
+ "drpc.worker.threads": "64",
+ "storm.zookeeper.servers": "['c6401.ambari.apache.org','c6402.ambari.apache.org']",
+ "supervisor.heartbeat.frequency.secs": "5",
+ "topology.executor.send.buffer.size": "1024",
+ "drpc.childopts": "-Xmx768m",
+ "nimbus.thrift.port": "6627",
+ "storm.zookeeper.retry.intervalceiling.millis": "30000",
+ "storm.local.dir": "/hadoop/storm",
+ "topology.receiver.buffer.size": "8",
+ "storm.messaging.netty.client_worker_threads": "1",
+ "transactional.zookeeper.root": "/transactional",
+ "drpc.request.timeout.secs": "600",
+ "topology.skip.missing.kryo.registrations": "false",
+ "worker.heartbeat.frequency.secs": "1",
+ "zmq.hwm": "0",
+ "storm.zookeeper.connection.timeout": "15000",
+ "topology.max.error.report.per.interval": "5",
+ "storm.messaging.netty.server_worker_threads": "1",
+ "supervisor.worker.start.timeout.secs": "120",
+ "zmq.threads": "1",
+ "topology.acker.executors": "null",
+ "storm.local.mode.zmq": "false",
+ "topology.max.task.parallelism": "null",
+ "storm.zookeeper.port": "2181",
+ "nimbus.childopts": "-Xmx1024m",
+ "worker.childopts": "-Xmx768m",
+ "drpc.queue.size": "128",
+ "storm.zookeeper.retry.times": "5",
+ "nimbus.monitor.freq.secs": "10",
+ "storm.cluster.mode": "distributed",
+ "dev.zookeeper.path": "/tmp/dev-storm-zookeeper",
+ "drpc.invocations.port": "3773",
+ "storm.zookeeper.root": "/storm",
+ "logviewer.childopts": "-Xmx128m",
+ "transactional.zookeeper.port": "null",
+ "topology.worker.childopts": "null",
+ "topology.max.spout.pending": "null",
+ "nimbus.cleanup.inbox.freq.secs": "600",
+ "storm.messaging.netty.min_wait_ms": "100",
+ "nimbus.task.timeout.secs": "30",
+ "nimbus.thrift.max_buffer_size": "1048576",
+ "topology.sleep.spout.wait.strategy.time.ms": "1",
+ "topology.optimize": "true",
+ "nimbus.reassign": "true",
+ "storm.messaging.transport": "backtype.storm.messaging.netty.Context",
+ "logviewer.appender.name": "A1",
+ "nimbus.host": "c6401.ambari.apache.org",
+ "ui.port": "8744",
+ "supervisor.slots.ports": "[6700, 6701]",
+ "nimbus.file.copy.expiration.secs": "600",
+ "supervisor.monitor.frequency.secs": "3",
+ "ui.childopts": "-Xmx768m",
+ "transactional.zookeeper.servers": "null",
+ "zmq.linger.millis": "5000",
+ "topology.error.throttle.interval.secs": "10",
+ "topology.worker.shared.thread.pool.size": "4",
+ "java.library.path": "/usr/local/lib:/opt/local/lib:/usr/lib",
+ "topology.spout.wait.strategy": "backtype.storm.spout.SleepSpoutWaitStrategy",
+ "task.heartbeat.frequency.secs": "3",
+ "topology.transfer.buffer.size": "1024",
+ "storm.zookeeper.session.timeout": "20000",
+ "topology.executor.receive.buffer.size": "1024",
+ "topology.stats.sample.rate": "0.05",
+ "topology.fall.back.on.java.serialization": "true",
+ "supervisor.childopts": "-Xmx256m",
+ "topology.enable.message.timeouts": "true",
+ "storm.messaging.netty.max_wait_ms": "1000",
+ "nimbus.topology.validator": "backtype.storm.nimbus.DefaultTopologyValidator",
+ "nimbus.supervisor.timeout.secs": "60",
+ "topology.disruptor.wait.strategy": "com.lmax.disruptor.BlockingWaitStrategy",
+ "nimbus.inbox.jar.expiration.secs": "3600",
+ "drpc.port": "3772",
+ "topology.kryo.factory": "backtype.storm.serialization.DefaultKryoFactory",
+ "storm.zookeeper.retry.interval": "1000",
+ "storm.messaging.netty.max_retries": "30",
+ "topology.tick.tuple.freq.secs": "null",
+ "supervisor.enable": "true",
+ "nimbus.task.launch.secs": "120",
+ "task.refresh.poll.secs": "10",
+ "topology.message.timeout.secs": "30",
+ "storm.messaging.netty.buffer_size": "5242880",
+ "topology.state.synchronization.timeout.secs": "60",
+ "supervisor.worker.timeout.secs": "30",
+ "topology.trident.batch.emit.interval.millis": "500",
+ "topology.builtin.metrics.bucket.size.secs": "60",
+ "storm.thrift.transport": "backtype.storm.security.auth.SimpleTransportPlugin",
+ "logviewer.port": "8000",
+ "topology.debug": "false"
+ },
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+ },
+ "webhcat-site": {
+ "templeton.pig.path": "pig.tar.gz/pig/bin/pig",
+ "templeton.exec.timeout": "60000",
+ "templeton.override.enabled": "false",
+ "templeton.jar": "/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar",
+ "templeton.zookeeper.hosts": "c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181",
+ "templeton.hive.properties": "hive.metastore.local=false,hive.metastore.uris=thrift://c6402.ambari.apache.org:9083,hive.metastore.sasl.enabled=yes,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse",
+ "templeton.storage.class": "org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage",
+ "templeton.hive.archive": "hdfs:///apps/webhcat/hive.tar.gz",
+ "templeton.streaming.jar": "hdfs:///apps/webhcat/hadoop-streaming.jar",
+ "templeton.port": "50111",
+ "templeton.libjars": "/usr/lib/zookeeper/zookeeper.jar",
+ "templeton.hadoop": "/usr/bin/hadoop",
+ "templeton.hive.path": "hive.tar.gz/hive/bin/hive",
+ "templeton.hadoop.conf.dir": "/etc/hadoop/conf",
+ "templeton.hcat": "/usr/bin/hcat",
+ "templeton.pig.archive": "hdfs:///apps/webhcat/pig.tar.gz"
+ },
+ "capacity-scheduler": {
+ "yarn.scheduler.capacity.node-locality-delay": "40",
+ "yarn.scheduler.capacity.root.capacity": "100",
+ "yarn.scheduler.capacity.root.acl_administer_queue": "*",
+ "yarn.scheduler.capacity.root.queues": "default",
+ "yarn.scheduler.capacity.maximum-applications": "10000",
+ "yarn.scheduler.capacity.root.default.user-limit-factor": "1",
+ "yarn.scheduler.capacity.root.default.maximum-capacity": "100",
+ "yarn.scheduler.capacity.root.default.state": "RUNNING",
+ "yarn.scheduler.capacity.maximum-am-resource-percent": "0.2",
+ "yarn.scheduler.capacity.root.default.acl_administer_jobs": "*",
+ "yarn.scheduler.capacity.root.default.capacity": "100",
+ "yarn.scheduler.capacity.root.default.acl_submit_applications": "*"
+ },
+ "hdfs-site": {
+ "dfs.namenode.checkpoint.period": "21600",
+ "dfs.namenode.avoid.write.stale.datanode": "true",
+ "dfs.block.access.token.enable": "true",
+ "dfs.support.append": "true",
+ "dfs.datanode.address": "0.0.0.0:${ambari.dfs.datanode.port}",
+ "dfs.cluster.administrators": " hdfs",
+ "dfs.replication": "3",
+ "ambari.dfs.datanode.http.port": "50075",
+ "dfs.datanode.balance.bandwidthPerSec": "6250000",
+ "dfs.namenode.safemode.threshold-pct": "1.0f",
+ "dfs.namenode.checkpoint.edits.dir": "${dfs.namenode.checkpoint.dir}",
+ "dfs.permissions.enabled": "true",
+ "fs.checkpoint.size": "67108864",
+ "dfs.client.read.shortcircuit": "true",
+ "dfs.namenode.https-address": "c6401.ambari.apache.org:50470",
+ "dfs.journalnode.edits.dir": "/grid/0/hdfs/journal",
+ "dfs.blocksize": "134217728",
+ "dfs.datanode.max.transfer.threads": "1024",
+ "dfs.datanode.du.reserved": "1073741824",
+ "dfs.webhdfs.enabled": "true",
+ "dfs.namenode.handler.count": "100",
+ "dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary",
+ "fs.permissions.umask-mode": "022",
+ "dfs.datanode.http.address": "0.0.0.0:50075",
+ "dfs.datanode.ipc.address": "0.0.0.0:8010",
+ "dfs.datanode.data.dir": "/hadoop/hdfs/data",
+ "dfs.namenode.http-address": "c6401.ambari.apache.org:50070",
+ "dfs.blockreport.initialDelay": "120",
+ "dfs.datanode.failed.volumes.tolerated": "0",
+ "dfs.namenode.accesstime.precision": "0",
+ "ambari.dfs.datanode.port": "50010",
+ "dfs.namenode.avoid.read.stale.datanode": "true",
+ "dfs.namenode.secondary.http-address": "c6402.ambari.apache.org:50090",
+ "dfs.namenode.stale.datanode.interval": "30000",
+ "dfs.heartbeat.interval": "3",
+ "dfs.client.read.shortcircuit.streams.cache.size": "4096",
+ "dfs.permissions.superusergroup": "hdfs",
+ "dfs.https.port": "50470",
+ "dfs.journalnode.http-address": "0.0.0.0:8480",
+ "dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket",
+ "dfs.namenode.write.stale.datanode.ratio": "1.0f",
+ "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude",
+ "dfs.datanode.data.dir.perm": "750",
+ "dfs.namenode.name.dir.restore": "true",
+ "dfs.replication.max": "50",
+ "dfs.namenode.name.dir": "/hadoop/hdfs/namenode"
+ },
+ "hbase-site": {
+ "hbase.hstore.flush.retries.number": "120",
+ "hbase.client.keyvalue.maxsize": "10485760",
+ "hbase.hstore.compactionThreshold": "3",
+ "hbase.rootdir": "hdfs://c6401.ambari.apache.org:8020/apps/hbase/data",
+ "hbase.regionserver.handler.count": "60",
+ "hbase.regionserver.global.memstore.lowerLimit": "0.38",
+ "hbase.hregion.memstore.block.multiplier": "2",
+ "hbase.hregion.memstore.flush.size": "134217728",
+ "hbase.superuser": "hbase",
+ "hbase.zookeeper.property.clientPort": "2181",
+ "hbase.regionserver.global.memstore.upperLimit": "0.4",
+ "zookeeper.session.timeout": "30000",
+ "hbase.tmp.dir": "/hadoop/hbase",
+ "hbase.local.dir": "${hbase.tmp.dir}/local",
+ "hbase.hregion.max.filesize": "10737418240",
+ "hfile.block.cache.size": "0.40",
+ "hbase.security.authentication": "simple",
+ "hbase.defaults.for.version.skip": "true",
+ "hbase.zookeeper.quorum": "c6401.ambari.apache.org,c6402.ambari.apache.org",
+ "zookeeper.znode.parent": "/hbase-unsecure",
+ "hbase.hstore.blockingStoreFiles": "10",
+ "hbase.master.port": "60000",
+ "hbase.hregion.majorcompaction": "86400000",
+ "hbase.security.authorization": "false",
+ "hbase.cluster.distributed": "true",
+ "hbase.hregion.memstore.mslab.enabled": "true",
+ "hbase.client.scanner.caching": "100",
+ "hbase.zookeeper.useMulti": "true"
+ },
+ "core-site": {
+ "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",
+ "gluster.daemon.user": "null",
+ "hadoop.proxyuser.oozie.groups": "users",
+ "hadoop.proxyuser.hcat.hosts": "c6402.ambari.apache.org",
+ "hadoop.proxyuser.hive.groups": "users",
+ "hadoop.security.authentication": "simple",
+ "hadoop.proxyuser.oozie.hosts": "c6402.ambari.apache.org",
+ "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec",
+ "mapreduce.jobtracker.webinterface.trusted": "false",
+ "fs.AbstractFileSystem.glusterfs.impl": "null",
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020",
+ "fs.trash.interval": "360",
+ "ipc.client.idlethreshold": "8000",
+ "io.file.buffer.size": "131072",
+ "hadoop.security.authorization": "false",
+ "hadoop.proxyuser.hive.hosts": "c6402.ambari.apache.org",
+ "hadoop.security.auth_to_local": "\n RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n DEFAULT",
+ "hadoop.proxyuser.hcat.groups": "users",
+ "ipc.client.connection.maxidletime": "30000",
+ "ipc.client.connect.max.retries": "50"
+ },
+ "hive-site": {
+ "hive.enforce.sorting": "true",
+ "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+ "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true",
+ "hive.security.metastore.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "fs.file.impl.disable.cache": "true",
+ "hive.auto.convert.join.noconditionaltask": "true",
+ "hive.map.aggr": "true",
+ "hive.optimize.index.filter": "true",
+ "hive.security.authorization.enabled": "false",
+ "hive.optimize.reducededuplication.min.reducer": "1",
+ "hive.optimize.bucketmapjoin": "true",
+ "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083",
+ "hive.mapjoin.bucket.cache.size": "10000",
+ "hive.auto.convert.join.noconditionaltask.size": "1000000000",
+ "hive.vectorized.execution.enabled": "false",
+ "javax.jdo.option.ConnectionUserName": "hive",
+ "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
+ "hive.optimize.reducededuplication": "true",
+ "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
+ "hive.metastore.client.socket.timeout": "60",
+ "hive.auto.convert.join": "true",
+ "hive.enforce.bucketing": "true",
+ "hive.mapred.reduce.tasks.speculative.execution": "false",
+ "hive.security.authenticator.manager": "org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator",
+ "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true",
+ "hive.auto.convert.sortmerge.join": "true",
+ "fs.hdfs.impl.disable.cache": "true",
+ "hive.security.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "ambari.hive.db.schema.name": "hive",
+ "hive.metastore.execute.setugi": "true",
+ "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
+ "hive.server2.enable.doAs": "true",
+ "hive.server2.transport.mode": "binary",
+ "hive.optimize.mapjoin.mapreduce": "true"
+ },
+ "hive-interactive-site": {
+ "hive.enforce.sorting": "true",
+ "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+ "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true",
+ "hive.security.metastore.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "fs.file.impl.disable.cache": "true",
+ "hive.auto.convert.join.noconditionaltask": "true",
+ "hive.map.aggr": "true",
+ "hive.optimize.index.filter": "true",
+ "hive.security.authorization.enabled": "false",
+ "hive.optimize.reducededuplication.min.reducer": "1",
+ "hive.optimize.bucketmapjoin": "true",
+ "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9084",
+ "hive.mapjoin.bucket.cache.size": "10000",
+ "hive.auto.convert.join.noconditionaltask.size": "1000000000",
+ "hive.vectorized.execution.enabled": "false",
+ "javax.jdo.option.ConnectionUserName": "hive",
+ "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
+ "hive.optimize.reducededuplication": "true",
+ "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
+ "hive.metastore.client.socket.timeout": "60",
+ "hive.auto.convert.join": "true",
+ "hive.enforce.bucketing": "true",
+ "hive.mapred.reduce.tasks.speculative.execution": "false",
+ "hive.security.authenticator.manager": "org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator",
+ "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true",
+ "hive.auto.convert.sortmerge.join": "true",
+ "fs.hdfs.impl.disable.cache": "true",
+ "hive.security.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "ambari.hive.db.schema.name": "hive",
+ "hive.metastore.execute.setugi": "true",
+ "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
+ "hive.server2.enable.doAs": "true",
+ "hive.server2.transport.mode": "binary",
+ "hive.optimize.mapjoin.mapreduce": "true"
+ },
+ "yarn-site": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "0.25",
+ "yarn.nodemanager.container-executor.class": "org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor",
+ "yarn.nodemanager.local-dirs": "/hadoop/yarn/local,/hadoop/yarn/local1",
+ "yarn.resourcemanager.resource-tracker.address": "c6402.ambari.apache.org:8025",
+ "yarn.nodemanager.remote-app-log-dir-suffix": "logs",
+ "yarn.resourcemanager.hostname": "c6402.ambari.apache.org",
+ "yarn.nodemanager.health-checker.script.timeout-ms": "60000",
+ "yarn.resourcemanager.scheduler.class": "org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler",
+ "yarn.nodemanager.resource.memory-mb": "2048",
+ "yarn.scheduler.minimum-allocation-mb": "683",
+ "yarn.resourcemanager.address": "c6402.ambari.apache.org:8050",
+ "yarn.resourcemanager.scheduler.address": "c6402.ambari.apache.org:8030",
+ "yarn.log-aggregation.retain-seconds": "2592000",
+ "yarn.scheduler.maximum-allocation-mb": "2048",
+ "yarn.log-aggregation-enable": "true",
+ "yarn.nodemanager.address": "0.0.0.0:45454",
+ "yarn.nodemanager.container-monitor.interval-ms": "3000",
+ "yarn.nodemanager.log-aggregation.compression-type": "gz",
+ "yarn.nodemanager.log.retain-seconds": "604800",
+ "yarn.nodemanager.delete.debug-delay-sec": "0",
+ "yarn.nodemanager.log-dirs": "/hadoop/yarn/log,/hadoop/yarn/log1",
+ "yarn.nodemanager.health-checker.interval-ms": "135000",
+ "yarn.resourcemanager.am.max-attempts": "2",
+ "yarn.nodemanager.remote-app-log-dir": "/app-logs",
+ "yarn.nodemanager.admin-env": "MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX",
+ "yarn.nodemanager.aux-services": "mapreduce_shuffle",
+ "yarn.nodemanager.vmem-check-enabled": "false",
+ "yarn.nodemanager.vmem-pmem-ratio": "2.1",
+ "yarn.admin.acl": "*",
+ "yarn.resourcemanager.webapp.address": "c6402.ambari.apache.org:8088",
+ "yarn.resourcemanager.nodes.exclude-path": "/etc/hadoop/conf/yarn.exclude",
+ "yarn.nodemanager.linux-container-executor.group": "hadoop",
+ "yarn.acl.enable": "true",
+ "yarn.log.server.url": "http://c6402.ambari.apache.org:19888/jobhistory/logs",
+ "yarn.application.classpath": "/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*",
+ "yarn.resourcemanager.admin.address": "c6402.ambari.apache.org:8141",
+ "yarn.nodemanager.aux-services.mapreduce_shuffle.class": "org.apache.hadoop.mapred.ShuffleHandler",
+ "yarn.timeline-service.leveldb-timeline-store.path": "/var/log/hadoop-yarn/timeline"
+ },
+ "tez-site": {
+ "tez.am.log.level": "WARN",
+ "tez.lib.uris": "hdfs:///apps/tez/,hdfs:///apps/tez/lib/",
+ "tez.staging-dir": "/tmp/${user.name}/staging",
+ "tez.am.am-rm.heartbeat.interval-ms.max": "250"
+ },
+ "yarn-env": {
+ "yarn_pid_dir_prefix": "/var/run/hadoop-yarn",
+ "apptimelineserver_heapsize": "1024",
+ "nodemanager_heapsize": "1024",
+ "content": "\nexport HADOOP_YARN_HOME={{hadoop_yarn_home}}\nexport YARN_LOG_DIR={{yarn_log_dir_prefix}}/$USER\nexport YARN_PID_DIR={{yarn_pid_dir_prefix}}/$USER\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\nexport JAVA_HOME={{java64_home}}\n\n# User for YARN daemons\nexport HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn}\n\n# resolve links - $0 may be a softlink\nexport YARN_CONF_DIR=\"${YARN_CONF_DIR:-$HADOOP_YARN_HOME/conf}\"\n\n# some Java parameters\n# export JAVA_HOME=/home/y/libexec/jdk1.6.0/\nif [ \"$JAVA_HOME\" != \"\" ]; then\n #echo \"run java in $JAVA_HOME\"\n JAVA_HOME=$JAVA_HOME\nfi\n\nif [ \"$JAVA_HOME\" = \"\" ]; then\n echo \"Error: JAVA_HOME is not set.\"\n exit 1\nfi\n\nJAVA=$JAVA_HOME/bin/java\nJAVA_HEAP_MAX=-Xmx1000m\n\n# For setting YARN specific HEAP sizes please use this\n# Parameter and set appropriately\nYARN_HEAPSIZE={{yarn_heapsize}}\n\n# check envvars which might override default args\nif [ \"$YARN_HEAPSIZE\" != \"\" ]; then\n JAVA_HEAP_MAX=\"-
Xmx\"\"$YARN_HEAPSIZE\"\"m\"\nfi\n\n# Resource Manager specific parameters\n\n# Specify the max Heapsize for the ResourceManager using a numerical value\n# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set\n# the value to 1000.\n# This value will be overridden by an Xmx setting specified in either YARN_OPTS\n# and/or YARN_RESOURCEMANAGER_OPTS.\n# If not specified, the default value will be picked from either YARN_HEAPMAX\n# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\nexport YARN_RESOURCEMANAGER_HEAPSIZE={{resourcemanager_heapsize}}\n\n# Specify the JVM options to be used when starting the ResourceManager.\n# These options will be appended to the options specified as YARN_OPTS\n# and therefore may override any similar flags set in YARN_OPTS\n#export YARN_RESOURCEMANAGER_OPTS=\n\n# Node Manager specific parameters\n\n# Specify the max Heapsize for the NodeManager using a numerical value\n# in the scale of MB. For example, to specif
y an jvm option of -Xmx1000m, set\n# the value to 1000.\n# This value will be overridden by an Xmx setting specified in either YARN_OPTS\n# and/or YARN_NODEMANAGER_OPTS.\n# If not specified, the default value will be picked from either YARN_HEAPMAX\n# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\nexport YARN_NODEMANAGER_HEAPSIZE={{nodemanager_heapsize}}\n\n# Specify the max Heapsize for the HistoryManager using a numerical value\n# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set\n# the value to 1024.\n# This value will be overridden by an Xmx setting specified in either YARN_OPTS\n# and/or YARN_HISTORYSERVER_OPTS.\n# If not specified, the default value will be picked from either YARN_HEAPMAX\n# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\nexport YARN_HISTORYSERVER_HEAPSIZE={{apptimelineserver_heapsize}}\n\n# Specify the JVM options to be used when starting the NodeManager.\n# These options will be appen
ded to the options specified as YARN_OPTS\n# and therefore may override any similar flags set in YARN_OPTS\n#export YARN_NODEMANAGER_OPTS=\n\n# so that filenames w/ spaces are handled correctly in loops below\nIFS=\n\n\n# default log directory and file\nif [ \"$YARN_LOG_DIR\" = \"\" ]; then\n YARN_LOG_DIR=\"$HADOOP_YARN_HOME/logs\"\nfi\nif [ \"$YARN_LOGFILE\" = \"\" ]; then\n YARN_LOGFILE='yarn.log'\nfi\n\n# default policy file for service-level authorization\nif [ \"$YARN_POLICYFILE\" = \"\" ]; then\n YARN_POLICYFILE=\"hadoop-policy.xml\"\nfi\n\n# restore ordinary behaviour\nunset IFS\n\n\nYARN_OPTS=\"$YARN_OPTS -Dhadoop.log.dir=$YARN_LOG_DIR\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.log.dir=$YARN_LOG_DIR\"\nYARN_OPTS=\"$YARN_OPTS -Dhadoop.log.file=$YARN_LOGFILE\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.log.file=$YARN_LOGFILE\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.home.dir=$YARN_COMMON_HOME\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.id.str=$YARN_IDENT_STRING\"\nYARN_OPTS=\"$YARN_OPTS -Dhadoop.root.logger=${YARN_
ROOT_LOGGER:-INFO,console}\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.root.logger=${YARN_ROOT_LOGGER:-INFO,console}\"\nif [ \"x$JAVA_LIBRARY_PATH\" != \"x\" ]; then\n YARN_OPTS=\"$YARN_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH\"\nfi\nYARN_OPTS=\"$YARN_OPTS -Dyarn.policy.file=$YARN_POLICYFILE\"",
+ "yarn_heapsize": "1024",
+ "yarn_user": "yarn",
+ "resourcemanager_heapsize": "1024",
+ "yarn_log_dir_prefix": "/var/log/hadoop-yarn",
+ "min_user_id": "1000"
+ },
+ "cluster-env": {
+ "managed_hdfs_resource_property_names": "",
+ "security_enabled": "false",
+ "ignore_groupsusers_create": "false",
+ "smokeuser": "ambari-qa",
+ "kerberos_domain": "EXAMPLE.COM",
+ "user_group": "hadoop"
+ },
+ "hadoop-env": {
+ "namenode_opt_maxnewsize": "200m",
+ "hdfs_log_dir_prefix": "/var/log/hadoop",
+ "namenode_heapsize": "1024m",
+ "namenode_opt_newsize": "200m",
+ "namenode_opt_permsize" : "128m",
+ "namenode_opt_maxpermsize" : "256m",
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n# The only required environment variable is JAVA_HOME. All others are\n# optional. When running a distributed configuration it is best to\n# set JAVA_HOME in this file, so that it is correctly defined on\n# remote nodes.\n\n# The java implementation to use. Required.\nexport JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home directory\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n# Hadoop Configuration Directory\n#TODO: if env var set that can cause problems\nexport HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n{# this is different for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java runtime options. Empty by default.\nex
port HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# Command specific options appended to HADOOP_OPTS when specified\nexport HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}\"\nHADOOP_JOBTRACKER_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USE
R/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}\"\nHADOOP_DATANODE_OPTS=\"-Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/
gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The following applies to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n# On secure datanodes, user to run the datanode as after dropping privileges\nexport HADOOP_SECURE_DN_USER={{hdfs_user}}\n\n# Extra ssh options. Empty by default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored. $HADOOP_HOME/logs by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# Where log files are stored in the secure data environment.\nexport HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SE
CURE_DN_USER\n\n# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.\n# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where hadoop code should be rsync'd from. Unset by default.\n# export HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are stored. /tmp by default.\nexport HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# History server pid\nexport HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n# A string representing this instance of hadoop. $USER by default.\nexport HADOOP_IDENT_STRING=$USER\n\n# The s
cheduling priority for daemon processes. See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Use libraries from standard classpath\nJAVA_JDBC_LIBS=\"\"\n#Add libraries required by mysql connector\nfor jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by oracle connector\nfor jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\nif [ -d \"/usr/lib/tez\" ]; then\n export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf\nfi\n\n# Setting path to hdfs command line\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n#Mostly required for hadoop 2.0\nexport JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64",
+ "hdfs_user": "hdfs",
+ "dtnode_heapsize": "1024m",
+ "proxyuser_group": "users",
+ "hadoop_heapsize": "1024",
+ "hadoop_pid_dir_prefix": "/var/run/hadoop"
+ },
+ "hive-env": {
+ "hcat_pid_dir": "/var/run/webhcat",
+ "hcat_user": "hcat",
+ "hive_ambari_database": "MySQL",
+ "hive_hostname": "abtest-3.c.pramod-thangali.internal",
+ "hive_metastore_port": "9083",
+ "webhcat_user": "hcat",
+ "content": "\n if [ \"$SERVICE\" = \"cli\" ]; then\n if [ -z \"$DEBUG\" ]; then\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n else\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n fi\n fi\n\n# The heap size of the jvm stared by hive shell script can be controlled via:\n\nexport HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n\n# Larger heap size may be required when running queries over large number of files or partitions.\n# By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be\n# appropriate for hive server (hwi etc).\n\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hive Configuration Directory can
be controlled by:\nexport HIVE_CONF_DIR={{conf_dir}}\n\n# Folder containing extra ibraries required for hive compilation/execution can be controlled by:\nif [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\nelif [ -d \"/usr/lib/hive-hcatalog/\" ]; then\n export HIVE_AUX_JARS_PATH=/usr/lib/hive-hcatalog/share/hcatalog/hive-hcatalog-core-*.jar\nelse\n export HIVE_AUX_JARS_PATH=/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar\nfi\nexport METASTORE_PORT={{hive_metastore_port}}",
+ "hive_database_name": "hive",
+ "hive_database_type": "mysql",
+ "hive_pid_dir": "/var/run/hive",
+ "hive_log_dir": "/var/log/hive",
+ "hive_user": "hive",
+ "hcat_log_dir": "/var/log/webhcat",
+ "hive_database": "New MySQL Database",
+ "hive_security_authorization": "None"
+ },
+ "hbase-env": {
+ "hbase_pid_dir": "/var/run/hbase",
+ "hbase_user": "hbase",
+ "hbase_master_heapsize": "1024m",
+ "content": "\n# Set environment variables here.\n\n# The java implementation to use. Java 1.6 required.\nexport JAVA_HOME={{java64_home}}\n\n# HBase Configuration directory\nexport HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}\n\n# Extra Java CLASSPATH elements. Optional.\nexport HBASE_CLASSPATH=${HBASE_CLASSPATH}\n\n# The maximum amount of heap to use, in MB. Default is 1000.\n# export HBASE_HEAPSIZE=1000\n\n# Extra Java runtime options.\n# Below are what we set by default. May only work with SUN JVM.\n# For more on why as well as other possible settings,\n# see http://wiki.apache.org/hadoop/PerformanceTuning\nexport HBASE_OPTS=\"-XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log\"\nexport SERVER_GC_OPTS=\"-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`\"\n# Uncomment below to enable java garbage collection logging.\n# export HBASE_OPTS=\"$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateS
tamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log\"\n\n# Uncomment and adjust to enable JMX exporting\n# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.\n# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html\n#\n# export HBASE_JMX_BASE=\"-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false\"\nexport HBASE_MASTER_OPTS=\"-Xmx{{master_heapsize}}\"\nexport HBASE_REGIONSERVER_OPTS=\"-Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}\"\n# export HBASE_THRIFT_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103\"\n# export HBASE_ZOOKEEPER_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104\"\n\n# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.\nexport HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers\n\n# Extra
ssh options. Empty by default.\n# export HBASE_SSH_OPTS=\"-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR\"\n\n# Where log files are stored. $HBASE_HOME/logs by default.\nexport HBASE_LOG_DIR={{log_dir}}\n\n# A string representing this instance of hbase. $USER by default.\n# export HBASE_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes. See 'man nice'.\n# export HBASE_NICENESS=10\n\n# The directory where pid files are stored. /tmp by default.\nexport HBASE_PID_DIR={{pid_dir}}\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HBASE_SLAVE_SLEEP=0.1\n\n# Tell HBase whether it should manage it's own instance of Zookeeper or not.\nexport HBASE_MANAGES_ZK=false\n\n{% if security_enabled %}\nexport HBASE_OPTS=\"$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}}\"\nexport HBASE_MASTER_OPTS=\"$HBAS
E_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}}\"\nexport HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}}\"\n{% endif %}",
+ "hbase_regionserver_heapsize": "1024m",
+ "hbase_regionserver_xmn_max": "512",
+ "hbase_regionserver_xmn_ratio": "0.2",
+ "hbase_log_dir": "/var/log/hbase",
+ "hbase_java_io_tmpdir" : "/tmp"
+ },
+ "ganglia-env": {
+ "gmond_user": "nobody",
+ "ganglia_runtime_dir": "/var/run/ganglia/hdp",
+ "rrdcached_base_dir": "/var/lib/ganglia/rrds",
+ "rrdcached_flush_timeout": "7200",
+ "gmetad_user": "nobody",
+ "rrdcached_write_threads": "4",
+ "rrdcached_delay": "1800",
+ "rrdcached_timeout": "3600"
+ },
+ "zookeeper-env": {
+ "clientPort": "2181",
+ "zk_user": "zookeeper",
+ "zk_log_dir": "/var/log/zookeeper",
+ "syncLimit": "5",
+ "content": "\nexport JAVA_HOME={{java64_home}}\nexport ZOO_LOG_DIR={{zk_log_dir}}\nexport ZOOPIDFILE={{zk_pid_file}}\nexport SERVER_JVMFLAGS={{zk_server_heapsize}}\nexport JAVA=$JAVA_HOME/bin/java\nexport CLASSPATH=$CLASSPATH:/usr/share/zookeeper/*\n\n{% if security_enabled %}\nexport SERVER_JVMFLAGS=\"$SERVER_JVMFLAGS -Djava.security.auth.login.config={{zk_server_jaas_file}}\"\nexport CLIENT_JVMFLAGS=\"$CLIENT_JVMFLAGS -Djava.security.auth.login.config={{zk_client_jaas_file}}\"\n{% endif %}",
+ "initLimit": "10",
+ "zk_pid_dir": "/var/run/zookeeper",
+ "zk_data_dir": "/hadoop/zookeeper",
+ "tickTime": "2000"
+ },
+ "mapred-env": {
+ "content": "\n# export JAVA_HOME=/home/y/libexec/jdk1.6.0/\n\nexport HADOOP_JOB_HISTORYSERVER_HEAPSIZE={{jobhistory_heapsize}}\n\nexport HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA\n\n#export HADOOP_JOB_HISTORYSERVER_OPTS=\n#export HADOOP_MAPRED_LOG_DIR=\"\" # Where log files are stored. $HADOOP_MAPRED_HOME/logs by default.\n#export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.\n#export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.\n#export HADOOP_MAPRED_IDENT_STRING= #A string representing this instance of hadoop. $USER by default\n#export HADOOP_MAPRED_NICENESS= #The scheduling priority for daemons. Defaults to 0.",
+ "mapred_pid_dir_prefix": "/var/run/hadoop-mapreduce",
+ "mapred_user": "mapred",
+ "jobhistory_heapsize": "900",
+ "mapred_log_dir_prefix": "/var/log/hadoop-mapreduce"
+ },
+ "tez-env": {
+ "content": "\n# Tez specific configuration\nexport TEZ_CONF_DIR={{config_dir}}\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# The java implementation to use.\nexport JAVA_HOME={{java64_home}}",
+ "tez_user": "tez"
+ },
+ "storm-env": {
+ "content": "\n#!/bin/bash\n\n# Set Storm specific environment variables here.\n\n# The java implementation to use.\nexport JAVA_HOME={{java_home}}\n\n# export STORM_CONF_DIR=\"\"",
+ "storm_log_dir": "/var/log/storm",
+ "storm_pid_dir": "/var/run/storm",
+ "storm_user": "storm"
+ },
+ "falcon-env": {
+ "falcon_port": "15000",
+ "falcon_pid_dir": "/var/run/falcon",
+ "falcon_log_dir": "/var/log/falcon",
+ "falcon.emeddedmq.port": "61616",
+ "falcon_user": "falcon",
+ "falcon_local_dir": "/hadoop/falcon",
+ "content": "\n# The java implementation to use. If JAVA_HOME is not found we expect java and jar to be in path\nexport JAVA_HOME={{java_home}}\n\n# any additional java opts you want to set. This will apply to both client and server operations\n#export FALCON_OPTS=\n\n# any additional java opts that you want to set for client only\n#export FALCON_CLIENT_OPTS=\n\n# java heap size we want to set for the client. Default is 1024MB\n#export FALCON_CLIENT_HEAP=\n\n# any additional opts you want to set for prisim service.\n#export FALCON_PRISM_OPTS=\n\n# java heap size we want to set for the prisim service. Default is 1024MB\n#export FALCON_PRISM_HEAP=\n\n# any additional opts you want to set for falcon service.\nexport FALCON_SERVER_OPTS=\"-Dfalcon.embeddedmq={{falcon_embeddedmq_enabled}} -Dfalcon.emeddedmq.port={{falcon_emeddedmq_port}}\"\n\n# java heap size we want to set for the falcon server. Default is 1024MB\n#export FALCON_SERVER_HEAP=\n\n# What is is considered as falcon home
dir. Default is the base locaion of the installed software\n#export FALCON_HOME_DIR=\n\n# Where log files are stored. Defatult is logs directory under the base install location\nexport FALCON_LOG_DIR={{falcon_log_dir}}\n\n# Where pid files are stored. Defatult is logs directory under the base install location\nexport FALCON_PID_DIR={{falcon_pid_dir}}\n\n# where the falcon active mq data is stored. Defatult is logs/data directory under the base install location\nexport FALCON_DATA_DIR={{falcon_embeddedmq_data}}\n\n# Where do you want to expand the war file. By Default it is in /server/webapp dir under the base install dir.\n#export FALCON_EXPANDED_WEBAPP_DIR=",
+ "falcon.embeddedmq.data": "/hadoop/falcon/embeddedmq/data",
+ "falcon.embeddedmq": "true",
+ "falcon_store_uri": "file:///hadoop/falcon/store"
+ },
+ "oozie-env": {
+ "oozie_derby_database": "Derby",
+ "oozie_admin_port": "11001",
+ "oozie_hostname": "abtest-3.c.pramod-thangali.internal",
+ "oozie_pid_dir": "/var/run/oozie",
+ "content": "\n#!/bin/bash\n\nif [ -d \"/usr/lib/bigtop-tomcat\" ]; then\n export OOZIE_CONFIG=${OOZIE_CONFIG:-/etc/oozie/conf}\n export CATALINA_BASE=${CATALINA_BASE:-/var/lib/oozie/oozie-server}\n export CATALINA_TMPDIR=${CATALINA_TMPDIR:-/var/tmp/oozie}\n export OOZIE_CATALINA_HOME=/usr/lib/bigtop-tomcat\nfi\n\n#Set JAVA HOME\nexport JAVA_HOME={{java_home}}\n\nexport JRE_HOME=${JAVA_HOME}\n\n# Set Oozie specific environment variables here.\n\n# Settings for the Embedded Tomcat that runs Oozie\n# Java System properties for Oozie should be specified in this variable\n#\n# export CATALINA_OPTS=\n\n# Oozie configuration file to load from Oozie configuration directory\n#\n# export OOZIE_CONFIG_FILE=oozie-site.xml\n\n# Oozie logs directory\n#\nexport OOZIE_LOG={{oozie_log_dir}}\n\n# Oozie pid directory\n#\nexport CATALINA_PID={{pid_file}}\n\n#Location of the data for oozie\nexport OOZIE_DATA={{oozie_data_dir}}\n\n# Oozie Log4J configuration file to load from Oozie configuratio
n directory\n#\n# export OOZIE_LOG4J_FILE=oozie-log4j.properties\n\n# Reload interval of the Log4J configuration file, in seconds\n#\n# export OOZIE_LOG4J_RELOAD=10\n\n# The port Oozie server runs\n#\nexport OOZIE_HTTP_PORT={{oozie_server_port}}\n\n# The admin port Oozie server runs\n#\nexport OOZIE_ADMIN_PORT={{oozie_server_admin_port}}\n\n# The host name Oozie server runs on\n#\n# export OOZIE_HTTP_HOSTNAME=`hostname -f`\n\n# The base URL for callback URLs to Oozie\n#\n# export OOZIE_BASE_URL=\"http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie\"\nexport JAVA_LIBRARY_PATH=/usr/lib/hadoop/lib/native/Linux-amd64-64",
+ "oozie_user": "oozie",
+ "oozie_database": "New Derby Database",
+ "oozie_data_dir": "/hadoop/oozie/data",
+ "oozie_log_dir": "/var/log/oozie"
+ },
+ "webhcat-env": {
+ "content": "\n# The file containing the running pid\nPID_FILE={{pid_file}}\n\nTEMPLETON_LOG_DIR={{templeton_log_dir}}/\n\n\nWEBHCAT_LOG_DIR={{templeton_log_dir}}/\n\n# The console error log\nERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log\n\n# The console log\nCONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log\n\n#TEMPLETON_JAR=templeton_jar_name\n\n#HADOOP_PREFIX=hadoop_prefix\n\n#HCAT_PREFIX=hive_prefix\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME=/usr/lib/hadoop"
+ },
+ "pig-env": {
+ "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
+ },
+ "sqoop-env": {
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+ "sqoop_user": "sqoop",
+ "jdbc_drivers" : "com.microsoft.sqlserver.jdbc.SQLServerDriver,Unsupported"
+ },
+ "hdfs-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "yarn-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "hbase-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "hive-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "hive-exec-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "webhcat-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "zookeeper-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "pig-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "pig-properties": {
+ "content": "pigproperties\nline2"
+ },
+ "oozie-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "flume-conf": {
+ "content": "a1.sources = r1\n a1.sinks = k1\n a1.channels = c1\n # Describe/configure the source\n a1.sources.r1.type = netcat\n a1.sources.r1.bind = localhost\n a1.sources.r1.port = 44444\n \n # Describe the sink\n a1.sinks.k1.type = logger\n \n # Use a channel which buffers events in memory\n a1.channels.c1.type = memory\n a1.channels.c1.capacity = 1000\n a1.channels.c1.transactionCapacity = 100\n \n # Bind the source and sink to the channel\n a1.sources.r1.channels = c1\n a1.sinks.k1.channel = c1\n"
+ },
+ "flume-log4j": {
+ "content": "log4jproperties\nline2"
+ }
+ },
+ "configuration_attributes": {
+ "yarn-site": {
+ "final": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
+ "yarn.nodemanager.container-executor.class": "true",
+ "yarn.nodemanager.local-dirs": "true"
+ }
+ },
+ "tez-site": {
+ "final": {
+ "tez.am.log.level": "true"
+ }
+ },
+ "capacity-scheduler": {
+ "final": {
+ "yarn.scheduler.capacity.node-locality-delay": "true"
+ }
+ },
+ "mapred-site": {
+ "final": {
+ "mapred.healthChecker.script.path": "true",
+ "mapreduce.jobtracker.staging.root.dir": "true"
+ }
+ },
+ "oozie-site": {
+ "final": {
+ "oozie.service.PurgeService.purge.interval": "true",
+ "oozie.service.CallableQueueService.queue.size": "true"
+ }
+ },
+ "webhcat-site": {
+ "final": {
+ "templeton.pig.path": "true",
+ "templeton.exec.timeout": "true",
+ "templeton.override.enabled": "true"
+ }
+ },
+ "hdfs-site": {
+ "final": {
+ "dfs.web.ugi": "true",
+ "dfs.support.append": "true",
+ "dfs.cluster.administrators": "true"
+ }
+ },
+ "hbase-site": {
+ "final": {
+ "hbase.client.keyvalue.maxsize": "true",
+ "hbase.hstore.compactionThreshold": "true",
+ "hbase.rootdir": "true"
+ }
+ },
+ "core-site": {
+ "final": {
+ "hadoop.proxyuser.hive.groups": "true",
+ "webinterface.private.actions": "true",
+ "hadoop.proxyuser.oozie.hosts": "true"
+ }
+ },
+ "hive-site": {
+ "final": {
+ "javax.jdo.option.ConnectionPassword": "true",
+ "javax.jdo.option.ConnectionDriverName": "true",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true"
+ }
+ },
+ "hive-interactive-site": {
+ "final": {
+ "javax.jdo.option.ConnectionPassword": "true",
+ "javax.jdo.option.ConnectionDriverName": "true",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true"
+ }
+ }
+ },
+ "configurationTags": {
+ "capacity-scheduler": {
+ "tag": "version1"
+ },
+ "oozie-site": {
+ "tag": "version1"
+ },
+ "storm-site": {
+ "tag": "version1"
+ },
+ "webhcat-site": {
+ "tag": "version1"
+ },
+ "global": {
+ "tag": "version1"
+ },
+ "mapred-site": {
+ "tag": "version1"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "hbase-site": {
+ "tag": "version1"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "yarn-site": {
+ "tag": "version1"
+ },
+ "hive-site": {
+ "tag": "version1"
+ },
+ "hive-interactive-site": {
+ "tag": "version1"
+ },
+ "hdfs-log4j": {
+ "tag": "version1"
+ },
+ "yarn-log4j": {
+ "tag": "version1"
+ },
+ "hbase-log4j": {
+ "tag": "version1"
+ },
+ "hive-log4j": {
+ "tag": "version1"
+ },
+ "hive-exec-log4j": {
+ "tag": "version1"
+ },
+ "zookeeper-log4j": {
+ "tag": "version1"
+ },
+ "oozie-log4j": {
+ "tag": "version1"
+ },
+ "pig-log4j": {
+ "tag": "version1"
+ },
+ "pig-properties": {
+ "tag": "version1"
+ }
+ },
+ "commandId": "7-1",
+ "clusterHostInfo": {
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "snamenode_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "nm_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "drpc_server_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "slave_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "ganglia_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "hive_server_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "logviewer_server_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "hive_metastore_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "hbase_rs_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "webhcat_server_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "supervisor_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "ganglia_monitor_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "all_ping_ports": [
+ "8670",
+ "8670"
+ ],
+ "rm_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "storm_ui_server_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "oozie_server": [
+ "c6402.ambari.apache.org"
+ ],
+ "hs_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "nimbus_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "namenode_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "hbase_master_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "hive_mysql_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "falcon_server_hosts": [
+ "c6402.ambari.apache.org"
+ ]
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/custom_actions/TestCheckHost.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/TestCheckHost.py b/ambari-server/src/test/python/custom_actions/TestCheckHost.py
index 0d4e485..9303d5d 100644
--- a/ambari-server/src/test/python/custom_actions/TestCheckHost.py
+++ b/ambari-server/src/test/python/custom_actions/TestCheckHost.py
@@ -413,4 +413,37 @@ class TestCheckHost(TestCase):
self.assertEquals(structured_out_mock.call_args[0][0], {'transparentHugePage' : {'message': '', 'exit_code': 0}})
+ @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
+ @patch("tempfile.mkdtemp", new = MagicMock(return_value='/tmp/jdk_tmp_dir'))
+ @patch.object(Script, 'get_config')
+ @patch.object(Script, 'get_tmp_dir')
+ @patch("check_host.download_file")
+ @patch("resource_management.libraries.script.Script.put_structured_out")
+ @patch("os.path.isfile")
+ @patch("resource_management.core.shell.call")
+ def testUnsupportedDatabaseType(self, isfile_mock, format_mock, structured_out_mock, download_file_mock, get_tmp_dir_mock, mock_config):
+ mock_config.return_value = {"commandParams" : {"check_execute_list" : "db_connection_check",
+ "java_home" : "test_java_home",
+ "ambari_server_host" : "test_host",
+ "jdk_location" : "test_jdk_location",
+ "db_name" : "unsupported_db",
+ "db_connection_url" : "test_db_connection_url",
+ "user_name" : "test_user_name",
+ "user_passwd" : "test_user_passwd",
+ "jdk_name" : "test_jdk_name"},
+ "hostLevelParams": { "agentCacheDir": "/nonexistent_tmp",
+ "custom_mysql_jdbc_name" : "mysql-connector-java.jar"}
+ }
+ get_tmp_dir_mock.return_value = "/tmp"
+ download_file_mock.side_effect = Exception("test exception")
+ isfile_mock.return_value = True
+ checkHost = CheckHost()
+
+ try:
+ checkHost.actionexecute(None)
+ self.fail("DB Check should be failed")
+ except Fail:
+ pass
+
+ self.assertEquals(structured_out_mock.call_args[0][0], {'db_connection_check': {'message': '\'unsupported_db\' database type not supported.', 'exit_code': 1}})
[52/57] [abbrv] ambari git commit: AMBARI-21307 renamed package,
minor corrections. Changed endpoint name
Posted by lp...@apache.org.
AMBARI-21307 renamed package, minor corrections. Changed endpoint name
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/30dded65
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/30dded65
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/30dded65
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 30dded6571d9ccb7f9d292cbf9205ac3163da985
Parents: 341be71
Author: lpuskas <lp...@apache.org>
Authored: Mon Aug 14 13:21:07 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:01 2017 +0200
----------------------------------------------------------------------
.../services/AmbariConfigurationService.java | 2 +-
.../api/services/ldap/LdapRestService.java | 15 +-
.../server/ldap/AmbariLdapConfiguration.java | 5 +-
.../apache/ambari/server/ldap/LdapModule.java | 4 +-
...efaultLdapConfigurationValidatorService.java | 243 -------------------
.../ad/DefaultLdapConnectionService.java | 63 -----
...efaultLdapConfigurationValidatorService.java | 243 +++++++++++++++++++
.../ads/DefaultLdapConnectionService.java | 63 +++++
...ltLdapConfigurationValidatorServiceTest.java | 113 ---------
...ltLdapConfigurationValidatorServiceTest.java | 113 +++++++++
10 files changed, 435 insertions(+), 429 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
index 927e518..492509f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
@@ -63,7 +63,7 @@ import io.swagger.annotations.ApiResponses;
* }
* </pre>
*/
-@Path("/configurations/")
+@Path("/ambariconfigs/")
@Api(value = "Ambari Configurations", description = "Endpoint for Ambari configuration related operations")
public class AmbariConfigurationService extends BaseService {
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
index 8578204..4e654dc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
@@ -34,6 +34,7 @@ import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@@ -70,8 +71,9 @@ public class LdapRestService extends BaseService {
@POST
@ApiIgnore // until documented
- @Path("/action") // todo this needs to be moved under the resource
+ @Path("/validate") // todo this needs to be moved under the resource
@Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
public Response validateConfiguration(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
Set<String> groups = Sets.newHashSet();
@@ -95,10 +97,7 @@ public class LdapRestService extends BaseService {
LOGGER.info("Testing LDAP attributes ....");
groups = ldapFacade.checkLdapAttibutes(ldapCheckConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
- // todo factor out the resource creation, design better the structure in the response
- Resource resource = new ResourceImpl(Resource.Type.AmbariConfiguration);
- resource.setProperty("groups", groups);
- result.getResultTree().addChild(resource, "payload");
+ setResult(groups, result);
break;
case "detect-attributes":
@@ -119,6 +118,12 @@ public class LdapRestService extends BaseService {
return Response.status(result.getStatus().getStatusCode()).entity(getResultSerializer().serialize(result)).build();
}
+ private void setResult(Set<String> groups, Result result) {
+ Resource resource = new ResourceImpl(Resource.Type.AmbariConfiguration);
+ resource.setProperty("groups", groups);
+ result.getResultTree().addChild(resource, "payload");
+ }
+
private void validateRequest(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
String errMsg;
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
index 8ab587b..b1cbced 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
@@ -37,11 +37,12 @@ public class AmbariLdapConfiguration {
public enum LdapConfigProperty {
LDAP_CONFIGURED("ambari.ldap.configured"),
AUTOMATIC_ATTRIBUTE_DETECTION("ambari.ldap.automatic.attribute.detection"),
-
USE_SSL("ambari.ldap.usessl"),
LDAP_SERVER_HOST("ambari.ldap.server.host"),
LDAP_SERVER_PORT("ambari.ldap.server.port"),
- BASE_DN("ambari.ldap.base.dn"),
+ LDAP_TRUSTSTORE_TYPE("ambari.ldap.truststore.type"),
+ LDAP_TRUSTSTORE_PATH("ambari.ldap.truststore.path"),
+ BASE_DN("ambari.ldap.bind.dn"),
BIND_ANONIMOUSLY("ambari.ldap.bindanonymously"),
MANAGER_DN("ambari.ldap.managerdn"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
index 1b49159..a4ad2ee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
@@ -18,8 +18,8 @@ package org.apache.ambari.server.ldap;
import org.apache.ambari.server.ldap.service.AmbariLdapFacade;
import org.apache.ambari.server.ldap.service.LdapConnectionService;
import org.apache.ambari.server.ldap.service.LdapFacade;
-import org.apache.ambari.server.ldap.service.ad.DefaultLdapConfigurationValidatorService;
-import org.apache.ambari.server.ldap.service.ad.DefaultLdapConnectionService;
+import org.apache.ambari.server.ldap.service.ads.DefaultLdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.ads.DefaultLdapConnectionService;
import com.google.inject.AbstractModule;
import com.google.inject.assistedinject.FactoryModuleBuilder;
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
deleted file mode 100644
index a8503ca..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
+++ /dev/null
@@ -1,243 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ad;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Set;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
-import org.apache.ambari.server.ldap.service.AmbariLdapException;
-import org.apache.ambari.server.ldap.service.LdapConnectionService;
-import org.apache.directory.api.ldap.codec.decorators.SearchResultEntryDecorator;
-import org.apache.directory.api.ldap.model.cursor.EntryCursor;
-import org.apache.directory.api.ldap.model.cursor.SearchCursor;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.exception.LdapException;
-import org.apache.directory.api.ldap.model.message.Response;
-import org.apache.directory.api.ldap.model.message.SearchRequest;
-import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
-import org.apache.directory.api.ldap.model.message.SearchScope;
-import org.apache.directory.api.ldap.model.name.Dn;
-import org.apache.directory.ldap.client.api.LdapConnection;
-import org.apache.directory.ldap.client.api.search.FilterBuilder;
-import org.apache.directory.shared.ldap.constants.SchemaConstants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
-/**
- * Implementation of the validation logic using the Apache Directory API.
- */
-@Singleton
-public class DefaultLdapConfigurationValidatorService implements LdapConfigurationValidatorService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
-
- @Inject
- private LdapConnectionService ldapConnectionService;
-
- /**
- * Facilitating the instantiation
- */
- @Inject
- public DefaultLdapConfigurationValidatorService() {
- }
-
- @Override
- public void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- try {
- bind(ambariLdapConfiguration, ldapConnection);
- } catch (LdapException e) {
- LOGGER.error("Could not connect to the LDAP server", e);
- throw new AmbariLdapException(e);
- }
- }
-
-
- /**
- * Checks the user attributes provided in the configuration instance by issuing a search for a (known) test user in the LDAP.
- * Attributes are considered correct if there is at least one entry found.
- *
- * Invalid attributes are signaled by throwing an exception.
- *
- * @param testUserName the test username
- * @param testPassword the test password
- * @param ambariLdapConfiguration configuration instance holding ldap configuration details
- * @return the DN of the test user
- * @throws AmbariException if the attributes are not valid or any errors occurs
- */
- @Override
- public String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- SearchCursor searchCursor = null;
- String userDn = null;
- try {
- LOGGER.info("Checking user attributes for user {} r ...", testUserName);
-
- // bind anonimously or with manager data
- bind(ambariLdapConfiguration, ldapConnection);
-
- // set up a filter based on the provided attributes
- String filter = FilterBuilder.and(
- FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
- FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), testUserName))
- .toString();
-
- LOGGER.info("Searching for the user: {} using the search filter: {}", testUserName, filter);
- EntryCursor entryCursor = ldapConnection.search(new Dn(ambariLdapConfiguration.userSearchBase()), filter, SearchScope.SUBTREE);
-
- // collecting search result entries
- List<Entry> users = Lists.newArrayList();
- for (Entry entry : entryCursor) {
- users.add(entry);
- userDn = entry.getDn().getNormName();
- }
-
- // there should be at least one user found
- if (users.isEmpty()) {
- String msg = String.format("There are no users found using the filter: [ %s ]. Try changing the attribute values", filter);
- LOGGER.error(msg);
- throw new Exception(msg);
- }
-
- LOGGER.info("Attibute validation succeeded. Filter: {}", filter);
-
- } catch (Exception e) {
-
- LOGGER.error("User attributes validation failed.", e);
- throw new AmbariLdapException(e.getMessage(), e);
-
- } finally {
- closeResources(ldapConnection, searchCursor);
- }
- return userDn;
- }
-
-
- @Override
- public Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- SearchCursor searchCursor = null;
- Set<Response> groupResponses = Sets.newHashSet();
-
- try {
- LOGGER.info("Checking group attributes for user dn {} ...", userDn);
-
- bind(ambariLdapConfiguration, ldapConnection);
-
- // set up a filter based on the provided attributes
- String filter = FilterBuilder.and(
- FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.groupObjectClass()),
- FilterBuilder.equal(ambariLdapConfiguration.groupMemberAttribute(), userDn)
- ).toString();
-
- LOGGER.info("Searching for the groups the user dn: {} is member of using the search filter: {}", userDn, filter);
-
- // assemble a search request
- SearchRequest searchRequest = new SearchRequestImpl();
- searchRequest.setFilter(filter);
- searchRequest.setBase(new Dn(ambariLdapConfiguration.groupSearchBase()));
- searchRequest.setScope(SearchScope.SUBTREE);
- searchRequest.addAttributes(ambariLdapConfiguration.groupMemberAttribute(), ambariLdapConfiguration.groupNameAttribute());
-
- // perform the search
- searchCursor = ldapConnection.search(searchRequest);
-
- for (Response response : searchCursor) {
- groupResponses.add(response);
- }
-
- } catch (Exception e) {
-
- LOGGER.error("User attributes validation failed.", e);
- throw new AmbariLdapException(e.getMessage(), e);
-
- } finally {
- closeResources(ldapConnection, searchCursor);
- }
-
- return processGroupResults(groupResponses, ambariLdapConfiguration);
- }
-
- /**
- * Binds to the LDAP server (anonimously or wit manager credentials)
- *
- * @param ambariLdapConfiguration configuration instance
- * @param connection connection instance
- * @throws LdapException if the bind operation fails
- */
- private void bind(AmbariLdapConfiguration ambariLdapConfiguration, LdapConnection connection) throws LdapException {
- LOGGER.info("Connecting to LDAP ....");
- if (!ambariLdapConfiguration.bindAnonimously()) {
- LOGGER.debug("Anonimous binding not supported, binding with the manager detailas...");
- connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
- } else {
- LOGGER.debug("Binding anonimously ...");
- connection.bind();
- }
-
- if (!connection.isConnected()) {
- LOGGER.error("Not connected to the LDAP server. Connection instance: {}", connection);
- throw new IllegalStateException("The connection to the LDAP server is not alive");
- }
- LOGGER.info("Connected to LDAP.");
- }
-
-
- /**
- * Extracts meaningful values from the search result.
- *
- * @param groupResponses the result entries returned by the search
- * @param ambariLdapConfiguration holds the keys of the meaningful attributes
- * @return a set with the group names the test user belongs to
- */
- private Set<String> processGroupResults(Set<Response> groupResponses, AmbariLdapConfiguration ambariLdapConfiguration) {
- Set<String> groupStrSet = Sets.newHashSet();
- for (Response response : groupResponses) {
- Entry entry = ((SearchResultEntryDecorator) response).getEntry();
- groupStrSet.add(entry.get(ambariLdapConfiguration.groupNameAttribute()).get().getString());
- }
-
- LOGGER.debug("Extracted group names from group search responses: {}", groupStrSet);
- return groupStrSet;
- }
-
- private void closeResources(LdapConnection connection, SearchCursor searchCursor) {
- LOGGER.debug("Housekeeping: closing the connection and the search cursor ...");
-
- if (null != searchCursor) {
- // this method is idempotent
- searchCursor.close();
- }
-
- if (null != connection) {
- try {
- connection.close();
- } catch (IOException e) {
- LOGGER.error("Exception occurred while closing the connection", e);
- }
- }
- }
-
-}
-
-
-
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
deleted file mode 100644
index 25dc1f2..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ad;
-
-import javax.inject.Singleton;
-
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.service.LdapConnectionService;
-import org.apache.directory.ldap.client.api.LdapConnectionConfig;
-import org.apache.directory.ldap.client.api.LdapNetworkConnection;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@Singleton
-public class DefaultLdapConnectionService implements LdapConnectionService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConnectionService.class);
-
- @Override
- public LdapNetworkConnection createLdapConnection(AmbariLdapConfiguration ambariLdapConfiguration) {
- LOGGER.debug("Creating ldap connection instance from: {}", ambariLdapConfiguration);
- return new LdapNetworkConnection(getLdapConnectionConfig(ambariLdapConfiguration));
- }
-
- private LdapConnectionConfig getLdapConnectionConfig(AmbariLdapConfiguration ambariAmbariLdapConfiguration) {
- LOGGER.debug("Creating a configuration instance based on the ambari configuration: {}", ambariAmbariLdapConfiguration);
-
- LdapConnectionConfig ldapConnectionConfig = new LdapConnectionConfig();
- ldapConnectionConfig.setLdapHost(ambariAmbariLdapConfiguration.ldapServerHost());
- ldapConnectionConfig.setLdapPort(ambariAmbariLdapConfiguration.ldapServerPort());
- ldapConnectionConfig.setUseSsl(ambariAmbariLdapConfiguration.useSSL());
-
- // todo set the other values as required
- return ldapConnectionConfig;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java
new file mode 100644
index 0000000..040983a
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java
@@ -0,0 +1,243 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ads;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.AmbariLdapException;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.api.ldap.codec.decorators.SearchResultEntryDecorator;
+import org.apache.directory.api.ldap.model.constants.SchemaConstants;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.cursor.SearchCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.exception.LdapException;
+import org.apache.directory.api.ldap.model.message.Response;
+import org.apache.directory.api.ldap.model.message.SearchRequest;
+import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.api.ldap.model.name.Dn;
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.search.FilterBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/**
+ * Implementation of the validation logic using the Apache Directory API.
+ */
+@Singleton
+public class DefaultLdapConfigurationValidatorService implements LdapConfigurationValidatorService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
+
+ @Inject
+ private LdapConnectionService ldapConnectionService;
+
+ /**
+ * Facilitating the instantiation
+ */
+ @Inject
+ public DefaultLdapConfigurationValidatorService() {
+ }
+
+ @Override
+ public void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ try {
+ bind(ambariLdapConfiguration, ldapConnection);
+ } catch (LdapException e) {
+ LOGGER.error("Could not connect to the LDAP server", e);
+ throw new AmbariLdapException(e);
+ }
+ }
+
+
+ /**
+ * Checks the user attributes provided in the configuration instance by issuing a search for a (known) test user in the LDAP.
+ * Attributes are considered correct if there is at least one entry found.
+ *
+ * Invalid attributes are signaled by throwing an exception.
+ *
+ * @param testUserName the test username
+ * @param testPassword the test password
+ * @param ambariLdapConfiguration configuration instance holding ldap configuration details
+ * @return the DN of the test user
+ * @throws AmbariException if the attributes are not valid or any errors occurs
+ */
+ @Override
+ public String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ SearchCursor searchCursor = null;
+ String userDn = null;
+ try {
+ LOGGER.info("Checking user attributes for user {} r ...", testUserName);
+
+ // bind anonimously or with manager data
+ bind(ambariLdapConfiguration, ldapConnection);
+
+ // set up a filter based on the provided attributes
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), testUserName))
+ .toString();
+
+ LOGGER.info("Searching for the user: {} using the search filter: {}", testUserName, filter);
+ EntryCursor entryCursor = ldapConnection.search(new Dn(ambariLdapConfiguration.userSearchBase()), filter, SearchScope.SUBTREE);
+
+ // collecting search result entries
+ List<Entry> users = Lists.newArrayList();
+ for (Entry entry : entryCursor) {
+ users.add(entry);
+ userDn = entry.getDn().getNormName();
+ }
+
+ // there should be at least one user found
+ if (users.isEmpty()) {
+ String msg = String.format("There are no users found using the filter: [ %s ]. Try changing the attribute values", filter);
+ LOGGER.error(msg);
+ throw new Exception(msg);
+ }
+
+ LOGGER.info("Attibute validation succeeded. Filter: {}", filter);
+
+ } catch (Exception e) {
+
+ LOGGER.error("User attributes validation failed.", e);
+ throw new AmbariLdapException(e.getMessage(), e);
+
+ } finally {
+ closeResources(ldapConnection, searchCursor);
+ }
+ return userDn;
+ }
+
+
+ @Override
+ public Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ SearchCursor searchCursor = null;
+ Set<Response> groupResponses = Sets.newHashSet();
+
+ try {
+ LOGGER.info("Checking group attributes for user dn {} ...", userDn);
+
+ bind(ambariLdapConfiguration, ldapConnection);
+
+ // set up a filter based on the provided attributes
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.groupObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.groupMemberAttribute(), userDn)
+ ).toString();
+
+ LOGGER.info("Searching for the groups the user dn: {} is member of using the search filter: {}", userDn, filter);
+
+ // assemble a search request
+ SearchRequest searchRequest = new SearchRequestImpl();
+ searchRequest.setFilter(filter);
+ searchRequest.setBase(new Dn(ambariLdapConfiguration.groupSearchBase()));
+ searchRequest.setScope(SearchScope.SUBTREE);
+ searchRequest.addAttributes(ambariLdapConfiguration.groupMemberAttribute(), ambariLdapConfiguration.groupNameAttribute());
+
+ // perform the search
+ searchCursor = ldapConnection.search(searchRequest);
+
+ for (Response response : searchCursor) {
+ groupResponses.add(response);
+ }
+
+ } catch (Exception e) {
+
+ LOGGER.error("User attributes validation failed.", e);
+ throw new AmbariLdapException(e.getMessage(), e);
+
+ } finally {
+ closeResources(ldapConnection, searchCursor);
+ }
+
+ return processGroupResults(groupResponses, ambariLdapConfiguration);
+ }
+
+ /**
+ * Binds to the LDAP server (anonimously or wit manager credentials)
+ *
+ * @param ambariLdapConfiguration configuration instance
+ * @param connection connection instance
+ * @throws LdapException if the bind operation fails
+ */
+ private void bind(AmbariLdapConfiguration ambariLdapConfiguration, LdapConnection connection) throws LdapException {
+ LOGGER.info("Connecting to LDAP ....");
+ if (!ambariLdapConfiguration.bindAnonimously()) {
+ LOGGER.debug("Anonimous binding not supported, binding with the manager detailas...");
+ connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
+ } else {
+ LOGGER.debug("Binding anonimously ...");
+ connection.bind();
+ }
+
+ if (!connection.isConnected()) {
+ LOGGER.error("Not connected to the LDAP server. Connection instance: {}", connection);
+ throw new IllegalStateException("The connection to the LDAP server is not alive");
+ }
+ LOGGER.info("Connected to LDAP.");
+ }
+
+
+ /**
+ * Extracts meaningful values from the search result.
+ *
+ * @param groupResponses the result entries returned by the search
+ * @param ambariLdapConfiguration holds the keys of the meaningful attributes
+ * @return a set with the group names the test user belongs to
+ */
+ private Set<String> processGroupResults(Set<Response> groupResponses, AmbariLdapConfiguration ambariLdapConfiguration) {
+ Set<String> groupStrSet = Sets.newHashSet();
+ for (Response response : groupResponses) {
+ Entry entry = ((SearchResultEntryDecorator) response).getEntry();
+ groupStrSet.add(entry.get(ambariLdapConfiguration.groupNameAttribute()).get().getString());
+ }
+
+ LOGGER.debug("Extracted group names from group search responses: {}", groupStrSet);
+ return groupStrSet;
+ }
+
+ private void closeResources(LdapConnection connection, SearchCursor searchCursor) {
+ LOGGER.debug("Housekeeping: closing the connection and the search cursor ...");
+
+ if (null != searchCursor) {
+ // this method is idempotent
+ searchCursor.close();
+ }
+
+ if (null != connection) {
+ try {
+ connection.close();
+ } catch (IOException e) {
+ LOGGER.error("Exception occurred while closing the connection", e);
+ }
+ }
+ }
+
+}
+
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConnectionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConnectionService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConnectionService.java
new file mode 100644
index 0000000..ced52fc
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConnectionService.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ads;
+
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Singleton
+public class DefaultLdapConnectionService implements LdapConnectionService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConnectionService.class);
+
+ @Override
+ public LdapNetworkConnection createLdapConnection(AmbariLdapConfiguration ambariLdapConfiguration) {
+ LOGGER.debug("Creating ldap connection instance from: {}", ambariLdapConfiguration);
+ return new LdapNetworkConnection(getLdapConnectionConfig(ambariLdapConfiguration));
+ }
+
+ private LdapConnectionConfig getLdapConnectionConfig(AmbariLdapConfiguration ambariAmbariLdapConfiguration) {
+ LOGGER.debug("Creating a configuration instance based on the ambari configuration: {}", ambariAmbariLdapConfiguration);
+
+ LdapConnectionConfig ldapConnectionConfig = new LdapConnectionConfig();
+ ldapConnectionConfig.setLdapHost(ambariAmbariLdapConfiguration.ldapServerHost());
+ ldapConnectionConfig.setLdapPort(ambariAmbariLdapConfiguration.ldapServerPort());
+ ldapConnectionConfig.setUseSsl(ambariAmbariLdapConfiguration.useSSL());
+
+ // todo set the other values as required
+ return ldapConnectionConfig;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
deleted file mode 100644
index 663ea12..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ad;
-
-import static org.junit.Assert.assertNotNull;
-
-import java.util.Map;
-
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
-import org.apache.ambari.server.ldap.service.LdapConnectionService;
-import org.apache.directory.api.ldap.model.cursor.EntryCursor;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.message.SearchScope;
-import org.apache.directory.ldap.client.api.LdapConnection;
-import org.apache.directory.ldap.client.api.LdapConnectionConfig;
-import org.apache.directory.ldap.client.api.LdapNetworkConnection;
-import org.apache.directory.shared.ldap.constants.SchemaConstants;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Maps;
-
-public class DefaultLdapConfigurationValidatorServiceTest {
- private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
- private static final String TEST_USER = "einstein";
-
- LdapConfigurationValidatorService ldapConfigurationValidatorService = new DefaultLdapConfigurationValidatorService();
-
-
- @Test
- public void testCheckAttributes() throws Exception {
-
- // WHEN
- LdapConnectionConfig config = new LdapConnectionConfig();
- config.setLdapHost("localhost");
- config.setLdapPort(389);
- LdapConnection connection = new LdapNetworkConnection(config);
-
- // THEN
- connection.anonymousBind();
-
-
- EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
-
- for (Entry entry : cursor) {
- assertNotNull(entry);
- System.out.println(entry);
- }
-
- cursor.close();
-
- }
-
- @Test
- public void testCheckUserAttributes() throws Exception {
- // GIVEN
- Map<String, Object> ldapPropsMap = Maps.newHashMap();
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_SEARCH_BASE.propertyName(), "dc=example,dc=com");
-
-
- AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
- LdapConnectionService connectionService = new DefaultLdapConnectionService();
- LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
-
- ldapConfigurationValidatorService.checkUserAttributes(ldapConnection, "einstein", "", ambariLdapConfiguration);
- }
-
- @Test
- public void testRetrieveGorupsForuser() throws Exception {
- // GIVEN
- Map<String, Object> ldapPropsMap = Maps.newHashMap();
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
-
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE.propertyName(), SchemaConstants.UNIQUE_MEMBER_AT);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_SEARCH_BASE.propertyName(), "dc=example,dc=com");
-
-
- AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
- LdapConnectionService connectionService = new DefaultLdapConnectionService();
- LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
-
- ldapConfigurationValidatorService.checkGroupAttributes(ldapConnection, "uid=einstein,dc=example,dc=com", ambariLdapConfiguration);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/30dded65/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java
new file mode 100644
index 0000000..1c7f75d
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ads;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Map;
+
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.api.ldap.model.constants.SchemaConstants;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Maps;
+
+public class DefaultLdapConfigurationValidatorServiceTest {
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
+ private static final String TEST_USER = "einstein";
+
+ LdapConfigurationValidatorService ldapConfigurationValidatorService = new DefaultLdapConfigurationValidatorService();
+
+
+ @Test
+ public void testCheckAttributes() throws Exception {
+
+ // WHEN
+ LdapConnectionConfig config = new LdapConnectionConfig();
+ config.setLdapHost("localhost");
+ config.setLdapPort(389);
+ LdapConnection connection = new LdapNetworkConnection(config);
+
+ // THEN
+ connection.anonymousBind();
+
+
+ EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
+
+ for (Entry entry : cursor) {
+ assertNotNull(entry);
+ System.out.println(entry);
+ }
+
+ cursor.close();
+
+ }
+
+ @Test
+ public void testCheckUserAttributes() throws Exception {
+ // GIVEN
+ Map<String, Object> ldapPropsMap = Maps.newHashMap();
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_SEARCH_BASE.propertyName(), "dc=example,dc=com");
+
+
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+ LdapConnectionService connectionService = new DefaultLdapConnectionService();
+ LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
+
+ ldapConfigurationValidatorService.checkUserAttributes(ldapConnection, "einstein", "", ambariLdapConfiguration);
+ }
+
+ @Test
+ public void testRetrieveGorupsForuser() throws Exception {
+ // GIVEN
+ Map<String, Object> ldapPropsMap = Maps.newHashMap();
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
+
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE.propertyName(), SchemaConstants.UNIQUE_MEMBER_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_SEARCH_BASE.propertyName(), "dc=example,dc=com");
+
+
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+ LdapConnectionService connectionService = new DefaultLdapConnectionService();
+ LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
+
+ ldapConfigurationValidatorService.checkGroupAttributes(ldapConnection, "uid=einstein,dc=example,dc=com", ambariLdapConfiguration);
+ }
+}
\ No newline at end of file
[24/57] [abbrv] ambari git commit: AMBARI-21909. LLAP concurrency
(hive.server2.tez.sessions.per.default.queue) max calculated value should not
be less than its current value.
Posted by lp...@apache.org.
AMBARI-21909. LLAP concurrency (hive.server2.tez.sessions.per.default.queue) max calculated value should not be less than its current value.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2ece0b33
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2ece0b33
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2ece0b33
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 2ece0b3317766fa7f89b80c64cd3ae5adfa8cbde
Parents: f6ecbd1
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Thu Sep 7 15:47:09 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Thu Sep 7 22:44:57 2017 -0700
----------------------------------------------------------------------
.../resources/common-services/YARN/3.0.0.3.0/service_advisor.py | 5 +++++
.../src/main/resources/stacks/HDP/2.5/services/stack_advisor.py | 5 +++++
2 files changed, 10 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ece0b33/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
index 74e0510..ecf245d 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
@@ -1011,6 +1011,11 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
putHiveInteractiveSiteProperty('hive.server2.tez.sessions.per.default.queue', long(llap_concurrency))
putHiveInteractiveSitePropertyAttribute('hive.server2.tez.sessions.per.default.queue', "minimum", min_llap_concurrency)
+ # Check if 'max_llap_concurreny' < 'llap_concurrency'.
+ if max_llap_concurreny < llap_concurrency:
+ self.logger.info("DBG: Adjusting 'max_llap_concurreny' to : {0}, based on 'llap_concurrency' : {1} and "
+ "earlier 'max_llap_concurreny' : {2}. ".format(llap_concurrency, llap_concurrency, max_llap_concurreny))
+ max_llap_concurreny = llap_concurrency
putHiveInteractiveSitePropertyAttribute('hive.server2.tez.sessions.per.default.queue', "maximum", long(max_llap_concurreny))
num_llap_nodes = long(num_llap_nodes)
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ece0b33/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 2dc493a..2a5d76b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -1293,6 +1293,11 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
putHiveInteractiveSiteProperty('hive.server2.tez.sessions.per.default.queue', long(llap_concurrency))
putHiveInteractiveSitePropertyAttribute('hive.server2.tez.sessions.per.default.queue', "minimum", min_llap_concurrency)
+ # Check if 'max_llap_concurreny' < 'llap_concurrency'.
+ if max_llap_concurreny < llap_concurrency:
+ self.logger.info("DBG: Adjusting 'max_llap_concurreny' to : {0}, based on 'llap_concurrency' : {1} and "
+ "earlier 'max_llap_concurreny' : {2}. ".format(llap_concurrency, llap_concurrency, max_llap_concurreny))
+ max_llap_concurreny = llap_concurrency
putHiveInteractiveSitePropertyAttribute('hive.server2.tez.sessions.per.default.queue', "maximum", long(max_llap_concurreny))
num_llap_nodes = long(num_llap_nodes)
[30/57] [abbrv] ambari git commit: AMBARI-21913. UI part: Server
returns 500 error for create config group request (akovalenko)
Posted by lp...@apache.org.
AMBARI-21913. UI part: Server returns 500 error for create config group request (akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/de94def9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/de94def9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/de94def9
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: de94def937a531b2775aee419a09d3831c2d012e
Parents: 2170ce0
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Fri Sep 8 14:51:16 2017 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Fri Sep 8 16:20:44 2017 +0300
----------------------------------------------------------------------
ambari-web/app/mixins/common/configs/configs_saver.js | 1 +
ambari-web/app/mixins/main/service/configs/config_overridable.js | 2 ++
2 files changed, 3 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/de94def9/ambari-web/app/mixins/common/configs/configs_saver.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/configs/configs_saver.js b/ambari-web/app/mixins/common/configs/configs_saver.js
index 6e7af4a..6b69ca1 100644
--- a/ambari-web/app/mixins/common/configs/configs_saver.js
+++ b/ambari-web/app/mixins/common/configs/configs_saver.js
@@ -577,6 +577,7 @@ App.ConfigsSaverMixin = Em.Mixin.create({
"cluster_name": App.get('clusterName') || this.get('clusterName'),
"group_name": group.name,
"tag": group.service_id,
+ "service_name": group.service_id,
"description": group.description,
"hosts": groupHosts,
"service_config_version_note": configVersionNote || "",
http://git-wip-us.apache.org/repos/asf/ambari/blob/de94def9/ambari-web/app/mixins/main/service/configs/config_overridable.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/service/configs/config_overridable.js b/ambari-web/app/mixins/main/service/configs/config_overridable.js
index c1d96dc..e7d223d 100644
--- a/ambari-web/app/mixins/main/service/configs/config_overridable.js
+++ b/ambari-web/app/mixins/main/service/configs/config_overridable.js
@@ -241,6 +241,7 @@ App.ConfigOverridable = Em.Mixin.create({
"group_name": newConfigGroupData.name,
"tag": newConfigGroupData.service_id,
"description": newConfigGroupData.description,
+ "service_name": newConfigGroupData.service_id,
"desired_configs": newConfigGroupData.desired_configs.map(function (cst) {
var type = Em.get(cst, 'site') || Em.get(cst, 'type');
return {
@@ -342,6 +343,7 @@ App.ConfigOverridable = Em.Mixin.create({
group_name: configGroup.get('name'),
description: configGroup.get('description'),
tag: configGroup.get('service.id'),
+ service_name: configGroup.get('service.id'),
hosts: configGroup.get('hosts').map(function (h) {
return {
host_name: h
[38/57] [abbrv] ambari git commit: AMBARI-21923. Web Client Should
Only Show Revert Action On Revertable Repositories (alexantonenko)
Posted by lp...@apache.org.
AMBARI-21923. Web Client Should Only Show Revert Action On Revertable Repositories (alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4bbbe1f6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4bbbe1f6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4bbbe1f6
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 4bbbe1f66075e1898ab814c27676439123ba0288
Parents: 285cbaf
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Mon Sep 11 13:49:25 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Mon Sep 11 13:49:25 2017 +0300
----------------------------------------------------------------------
.../main/admin/stack_and_upgrade_controller.js | 3 +-
ambari-web/app/mappers/stack_version_mapper.js | 4 ++-
ambari-web/app/models/stack_version/version.js | 2 ++
ambari-web/app/styles/stack_versions.less | 24 +++++++++++--
.../stack_upgrade/upgrade_version_column.hbs | 14 ++++++++
.../stack_upgrade/upgrade_version_box_view.js | 9 +++--
.../upgrade_version_column_view.js | 37 --------------------
.../admin/stack_and_upgrade_controller_test.js | 17 +++------
.../mixins/common/configs/configs_saver_test.js | 4 ++-
.../service/configs/config_overridable_test.js | 2 ++
.../upgrade_version_box_view_test.js | 27 +++++++++++---
11 files changed, 80 insertions(+), 63 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index b0df4a4..f968afa 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -2064,7 +2064,6 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
*/
revertPatchUpgrade: function (version) {
this.set('requestInProgress', true);
- var upgrade = App.StackUpgradeHistory.find().findProperty('associatedVersion', version.get('repositoryVersion'));
return App.ajax.send({
name: 'admin.upgrade.revert',
sender: this,
@@ -2074,7 +2073,7 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
this.sender.set('requestInProgress', false);
},
data: {
- upgradeId: upgrade && upgrade.get('upgradeId'),
+ upgradeId: version.get('stackVersion').get('revertUpgradeId'),
id: version.get('id'),
value: version.get('repositoryVersion'),
label: version.get('displayName'),
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/app/mappers/stack_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/stack_version_mapper.js b/ambari-web/app/mappers/stack_version_mapper.js
index 2b091a0..9fa0b78 100644
--- a/ambari-web/app/mappers/stack_version_mapper.js
+++ b/ambari-web/app/mappers/stack_version_mapper.js
@@ -36,7 +36,9 @@ App.stackVersionMapper = App.QuickDataMapper.create({
"upgrading_hosts": "host_states.UPGRADING",
"upgraded_hosts": "host_states.UPGRADED",
"upgrade_failed_hosts": "host_states.UPGRADE_FAILED",
- "current_hosts": "host_states.CURRENT"
+ "current_hosts": "host_states.CURRENT",
+ "supports_revert": "supports_revert",
+ "revert_upgrade_id": "revert_upgrade_id"
},
map: function (json) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/app/models/stack_version/version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_version/version.js b/ambari-web/app/models/stack_version/version.js
index 4e18ca4..45e09e6 100644
--- a/ambari-web/app/models/stack_version/version.js
+++ b/ambari-web/app/models/stack_version/version.js
@@ -34,6 +34,8 @@ App.StackVersion = DS.Model.extend({
upgradedHosts: DS.attr('array'),
upgradeFailedHosts: DS.attr('array'),
currentHosts: DS.attr('array'),
+ supportsRevert: DS.attr('boolean'),
+ revertUpgradeId: DS.attr('number'),
noInstalledHosts: Em.computed.empty('installedHosts'),
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index b854933..61cb177 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -304,8 +304,28 @@
margin: 5px 0 15px 0;
line-height: 20px;
height: 35px;
- .label-success {
- line-height: 30px;
+ .label-wrapper{
+ display: inline-block;
+ position: relative;
+ .label-success {
+ line-height: 30px;
+ }
+ .dropdown-toggle{
+ background: none;
+ height: auto;
+ padding: 8px 8px;
+ background: none;
+ border:none;
+ outline:none;
+ box-shadow: none;
+ position: absolute;
+ right: 0;
+ top: 0;
+ color: #fff;
+ &:hover, &:active, &:focus{
+ color: #fff;
+ }
+ }
}
.btn.btn-primary:not(.dropdown-toggle) {
padding: 4px 5px;
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
index b6a30fc..41c8cdb 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
@@ -61,7 +61,21 @@
</div>
{{/if}}
{{#if view.stateElement.isLabel}}
+ <div class="label-wrapper">
<span {{bindAttr class="view.stateElement.class"}}>{{view.stateElement.text}}</span>
+ {{#if view.stateElement.canBeReverted}}
+ <button class="btn dropdown-toggle" data-toggle="dropdown">
+ <span class="caret"></span>
+ </button>
+ <ul class="dropdown-menu">
+ <li>
+ <a {{action runAction view.stateElement.action target="view"}}>
+ {{view.stateElement.actionText}}
+ </a>
+ </li>
+ </ul>
+ {{/if}}
+ </div>
{{/if}}
{{#if view.stateElement.isLink}}
{{#if view.stateElement.iconClass}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
index 1a2cc60..7c647fb 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
@@ -145,9 +145,9 @@ App.UpgradeVersionBoxView = Em.View.extend({
action: 'resumeUpgrade'
},
'CURRENT_PATCH': {
- isButton: true,
- text: Em.I18n.t('common.revert'),
- action: 'confirmRevertPatchUpgrade'
+ isLabel: true,
+ text: Em.I18n.t('common.current'),
+ class: 'label label-success'
}
},
@@ -169,6 +169,9 @@ App.UpgradeVersionBoxView = Em.View.extend({
if (status === 'CURRENT' && this.get('content.isPatch') && !this.get('isUpgrading')) {
element.setProperties(statePropertiesMap['CURRENT_PATCH']);
+ element.set('canBeReverted', this.get('content.stackVersion').get('supportsRevert'));
+ element.set('action', 'confirmRevertPatchUpgrade');
+ element.set('actionText', Em.I18n.t('common.revert'));
}
else if (['INSTALLING', 'CURRENT'].contains(status) && !this.get('content.isPatch')) {
element.setProperties(statePropertiesMap[status]);
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
index 4039497..b5933b9 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
@@ -63,43 +63,6 @@ App.UpgradeVersionColumnView = App.UpgradeVersionBoxView.extend({
}, this);
}.property(),
- /**
- * map of properties which correspond to particular state of Upgrade version
- * @type {object}
- */
- statePropertiesMap: {
- 'CURRENT': {
- isLabel: true,
- text: Em.I18n.t('common.current'),
- class: 'label label-success'
- },
- 'NOT_REQUIRED': {
- isButton: true,
- text: Em.I18n.t('common.install'),
- action: 'installRepoVersionConfirmation'
- },
- 'LOADING': {
- isSpinner: true,
- class: 'spinner'
- },
- 'INSTALLING': {
- iconClass: 'glyphicon glyphicon-cog',
- isLink: true,
- text: Em.I18n.t('hosts.host.stackVersions.status.installing'),
- action: 'showProgressPopup'
- },
- 'INSTALLED': {
- iconClass: 'glyphicon glyphicon-ok',
- isLink: true,
- text: Em.I18n.t('common.installed'),
- action: null
- },
- 'SUSPENDED': {
- isButton: true,
- text: Em.I18n.t('admin.stackUpgrade.dialog.resume'),
- action: 'resumeUpgrade'
- }
- },
/**
* @param {Em.Object} stackService
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index 94b9091..e2f07fc 100644
--- a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -3476,24 +3476,15 @@ describe('App.MainAdminStackAndUpgradeController', function() {
});
describe('#revertPatchUpgrade', function() {
- beforeEach(function() {
- sinon.stub(App.StackUpgradeHistory, 'find').returns([
- Em.Object.create({
- associatedVersion: '1.1',
- upgradeId: 1
- })
- ]);
- });
- afterEach(function() {
- App.StackUpgradeHistory.find.restore();
- });
-
it('App.ajax.send should be called', function() {
var version = Em.Object.create({
repositoryVersion: '1.1',
id: 2,
displayName: '1.2',
- upgradeType: 'EXPRESS'
+ upgradeType: 'EXPRESS',
+ stackVersion: Em.Object.create({
+ revertUpgradeId: 1
+ })
});
controller.revertPatchUpgrade(version);
expect(controller.get('requestInProgress')).to.be.true;
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/test/mixins/common/configs/configs_saver_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/common/configs/configs_saver_test.js b/ambari-web/test/mixins/common/configs/configs_saver_test.js
index 9099e54..855823c 100644
--- a/ambari-web/test/mixins/common/configs/configs_saver_test.js
+++ b/ambari-web/test/mixins/common/configs/configs_saver_test.js
@@ -914,7 +914,8 @@ describe('App.ConfigsSaverMixin', function() {
host_name: 'host1'
}],
"service_config_version_note": "note",
- "desired_configs": "{}"
+ "desired_configs": "{}",
+ "service_name": "S1",
}
});
});
@@ -939,6 +940,7 @@ describe('App.ConfigsSaverMixin', function() {
host_name: 'host1'
}],
"service_config_version_note": "",
+ "service_name": "S1",
"desired_configs": "{}",
id: 'g1'
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/main/service/configs/config_overridable_test.js b/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
index b562ffb..198d854 100644
--- a/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
+++ b/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
@@ -179,6 +179,7 @@ describe('App.ConfigOverridable', function () {
"ConfigGroup": {
"group_name": 'cg1',
"tag": 'S1',
+ "service_name": "S1",
"description": '',
"desired_configs": [],
"hosts": [{host_name: 'host1'}]
@@ -264,6 +265,7 @@ describe('App.ConfigOverridable', function () {
group_name: 'cg1',
description: 'dsc',
tag: 'S1',
+ service_name: "S1",
hosts: [{
host_name: 'host1'
}],
http://git-wip-us.apache.org/repos/asf/ambari/blob/4bbbe1f6/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
index 5b0ce86..46dd62f 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
@@ -452,15 +452,34 @@ describe('App.UpgradeVersionBoxView', function () {
inputData: {
'content.status': 'CURRENT',
'content.isPatch': true,
- 'isUpgrading': false
+ 'isUpgrading': false,
+ 'content.stackVersion': Em.Object.create({
+ 'supportsRevert': false
+ })
+ },
+ expected: {
+ isLabel: true,
+ text: Em.I18n.t('common.current'),
+ class: 'label label-success',
+ canBeReverted: false
+ },
+ title: 'current no-revertable patch version'
+ },
+ {
+ inputData: {
+ 'content.status': 'CURRENT',
+ 'content.isPatch': true,
+ 'isUpgrading': false,
+ 'content.stackVersion': Em.Object.create({
+ 'supportsRevert': true
+ })
},
expected: {
status: 'CURRENT',
- isButton: true,
- text: Em.I18n.t('common.revert'),
+ text: Em.I18n.t('common.current'),
action: 'confirmRevertPatchUpgrade'
},
- title: 'current patch version'
+ title: 'current revertable patch version'
},
{
inputData: {
[37/57] [abbrv] ambari git commit: AMBARI-21821,
update Ambari Metrics service check to support HTTP SPNEGO
authentication. (Qin Liu via eyang)
Posted by lp...@apache.org.
AMBARI-21821, update Ambari Metrics service check to support
HTTP SPNEGO authentication. (Qin Liu via eyang)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/285cbafe
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/285cbafe
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/285cbafe
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 285cbafe31df4dd574e6a9828934b49269b169c3
Parents: acde502
Author: Eric Yang <ey...@apache.org>
Authored: Sun Sep 10 08:46:49 2017 -0700
Committer: Eric Yang <ey...@apache.org>
Committed: Sun Sep 10 08:46:49 2017 -0700
----------------------------------------------------------------------
.../0.1.0/package/scripts/params.py | 2 +
.../0.1.0/package/scripts/service_check.py | 201 ++++++++++++++-----
2 files changed, 151 insertions(+), 52 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/285cbafe/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 6975bec..756da26 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -313,6 +313,8 @@ if security_enabled:
_hostname_lowercase = config['hostname'].lower()
client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf")
smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+ smoke_user_princ = config['configurations']['cluster-env']['smokeuser_principal_name']
+ smoke_user = config['configurations']['cluster-env']['smokeuser']
hbase_user_keytab = config['configurations']['ams-hbase-env']['hbase_user_keytab']
ams_collector_jaas_config_file = format("{hbase_conf_dir}/ams_collector_jaas.conf")
http://git-wip-us.apache.org/repos/asf/ambari/blob/285cbafe/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
index 2b3dfa9..b31475a 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
@@ -22,6 +22,7 @@ from resource_management.core.logger import Logger
from resource_management.core.base import Fail
from resource_management import Script
from resource_management import Template
+from resource_management.libraries.functions.curl_krb_request import curl_krb_request
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -65,16 +66,32 @@ class AMSServiceCheck(Script):
def service_check_for_single_host(self, metric_collector_host, params):
random_value1 = random.random()
- headers = {"Content-type": "application/json"}
- ca_certs = os.path.join(params.ams_monitor_conf_dir,
- params.metric_truststore_ca_certs)
current_time = int(time.time()) * 1000
metric_json = Template('smoketest_metrics.json.j2', hostname=params.hostname, random1=random_value1,
current_time=current_time).get_content()
try:
- post_metrics_to_collector(self.AMS_METRICS_POST_URL, metric_collector_host, params.metric_collector_port, params.metric_collector_https_enabled,
- metric_json, headers, ca_certs, self.AMS_CONNECT_TRIES, self.AMS_CONNECT_TIMEOUT)
+ if is_spnego_enabled(params):
+ header= 'Content-Type: application/json'
+ method = 'POST'
+ tmp_dir = Script.get_tmp_dir()
+
+ protocol = "http"
+ if not callable(params.metric_collector_https_enabled):
+ if params.metric_collector_https_enabled:
+ protocol = "https"
+ port = str(params.metric_collector_port)
+ uri = '{0}://{1}:{2}{3}'.format(
+ protocol, metric_collector_host, port, self.AMS_METRICS_POST_URL)
+
+ call_curl_krb_request(tmp_dir, params.smoke_user_keytab, params.smoke_user_princ, uri, params.kinit_path_local, params.smoke_user,
+ self.AMS_CONNECT_TIMEOUT, method, metric_json, header, tries = self.AMS_CONNECT_TRIES)
+ else :
+ headers = {"Content-type": "application/json"}
+ ca_certs = os.path.join(params.ams_monitor_conf_dir,
+ params.metric_truststore_ca_certs)
+ post_metrics_to_collector(self.AMS_METRICS_POST_URL, metric_collector_host, params.metric_collector_port, params.metric_collector_https_enabled,
+ metric_json, headers, ca_certs, self.AMS_CONNECT_TRIES, self.AMS_CONNECT_TIMEOUT)
get_metrics_parameters = {
"metricNames": "AMBARI_METRICS.SmokeTest.FakeMetric",
@@ -87,55 +104,63 @@ class AMSServiceCheck(Script):
}
encoded_get_metrics_parameters = urllib.urlencode(get_metrics_parameters)
- Logger.info("Connecting (GET) to %s:%s%s" % (metric_collector_host,
- params.metric_collector_port,
- self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters))
- for i in xrange(0, self.AMS_READ_TRIES):
- conn = network.get_http_connection(
- metric_collector_host,
- int(params.metric_collector_port),
- params.metric_collector_https_enabled,
- ca_certs,
- ssl_version=Script.get_force_https_protocol_value()
- )
- conn.request("GET", self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters)
- response = conn.getresponse()
- Logger.info("Http response for host %s : %s %s" % (metric_collector_host, response.status, response.reason))
-
- data = response.read()
- Logger.info("Http data: %s" % data)
- conn.close()
-
- if response.status == 200:
- Logger.info("Metrics were retrieved from host %s" % metric_collector_host)
- else:
- raise Fail("Metrics were not retrieved from host %s. GET request status: %s %s \n%s" %
- (metric_collector_host, response.status, response.reason, data))
- data_json = json.loads(data)
-
- def floats_eq(f1, f2, delta):
- return abs(f1-f2) < delta
-
- values_are_present = False
- for metrics_data in data_json["metrics"]:
- if (str(current_time) in metrics_data["metrics"] and str(current_time + 1000) in metrics_data["metrics"]
- and floats_eq(metrics_data["metrics"][str(current_time)], random_value1, 0.0000001)
- and floats_eq(metrics_data["metrics"][str(current_time + 1000)], current_time, 1)):
- Logger.info("Values %s and %s were found in the response from host %s." % (metric_collector_host, random_value1, current_time))
- values_are_present = True
- break
- pass
+ if is_spnego_enabled(params):
+ method = 'GET'
+ uri = '{0}://{1}:{2}{3}'.format(
+ protocol, metric_collector_host, port, self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters)
- if not values_are_present:
- if i < self.AMS_READ_TRIES - 1: #range/xrange returns items from start to end-1
- Logger.info("Values weren't stored yet. Retrying in %s seconds."
- % (self.AMS_READ_TIMEOUT))
- time.sleep(self.AMS_READ_TIMEOUT)
+ call_curl_krb_request(tmp_dir, params.smoke_user_keytab, params.smoke_user_princ, uri, params.kinit_path_local, params.smoke_user,
+ self.AMS_READ_TIMEOUT, method, tries = self.AMS_READ_TRIES, current_time = current_time, random_value = random_value1)
+ else:
+ Logger.info("Connecting (GET) to %s:%s%s" % (metric_collector_host,
+ params.metric_collector_port,
+ self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters))
+ for i in xrange(0, self.AMS_READ_TRIES):
+ conn = network.get_http_connection(
+ metric_collector_host,
+ int(params.metric_collector_port),
+ params.metric_collector_https_enabled,
+ ca_certs,
+ ssl_version=Script.get_force_https_protocol_value()
+ )
+ conn.request("GET", self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters)
+ response = conn.getresponse()
+ Logger.info("Http response for host %s : %s %s" % (metric_collector_host, response.status, response.reason))
+
+ data = response.read()
+ Logger.info("Http data: %s" % data)
+ conn.close()
+
+ if response.status == 200:
+ Logger.info("Metrics were retrieved from host %s" % metric_collector_host)
else:
- raise Fail("Values %s and %s were not found in the response." % (random_value1, current_time))
- else:
- break
- pass
+ raise Fail("Metrics were not retrieved from host %s. GET request status: %s %s \n%s" %
+ (metric_collector_host, response.status, response.reason, data))
+ data_json = json.loads(data)
+
+ def floats_eq(f1, f2, delta):
+ return abs(f1-f2) < delta
+
+ values_are_present = False
+ for metrics_data in data_json["metrics"]:
+ if (str(current_time) in metrics_data["metrics"] and str(current_time + 1000) in metrics_data["metrics"]
+ and floats_eq(metrics_data["metrics"][str(current_time)], random_value1, 0.0000001)
+ and floats_eq(metrics_data["metrics"][str(current_time + 1000)], current_time, 1)):
+ Logger.info("Values %s and %s were found in the response from host %s." % (metric_collector_host, random_value1, current_time))
+ values_are_present = True
+ break
+ pass
+
+ if not values_are_present:
+ if i < self.AMS_READ_TRIES - 1: #range/xrange returns items from start to end-1
+ Logger.info("Values weren't stored yet. Retrying in %s seconds."
+ % (self.AMS_READ_TIMEOUT))
+ time.sleep(self.AMS_READ_TIMEOUT)
+ else:
+ raise Fail("Values %s and %s were not found in the response." % (random_value1, current_time))
+ else:
+ break
+ pass
except Fail as ex:
Logger.warning("Ambari Metrics service check failed on collector host %s. Reason : %s" % (metric_collector_host, str(ex)))
raise Fail("Ambari Metrics service check failed on collector host %s. Reason : %s" % (metric_collector_host, str(ex)))
@@ -158,6 +183,78 @@ class AMSServiceCheck(Script):
Logger.warning(results[host].result)
raise Fail("All metrics collectors are unavailable.")
+def is_spnego_enabled(params):
+ return params.security_enabled \
+ and 'core-site' in params.config['configurations'] \
+ and 'hadoop.http.authentication.type' in params.config['configurations']['core-site'] \
+ and params.config['configurations']['core-site']['hadoop.http.authentication.type'] == "kerberos" \
+ and 'hadoop.http.filter.initializers' in params.config['configurations']['core-site'] \
+ and params.config['configurations']['core-site']['hadoop.http.filter.initializers'] == "org.apache.hadoop.security.AuthenticationFilterInitializer"
+
+def call_curl_krb_request(tmp_dir, user_keytab, user_princ, uri, kinit_path, user,
+ connection_timeout, method='GET', metric_json='', header='', tries = 1, current_time = 0, random_value = 0):
+ if method == 'POST':
+ Logger.info("Generated metrics for %s:\n%s" % (uri, metric_json))
+
+ for i in xrange(0, tries):
+ try:
+ Logger.info("Connecting (%s) to %s" % (method, uri));
+
+ response = None
+ errmsg = None
+ time_millis = 0
+
+ response, errmsg, time_millis = curl_krb_request(tmp_dir, user_keytab, user_princ, uri, 'ams_service_check',
+ kinit_path, False, "AMS Service Check", user,
+ connection_timeout=connection_timeout, kinit_timer_ms=0,
+ method=method, body=metric_json, header=header)
+ except Exception, exception:
+ if i < tries - 1: #range/xrange returns items from start to end-1
+ time.sleep(connection_timeout)
+ Logger.info("Connection failed for %s. Next retry in %s seconds."
+ % (uri, connection_timeout))
+ continue
+ else:
+ raise Fail("Unable to {0} metrics on: {1}. Exception: {2}".format(method, uri, str(exception)))
+ finally:
+ if not response:
+ Logger.error("Unable to {0} metrics on: {1}. Error: {2}".format(method, uri, errmsg))
+ else:
+ Logger.info("%s response from %s: %s, errmsg: %s" % (method, uri, response, errmsg));
+ try:
+ response.close()
+ except:
+ Logger.debug("Unable to close {0} connection to {1}".format(method, uri))
+
+ if method == 'GET':
+ data_json = json.loads(response)
+
+ def floats_eq(f1, f2, delta):
+ return abs(f1-f2) < delta
+
+ values_are_present = False
+ for metrics_data in data_json["metrics"]:
+ if (str(current_time) in metrics_data["metrics"] and str(current_time + 1000) in metrics_data["metrics"]
+ and floats_eq(metrics_data["metrics"][str(current_time)], random_value, 0.0000001)
+ and floats_eq(metrics_data["metrics"][str(current_time + 1000)], current_time, 1)):
+ Logger.info("Values %s and %s were found in the response from %s." % (uri, random_value, current_time))
+ values_are_present = True
+ break
+ pass
+
+ if not values_are_present:
+ if i < tries - 1: #range/xrange returns items from start to end-1
+ Logger.info("Values weren't stored yet. Retrying in %s seconds."
+ % (tries))
+ time.sleep(connection_timeout)
+ else:
+ raise Fail("Values %s and %s were not found in the response." % (random_value, current_time))
+ else:
+ break
+ pass
+ else:
+ break
+
def post_metrics_to_collector(ams_metrics_post_url, metric_collector_host, metric_collector_port, metric_collector_https_enabled,
metric_json, headers, ca_certs, tries = 1, connect_timeout = 10):
for i in xrange(0, tries):
[27/57] [abbrv] ambari git commit: AMBARI-21915 Log Search UI: unit
tests setup. (ababiichuk)
Posted by lp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/yarn.lock
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/yarn.lock b/ambari-logsearch/ambari-logsearch-web/yarn.lock
index 4883a15..c55bb4d 100644
--- a/ambari-logsearch/ambari-logsearch-web/yarn.lock
+++ b/ambari-logsearch/ambari-logsearch-web/yarn.lock
@@ -2,67 +2,87 @@
# yarn lockfile v1
-"@angular/cli@1.0.0":
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/@angular/cli/-/cli-1.0.0.tgz#7bfde1e7c5f28bf5bed4dda1352ee67ee887302f"
+"@angular-devkit/build-optimizer@~0.0.18":
+ version "0.0.18"
+ resolved "https://registry.yarnpkg.com/@angular-devkit/build-optimizer/-/build-optimizer-0.0.18.tgz#bdf507a37403b07fc72437d792d4e2541b0f13ec"
+ dependencies:
+ loader-utils "^1.1.0"
+ source-map "^0.5.6"
+ typescript "^2.3.3"
+
+"@angular-devkit/core@0.0.12":
+ version "0.0.12"
+ resolved "https://registry.yarnpkg.com/@angular-devkit/core/-/core-0.0.12.tgz#bf044c8ed38b8e2ec2648bbc6fd0d92be99112da"
+
+"@angular-devkit/schematics@~0.0.21":
+ version "0.0.21"
+ resolved "https://registry.yarnpkg.com/@angular-devkit/schematics/-/schematics-0.0.21.tgz#ea4b3b6ce8fc86f50de9df3402a039469369a7eb"
+ dependencies:
+ "@angular-devkit/core" "0.0.12"
+ "@ngtools/json-schema" "^1.1.0"
+ minimist "^1.2.0"
+ rxjs "^5.4.2"
+
+"@angular/cli@^1.4.0":
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/@angular/cli/-/cli-1.4.0.tgz#c7b6f87c793e8147a4a4f5b472245b942be10556"
dependencies:
- "@ngtools/json-schema" "1.0.5"
- "@ngtools/webpack" "1.3.0"
+ "@angular-devkit/build-optimizer" "~0.0.18"
+ "@angular-devkit/schematics" "~0.0.21"
+ "@ngtools/json-schema" "1.1.0"
+ "@ngtools/webpack" "1.7.0"
+ "@schematics/angular" "~0.0.30"
autoprefixer "^6.5.3"
- chalk "^1.1.3"
+ chalk "^2.0.1"
+ circular-dependency-plugin "^3.0.0"
common-tags "^1.3.1"
- css-loader "^0.26.1"
+ copy-webpack-plugin "^4.0.1"
+ core-object "^3.1.0"
+ css-loader "^0.28.1"
cssnano "^3.10.0"
- debug "^2.1.3"
denodeify "^1.2.1"
- diff "^3.1.0"
- ember-cli-normalize-entity-name "^1.0.0"
ember-cli-string-utils "^1.0.0"
exports-loader "^0.6.3"
- extract-text-webpack-plugin "~2.0.0"
+ extract-text-webpack-plugin "3.0.0"
file-loader "^0.10.0"
- fs-extra "^2.0.0"
+ fs-extra "^4.0.0"
get-caller-file "^1.0.0"
glob "^7.0.3"
- html-webpack-plugin "^2.19.0"
- inflection "^1.7.0"
- inquirer "^3.0.0"
- isbinaryfile "^3.0.0"
+ heimdalljs "^0.2.4"
+ heimdalljs-logger "^0.1.9"
+ html-webpack-plugin "^2.29.0"
istanbul-instrumenter-loader "^2.0.0"
- json-loader "^0.5.4"
- karma-sourcemap-loader "^0.3.7"
- karma-webpack "^2.0.0"
+ karma-source-map-support "^1.2.0"
less "^2.7.2"
- less-loader "^2.2.3"
+ less-loader "^4.0.5"
+ license-webpack-plugin "^1.0.0"
lodash "^4.11.1"
- minimatch "^3.0.3"
+ memory-fs "^0.4.1"
node-modules-path "^1.0.0"
nopt "^4.0.1"
- opn "4.0.2"
+ opn "~5.1.0"
portfinder "~1.0.12"
- postcss-loader "^0.13.0"
+ postcss-loader "^1.3.3"
postcss-url "^5.1.2"
raw-loader "^0.5.1"
resolve "^1.1.7"
- rimraf "^2.5.3"
- rsvp "^3.0.17"
- rxjs "^5.0.1"
- sass-loader "^4.1.1"
- script-loader "^0.7.0"
+ rxjs "^5.4.2"
+ sass-loader "^6.0.3"
semver "^5.1.0"
silent-error "^1.0.0"
- source-map-loader "^0.1.5"
+ source-map-loader "^0.2.0"
+ source-map-support "^0.4.1"
style-loader "^0.13.1"
stylus "^0.54.5"
- stylus-loader "^2.4.0"
- temp "0.8.3"
- typescript ">=2.0.0 <2.3.0"
+ stylus-loader "^3.0.1"
+ typescript ">=2.0.0 <2.6.0"
url-loader "^0.5.7"
- walk-sync "^0.3.1"
- webpack "~2.2.0"
- webpack-dev-server "~2.3.0"
- webpack-merge "^2.4.0"
- zone.js "^0.7.2"
+ webpack "~3.5.5"
+ webpack-concat-plugin "1.4.0"
+ webpack-dev-middleware "~1.12.0"
+ webpack-dev-server "~2.7.1"
+ webpack-merge "^4.1.0"
+ zone.js "^0.8.14"
optionalDependencies:
node-sass "^4.3.0"
@@ -120,17 +140,17 @@
version "2.2.2"
resolved "https://registry.yarnpkg.com/@ngrx/store/-/store-2.2.2.tgz#a00305a6452032a3385886a11ce529dce2dae65b"
-"@ngtools/json-schema@1.0.5":
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/@ngtools/json-schema/-/json-schema-1.0.5.tgz#ad39037c70c88b245ac7267a71777646b6063d77"
+"@ngtools/json-schema@1.1.0", "@ngtools/json-schema@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@ngtools/json-schema/-/json-schema-1.1.0.tgz#c3a0c544d62392acc2813a42c8a0dc6f58f86922"
-"@ngtools/webpack@1.3.0":
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/@ngtools/webpack/-/webpack-1.3.0.tgz#a1071230985358ecdf87b2fa9879ae6cc6355e83"
+"@ngtools/webpack@1.7.0":
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/@ngtools/webpack/-/webpack-1.7.0.tgz#100b4ba370b3b9f991936f3d5db09cebffe11583"
dependencies:
enhanced-resolve "^3.1.0"
loader-utils "^1.0.2"
- magic-string "^0.19.0"
+ magic-string "^0.22.3"
source-map "^0.5.6"
"@ngx-translate/core@^6.0.1":
@@ -141,6 +161,10 @@
version "0.0.3"
resolved "https://registry.yarnpkg.com/@ngx-translate/http-loader/-/http-loader-0.0.3.tgz#8346c8d2d6f630254601029668f17abe2afe8a9b"
+"@schematics/angular@~0.0.30":
+ version "0.0.33"
+ resolved "https://registry.yarnpkg.com/@schematics/angular/-/angular-0.0.33.tgz#bc0b28356af46fe9ec64495588ee61503fd34ce5"
+
"@types/d3-array@*":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-1.2.0.tgz#9b1fc3202fc1a9f7da0f2873bd38b443137a9d34"
@@ -357,23 +381,34 @@ abbrev@1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f"
-accepts@1.3.3, accepts@~1.3.3:
+accepts@1.3.3:
version "1.3.3"
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.3.tgz#c3ca7434938648c3e0d9c1e328dd68b622c284ca"
dependencies:
mime-types "~2.1.11"
negotiator "0.6.1"
+accepts@~1.3.3:
+ version "1.3.4"
+ resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.4.tgz#86246758c7dd6d21a6474ff084a4740ec05eb21f"
+ dependencies:
+ mime-types "~2.1.16"
+ negotiator "0.6.1"
+
acorn-dynamic-import@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz#c752bd210bef679501b6c6cb7fc84f8f47158cc4"
dependencies:
acorn "^4.0.3"
-acorn@^4.0.3, acorn@^4.0.4:
+acorn@^4.0.3:
version "4.0.13"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
+acorn@^5.0.0:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.1.2.tgz#911cb53e036807cf0fa778dc5d370fbd864246d7"
+
adm-zip@0.4.4:
version "0.4.4"
resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.4.tgz#a61ed5ae6905c3aea58b3a657d25033091052736"
@@ -393,17 +428,26 @@ agent-base@2:
extend "~3.0.0"
semver "~5.0.1"
-ajv-keywords@^1.1.1:
- version "1.5.1"
- resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c"
+ajv-keywords@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.0.tgz#a296e17f7bfae7c1ce4f7e0de53d29cb32162df0"
-ajv@^4.11.2, ajv@^4.7.0, ajv@^4.9.1:
+ajv@^4.9.1:
version "4.11.8"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536"
dependencies:
co "^4.6.0"
json-stable-stringify "^1.0.1"
+ajv@^5.0.0, ajv@^5.1.5:
+ version "5.2.2"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.2.2.tgz#47c68d69e86f5d953103b0074a9430dc63da5e39"
+ dependencies:
+ co "^4.6.0"
+ fast-deep-equal "^1.0.0"
+ json-schema-traverse "^0.3.0"
+ json-stable-stringify "^1.0.1"
+
align-text@^0.1.1, align-text@^0.1.3:
version "0.1.4"
resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117"
@@ -456,10 +500,6 @@ ansi-align@^2.0.0:
dependencies:
string-width "^2.0.0"
-ansi-escapes@^1.1.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e"
-
ansi-html@0.0.7:
version "0.0.7"
resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e"
@@ -472,6 +512,12 @@ ansi-styles@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe"
+ansi-styles@^3.1.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.0.tgz#c159b8d5be0f9e5a6f346dab94f16ce022161b88"
+ dependencies:
+ color-convert "^1.9.0"
+
any-promise@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f"
@@ -528,6 +574,10 @@ array-flatten@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
+array-flatten@^2.1.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.1.tgz#426bb9da84090c1838d812c8150af20a8331e296"
+
array-slice@^0.2.3:
version "0.2.3"
resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-0.2.3.tgz#dd3cfb80ed7973a75117cdac69b0b99ec86186f5"
@@ -555,8 +605,8 @@ arrify@^1.0.0:
resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d"
asap@~2.0.3:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.5.tgz#522765b50c3510490e52d7dcfe085ef9ba96958f"
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
asn1.js@^4.0.0:
version "4.9.1"
@@ -592,7 +642,7 @@ async-foreach@^0.1.3:
version "0.1.3"
resolved "https://registry.yarnpkg.com/async-foreach/-/async-foreach-0.1.3.tgz#36121f845c0578172de419a97dbeb1d16ec34542"
-async@^0.9.0, async@~0.9.0:
+async@^0.9.0:
version "0.9.2"
resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d"
@@ -600,7 +650,13 @@ async@^1.4.0, async@^1.5.2:
version "1.5.2"
resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
-async@^2.0.1, async@^2.1.2, async@^2.1.4:
+async@^2.1.2, async@^2.1.5, async@^2.4.1:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/async/-/async-2.5.0.tgz#843190fd6b7357a0b9e1c956edddd5ec8462b54d"
+ dependencies:
+ lodash "^4.14.0"
+
+async@^2.1.4:
version "2.4.1"
resolved "https://registry.yarnpkg.com/async/-/async-2.4.1.tgz#62a56b279c98a11d0987096a01cc3eeb8eb7bbd7"
dependencies:
@@ -633,7 +689,15 @@ aws4@^1.2.1:
version "1.6.0"
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e"
-babel-code-frame@^6.11.0, babel-code-frame@^6.20.0, babel-code-frame@^6.22.0:
+babel-code-frame@^6.11.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
+ dependencies:
+ chalk "^1.1.3"
+ esutils "^2.0.2"
+ js-tokens "^3.0.2"
+
+babel-code-frame@^6.20.0, babel-code-frame@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4"
dependencies:
@@ -660,7 +724,14 @@ babel-messages@^6.23.0:
dependencies:
babel-runtime "^6.22.0"
-babel-runtime@^6.18.0, babel-runtime@^6.22.0:
+babel-runtime@^6.18.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
+ dependencies:
+ core-js "^2.4.0"
+ regenerator-runtime "^0.11.0"
+
+babel-runtime@^6.22.0:
version "6.23.0"
resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.23.0.tgz#0a9489f144de70efb3ce4300accdb329e2fc543b"
dependencies:
@@ -704,6 +775,10 @@ babylon@^6.11.0, babylon@^6.13.0, babylon@^6.15.0:
version "6.17.1"
resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.1.tgz#17f14fddf361b695981fe679385e4f1c01ebd86f"
+babylon@^6.18.0:
+ version "6.18.0"
+ resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
+
backo2@1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947"
@@ -717,8 +792,8 @@ base64-arraybuffer@0.1.5:
resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8"
base64-js@^1.0.2:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.0.tgz#a39992d723584811982be5e290bb6a53d86700f1"
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.1.tgz#a91947da1f4a516ea38e5b4ec0ec3773675e0886"
base64id@1.0.0:
version "1.0.0"
@@ -764,13 +839,17 @@ blocking-proxy@0.0.5:
dependencies:
minimist "^1.2.0"
+bluebird@^2.10.2:
+ version "2.11.0"
+ resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-2.11.0.tgz#534b9033c022c9579c56ba3b3e5a5caafbb650e1"
+
bluebird@^3.3.0, bluebird@^3.4.7:
version "3.5.0"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.0.tgz#791420d7f551eea2897453a8a77653f96606d67c"
bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
- version "4.11.6"
- resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.6.tgz#53344adb14617a13f6e8dd2ce28905d1c0ba3215"
+ version "4.11.8"
+ resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f"
body-parser@^1.12.4:
version "1.17.2"
@@ -787,6 +866,17 @@ body-parser@^1.12.4:
raw-body "~2.2.0"
type-is "~1.6.15"
+bonjour@^3.5.0:
+ version "3.5.0"
+ resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5"
+ dependencies:
+ array-flatten "^2.1.0"
+ deep-equal "^1.0.1"
+ dns-equal "^1.0.0"
+ dns-txt "^2.0.2"
+ multicast-dns "^6.0.1"
+ multicast-dns-service-types "^1.1.0"
+
boolbase@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
@@ -839,14 +929,15 @@ brorand@^1.0.1:
resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
browserify-aes@^1.0.0, browserify-aes@^1.0.4:
- version "1.0.6"
- resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.0.6.tgz#5e7725dbdef1fd5930d4ebab48567ce451c48a0a"
+ version "1.0.8"
+ resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.0.8.tgz#c8fa3b1b7585bb7ba77c5560b60996ddec6d5309"
dependencies:
- buffer-xor "^1.0.2"
+ buffer-xor "^1.0.3"
cipher-base "^1.0.0"
create-hash "^1.1.0"
- evp_bytestokey "^1.0.0"
+ evp_bytestokey "^1.0.3"
inherits "^2.0.1"
+ safe-buffer "^5.0.1"
browserify-cipher@^1.0.0:
version "1.0.0"
@@ -896,11 +987,15 @@ browserslist@^1.3.6, browserslist@^1.5.2, browserslist@^1.7.6:
caniuse-db "^1.0.30000639"
electron-to-chromium "^1.2.7"
+buffer-indexof@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c"
+
buffer-shims@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/buffer-shims/-/buffer-shims-1.0.0.tgz#9978ce317388c649ad8793028c3477ef044a8b51"
-buffer-xor@^1.0.2:
+buffer-xor@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
@@ -920,14 +1015,14 @@ builtin-status-codes@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8"
-bytes@2.3.0:
- version "2.3.0"
- resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.3.0.tgz#d5b680a165b6201739acb611542aabc2d8ceb070"
-
bytes@2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.4.0.tgz#7d97196f9d5baf7f6935e25985549edd2a6c2339"
+bytes@2.5.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.5.0.tgz#4c9423ea2d252c270c41b2bdefeff9bb6b62c06a"
+
callsite@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20"
@@ -958,7 +1053,7 @@ camelcase@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a"
-camelcase@^4.0.0:
+camelcase@^4.0.0, camelcase@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd"
@@ -972,8 +1067,8 @@ caniuse-api@^1.5.2:
lodash.uniq "^4.5.0"
caniuse-db@^1.0.30000529, caniuse-db@^1.0.30000634, caniuse-db@^1.0.30000639:
- version "1.0.30000676"
- resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000676.tgz#82ea578237637c8ff34a28acaade373b624c4ea8"
+ version "1.0.30000726"
+ resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000726.tgz#9bb742f8d026a62df873bc03c06843d2255b60d7"
capture-stack-trace@^1.0.0:
version "1.0.0"
@@ -1000,7 +1095,19 @@ chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3:
strip-ansi "^3.0.0"
supports-color "^2.0.0"
-chokidar@^1.4.1, chokidar@^1.4.3, chokidar@^1.6.0:
+chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.1.0.tgz#ac5becf14fa21b99c6c92ca7a7d7cfd5b17e743e"
+ dependencies:
+ ansi-styles "^3.1.0"
+ escape-string-regexp "^1.0.5"
+ supports-color "^4.0.0"
+
+charenc@~0.0.1:
+ version "0.0.2"
+ resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
+
+chokidar@^1.4.1, chokidar@^1.6.0, chokidar@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468"
dependencies:
@@ -1016,20 +1123,25 @@ chokidar@^1.4.1, chokidar@^1.4.3, chokidar@^1.6.0:
fsevents "^1.0.0"
cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.3.tgz#eeabf194419ce900da3018c207d212f2a6df0a07"
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de"
dependencies:
inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+circular-dependency-plugin@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/circular-dependency-plugin/-/circular-dependency-plugin-3.0.0.tgz#9b68692e35b0e3510998d0164b6ae5011bea5760"
clap@^1.0.9:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/clap/-/clap-1.1.3.tgz#b3bd36e93dd4cbfb395a3c26896352445265c05b"
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/clap/-/clap-1.2.0.tgz#59c90fe3e137104746ff19469a27a634ff68c857"
dependencies:
chalk "^1.1.3"
clean-css@4.1.x:
- version "4.1.3"
- resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.1.3.tgz#07cfe8980edb20d455ddc23aadcf1e04c6e509ce"
+ version "4.1.8"
+ resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.1.8.tgz#061455b2494a750ac98f46d8d5ebb17c679ea9d1"
dependencies:
source-map "0.5.x"
@@ -1037,16 +1149,6 @@ cli-boxes@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143"
-cli-cursor@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5"
- dependencies:
- restore-cursor "^2.0.0"
-
-cli-width@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.1.0.tgz#b234ca209b29ef66fc518d9b98d5847b00edf00a"
-
cliui@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1"
@@ -1063,17 +1165,30 @@ cliui@^3.2.0:
strip-ansi "^3.0.1"
wrap-ansi "^2.0.0"
+clone-deep@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-0.3.0.tgz#348c61ae9cdbe0edfe053d91ff4cc521d790ede8"
+ dependencies:
+ for-own "^1.0.0"
+ is-plain-object "^2.0.1"
+ kind-of "^3.2.2"
+ shallow-clone "^0.1.2"
+
clone@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.2.tgz#260b7a99ebb1edfe247538175f783243cb19d149"
+clone@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.1.tgz#d217d1e961118e3ac9a4b8bba3285553bf647cdb"
+
co@^4.6.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
coa@~1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/coa/-/coa-1.0.2.tgz#2ba9fec3b4aa43d7a49d7e6c3561e92061b6bcec"
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/coa/-/coa-1.0.4.tgz#a9ef153660d6a86a8bdec0289a5c684d217432fd"
dependencies:
q "^1.1.2"
@@ -1092,15 +1207,15 @@ codelyzer@~2.0.0:
source-map "^0.5.6"
sprintf-js "^1.0.3"
-color-convert@^1.3.0:
+color-convert@^1.3.0, color-convert@^1.9.0:
version "1.9.0"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.0.tgz#1accf97dd739b983bf994d56fec8f95853641b7a"
dependencies:
color-name "^1.1.1"
color-name@^1.0.0, color-name@^1.1.1:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.2.tgz#5c8ab72b64bd2215d617ae9559ebb148475cf98d"
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
color-string@^0.3.0:
version "0.3.0"
@@ -1140,12 +1255,16 @@ combined-stream@^1.0.5, combined-stream@~1.0.5:
dependencies:
delayed-stream "~1.0.0"
-commander@2, commander@2.9.x, commander@~2.9.0:
+commander@2:
version "2.9.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4"
dependencies:
graceful-readlink ">= 1.0.0"
+commander@2.11.x, commander@~2.11.0:
+ version "2.11.0"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563"
+
common-tags@^1.3.1:
version "1.4.0"
resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.4.0.tgz#1187be4f3d4cf0c0427d43f74eef1f73501614c0"
@@ -1168,27 +1287,36 @@ component-inherit@0.0.3:
version "0.0.3"
resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143"
-compressible@~2.0.8:
- version "2.0.10"
- resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.10.tgz#feda1c7f7617912732b29bf8cf26252a20b9eecd"
+compressible@~2.0.10:
+ version "2.0.11"
+ resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.11.tgz#16718a75de283ed8e604041625a2064586797d8a"
dependencies:
- mime-db ">= 1.27.0 < 2"
+ mime-db ">= 1.29.0 < 2"
compression@^1.5.2:
- version "1.6.2"
- resolved "https://registry.yarnpkg.com/compression/-/compression-1.6.2.tgz#cceb121ecc9d09c52d7ad0c3350ea93ddd402bc3"
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.0.tgz#030c9f198f1643a057d776a738e922da4373012d"
dependencies:
accepts "~1.3.3"
- bytes "2.3.0"
- compressible "~2.0.8"
- debug "~2.2.0"
+ bytes "2.5.0"
+ compressible "~2.0.10"
+ debug "2.6.8"
on-headers "~1.0.1"
- vary "~1.1.0"
+ safe-buffer "5.1.1"
+ vary "~1.1.1"
concat-map@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+concat-stream@1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.0.tgz#0aac662fd52be78964d5532f694784e70110acf7"
+ dependencies:
+ inherits "^2.0.3"
+ readable-stream "^2.2.2"
+ typedarray "^0.0.6"
+
configstore@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/configstore/-/configstore-3.1.0.tgz#45df907073e26dfa1cf4b2d52f5b60545eaa11d1"
@@ -1247,14 +1375,45 @@ cookie@0.3.1:
version "0.3.1"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb"
+copy-webpack-plugin@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-4.0.1.tgz#9728e383b94316050d0c7463958f2b85c0aa8200"
+ dependencies:
+ bluebird "^2.10.2"
+ fs-extra "^0.26.4"
+ glob "^6.0.4"
+ is-glob "^3.1.0"
+ loader-utils "^0.2.15"
+ lodash "^4.3.0"
+ minimatch "^3.0.0"
+ node-dir "^0.1.10"
+
core-js@^2.2.0, core-js@^2.4.0, core-js@^2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.4.1.tgz#4de911e667b0eae9124e34254b53aea6fc618d3e"
+core-object@^3.1.0:
+ version "3.1.5"
+ resolved "https://registry.yarnpkg.com/core-object/-/core-object-3.1.5.tgz#fa627b87502adc98045e44678e9a8ec3b9c0d2a9"
+ dependencies:
+ chalk "^2.0.0"
+
core-util-is@~1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
+cosmiconfig@^2.1.0, cosmiconfig@^2.1.1:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-2.2.2.tgz#6173cebd56fac042c1f4390edf7af6c07c7cb892"
+ dependencies:
+ is-directory "^0.3.1"
+ js-yaml "^3.4.3"
+ minimist "^1.2.0"
+ object-assign "^4.1.0"
+ os-homedir "^1.0.1"
+ parse-json "^2.2.0"
+ require-from-string "^1.1.0"
+
create-ecdh@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.0.tgz#888c723596cdf7612f6498233eebd7a35301737d"
@@ -1268,7 +1427,7 @@ create-error-class@^3.0.0:
dependencies:
capture-stack-trace "^1.0.0"
-create-hash@^1.1.0, create-hash@^1.1.1, create-hash@^1.1.2:
+create-hash@^1.1.0, create-hash@^1.1.2:
version "1.1.3"
resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.1.3.tgz#606042ac8b9262750f483caddab0f5819172d8fd"
dependencies:
@@ -1302,6 +1461,18 @@ cross-spawn@^3.0.0:
lru-cache "^4.0.1"
which "^1.2.9"
+cross-spawn@^5.0.1:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449"
+ dependencies:
+ lru-cache "^4.0.1"
+ shebang-command "^1.2.0"
+ which "^1.2.9"
+
+crypt@~0.0.1:
+ version "0.0.2"
+ resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
+
cryptiles@2.x.x:
version "2.0.5"
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
@@ -1309,8 +1480,8 @@ cryptiles@2.x.x:
boom "2.x.x"
crypto-browserify@^3.11.0:
- version "3.11.0"
- resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.0.tgz#3652a0906ab9b2a7e0c3ce66a408e957a2485522"
+ version "3.11.1"
+ resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.11.1.tgz#948945efc6757a400d6e5e5af47194d10064279f"
dependencies:
browserify-cipher "^1.0.0"
browserify-sign "^4.0.0"
@@ -1331,13 +1502,14 @@ css-color-names@0.0.4:
version "0.0.4"
resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0"
-css-loader@^0.26.1:
- version "0.26.4"
- resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-0.26.4.tgz#b61e9e30db94303e6ffc892f10ecd09ad025a1fd"
+css-loader@^0.28.1:
+ version "0.28.7"
+ resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-0.28.7.tgz#5f2ee989dd32edd907717f953317656160999c1b"
dependencies:
babel-code-frame "^6.11.0"
css-selector-tokenizer "^0.7.0"
cssnano ">=2.6.1 <4"
+ icss-utils "^2.1.0"
loader-utils "^1.0.2"
lodash.camelcase "^4.3.0"
object-assign "^4.0.1"
@@ -1346,7 +1518,8 @@ css-loader@^0.26.1:
postcss-modules-local-by-default "^1.0.1"
postcss-modules-scope "^1.0.0"
postcss-modules-values "^1.1.0"
- source-list-map "^0.1.7"
+ postcss-value-parser "^3.3.0"
+ source-list-map "^2.0.0"
css-parse@1.7.x:
version "1.7.0"
@@ -1647,6 +1820,12 @@ d3@^4.10.0:
d3-voronoi "1.1.2"
d3-zoom "1.5.0"
+d@1:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/d/-/d-1.0.0.tgz#754bb5bfe55451da69a58b94d45f4c5b0462d58f"
+ dependencies:
+ es5-ext "^0.10.9"
+
dashdash@^1.12.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
@@ -1657,13 +1836,19 @@ date-now@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
-debug@*, debug@2, debug@2.6.8, debug@^2.1.3, debug@^2.2.0, debug@^2.6.3, debug@^2.6.8:
+debug@*:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-3.0.1.tgz#0564c612b521dc92d9f2988f0549e34f9c98db64"
+ dependencies:
+ ms "2.0.0"
+
+debug@2, debug@2.6.8, debug@^2.2.0, debug@^2.6.3, debug@^2.6.6, debug@^2.6.8:
version "2.6.8"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.8.tgz#e731531ca2ede27d188222427da17821d68ff4fc"
dependencies:
ms "2.0.0"
-debug@2.2.0, debug@~2.2.0:
+debug@2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da"
dependencies:
@@ -1685,6 +1870,10 @@ decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
+deep-equal@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5"
+
deep-extend@~0.4.0:
version "0.4.2"
resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f"
@@ -1711,6 +1900,17 @@ del@^2.2.0:
pinkie-promise "^2.0.0"
rimraf "^2.2.8"
+del@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/del/-/del-3.0.0.tgz#53ecf699ffcbcb39637691ab13baf160819766e5"
+ dependencies:
+ globby "^6.1.0"
+ is-path-cwd "^1.0.0"
+ is-path-in-cwd "^1.0.0"
+ p-map "^1.1.1"
+ pify "^3.0.0"
+ rimraf "^2.2.8"
+
delayed-stream@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
@@ -1727,6 +1927,10 @@ depd@1.1.0, depd@~1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.0.tgz#e1bd82c6aab6ced965b97b88b17ed3e528ca18c3"
+depd@1.1.1, depd@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359"
+
des.js@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.0.tgz#c074d2e2aa6a8a9a07dbd61f9a15c2cd83ec8ecc"
@@ -1772,6 +1976,23 @@ directory-encoder@^0.7.2:
handlebars "^1.3.0"
img-stats "^0.5.2"
+dns-equal@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d"
+
+dns-packet@^1.0.1:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.2.2.tgz#a8a26bec7646438963fc86e06f8f8b16d6c8bf7a"
+ dependencies:
+ ip "^1.1.0"
+ safe-buffer "^5.0.1"
+
+dns-txt@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6"
+ dependencies:
+ buffer-indexof "^1.0.0"
+
dom-converter@~0.1:
version "0.1.4"
resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.1.4.tgz#a45ef5727b890c9bffe6d7c876e7b19cb0e17f3b"
@@ -1845,9 +2066,13 @@ ee-first@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
+ejs@^2.5.7:
+ version "2.5.7"
+ resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.5.7.tgz#cc872c168880ae3c7189762fd5ffc00896c9518a"
+
electron-to-chromium@^1.2.7:
- version "1.3.13"
- resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.13.tgz#1b3a5eace6e087bb5e257a100b0cbfe81b2891fc"
+ version "1.3.21"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.21.tgz#a967ebdcfe8ed0083fc244d1894022a8e8113ea2"
elliptic@^6.0.0:
version "6.4.0"
@@ -1861,12 +2086,6 @@ elliptic@^6.0.0:
minimalistic-assert "^1.0.0"
minimalistic-crypto-utils "^1.0.0"
-ember-cli-normalize-entity-name@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/ember-cli-normalize-entity-name/-/ember-cli-normalize-entity-name-1.0.0.tgz#0b14f7bcbc599aa117b5fddc81e4fd03c4bad5b7"
- dependencies:
- silent-error "^1.0.0"
-
ember-cli-string-utils@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/ember-cli-string-utils/-/ember-cli-string-utils-1.1.0.tgz#39b677fc2805f55173735376fcef278eaa4452a1"
@@ -1918,18 +2137,14 @@ engine.io@1.8.2:
engine.io-parser "1.3.2"
ws "1.1.1"
-enhanced-resolve@^3.0.0, enhanced-resolve@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.1.0.tgz#9f4b626f577245edcf4b2ad83d86e17f4f421dec"
+enhanced-resolve@^3.1.0, enhanced-resolve@^3.4.0:
+ version "3.4.1"
+ resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz#0421e339fd71419b3da13d129b3979040230476e"
dependencies:
graceful-fs "^4.1.2"
memory-fs "^0.4.0"
object-assign "^4.0.1"
- tapable "^0.2.5"
-
-ensure-posix-path@^1.0.0:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/ensure-posix-path/-/ensure-posix-path-1.0.2.tgz#a65b3e42d0b71cfc585eb774f9943c8d9b91b0c2"
+ tapable "^0.2.7"
ent@~2.2.0:
version "2.2.0"
@@ -1951,6 +2166,62 @@ error-ex@^1.2.0:
dependencies:
is-arrayish "^0.2.1"
+es5-ext@^0.10.14, es5-ext@^0.10.9, es5-ext@~0.10.14:
+ version "0.10.30"
+ resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.30.tgz#7141a16836697dbabfaaaeee41495ce29f52c939"
+ dependencies:
+ es6-iterator "2"
+ es6-symbol "~3.1"
+
+es6-iterator@2, es6-iterator@^2.0.1, es6-iterator@~2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.1.tgz#8e319c9f0453bf575d374940a655920e59ca5512"
+ dependencies:
+ d "1"
+ es5-ext "^0.10.14"
+ es6-symbol "^3.1"
+
+es6-map@^0.1.3:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+ es6-iterator "~2.0.1"
+ es6-set "~0.1.5"
+ es6-symbol "~3.1.1"
+ event-emitter "~0.3.5"
+
+es6-promise@~4.0.3:
+ version "4.0.5"
+ resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.0.5.tgz#7882f30adde5b240ccfa7f7d78c548330951ae42"
+
+es6-set@~0.1.5:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+ es6-iterator "~2.0.1"
+ es6-symbol "3.1.1"
+ event-emitter "~0.3.5"
+
+es6-symbol@3.1.1, es6-symbol@^3.1, es6-symbol@^3.1.1, es6-symbol@~3.1, es6-symbol@~3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+
+es6-weak-map@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.2.tgz#5e3ab32251ffd1538a1f8e5ffa1357772f92d96f"
+ dependencies:
+ d "1"
+ es5-ext "^0.10.14"
+ es6-iterator "^2.0.1"
+ es6-symbol "^3.1.1"
+
escape-html@~1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
@@ -1959,6 +2230,15 @@ escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
+escope@^3.6.0:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3"
+ dependencies:
+ es6-map "^0.1.3"
+ es6-weak-map "^2.0.1"
+ esrecurse "^4.1.0"
+ estraverse "^4.1.1"
+
esprima@^2.6.0:
version "2.7.3"
resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581"
@@ -1967,6 +2247,21 @@ esprima@^3.1.1:
version "3.1.3"
resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633"
+esprima@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804"
+
+esrecurse@^4.1.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.0.tgz#fa9568d98d3823f9a41d91e902dcab9ea6e5b163"
+ dependencies:
+ estraverse "^4.1.0"
+ object-assign "^4.0.1"
+
+estraverse@^4.1.0, estraverse@^4.1.1:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13"
+
esutils@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
@@ -1975,6 +2270,13 @@ etag@~1.8.0:
version "1.8.0"
resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.0.tgz#6f631aef336d6c46362b51764044ce216be3c051"
+event-emitter@~0.3.5:
+ version "0.3.5"
+ resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39"
+ dependencies:
+ d "1"
+ es5-ext "~0.10.14"
+
eventemitter3@1.x.x:
version "1.2.0"
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-1.2.0.tgz#1c86991d816ad1e504750e73874224ecf3bec508"
@@ -1983,17 +2285,18 @@ events@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
-eventsource@~0.1.6:
+eventsource@0.1.6:
version "0.1.6"
resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-0.1.6.tgz#0acede849ed7dd1ccc32c811bb11b944d4f29232"
dependencies:
original ">=0.0.5"
-evp_bytestokey@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.0.tgz#497b66ad9fef65cd7c08a6180824ba1476b66e53"
+evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02"
dependencies:
- create-hash "^1.1.1"
+ md5.js "^1.3.4"
+ safe-buffer "^5.1.1"
execa@^0.4.0:
version "0.4.0"
@@ -2006,6 +2309,18 @@ execa@^0.4.0:
path-key "^1.0.0"
strip-eof "^1.0.0"
+execa@^0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777"
+ dependencies:
+ cross-spawn "^5.0.1"
+ get-stream "^3.0.0"
+ is-stream "^1.1.0"
+ npm-run-path "^2.0.0"
+ p-finally "^1.0.0"
+ signal-exit "^3.0.0"
+ strip-eof "^1.0.0"
+
exit@^0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c"
@@ -2045,8 +2360,8 @@ exports-loader@^0.6.3:
source-map "0.5.x"
express@^4.13.3:
- version "4.15.3"
- resolved "https://registry.yarnpkg.com/express/-/express-4.15.3.tgz#bab65d0f03aa80c358408972fc700f916944b662"
+ version "4.15.4"
+ resolved "https://registry.yarnpkg.com/express/-/express-4.15.4.tgz#032e2253489cf8fce02666beca3d11ed7a2daed1"
dependencies:
accepts "~1.3.3"
array-flatten "1.1.1"
@@ -2054,23 +2369,23 @@ express@^4.13.3:
content-type "~1.0.2"
cookie "0.3.1"
cookie-signature "1.0.6"
- debug "2.6.7"
- depd "~1.1.0"
+ debug "2.6.8"
+ depd "~1.1.1"
encodeurl "~1.0.1"
escape-html "~1.0.3"
etag "~1.8.0"
- finalhandler "~1.0.3"
+ finalhandler "~1.0.4"
fresh "0.5.0"
merge-descriptors "1.0.1"
methods "~1.1.2"
on-finished "~2.3.0"
parseurl "~1.3.1"
path-to-regexp "0.1.7"
- proxy-addr "~1.1.4"
- qs "6.4.0"
+ proxy-addr "~1.1.5"
+ qs "6.5.0"
range-parser "~1.2.0"
- send "0.15.3"
- serve-static "1.12.3"
+ send "0.15.4"
+ serve-static "1.12.4"
setprototypeof "1.0.3"
statuses "~1.3.1"
type-is "~1.6.15"
@@ -2081,33 +2396,38 @@ extend@3, extend@^3.0.0, extend@~3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444"
-external-editor@^2.0.1:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.0.4.tgz#1ed9199da9cbfe2ef2f7a31b2fde8b0d12368972"
- dependencies:
- iconv-lite "^0.4.17"
- jschardet "^1.4.2"
- tmp "^0.0.31"
-
extglob@^0.3.1:
version "0.3.2"
resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1"
dependencies:
is-extglob "^1.0.0"
-extract-text-webpack-plugin@~2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/extract-text-webpack-plugin/-/extract-text-webpack-plugin-2.0.0.tgz#8640f72609800a3528f13a2a9634d566a5c1ae60"
+extract-text-webpack-plugin@3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/extract-text-webpack-plugin/-/extract-text-webpack-plugin-3.0.0.tgz#90caa7907bc449f335005e3ac7532b41b00de612"
dependencies:
- ajv "^4.11.2"
- async "^2.1.2"
- loader-utils "^1.0.2"
- webpack-sources "^0.1.0"
+ async "^2.4.1"
+ loader-utils "^1.1.0"
+ schema-utils "^0.3.0"
+ webpack-sources "^1.0.1"
+
+extract-zip@~1.6.5:
+ version "1.6.5"
+ resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.6.5.tgz#99a06735b6ea20ea9b705d779acffcc87cff0440"
+ dependencies:
+ concat-stream "1.6.0"
+ debug "2.2.0"
+ mkdirp "0.5.0"
+ yauzl "2.4.1"
extsprintf@1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.0.2.tgz#e1080e0658e300b06294990cc70e1502235fd550"
+fast-deep-equal@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff"
+
fastparse@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.1.tgz#d1e2643b38a94d7583b479060e6c4affc94071f8"
@@ -2124,11 +2444,11 @@ faye-websocket@~0.11.0:
dependencies:
websocket-driver ">=0.5.1"
-figures@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962"
+fd-slicer@~1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.0.1.tgz#8b5bcbd9ec327c5041bf9ab023fd6750f1177e65"
dependencies:
- escape-string-regexp "^1.0.5"
+ pend "~1.2.0"
file-loader@^0.10.0:
version "0.10.1"
@@ -2157,7 +2477,7 @@ fill-range@^2.1.0:
repeat-element "^1.1.2"
repeat-string "^1.5.2"
-finalhandler@1.0.3, finalhandler@~1.0.3:
+finalhandler@1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.3.tgz#ef47e77950e999780e86022a560e3217e0d0cc89"
dependencies:
@@ -2169,6 +2489,18 @@ finalhandler@1.0.3, finalhandler@~1.0.3:
statuses "~1.3.1"
unpipe "~1.0.0"
+finalhandler@~1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.4.tgz#18574f2e7c4b98b8ae3b230c21f201f31bdb3fb7"
+ dependencies:
+ debug "2.6.8"
+ encodeurl "~1.0.1"
+ escape-html "~1.0.3"
+ on-finished "~2.3.0"
+ parseurl "~1.3.1"
+ statuses "~1.3.1"
+ unpipe "~1.0.0"
+
find-up@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f"
@@ -2176,6 +2508,12 @@ find-up@^1.0.0:
path-exists "^2.0.0"
pinkie-promise "^2.0.0"
+find-up@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
+ dependencies:
+ locate-path "^2.0.0"
+
findup-sync@~0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-0.3.0.tgz#37930aa5d816b777c03445e1966cc6790a4c0b16"
@@ -2190,6 +2528,10 @@ font-awesome@^4.7.0:
version "4.7.0"
resolved "https://registry.yarnpkg.com/font-awesome/-/font-awesome-4.7.0.tgz#8fa8cf0411a1a31afd07b06d2902bb9fc815a133"
+for-in@^0.1.3:
+ version "0.1.8"
+ resolved "https://registry.yarnpkg.com/for-in/-/for-in-0.1.8.tgz#d8773908e31256109952b1fdb9b3fa867d2775e1"
+
for-in@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
@@ -2200,6 +2542,12 @@ for-own@^0.1.4:
dependencies:
for-in "^1.0.1"
+for-own@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b"
+ dependencies:
+ for-in "^1.0.1"
+
forever-agent@~0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
@@ -2220,12 +2568,6 @@ fresh@0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.0.tgz#f474ca5e6a9246d6fd8e0953cfa9b9c805afa78e"
-fs-access@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a"
- dependencies:
- null-check "^1.0.0"
-
fs-extra@^0.23.1:
version "0.23.1"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-0.23.1.tgz#6611dba6adf2ab8dc9c69fab37cddf8818157e3d"
@@ -2235,12 +2577,31 @@ fs-extra@^0.23.1:
path-is-absolute "^1.0.0"
rimraf "^2.2.8"
-fs-extra@^2.0.0:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-2.1.2.tgz#046c70163cef9aad46b0e4a7fa467fb22d71de35"
+fs-extra@^0.26.4:
+ version "0.26.7"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-0.26.7.tgz#9ae1fdd94897798edab76d0918cf42d0c3184fa9"
dependencies:
graceful-fs "^4.1.2"
jsonfile "^2.1.0"
+ klaw "^1.0.0"
+ path-is-absolute "^1.0.0"
+ rimraf "^2.2.8"
+
+fs-extra@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.1.tgz#7fc0c6c8957f983f57f306a24e5b9ddd8d0dd880"
+ dependencies:
+ graceful-fs "^4.1.2"
+ jsonfile "^3.0.0"
+ universalify "^0.1.0"
+
+fs-extra@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-1.0.0.tgz#cd3ce5f7e7cb6145883fcae3191e9877f8587950"
+ dependencies:
+ graceful-fs "^4.1.2"
+ jsonfile "^2.1.0"
+ klaw "^1.0.0"
fs.realpath@^1.0.0:
version "1.0.0"
@@ -2271,8 +2632,8 @@ fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2:
rimraf "2"
function-bind@^1.0.2:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.0.tgz#16176714c801798e4e8f2cf7f7529467bb4a5771"
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
gauge@~2.7.3:
version "2.7.4"
@@ -2335,9 +2696,19 @@ glob@7.0.x:
once "^1.3.0"
path-is-absolute "^1.0.0"
-glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.0.6, glob@^7.1.1, glob@~7.1.1:
- version "7.1.2"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
+glob@^6.0.4:
+ version "6.0.4"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22"
+ dependencies:
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "2 || 3"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
+glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.0.6, glob@^7.1.1, glob@~7.1.1:
+ version "7.1.2"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
@@ -2371,12 +2742,22 @@ globby@^5.0.0:
pify "^2.0.0"
pinkie-promise "^2.0.0"
+globby@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
+ dependencies:
+ array-union "^1.0.1"
+ glob "^7.0.3"
+ object-assign "^4.0.1"
+ pify "^2.0.0"
+ pinkie-promise "^2.0.0"
+
globule@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/globule/-/globule-1.1.0.tgz#c49352e4dc183d85893ee825385eb994bb6df45f"
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/globule/-/globule-1.2.0.tgz#1dc49c6822dd9e8a2fa00ba2a295006e8664bd09"
dependencies:
glob "~7.1.1"
- lodash "~4.16.4"
+ lodash "~4.17.4"
minimatch "~3.0.2"
got@^6.7.1:
@@ -2395,7 +2776,7 @@ got@^6.7.1:
unzip-response "^2.0.1"
url-parse-lax "^1.0.0"
-graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6:
+graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9:
version "4.1.11"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658"
@@ -2456,6 +2837,10 @@ has-flag@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa"
+has-flag@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51"
+
has-unicode@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
@@ -2472,11 +2857,26 @@ hash-base@^2.0.0:
dependencies:
inherits "^2.0.1"
-hash.js@^1.0.0, hash.js@^1.0.3:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.0.3.tgz#1332ff00156c0a0ffdd8236013d07b77a0451573"
+hash-base@^3.0.0:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.0.4.tgz#5fc8686847ecd73499403319a6b0a3f3f6ae4918"
dependencies:
inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+hash.js@^1.0.0, hash.js@^1.0.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.3.tgz#340dedbe6290187151c1ea1d777a3448935df846"
+ dependencies:
+ inherits "^2.0.3"
+ minimalistic-assert "^1.0.0"
+
+hasha@~2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/hasha/-/hasha-2.2.0.tgz#78d7cbfc1e6d66303fe79837365984517b2f6ee1"
+ dependencies:
+ is-stream "^1.0.1"
+ pinkie-promise "^2.0.0"
hawk@~3.1.3:
version "3.1.3"
@@ -2491,6 +2891,19 @@ he@1.1.x:
version "1.1.1"
resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
+heimdalljs-logger@^0.1.9:
+ version "0.1.9"
+ resolved "https://registry.yarnpkg.com/heimdalljs-logger/-/heimdalljs-logger-0.1.9.tgz#d76ada4e45b7bb6f786fc9c010a68eb2e2faf176"
+ dependencies:
+ debug "^2.2.0"
+ heimdalljs "^0.2.0"
+
+heimdalljs@^0.2.0, heimdalljs@^0.2.4:
+ version "0.2.5"
+ resolved "https://registry.yarnpkg.com/heimdalljs/-/heimdalljs-0.2.5.tgz#6aa54308eee793b642cff9cf94781445f37730ac"
+ dependencies:
+ rsvp "~3.2.1"
+
hmac-drbg@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
@@ -2504,8 +2917,8 @@ hoek@2.x.x:
resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed"
hosted-git-info@^2.1.4:
- version "2.4.2"
- resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.4.2.tgz#0076b9f46a270506ddbaaea56496897460612a67"
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c"
hpack.js@^2.1.6:
version "2.1.6"
@@ -2525,21 +2938,21 @@ html-entities@^1.2.0:
resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.2.1.tgz#0df29351f0721163515dfb9e5543e5f6eed5162f"
html-minifier@^3.2.3:
- version "3.5.2"
- resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.2.tgz#d73bc3ff448942408818ce609bf3fb0ea7ef4eb7"
+ version "3.5.3"
+ resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.3.tgz#4a275e3b1a16639abb79b4c11191ff0d0fcf1ab9"
dependencies:
camel-case "3.0.x"
clean-css "4.1.x"
- commander "2.9.x"
+ commander "2.11.x"
he "1.1.x"
ncname "1.0.x"
param-case "2.1.x"
relateurl "0.2.x"
uglify-js "3.0.x"
-html-webpack-plugin@^2.19.0:
- version "2.28.0"
- resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-2.28.0.tgz#2e7863b57e5fd48fe263303e2ffc934c3064d009"
+html-webpack-plugin@^2.29.0:
+ version "2.30.1"
+ resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-2.30.1.tgz#7f9c421b7ea91ec460f56527d78df484ee7537d5"
dependencies:
bluebird "^3.4.7"
html-minifier "^3.2.3"
@@ -2570,7 +2983,16 @@ http-errors@~1.6.1:
setprototypeof "1.0.3"
statuses ">= 1.3.1 < 2"
-http-proxy-middleware@~0.17.1:
+http-errors@~1.6.2:
+ version "1.6.2"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.2.tgz#0a002cc85707192a7e7946ceedc11155f60ec736"
+ dependencies:
+ depd "1.1.1"
+ inherits "2.0.3"
+ setprototypeof "1.0.3"
+ statuses ">= 1.3.1 < 2"
+
+http-proxy-middleware@~0.17.4:
version "0.17.4"
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.17.4.tgz#642e8848851d66f09d4f124912846dbaeb41b833"
dependencies:
@@ -2606,7 +3028,7 @@ https-proxy-agent@^1.0.0:
debug "2"
extend "3"
-iconv-lite@0.4, iconv-lite@^0.4.17:
+iconv-lite@0.4:
version "0.4.17"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.17.tgz#4fdaa3b38acbc2c031b045d0edcdfe1ecab18c8d"
@@ -2618,13 +3040,19 @@ icss-replace-symbols@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded"
+icss-utils@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-2.1.0.tgz#83f0a0ec378bf3246178b6c2ad9136f135b1c962"
+ dependencies:
+ postcss "^6.0.1"
+
ieee754@^1.1.4:
version "1.1.8"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
image-size@~0.5.0:
- version "0.5.4"
- resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.4.tgz#94e07beec0659386f1aefb84b2222e88405485cd"
+ version "0.5.5"
+ resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c"
img-stats@^0.5.2:
version "0.5.2"
@@ -2654,10 +3082,6 @@ indexof@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d"
-inflection@^1.7.0:
- version "1.12.0"
- resolved "https://registry.yarnpkg.com/inflection/-/inflection-1.12.0.tgz#a200935656d6f5f6bc4dc7502e1aecb703228416"
-
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
@@ -2665,7 +3089,7 @@ inflight@^1.0.4:
once "^1.3.0"
wrappy "1"
-inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@~2.0.0, inherits@~2.0.1:
+inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
@@ -2677,23 +3101,11 @@ ini@^1.3.4, ini@~1.3.0:
version "1.3.4"
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e"
-inquirer@^3.0.0:
- version "3.0.6"
- resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.0.6.tgz#e04aaa9d05b7a3cb9b0f407d04375f0447190347"
+internal-ip@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-1.2.0.tgz#ae9fbf93b984878785d50a8de1b356956058cf5c"
dependencies:
- ansi-escapes "^1.1.0"
- chalk "^1.0.0"
- cli-cursor "^2.1.0"
- cli-width "^2.0.0"
- external-editor "^2.0.1"
- figures "^2.0.0"
- lodash "^4.3.0"
- mute-stream "0.0.7"
- run-async "^2.2.0"
- rx "^4.1.0"
- string-width "^2.0.0"
- strip-ansi "^3.0.0"
- through "^2.3.6"
+ meow "^3.3.0"
interpret@^1.0.0:
version "1.0.3"
@@ -2709,9 +3121,13 @@ invert-kv@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6"
-ipaddr.js@1.3.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.3.0.tgz#1e03a52fdad83a8bbb2b25cbf4998b4cffcd3dec"
+ip@^1.1.0, ip@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a"
+
+ipaddr.js@1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.4.0.tgz#296aca878a821816e5b85d0a285a99bcff4582f0"
is-absolute-url@^2.0.0:
version "2.1.0"
@@ -2727,7 +3143,7 @@ is-binary-path@^1.0.0:
dependencies:
binary-extensions "^1.0.0"
-is-buffer@^1.1.5:
+is-buffer@^1.0.2, is-buffer@^1.1.5, is-buffer@~1.1.1:
version "1.1.5"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.5.tgz#1f3b26ef613b214b88cbca23cc6c01d87961eecc"
@@ -2737,6 +3153,10 @@ is-builtin-module@^1.0.0:
dependencies:
builtin-modules "^1.0.0"
+is-directory@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1"
+
is-dotfile@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.2.tgz#2c132383f39199f8edc268ca01b9b007d205cc4d"
@@ -2825,6 +3245,12 @@ is-plain-obj@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
+is-plain-object@^2.0.1:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
+ dependencies:
+ isobject "^3.0.1"
+
is-posix-bracket@^0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4"
@@ -2833,10 +3259,6 @@ is-primitive@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575"
-is-promise@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa"
-
is-redirect@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24"
@@ -2845,7 +3267,7 @@ is-retry-allowed@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34"
-is-stream@^1.0.0, is-stream@^1.1.0:
+is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
@@ -2863,6 +3285,10 @@ is-utf8@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72"
+is-wsl@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d"
+
isarray@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
@@ -2885,6 +3311,10 @@ isobject@^2.0.0:
dependencies:
isarray "1.0.0"
+isobject@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
+
isstream@~0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
@@ -2924,7 +3354,19 @@ istanbul-lib-hook@^1.0.7:
dependencies:
append-transform "^0.4.0"
-istanbul-lib-instrument@^1.1.3, istanbul-lib-instrument@^1.7.2:
+istanbul-lib-instrument@^1.1.3:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.8.0.tgz#66f6c9421cc9ec4704f76f2db084ba9078a2b532"
+ dependencies:
+ babel-generator "^6.18.0"
+ babel-template "^6.16.0"
+ babel-traverse "^6.18.0"
+ babel-types "^6.18.0"
+ babylon "^6.18.0"
+ istanbul-lib-coverage "^1.1.1"
+ semver "^5.3.0"
+
+istanbul-lib-instrument@^1.7.2:
version "1.7.2"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.7.2.tgz#6014b03d3470fb77638d5802508c255c06312e56"
dependencies:
@@ -3005,6 +3447,17 @@ js-tokens@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.1.tgz#08e9f132484a2c45a30907e9dc4d5567b7f114d7"
+js-tokens@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
+
+js-yaml@^3.4.3:
+ version "3.9.1"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.9.1.tgz#08775cebdfdd359209f0d2acd383c8f86a6904a0"
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^4.0.0"
+
js-yaml@^3.7.0:
version "3.8.4"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.8.4.tgz#520b4564f86573ba96662af85a8cafa7b4b5a6f6"
@@ -3023,10 +3476,6 @@ jsbn@~0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
-jschardet@^1.4.2:
- version "1.4.2"
- resolved "https://registry.yarnpkg.com/jschardet/-/jschardet-1.4.2.tgz#2aa107f142af4121d145659d44f50830961e699a"
-
jsesc@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b"
@@ -3036,8 +3485,12 @@ jsesc@~0.5.0:
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
json-loader@^0.5.4:
- version "0.5.4"
- resolved "https://registry.yarnpkg.com/json-loader/-/json-loader-0.5.4.tgz#8baa1365a632f58a3c46d20175fc6002c96e37de"
+ version "0.5.7"
+ resolved "https://registry.yarnpkg.com/json-loader/-/json-loader-0.5.7.tgz#dca14a70235ff82f0ac9a3abeb60d337a365185d"
+
+json-schema-traverse@^0.3.0:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340"
json-schema@0.2.3:
version "0.2.3"
@@ -3057,7 +3510,7 @@ json3@3.3.2, json3@^3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1"
-json5@^0.5.0:
+json5@^0.5.0, json5@^0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821"
@@ -3067,6 +3520,12 @@ jsonfile@^2.1.0:
optionalDependencies:
graceful-fs "^4.1.6"
+jsonfile@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-3.0.1.tgz#a5ecc6f65f53f662c4415c7675a0331d0992ec66"
+ optionalDependencies:
+ graceful-fs "^4.1.6"
+
jsonify@~0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73"
@@ -3080,13 +3539,6 @@ jsprim@^1.2.2:
json-schema "0.2.3"
verror "1.3.6"
-karma-chrome-launcher@~2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.0.0.tgz#c2790c5a32b15577d0fff5a4d5a2703b3b439c25"
- dependencies:
- fs-access "^1.0.0"
- which "^1.2.1"
-
karma-cli@~1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/karma-cli/-/karma-cli-1.0.1.tgz#ae6c3c58a313a1d00b45164c455b9b86ce17f960"
@@ -3109,21 +3561,18 @@ karma-jasmine@^1.0.2, karma-jasmine@~1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-1.1.0.tgz#22e4c06bf9a182e5294d1f705e3733811b810acf"
-karma-sourcemap-loader@^0.3.7:
- version "0.3.7"
- resolved "https://registry.yarnpkg.com/karma-sourcemap-loader/-/karma-sourcemap-loader-0.3.7.tgz#91322c77f8f13d46fed062b042e1009d4c4505d8"
+karma-phantomjs-launcher@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/karma-phantomjs-launcher/-/karma-phantomjs-launcher-1.0.4.tgz#d23ca34801bda9863ad318e3bb4bd4062b13acd2"
dependencies:
- graceful-fs "^4.1.2"
+ lodash "^4.0.1"
+ phantomjs-prebuilt "^2.1.7"
-karma-webpack@^2.0.0:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/karma-webpack/-/karma-webpack-2.0.3.tgz#39cebf5ca2580139b27f9ae69b78816b9c82fae6"
+karma-source-map-support@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/karma-source-map-support/-/karma-source-map-support-1.2.0.tgz#1bf81e7bb4b089627ab352ec4179e117c406a540"
dependencies:
- async "~0.9.0"
- loader-utils "^0.2.5"
- lodash "^3.8.0"
- source-map "^0.1.41"
- webpack-dev-middleware "^1.0.11"
+ source-map-support "^0.4.1"
karma@~1.4.1:
version "1.4.1"
@@ -3157,18 +3606,38 @@ karma@~1.4.1:
tmp "0.0.28"
useragent "^2.1.10"
-kind-of@^3.0.2:
+kew@~0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/kew/-/kew-0.7.0.tgz#79d93d2d33363d6fdd2970b335d9141ad591d79b"
+
+kind-of@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-2.0.1.tgz#018ec7a4ce7e3a86cb9141be519d24c8faa981b5"
+ dependencies:
+ is-buffer "^1.0.2"
+
+kind-of@^3.0.2, kind-of@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
dependencies:
is-buffer "^1.1.5"
+klaw@^1.0.0:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/klaw/-/klaw-1.3.1.tgz#4088433b46b3b1ba259d78785d8e96f73ba02439"
+ optionalDependencies:
+ graceful-fs "^4.1.9"
+
latest-version@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-3.1.0.tgz#a205383fea322b33b5ae3b18abee0dc2f356ee15"
dependencies:
package-json "^4.0.0"
+lazy-cache@^0.2.3:
+ version "0.2.7"
+ resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-0.2.7.tgz#7feddf2dcb6edb77d11ef1d117ab5ffdf0ab1b65"
+
lazy-cache@^1.0.3:
version "1.0.4"
resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e"
@@ -3183,11 +3652,13 @@ lcid@^1.0.0:
dependencies:
invert-kv "^1.0.0"
-less-loader@^2.2.3:
- version "2.2.3"
- resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-2.2.3.tgz#b6d8f8139c8493df09d992a93a00734b08f84528"
+less-loader@^4.0.5:
+ version "4.0.5"
+ resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-4.0.5.tgz#ae155a7406cac6acd293d785587fcff0f478c4dd"
dependencies:
- loader-utils "^0.2.5"
+ clone "^2.1.1"
+ loader-utils "^1.1.0"
+ pify "^2.3.0"
less@^2.7.2:
version "2.7.2"
@@ -3202,6 +3673,12 @@ less@^2.7.2:
request "^2.72.0"
source-map "^0.5.3"
+license-webpack-plugin@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/license-webpack-plugin/-/license-webpack-plugin-1.0.0.tgz#9515229075bacce8ec420cadf99a54a5f78cc7df"
+ dependencies:
+ ejs "^2.5.7"
+
load-json-file@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0"
@@ -3212,11 +3689,20 @@ load-json-file@^1.0.0:
pinkie-promise "^2.0.0"
strip-bom "^2.0.0"
+load-json-file@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8"
+ dependencies:
+ graceful-fs "^4.1.2"
+ parse-json "^2.2.0"
+ pify "^2.0.0"
+ strip-bom "^3.0.0"
+
loader-runner@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.3.0.tgz#f482aea82d543e07921700d5a46ef26fdac6b8a2"
-loader-utils@^0.2.15, loader-utils@^0.2.16, loader-utils@^0.2.5, loader-utils@^0.2.9, loader-utils@~0.2.2:
+loader-utils@^0.2.15, loader-utils@^0.2.16, loader-utils@~0.2.2:
version "0.2.17"
resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-0.2.17.tgz#f86e6374d43205a6e6c60e9196f17c0299bfb348"
dependencies:
@@ -3225,7 +3711,7 @@ loader-utils@^0.2.15, loader-utils@^0.2.16, loader-utils@^0.2.5, loader-utils@^0
json5 "^0.5.0"
object-assign "^4.0.1"
-loader-utils@^1.0.2:
+loader-utils@^1.0.1, loader-utils@^1.0.2, loader-utils@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.1.0.tgz#c98aef488bcceda2ffb5e2de646d6a754429f5cd"
dependencies:
@@ -3233,6 +3719,13 @@ loader-utils@^1.0.2:
emojis-list "^2.0.0"
json5 "^0.5.0"
+locate-path@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
+ dependencies:
+ p-locate "^2.0.0"
+ path-exists "^3.0.0"
+
lodash.assign@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7"
@@ -3253,6 +3746,10 @@ lodash.mergewith@^4.6.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/lodash.mergewith/-/lodash.mergewith-4.6.0.tgz#150cf0a16791f5903b8891eab154609274bdea55"
+lodash.tail@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/lodash.tail/-/lodash.tail-4.1.1.tgz#d2333a36d9e7717c8ad2f7cacafec7c32b444664"
+
lodash.uniq@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
@@ -3261,14 +3758,10 @@ lodash@^3.8.0:
version "3.10.1"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6"
-lodash@^4.0.0, lodash@^4.11.1, lodash@^4.14.0, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.5.0:
+lodash@^4.0.0, lodash@^4.0.1, lodash@^4.11.1, lodash@^4.14.0, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.5.0, lodash@~4.17.4:
version "4.17.4"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae"
-lodash@~4.16.4:
- version "4.16.6"
- resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.16.6.tgz#d22c9ac660288f3843e16ba7d2b5d06cca27d777"
-
log4js@^0.6.31:
version "0.6.38"
resolved "https://registry.yarnpkg.com/log4js/-/log4js-0.6.38.tgz#2c494116695d6fb25480943d3fc872e662a522fd"
@@ -3276,6 +3769,10 @@ log4js@^0.6.31:
readable-stream "~1.0.2"
semver "~4.3.3"
+loglevel@^1.4.1:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.4.1.tgz#95b383f91a3c2756fd4ab093667e4309161f2bcd"
+
longest@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097"
@@ -3305,20 +3802,27 @@ lru-cache@2.2.x:
version "2.2.4"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.2.4.tgz#6c658619becf14031d0d0b594b16042ce4dc063d"
-lru-cache@^4.0.0, lru-cache@^4.0.1:
+lru-cache@^4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e"
dependencies:
pseudomap "^1.0.1"
yallist "^2.0.0"
+lru-cache@^4.0.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55"
+ dependencies:
+ pseudomap "^1.0.2"
+ yallist "^2.1.2"
+
macaddress@^0.2.8:
version "0.2.8"
resolved "https://registry.yarnpkg.com/macaddress/-/macaddress-0.2.8.tgz#5904dc537c39ec6dbefeae902327135fa8511f12"
-magic-string@^0.19.0:
- version "0.19.1"
- resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.19.1.tgz#14d768013caf2ec8fdea16a49af82fc377e75201"
+magic-string@^0.22.3:
+ version "0.22.4"
+ resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.22.4.tgz#31039b4e40366395618c1d6cf8193c53917475ff"
dependencies:
vlq "^0.2.1"
@@ -3336,28 +3840,43 @@ map-obj@^1.0.0, map-obj@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d"
-matcher-collection@^1.0.0:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/matcher-collection/-/matcher-collection-1.0.4.tgz#2f66ae0869996f29e43d0b62c83dd1d43e581755"
- dependencies:
- minimatch "^3.0.2"
-
math-expression-evaluator@^1.2.14:
version "1.2.17"
resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-1.2.17.tgz#de819fdbcd84dccd8fae59c6aeb79615b9d266ac"
+md5.js@^1.3.4:
+ version "1.3.4"
+ resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.4.tgz#e9bdbde94a20a5ac18b04340fc5764d5b09d901d"
+ dependencies:
+ hash-base "^3.0.0"
+ inherits "^2.0.1"
+
+md5@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/md5/-/md5-2.2.1.tgz#53ab38d5fe3c8891ba465329ea23fac0540126f9"
+ dependencies:
+ charenc "~0.0.1"
+ crypt "~0.0.1"
+ is-buffer "~1.1.1"
+
media-typer@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
-memory-fs@^0.4.0, memory-fs@~0.4.1:
+mem@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76"
+ dependencies:
+ mimic-fn "^1.0.0"
+
+memory-fs@^0.4.0, memory-fs@^0.4.1, memory-fs@~0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552"
dependencies:
errno "^0.1.3"
readable-stream "^2.0.1"
-meow@^3.7.0:
+meow@^3.3.0, meow@^3.7.0:
version "3.7.0"
resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb"
dependencies:
@@ -3405,7 +3924,11 @@ miller-rabin@^4.0.0:
bn.js "^4.0.0"
brorand "^1.0.1"
-"mime-db@>= 1.27.0 < 2", mime-db@~1.27.0:
+"mime-db@>= 1.29.0 < 2", mime-db@~1.30.0:
+ version "1.30.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01"
+
+mime-db@~1.27.0:
version "1.27.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.27.0.tgz#820f572296bbd20ec25ed55e5b5de869e5436eb1"
@@ -3415,14 +3938,24 @@ mime-types@^2.1.12, mime-types@~2.1.11, mime-types@~2.1.15, mime-types@~2.1.7:
dependencies:
mime-db "~1.27.0"
+mime-types@~2.1.16:
+ version "2.1.17"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a"
+ dependencies:
+ mime-db "~1.30.0"
+
mime@1.3.4:
version "1.3.4"
resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.4.tgz#115f9e3b6b3daf2959983cb38f149a2d40eb5d53"
-mime@1.3.x, mime@^1.2.11, mime@^1.3.4:
+mime@1.3.x, mime@^1.3.4:
version "1.3.6"
resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.6.tgz#591d84d3653a6b0b4a3b9df8de5aa8108e72e5e0"
+mime@^1.2.11:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-1.4.0.tgz#69e9e0db51d44f2a3b56e48b7817d7d137f1a343"
+
mimic-fn@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18"
@@ -3453,6 +3986,19 @@ minimist@~0.0.1:
version "0.0.10"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf"
+mixin-object@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/mixin-object/-/mixin-object-2.0.1.tgz#4fb949441dab182540f1fe035ba60e1947a5e57e"
+ dependencies:
+ for-in "^0.1.3"
+ is-extendable "^0.1.1"
+
+mkdirp@0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.0.tgz#1d73076a6df986cd9344e15e71fcc05a4c9abf12"
+ dependencies:
+ minimist "0.0.8"
+
mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
@@ -3481,14 +4027,25 @@ ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
-mute-stream@0.0.7:
- version "0.0.7"
- resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab"
+multicast-dns-service-types@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901"
-nan@^2.3.0, nan@^2.3.2:
+multicast-dns@^6.0.1:
+ version "6.1.1"
+ resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.1.1.tgz#6e7de86a570872ab17058adea7160bbeca814dde"
+ dependencies:
+ dns-packet "^1.0.1"
+ thunky "^0.1.0"
+
+nan@^2.3.0:
version "2.6.2"
resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45"
+nan@^2.3.2:
+ version "2.7.0"
+ resolved "https://registry.yarnpkg.com/nan/-/nan-2.7.0.tgz#d95bf721ec877e08db276ed3fc6eb78f9083ad46"
+
ncname@1.0.x:
version "1.0.0"
resolved "https://registry.yarnpkg.com/ncname/-/ncname-1.0.0.tgz#5b57ad18b1ca092864ef62b0b1ed8194f383b71c"
@@ -3515,9 +4072,19 @@ no-case@^2.2.0:
dependencies:
lower-case "^1.1.1"
+node-dir@^0.1.10:
+ version "0.1.17"
+ resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.17.tgz#5f5665d93351335caabef8f1c554516cf5f1e4e5"
+ dependencies:
+ minimatch "^3.0.2"
+
+node-forge@0.6.33:
+ version "0.6.33"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.6.33.tgz#463811879f573d45155ad6a9f43dc296e8e85ebc"
+
node-gyp@^3.3.1:
- version "3.6.1"
- resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-3.6.1.tgz#19561067ff185464aded478212681f47fd578cbc"
+ version "3.6.2"
+ resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-3.6.2.tgz#9bfbe54562286284838e750eac05295853fa1c60"
dependencies:
fstream "^1.0.0"
glob "^7.0.3"
@@ -3616,8 +4183,8 @@ nopt@^4.0.1:
osenv "^0.1.4"
normalize-package-data@^2.3.2, normalize-package-data@^2.3.4:
- version "2.3.8"
- resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.3.8.tgz#d819eda2a9dedbd1ffa563ea4071d936782295bb"
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f"
dependencies:
hosted-git-info "^2.1.4"
is-builtin-module "^1.0.0"
@@ -3649,7 +4216,22 @@ npm-run-path@^1.0.0:
dependencies:
path-key "^1.0.0"
-"npmlog@0 || 1 || 2 || 3 || 4", npmlog@^4.0.0, npmlog@^4.0.2:
+npm-run-path@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
+ dependencies:
+ path-key "^2.0.0"
+
+"npmlog@0 || 1 || 2 || 3 || 4", npmlog@^4.0.0:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b"
+ dependencies:
+ are-we-there-yet "~1.1.2"
+ console-control-strings "~1.1.0"
+ gauge "~2.7.3"
+ set-blocking "~2.0.0"
+
+npmlog@^4.0.2:
version "4.1.0"
resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.0.tgz#dc59bee85f64f00ed424efb2af0783df25d1c0b5"
dependencies:
@@ -3664,10 +4246,6 @@ nth-check@~1.0.1:
dependencies:
boolbase "~1.0.0"
-null-check@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd"
-
num2fraction@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede"
@@ -3719,12 +4297,6 @@ once@^1.3.0, once@^1.3.3, once@^1.4.0:
dependencies:
wrappy "1"
-onetime@^2.0.0:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4"
- dependencies:
- mimic-fn "^1.0.0"
-
opn@4.0.2:
version "4.0.2"
resolved "https://registry.yarnpkg.com/opn/-/opn-4.0.2.tgz#7abc22e644dff63b0a96d5ab7f2790c0f01abc95"
@@ -3732,6 +4304,12 @@ opn@4.0.2:
object-assign "^4.0.1"
pinkie-promise "^2.0.0"
+opn@~5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/opn/-/opn-5.1.0.tgz#72ce2306a17dbea58ff1041853352b4a8fc77519"
+ dependencies:
+ is-wsl "^1.1.0"
+
optimist@^0.6.1, optimist@~0.6.0:
version "0.6.1"
resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686"
@@ -3759,7 +4337,7 @@ os-browserify@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.2.1.tgz#63fc4ccee5d2d7763d26bbf8601078e6c2e0044f"
-os-homedir@^1.0.0:
+os-homedir@^1.0.0, os-homedir@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
@@ -3769,6 +4347,14 @@ os-locale@^1.4.0:
dependencies:
lcid "^1.0.0"
+os-locale@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2"
+ dependencies:
+ execa "^0.7.0"
+ lcid "^1.0.0"
+ mem "^1.1.0"
+
os-tmpdir@^1.0.0, os-tmpdir@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
@@ -3780,6 +4366,24 @@ osenv@0, osenv@^0.1.4:
os-homedir "^1.0.0"
os-tmpdir "^1.0.0"
+p-finally@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
+
+p-limit@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc"
+
+p-locate@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
+ dependencies:
+ p-limit "^1.1.0"
+
+p-map@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/p-map/-/p-map-1.1.1.tgz#05f5e4ae97a068371bc2a5cc86bfbdbc19c4ae7a"
+
package-json@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/package-json/-/package-json-4.0.1.tgz#8869a0401253661c4c4ca3da6c2121ed555f5eed"
@@ -3856,6 +4460,10 @@ path-exists@^2.0.0:
dependencies:
pinkie-promise "^2.0.0"
+path-exists@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
+
path-is-absolute@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
@@ -3868,6 +4476,10 @@ path-key@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-1.0.0.tgz#5d53d578019646c0d68800db4e146e6bdc2ac7af"
+path-key@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
+
path-parse@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1"
@@ -3884,9 +4496,15 @@ path-type@^1.0.0:
pify "^2.0.0"
pinkie-promise "^2.0.0"
+path-type@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73"
+ dependencies:
+ pify "^2.0.0"
+
pbkdf2@^3.0.3:
- version "3.0.12"
- resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.12.tgz#be36785c5067ea48d806ff923288c5f750b6b8a2"
+ version "3.0.13"
+ resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.0.13.tgz#c37d295531e786b1da3e3eadc840426accb0ae25"
dependencies:
create-hash "^1.1.2"
create-hmac "^1.1.4"
@@ -3894,14 +4512,36 @@ pbkdf2@^3.0.3:
safe-buffer "^5.0.1"
sha.js "^2.4.8"
+pend@~1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
+
performance-now@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5"
+phantomjs-prebuilt@^2.1.7:
+ version "2.1.15"
+ resolved "https://registry.yarnpkg.com/phantomjs-prebuilt/-/phantomjs-prebuilt-2.1.15.tgz#20f86e82d3349c505917527745b7a411e08b3903"
+ dependencies:
+ es6-promise "~4.0.3"
+ extract-zip "~1.6.5"
+ fs-extra "~1.0.0"
+ hasha "~2.2.0"
+ kew "~0.7.0"
+ progress "~1.1.8"
+ request "~2.81.0"
+ request-progress "~2.0.1"
+ which "~1.2.10"
+
pify@^2.0.0, pify@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
+pify@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
+
pinkie-promise@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa"
@@ -3981,12 +4621,37 @@ postcss-filter-plugins@^2.0.0:
postcss "^5.0.4"
uniqid "^4.0.0"
-postcss-loader@^0.13.0:
- version "0.13.0"
- resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-0.13.0.tgz#72fdaf0d29444df77d3751ce4e69dc40bc99ed85"
+postcss-load-config@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-1.2.0.tgz#539e9afc9ddc8620121ebf9d8c3673e0ce50d28a"
dependencies:
- loader-utils "^0.2.15"
- postcss "^5.2.0"
+ cosmiconfig "^2.1.0"
+ object-assign "^4.1.0"
+ postcss-load-options "^1.2.0"
+ postcss-load-plugins "^2.3.0"
+
+postcss-load-options@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-load-options/-/postcss-load-options-1.2.0.tgz#b098b1559ddac2df04bc0bb375f99a5cfe2b6d8c"
+ dependencies:
+ cosmiconfig "^2.1.0"
+ object-assign "^4.1.0"
+
+postcss-load-plugins@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/postcss-load-plugins/-/postcss-load-plugins-2.3.0.tgz#745768116599aca2f009fad426b00175049d8d92"
+ dependencies:
+ cosmiconfig "^2.1.1"
+ object-assign "^4.1.0"
+
+postcss-loader@^1.3.3:
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-1.3.3.tgz#a621ea1fa29062a83972a46f54486771301916eb"
+ dependencies:
+ loader-utils "^1.0.2"
+ object-assign "^4.1.1"
+ postcss "^5.2.15"
+ postcss-load-config "^1.2.0"
postcss-merge-idents@^2.1.5:
version "2.1.7"
@@ -4168,7 +4833,7 @@ postcss-zindex@^2.0.1:
postcss "^5.0.4"
uniqs "^2.0.0"
-postcss@^5.0.0, postcss@^5.0.10, postcss@^5.0.11, postcss@^5.0.12, postcss@^5.0.13, postcss@^5.0.14, postcss@^5.0.16, postcss@^5.0.2, postcss@^5.0.4, postcss@^5.0.5, postcss@^5.0.6, postcss@^5.0.8, postcss@^5.2.0, postcss@^5.2.16:
+postcss@^5.0.0, postcss@^5.0.10, postcss@^5.0.11, postcss@^5.0.12, postcss@^5.0.13, postcss@^5.0.14, postcss@^5.0.16, postcss@^5.0.2, postcss@^5.0.4, postcss@^5.0.5, postcss@^5.0.6, postcss@^5.0.8, postcss@^5.2.15, postcss@^5.2.16:
version "5.2.17"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.17.tgz#cf4f597b864d65c8a492b2eabe9d706c879c388b"
<TRUNCATED>
[35/57] [abbrv] ambari git commit: AMBARI-21868. Implement host
recovery - backend changes. (Eugene Chekanskiy via swagle)
Posted by lp...@apache.org.
AMBARI-21868. Implement host recovery - backend changes. (Eugene Chekanskiy via swagle)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c7a3bcd9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c7a3bcd9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c7a3bcd9
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: c7a3bcd9cd9b4c92990405c6826140dee9ddd46e
Parents: 75c8f5e
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Fri Sep 8 13:53:18 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Fri Sep 8 13:53:18 2017 -0700
----------------------------------------------------------------------
.../resources/ClusterResourceDefinition.java | 1 +
.../AmbariManagementControllerImpl.java | 25 +++++++++++++-------
.../controller/DeleteIdentityHandler.java | 3 ++-
.../server/controller/KerberosHelper.java | 8 +++++--
.../server/controller/KerberosHelperImpl.java | 13 ++++++----
.../AbstractPrepareKerberosServerAction.java | 14 ++++++-----
.../kerberos/CreateKeytabFilesServerAction.java | 9 +++++--
.../kerberos/CreatePrincipalsServerAction.java | 3 ++-
.../kerberos/KerberosIdentityDataFile.java | 2 +-
.../KerberosIdentityDataFileWriter.java | 9 ++++---
.../PrepareDisableKerberosServerAction.java | 2 +-
.../PrepareEnableKerberosServerAction.java | 2 +-
.../PrepareKerberosIdentitiesServerAction.java | 3 ++-
.../upgrades/PreconfigureKerberosAction.java | 6 ++---
.../server/agent/TestHeartbeatHandler.java | 2 +-
...AbstractPrepareKerberosServerActionTest.java | 2 +-
.../kerberos/KerberosIdentityDataFileTest.java | 8 +++----
.../kerberos/KerberosServerActionTest.java | 2 +-
18 files changed, 72 insertions(+), 42 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
index 8933dd3..9d0c169 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
@@ -87,6 +87,7 @@ public class ClusterResourceDefinition extends BaseResourceDefinition {
directives.add(KerberosHelper.DIRECTIVE_FORCE_TOGGLE_KERBEROS);
directives.add(KerberosHelper.DIRECTIVE_HOSTS);
directives.add(KerberosHelper.DIRECTIVE_COMPONENTS);
+ directives.add(KerberosHelper.DIRECTIVE_IGNORE_CONFIGS);
return directives;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 96280ea..34744eb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -2955,15 +2955,22 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
}
break;
case INIT:
- throw new AmbariException("Unsupported transition to INIT for"
- + " servicecomponenthost"
- + ", clusterName=" + cluster.getClusterName()
- + ", clusterId=" + cluster.getClusterId()
- + ", serviceName=" + scHost.getServiceName()
- + ", componentName=" + scHost.getServiceComponentName()
- + ", hostname=" + scHost.getHostName()
- + ", currentState=" + oldSchState
- + ", newDesiredState=" + newState);
+ if (oldSchState == State.INSTALLED ||
+ oldSchState == State.INSTALL_FAILED ||
+ oldSchState == State.INIT) {
+ scHost.setState(State.INIT);
+ continue;
+ } else {
+ throw new AmbariException("Unsupported transition to INIT for"
+ + " servicecomponenthost"
+ + ", clusterName=" + cluster.getClusterName()
+ + ", clusterId=" + cluster.getClusterId()
+ + ", serviceName=" + scHost.getServiceName()
+ + ", componentName=" + scHost.getServiceComponentName()
+ + ", hostname=" + scHost.getHostName()
+ + ", currentState=" + oldSchState
+ + ", newDesiredState=" + newState);
+ }
default:
throw new AmbariException("Unsupported state change operation"
+ ", newState=" + newState);
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
index a7b9d80..29f8e2a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/DeleteIdentityHandler.java
@@ -227,7 +227,8 @@ class DeleteIdentityHandler {
calculateConfig(kerberosDescriptor, serviceNames()),
new HashMap<>(),
false,
- new HashMap<>());
+ new HashMap<>(),
+ false);
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", actionLog.getStdOut(), actionLog.getStdErr());
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index bb360b5..20c5708 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -59,6 +59,10 @@ public interface KerberosHelper {
*/
String DIRECTIVE_COMPONENTS = "regenerate_components";
/**
+ * directive used to pass host list to regenerate keytabs on
+ */
+ String DIRECTIVE_IGNORE_CONFIGS = "ignore_config_updates";
+ /**
* directive used to indicate that the enable Kerberos operation should proceed even if the
* cluster's security type is not changing
*/
@@ -591,6 +595,7 @@ public interface KerberosHelper {
* values
* @param configurations a Map of configurations to use a replacements for variables
* in identity fields
+ * @param ignoreHeadless boolean value to specify if headless principals must not be processed
* @return an integer indicating the number of identities added to the data file
* @throws java.io.IOException if an error occurs while writing a record to the data file
*/
@@ -598,9 +603,8 @@ public interface KerberosHelper {
Collection<KerberosIdentityDescriptor> identities,
Collection<String> identityFilter, String hostname, String serviceName,
String componentName, Map<String, Map<String, String>> kerberosConfigurations,
- Map<String, Map<String, String>> configurations)
+ Map<String, Map<String, String>> configurations, boolean ignoreHeadless)
throws IOException;
-
/**
* Calculates the map of configurations relative to the cluster and host.
* <p/>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index 013a063..67b08fd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -265,10 +265,13 @@ public class KerberosHelperImpl implements KerberosHelper {
Set<String> hostFilter = parseHostFilter(requestProperties);
Map<String, Set<String>> serviceComponentFilter = parseComponentFilter(requestProperties);
+ boolean updateConfigurations = !requestProperties.containsKey(DIRECTIVE_IGNORE_CONFIGS)
+ || !"true".equalsIgnoreCase(requestProperties.get(DIRECTIVE_IGNORE_CONFIGS));
+
if ("true".equalsIgnoreCase(value) || "all".equalsIgnoreCase(value)) {
- handler = new CreatePrincipalsAndKeytabsHandler(true, true, true);
+ handler = new CreatePrincipalsAndKeytabsHandler(true, updateConfigurations, true);
} else if ("missing".equalsIgnoreCase(value)) {
- handler = new CreatePrincipalsAndKeytabsHandler(false, true, true);
+ handler = new CreatePrincipalsAndKeytabsHandler(false, updateConfigurations, true);
}
if (handler != null) {
@@ -1482,7 +1485,7 @@ public class KerberosHelperImpl implements KerberosHelper {
Collection<KerberosIdentityDescriptor> identities,
Collection<String> identityFilter, String hostname, String serviceName,
String componentName, Map<String, Map<String, String>> kerberosConfigurations,
- Map<String, Map<String, String>> configurations)
+ Map<String, Map<String, String>> configurations, boolean ignoreHeadless)
throws IOException {
int identitiesAdded = 0;
@@ -1534,7 +1537,8 @@ public class KerberosHelperImpl implements KerberosHelper {
keytabFileOwnerAccess,
keytabFileGroupName,
keytabFileGroupAccess,
- (keytabIsCachable) ? "true" : "false");
+ (keytabIsCachable) ? "true" : "false",
+ (ignoreHeadless && principalDescriptor.getType() == KerberosPrincipalType.USER) ? "true" : "false");
}
// Add the principal-related configuration to the map of configurations
@@ -2189,6 +2193,7 @@ public class KerberosHelperImpl implements KerberosHelper {
keytabFileOwnerAccess,
keytabFileGroupName,
keytabFileGroupAccess,
+ "false",
"false");
hostsWithValidKerberosClient.add(hostname);
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
index d6b8ffc..3db844a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -76,12 +76,13 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
Map<String, Map<String, String>> currentConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
boolean includeAmbariIdentity,
- Map<String, Set<String>> propertiesToBeIgnored) throws AmbariException {
+ Map<String, Set<String>> propertiesToBeIgnored,
+ boolean excludeHeadless) throws AmbariException {
List<Component> components = new ArrayList<>();
for (ServiceComponentHost each : schToProcess) {
components.add(Component.fromServiceComponentHost(each));
}
- processServiceComponents(cluster, kerberosDescriptor, components, identityFilter, dataDirectory, currentConfigurations, kerberosConfigurations, includeAmbariIdentity, propertiesToBeIgnored);
+ processServiceComponents(cluster, kerberosDescriptor, components, identityFilter, dataDirectory, currentConfigurations, kerberosConfigurations, includeAmbariIdentity, propertiesToBeIgnored, excludeHeadless);
}
protected void processServiceComponents(Cluster cluster, KerberosDescriptor kerberosDescriptor,
@@ -90,7 +91,8 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
Map<String, Map<String, String>> currentConfigurations,
Map<String, Map<String, String>> kerberosConfigurations,
boolean includeAmbariIdentity,
- Map<String, Set<String>> propertiesToBeIgnored) throws AmbariException {
+ Map<String, Set<String>> propertiesToBeIgnored,
+ boolean excludeHeadless) throws AmbariException {
actionLog.writeStdOut("Processing Kerberos identities and configurations");
@@ -141,7 +143,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
// Add service-level principals (and keytabs)
kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, serviceIdentities,
- identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+ identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, excludeHeadless);
propertiesToIgnore = gatherPropertiesToIgnore(serviceIdentities, propertiesToIgnore);
KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
@@ -156,7 +158,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
// Add component-level principals (and keytabs)
kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
- identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+ identityFilter, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, excludeHeadless);
propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
}
}
@@ -177,7 +179,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(identity);
kerberosHelper.addIdentities(kerberosIdentityDataFileWriter, componentIdentities,
- identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations);
+ identityFilter, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations, excludeHeadless);
propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
index a23ab5d..4396a2b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreateKeytabFilesServerAction.java
@@ -217,9 +217,14 @@ public class CreateKeytabFilesServerAction extends KerberosServerAction {
return commandReport;
}
+ boolean regenerateKeytabs = "true".equalsIgnoreCase(getCommandParameterValue(getCommandParameters(), REGENERATE_ALL));
+ boolean onlyKeytabWrite = "true".equalsIgnoreCase(identityRecord.get(KerberosIdentityDataFileReader.ONLY_KEYTAB_WRITE));
+ boolean grabKeytabFromCache = regenerateKeytabs && onlyKeytabWrite;
+ // if grabKeytabFromCache=true we will try to get keytab from cache and send to agent, it will be true for
+ // headless cached keytabs
if (password == null) {
- if (hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME) || kerberosPrincipalHostDAO
- .exists(evaluatedPrincipal, hostEntity.getHostId())) {
+ if (!grabKeytabFromCache && (hostName.equalsIgnoreCase(KerberosHelper.AMBARI_SERVER_HOST_NAME) || kerberosPrincipalHostDAO
+ .exists(evaluatedPrincipal, hostEntity.getHostId()))) {
// There is nothing to do for this since it must already exist and we don't want to
// regenerate the keytab
message = String.format("Skipping keytab file for %s, missing password indicates nothing to do", evaluatedPrincipal);
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
index 2fd5abe..069c821 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/CreatePrincipalsServerAction.java
@@ -131,7 +131,8 @@ public class CreatePrincipalsServerAction extends KerberosServerAction {
boolean regenerateKeytabs = "true".equalsIgnoreCase(getCommandParameterValue(getCommandParameters(), REGENERATE_ALL));
if (regenerateKeytabs) {
- processPrincipal = true;
+ // do not process cached identities that can be passed as is(headless identities)
+ processPrincipal = "false".equals(identityRecord.get(KerberosIdentityDataFileReader.ONLY_KEYTAB_WRITE).toLowerCase());
} else {
KerberosPrincipalEntity kerberosPrincipalEntity = kerberosPrincipalDAO.find(evaluatedPrincipal);
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
index 81e345a..ddf3d1b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFile.java
@@ -36,6 +36,6 @@ public interface KerberosIdentityDataFile extends KerberosDataFile {
String KEYTAB_FILE_GROUP_NAME = "keytab_file_group_name";
String KEYTAB_FILE_GROUP_ACCESS = "keytab_file_group_access";
String KEYTAB_FILE_IS_CACHABLE = "keytab_file_is_cachable";
-
+ String ONLY_KEYTAB_WRITE = "only_keytab_write";
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
index f55c6f4..ea742bd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileWriter.java
@@ -68,7 +68,8 @@ public class KerberosIdentityDataFileWriter extends AbstractKerberosDataFileWrit
String principal, String principalType,
String keytabFilePath, String keytabFileOwnerName,
String keytabFileOwnerAccess, String keytabFileGroupName,
- String keytabFileGroupAccess, String keytabFileCanCache)
+ String keytabFileGroupAccess, String keytabFileCanCache,
+ String onlyKeytabWrite)
throws IOException {
super.appendRecord(hostName,
serviceName,
@@ -80,7 +81,8 @@ public class KerberosIdentityDataFileWriter extends AbstractKerberosDataFileWrit
keytabFileOwnerAccess,
keytabFileGroupName,
keytabFileGroupAccess,
- keytabFileCanCache);
+ keytabFileCanCache,
+ onlyKeytabWrite);
}
@Override
@@ -95,6 +97,7 @@ public class KerberosIdentityDataFileWriter extends AbstractKerberosDataFileWrit
KEYTAB_FILE_OWNER_ACCESS,
KEYTAB_FILE_GROUP_NAME,
KEYTAB_FILE_GROUP_ACCESS,
- KEYTAB_FILE_IS_CACHABLE);
+ KEYTAB_FILE_IS_CACHABLE,
+ ONLY_KEYTAB_WRITE);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
index 4e63f4a..f56e946 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareDisableKerberosServerAction.java
@@ -108,7 +108,7 @@ public class PrepareDisableKerberosServerAction extends AbstractPrepareKerberosS
Map<String, Map<String, String>> configurations = kerberosHelper.calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
- configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore);
+ configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore, false);
// Add auth-to-local configurations to the set of changes
Map<String, Set<String>> authToLocalProperties = kerberosHelper.translateConfigurationSpecifications(kerberosDescriptor.getAllAuthToLocalProperties());
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
index e13f033..3ec84fa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareEnableKerberosServerAction.java
@@ -117,7 +117,7 @@ public class PrepareEnableKerberosServerAction extends PrepareKerberosIdentities
Map<String, Map<String, String>> configurations = kerberosHelper.calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
- configurations, kerberosConfigurations, true, propertiesToIgnore);
+ configurations, kerberosConfigurations, true, propertiesToIgnore, false);
// Calculate the set of configurations to update and replace any variables
// using the previously calculated Map of configurations for the host.
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
index 00c82a5..49828cb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/PrepareKerberosIdentitiesServerAction.java
@@ -33,6 +33,7 @@ import org.apache.ambari.server.controller.KerberosHelper;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -94,7 +95,7 @@ public class PrepareKerberosIdentitiesServerAction extends AbstractPrepareKerber
Map<String, Map<String, String>> configurations = kerberosHelper.calculateConfigurations(cluster, null, kerberosDescriptor, false, false);
processServiceComponentHosts(cluster, kerberosDescriptor, schToProcess, identityFilter, dataDirectory,
- configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore);
+ configurations, kerberosConfigurations, includeAmbariIdentity, propertiesToIgnore, !CollectionUtils.isEmpty(getHostFilter()));
kerberosHelper.applyStackAdvisorUpdates(cluster, services, configurations, kerberosConfigurations,
propertiesToIgnore, propertiesToRemove, true);
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
index 697f1d1..30bc47f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosAction.java
@@ -310,7 +310,7 @@ public class PreconfigureKerberosAction extends AbstractUpgradeServerAction {
// Add service-level principals (and keytabs)
kerberosHelper.addIdentities(null, serviceIdentities,
- null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+ null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, false);
propertiesToIgnore = gatherPropertiesToIgnore(serviceIdentities, propertiesToIgnore);
KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
@@ -325,7 +325,7 @@ public class PreconfigureKerberosAction extends AbstractUpgradeServerAction {
// Add component-level principals (and keytabs)
kerberosHelper.addIdentities(null, componentIdentities,
- null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations);
+ null, hostName, serviceName, componentName, kerberosConfigurations, currentConfigurations, false);
propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
}
}
@@ -346,7 +346,7 @@ public class PreconfigureKerberosAction extends AbstractUpgradeServerAction {
List<KerberosIdentityDescriptor> componentIdentities = Collections.singletonList(identity);
kerberosHelper.addIdentities(null, componentIdentities,
- null, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations);
+ null, KerberosHelper.AMBARI_SERVER_HOST_NAME, "AMBARI", componentName, kerberosConfigurations, currentConfigurations, false);
propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index b4ff5c1..20ff949 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@ -1549,7 +1549,7 @@ public class TestHeartbeatHandler {
kerberosIdentityDataFileWriter.writeRecord("c6403.ambari.apache.org", "HDFS", "DATANODE",
"dn/_HOST@_REALM", "service",
"/etc/security/keytabs/dn.service.keytab",
- "hdfs", "r", "hadoop", "", "false");
+ "hdfs", "r", "hadoop", "", "false", "false");
kerberosIdentityDataFileWriter.close();
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
index 95e5513..8ff5ad2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
@@ -149,7 +149,7 @@ public class AbstractPrepareKerberosServerActionTest {
identityFilter,
"",
configurations, kerberosConfigurations,
- false, propertiesToIgnore);
+ false, propertiesToIgnore, false);
verify(kerberosHelper);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
index 323ba8e..cfe0fee 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosIdentityDataFileTest.java
@@ -54,7 +54,7 @@ public class KerberosIdentityDataFileTest {
"principal" + i, "principal_type" + i, "keytabFilePath" + i,
"keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
"keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
- "false");
+ "false", "false");
}
// Add some odd characters
@@ -62,7 +62,7 @@ public class KerberosIdentityDataFileTest {
"principal", "principal_type", "keytabFilePath",
"'keytabFileOwnerName'", "<keytabFileOwnerAccess>",
"\"keytabFileGroupName\"", "keytab,File,Group,Access",
- "false");
+ "false", "false");
writer.close();
Assert.assertTrue(writer.isClosed());
@@ -153,7 +153,7 @@ public class KerberosIdentityDataFileTest {
"principal", "principal_type", "keytabFilePath",
"keytabFileOwnerName", "keytabFileOwnerAccess",
"keytabFileGroupName", "keytabFileGroupAccess",
- "true");
+ "true", "false");
writer.close();
Assert.assertTrue(writer.isClosed());
@@ -179,7 +179,7 @@ public class KerberosIdentityDataFileTest {
"principal", "principal_type", "keytabFilePath",
"keytabFileOwnerName", "keytabFileOwnerAccess",
"keytabFileGroupName", "keytabFileGroupAccess",
- "true");
+ "true", "false");
writer.close();
Assert.assertTrue(writer.isClosed());
http://git-wip-us.apache.org/repos/asf/ambari/blob/c7a3bcd9/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
index f63e6b8..a43db4d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerActionTest.java
@@ -120,7 +120,7 @@ public class KerberosServerActionTest {
"principal|_HOST|_REALM" + i, "principal_type", "keytabFilePath" + i,
"keytabFileOwnerName" + i, "keytabFileOwnerAccess" + i,
"keytabFileGroupName" + i, "keytabFileGroupAccess" + i,
- "false");
+ "false", "false");
}
writer.close();
[04/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
Posted by lp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/ranger_kms_default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/ranger_kms_default.json b/ambari-server/src/test/python/common-services/configs/ranger_kms_default.json
new file mode 100644
index 0000000..b6de743
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/ranger_kms_default.json
@@ -0,0 +1,802 @@
+{
+ "localComponents": [
+ "SECONDARY_NAMENODE",
+ "HDFS_CLIENT",
+ "DATANODE",
+ "NAMENODE",
+ "ZOOKEEPER_SERVER",
+ "ZOOKEEPER_CLIENT",
+ "RANGER_USERSYNC",
+ "RANGER_ADMIN",
+ "RANGER_TAGSYNC",
+ "RANGER_KMS_SERVER"
+ ],
+ "configuration_attributes": {
+ "ranger-hdfs-audit": {},
+ "ssl-client": {},
+ "ranger-admin-site": {},
+ "ranger-hdfs-policymgr-ssl": {},
+ "tagsync-application-properties": {},
+ "ranger-env": {},
+ "usersync-log4j": {},
+ "admin-properties": {},
+ "ranger-ugsync-site": {},
+ "hdfs-site": {
+ "final": {
+ "dfs.datanode.data.dir": "true",
+ "dfs.namenode.http-address": "true",
+ "dfs.datanode.failed.volumes.tolerated": "true",
+ "dfs.support.append": "true",
+ "dfs.namenode.name.dir": "true",
+ "dfs.webhdfs.enabled": "true"
+ }
+ },
+ "ranger-tagsync-site": {},
+ "zoo.cfg": {},
+ "hadoop-policy": {},
+ "hdfs-log4j": {},
+ "ranger-hdfs-plugin-properties": {},
+ "core-site": {
+ "final": {
+ "fs.defaultFS": "true"
+ }
+ },
+ "hadoop-env": {},
+ "zookeeper-log4j": {},
+ "ssl-server": {},
+ "ranger-site": {},
+ "admin-log4j": {},
+ "tagsync-log4j": {},
+ "ranger-hdfs-security": {},
+ "usersync-properties": {},
+ "zookeeper-env": {},
+ "cluster-env": {},
+ "dbks-site": {},
+ "kms-env": {},
+ "kms-log4j": {},
+ "kms-properties": {},
+ "kms-site": {},
+ "ranger-kms-security": {},
+ "ranger-kms-site": {},
+ "ranger-kms-policymgr-ssl": {},
+ "ranger-kms-audit": {}
+ },
+ "public_hostname": "c6401.ambari.apache.org",
+ "commandId": "9-1",
+ "hostname": "c6401.ambari.apache.org",
+ "kerberosCommandParams": [],
+ "serviceName": "RANGER_KMS",
+ "role": "RANGER_KMS_SERVER",
+ "forceRefreshConfigTagsBeforeExecution": [],
+ "requestId": 9,
+ "agentConfigParams": {
+ "agent": {
+ "parallel_execution": 0
+ }
+ },
+ "clusterName": "c1",
+ "commandType": "EXECUTION_COMMAND",
+ "taskId": 64,
+ "roleParams": {},
+ "configurationTags": {
+ "ranger-hdfs-audit": {
+ "tag": "version1466427664617"
+ },
+ "ssl-client": {
+ "tag": "version1"
+ },
+ "ranger-admin-site": {
+ "tag": "version1466427664621"
+ },
+ "ranger-hdfs-policymgr-ssl": {
+ "tag": "version1466427664617"
+ },
+ "tagsync-application-properties": {
+ "tag": "version1466427664621"
+ },
+ "ranger-env": {
+ "tag": "version1466427664621"
+ },
+ "usersync-log4j": {
+ "tag": "version1466427664621"
+ },
+ "admin-properties": {
+ "tag": "version1466427664621"
+ },
+ "ranger-ugsync-site": {
+ "tag": "version1466427664621"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "ranger-tagsync-site": {
+ "tag": "version1466427664621"
+ },
+ "zoo.cfg": {
+ "tag": "version1"
+ },
+ "hadoop-policy": {
+ "tag": "version1"
+ },
+ "hdfs-log4j": {
+ "tag": "version1"
+ },
+ "ranger-hdfs-plugin-properties": {
+ "tag": "version1466427664617"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "hadoop-env": {
+ "tag": "version1"
+ },
+ "zookeeper-log4j": {
+ "tag": "version1"
+ },
+ "ssl-server": {
+ "tag": "version1"
+ },
+ "ranger-site": {
+ "tag": "version1466427664621"
+ },
+ "admin-log4j": {
+ "tag": "version1466427664621"
+ },
+ "tagsync-log4j": {
+ "tag": "version1466427664621"
+ },
+ "ranger-hdfs-security": {
+ "tag": "version1466427664617"
+ },
+ "usersync-properties": {
+ "tag": "version1466427664621"
+ },
+ "zookeeper-env": {
+ "tag": "version1"
+ },
+ "cluster-env": {
+ "tag": "version1"
+ },
+ "dbks-site": {
+ "tag": "version1"
+ },
+ "kms-env": {
+ "tag": "version1"
+ },
+ "kms-log4j": {
+ "tag": "version1"
+ },
+ "kms-properties": {
+ "tag": "version1"
+ },
+ "kms-site": {
+ "tag": "version1"
+ },
+ "ranger-kms-security": {
+ "tag": "version1"
+ },
+ "ranger-kms-site": {
+ "tag": "version1"
+ },
+ "ranger-kms-policymgr-ssl": {
+ "tag": "version1"
+ },
+ "ranger-kms-audit": {
+ "tag": "version1"
+ }
+ },
+ "roleCommand": "START",
+ "hostLevelParams": {
+ "agent_stack_retry_on_unavailability": "false",
+ "stack_name": "HDP",
+ "custom_mysql_jdbc_name": "mysql-connector-java.jar",
+ "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
+ "host_sys_prepped": "false",
+ "ambari_db_rca_username": "mapred",
+ "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
+ "agent_stack_retry_count": "5",
+ "stack_version": "2.5",
+ "jdk_name": "jdk-8u60-linux-x64.tar.gz",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "repository_version_id": "1",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "not_managed_hdfs_path_list": "[\"/tmp\"]",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "java_version": "8",
+ "repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-777\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.5\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-776\",\"baseSaved\":true},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.21\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"baseSaved\":true}]",
+ "package_list": "[{\"name\":\"ranger_${stack_version}-admin\",\"condition\":\"\",\"skipUpgrade\":false},{\"name\":\"ranger_${stack_version}-usersync\",\"condition\":\"\",\"skipUpgrade\":false},{\"name\":\"ranger_${stack_version}-tagsync\",\"condition\":\"should_install_ranger_tagsync\",\"skipUpgrade\":false},{\"name\":\"ambari-logsearch-solr-client\",\"condition\":\"should_install_logsearch_solr_client\",\"skipUpgrade\":false}]",
+ "db_name": "ambari",
+ "group_list": "[\"ranger\",\"hadoop\",\"users\"]",
+ "agentCacheDir": "/var/lib/ambari-agent/cache",
+ "ambari_db_rca_password": "mapred",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "oracle_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//ojdbc6.jar",
+ "db_driver_filename": "mysql-connector-java.jar",
+ "user_list": "[\"zookeeper\",\"ambari-qa\",\"ranger\",\"hdfs\"]",
+ "clientsToUpdateConfigs": "[\"*\"]"
+ },
+ "commandParams": {
+ "service_package_folder": "common-services/RANGER/0.4.0/package",
+ "script": "scripts/ranger_usersync.py",
+ "hooks_folder": "HDP/2.0.6/hooks",
+ "version": "2.5.0.0-777",
+ "max_duration_for_retries": "0",
+ "command_retry_enabled": "false",
+ "command_timeout": "600",
+ "script_type": "PYTHON"
+ },
+ "forceRefreshConfigTags": [],
+ "stageId": 1,
+ "clusterHostInfo": {
+ "snamenode_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "ambari_server_use_ssl": [
+ "false"
+ ],
+ "all_ping_ports": [
+ "8670"
+ ],
+ "ranger_tagsync_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_usersync_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "slave_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "namenode_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "ambari_server_port": [
+ "8080"
+ ],
+ "ranger_admin_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_racks": [
+ "/default-rack"
+ ],
+ "all_ipv4_ips": [
+ "172.22.125.4"
+ ],
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_kms_server_hosts": [
+ "c6401.ambari.apache.org"
+ ]
+ },
+ "configurations": {
+ "ranger-hdfs-audit": {
+ "xasecure.audit.destination.solr.zookeepers": "c6401.ambari.apache.org:2181/ranger_audits",
+ "xasecure.audit.destination.solr.urls": "",
+ "xasecure.audit.destination.solr.batch.filespool.dir": "/var/log/hadoop/hdfs/audit/solr/spool",
+ "xasecure.audit.destination.hdfs.batch.filespool.dir": "/var/log/hadoop/hdfs/audit/hdfs/spool",
+ "xasecure.audit.destination.hdfs": "true",
+ "xasecure.audit.destination.solr": "true",
+ "xasecure.audit.provider.summary.enabled": "false",
+ "xasecure.audit.destination.hdfs.dir": "hdfs://c6401.ambari.apache.org:8020/ranger/audit",
+ "xasecure.audit.is.enabled": "true"
+ },
+ "ssl-client": {
+ "ssl.client.truststore.reload.interval": "10000",
+ "ssl.client.keystore.password": "bigdata",
+ "ssl.client.truststore.type": "jks",
+ "ssl.client.keystore.location": "/etc/security/clientKeys/keystore.jks",
+ "ssl.client.truststore.location": "/etc/security/clientKeys/all.jks",
+ "ssl.client.truststore.password": "bigdata",
+ "ssl.client.keystore.type": "jks"
+ },
+ "ranger-admin-site": {
+ "ranger.admin.kerberos.cookie.domain": "",
+ "ranger.kms.service.user.hdfs": "hdfs",
+ "ranger.spnego.kerberos.principal": "",
+ "ranger.ldap.ad.url": "{{ranger_ug_ldap_url}}",
+ "ranger.plugins.hive.serviceuser": "hive",
+ "ranger.lookup.kerberos.keytab": "",
+ "ranger.plugins.kms.serviceuser": "kms",
+ "ranger.service.https.attrib.ssl.enabled": "false",
+ "ranger.sso.browser.useragent": "Mozilla,chrome",
+ "ranger.jpa.jdbc.url": "jdbc:mysql://c6401.ambari.apache.org:3306/ranger01",
+ "ranger.plugins.hbase.serviceuser": "hbase",
+ "ranger.plugins.hdfs.serviceuser": "hdfs",
+ "ranger.ldap.group.searchbase": "{{ranger_ug_ldap_group_searchbase}}",
+ "ranger.ldap.user.dnpattern": "uid={0},ou=users,dc=xasecure,dc=net",
+ "ranger.plugins.knox.serviceuser": "knox",
+ "ranger.ldap.base.dn": "dc=example,dc=com",
+ "ranger.sso.publicKey": "",
+ "ranger.admin.kerberos.cookie.path": "/",
+ "ranger.service.https.attrib.clientAuth": "want",
+ "ranger.jpa.jdbc.user": "{{ranger_db_user}}",
+ "ranger.ldap.ad.user.searchfilter": "(sAMAccountName={0})",
+ "ranger.ldap.group.roleattribute": "cn",
+ "ranger.plugins.kafka.serviceuser": "kafka",
+ "ranger.admin.kerberos.principal": "",
+ "ranger.ldap.ad.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.credential.provider.path": "/etc/ranger/admin/rangeradmin.jceks",
+ "ranger.ldap.referral": "ignore",
+ "ranger.service.http.port": "6080",
+ "ranger.ldap.user.searchfilter": "(uid={0})",
+ "ranger.plugins.atlas.serviceuser": "atlas",
+ "ranger.truststore.password": "changeit",
+ "ranger.ldap.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.password": "NONE",
+ "ranger.audit.solr.zookeepers": "c6401.ambari.apache.org:2181/ranger_audits",
+ "ranger.lookup.kerberos.principal": "",
+ "ranger.service.https.port": "6182",
+ "ranger.plugins.storm.serviceuser": "storm",
+ "ranger.externalurl": "{{ranger_external_url}}",
+ "ranger.truststore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.kms.service.user.hive": "",
+ "ranger.https.attrib.keystore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.jpa.jdbc.dialect": "{{jdbc_dialect}}",
+ "ranger.service.host": "{{ranger_host}}",
+ "ranger.service.https.attrib.keystore.keyalias": "rangeradmin",
+ "ranger.service.https.attrib.keystore.pass": "xasecure",
+ "ranger.unixauth.remote.login.enabled": "true",
+ "ranger.jpa.jdbc.credential.alias": "rangeradmin",
+ "ranger.ldap.ad.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.username": "ranger_solr",
+ "ranger.sso.enabled": "false",
+ "ranger.audit.solr.urls": "",
+ "ranger.ldap.ad.domain": "",
+ "ranger.plugins.yarn.serviceuser": "yarn",
+ "ranger.audit.source.type": "solr",
+ "ranger.ldap.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.ldap.url": "{{ranger_ug_ldap_url}}",
+ "ranger.authentication.method": "UNIX",
+ "ranger.service.http.enabled": "true",
+ "ranger.ldap.group.searchfilter": "{{ranger_ug_ldap_group_searchfilter}}",
+ "ranger.ldap.ad.referral": "ignore",
+ "ranger.ldap.ad.base.dn": "dc=example,dc=com",
+ "ranger.jpa.jdbc.password": "_",
+ "ranger.spnego.kerberos.keytab": "",
+ "ranger.sso.providerurl": "",
+ "ranger.unixauth.service.hostname": "{{ugsync_host}}",
+ "ranger.admin.kerberos.keytab": "",
+ "ranger.admin.kerberos.token.valid.seconds": "30",
+ "ranger.jpa.jdbc.driver": "com.mysql.jdbc.Driver",
+ "ranger.unixauth.service.port": "5151"
+ },
+ "ranger-hdfs-policymgr-ssl": {
+ "xasecure.policymgr.clientssl.keystore": "/usr/hdp/current/hadoop-client/conf/ranger-plugin-keystore.jks",
+ "xasecure.policymgr.clientssl.truststore.password": "changeit",
+ "xasecure.policymgr.clientssl.keystore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.truststore": "/usr/hdp/current/hadoop-client/conf/ranger-plugin-truststore.jks",
+ "xasecure.policymgr.clientssl.truststore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.keystore.password": "myKeyFilePassword"
+ },
+ "tagsync-application-properties": {
+ "atlas.kafka.entities.group.id": "ranger_entities_consumer",
+ "atlas.kafka.zookeeper.connect": "c6401.ambari.apache.org:2181",
+ "atlas.kafka.bootstrap.servers": "localhost:6667"
+ },
+ "ranger-env": {
+ "ranger_solr_shards": "1",
+ "ranger_solr_config_set": "ranger_audits",
+ "ranger_user": "ranger",
+ "xml_configurations_supported": "true",
+ "ranger-atlas-plugin-enabled": "No",
+ "ranger-hbase-plugin-enabled": "No",
+ "ranger-yarn-plugin-enabled": "No",
+ "bind_anonymous": "false",
+ "ranger_admin_username": "amb_ranger_admin",
+ "admin_password": "admin",
+ "is_solrCloud_enabled": "true",
+ "ranger-storm-plugin-enabled": "No",
+ "ranger-hdfs-plugin-enabled": "No",
+ "ranger_group": "ranger",
+ "ranger-knox-plugin-enabled": "No",
+ "ranger_admin_log_dir": "/var/log/ranger/admin",
+ "ranger-kafka-plugin-enabled": "No",
+ "ranger_privelege_user_jdbc_url": "jdbc:mysql://c6401.ambari.apache.org:3306",
+ "ranger-hive-plugin-enabled": "No",
+ "xasecure.audit.destination.solr": "true",
+ "ranger_pid_dir": "/var/run/ranger",
+ "xasecure.audit.destination.hdfs": "true",
+ "admin_username": "admin",
+ "xasecure.audit.destination.hdfs.dir": "hdfs://c6401.ambari.apache.org:8020/ranger/audit",
+ "create_db_dbuser": "true",
+ "ranger_solr_collection_name": "ranger_audits",
+ "ranger_admin_password": "P1!qLEQwP24KVlWY",
+ "ranger_usersync_log_dir": "/var/log/ranger/usersync"
+ },
+ "usersync-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nlog4j.rootLogger = info,logFile\n\n# logFile\nlog4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.logFile.file=${logdir}/usersync.log\nl
og4j.appender.logFile.datePattern='.'yyyy-MM-dd\nlog4j.appender.logFile.layout=org.apache.log4j.PatternLayout\nlog4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n\n\n# console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.Target=System.out\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n"
+ },
+ "admin-properties": {
+ "db_user": "rangeradmin01",
+ "DB_FLAVOR": "MYSQL",
+ "db_password": "rangeradmin01",
+ "db_root_user": "root",
+ "policymgr_external_url": "http://c6401.ambari.apache.org:6080",
+ "db_name": "ranger01",
+ "db_host": "c6401.ambari.apache.org",
+ "db_root_password": "vagrant",
+ "SQL_CONNECTOR_JAR": "{{driver_curl_target}}"
+ },
+ "ranger-ugsync-site": {
+ "ranger.usersync.ldap.binddn": "",
+ "ranger.usersync.policymgr.username": "rangerusersync",
+ "ranger.usersync.policymanager.mockrun": "false",
+ "ranger.usersync.group.searchbase": "",
+ "ranger.usersync.ldap.bindalias": "testldapalias",
+ "ranger.usersync.truststore.file": "/usr/hdp/current/ranger-usersync/conf/mytruststore.jks",
+ "ranger.usersync.port": "5151",
+ "ranger.usersync.pagedresultssize": "500",
+ "ranger.usersync.group.memberattributename": "",
+ "ranger.usersync.kerberos.principal": "",
+ "ranger.usersync.source.impl.class": "org.apache.ranger.unixusersync.process.UnixUserGroupBuilder",
+ "ranger.usersync.ldap.referral": "ignore",
+ "ranger.usersync.group.searchfilter": "",
+ "ranger.usersync.ldap.user.objectclass": "person",
+ "ranger.usersync.logdir": "{{usersync_log_dir}}",
+ "ranger.usersync.ldap.user.searchfilter": "",
+ "ranger.usersync.ldap.groupname.caseconversion": "none",
+ "ranger.usersync.ldap.ldapbindpassword": "",
+ "ranger.usersync.unix.minUserId": "500",
+ "ranger.usersync.policymanager.maxrecordsperapicall": "1000",
+ "ranger.usersync.group.nameattribute": "",
+ "ranger.usersync.policymgr.alias": "ranger.usersync.policymgr.password",
+ "ranger.usersync.keystore.file": "/usr/hdp/current/ranger-usersync/conf/unixauthservice.jks",
+ "ranger.usersync.user.searchenabled": "false",
+ "ranger.usersync.group.usermapsyncenabled": "true",
+ "ranger.usersync.ldap.bindkeystore": "",
+ "ranger.usersync.ldap.user.groupnameattribute": "memberof, ismemberof",
+ "ranger.usersync.kerberos.keytab": "",
+ "ranger.usersync.passwordvalidator.path": "./native/credValidator.uexe",
+ "ranger.usersync.group.objectclass": "",
+ "ranger.usersync.ldap.user.searchscope": "sub",
+ "ranger.usersync.unix.password.file": "/etc/passwd",
+ "ranger.usersync.ldap.user.nameattribute": "",
+ "ranger.usersync.pagedresultsenabled": "true",
+ "ranger.usersync.policymanager.baseURL": "{{ranger_external_url}}",
+ "ranger.usersync.group.search.first.enabled": "false",
+ "ranger.usersync.group.searchenabled": "false",
+ "ranger.usersync.sink.impl.class": "org.apache.ranger.unixusersync.process.PolicyMgrUserGroupBuilder",
+ "ranger.usersync.ssl": "true",
+ "ranger.usersync.ldap.url": "",
+ "ranger.usersync.ldap.searchBase": "dc=hadoop,dc=apache,dc=org",
+ "ranger.usersync.policymgr.keystore": "/usr/hdp/current/ranger-usersync/conf/ugsync.jceks",
+ "ranger.usersync.ldap.user.searchbase": "",
+ "ranger.usersync.ldap.username.caseconversion": "none",
+ "ranger.usersync.credstore.filename": "/usr/hdp/current/ranger-usersync/conf/ugsync.jceks",
+ "ranger.usersync.keystore.password": "UnIx529p",
+ "ranger.usersync.unix.group.file": "/etc/group",
+ "ranger.usersync.filesource.file": "/tmp/usergroup.txt",
+ "ranger.usersync.group.searchscope": "",
+ "ranger.usersync.truststore.password": "changeit",
+ "ranger.usersync.enabled": "true",
+ "ranger.usersync.sleeptimeinmillisbetweensynccycle": "60000",
+ "ranger.usersync.filesource.text.delimiter": ","
+ },
+ "hdfs-site": {
+ "dfs.namenode.checkpoint.period": "21600",
+ "dfs.namenode.avoid.write.stale.datanode": "true",
+ "dfs.namenode.startup.delay.block.deletion.sec": "3600",
+ "dfs.namenode.checkpoint.txns": "1000000",
+ "dfs.content-summary.limit": "5000",
+ "dfs.support.append": "true",
+ "dfs.datanode.address": "0.0.0.0:50010",
+ "dfs.cluster.administrators": " hdfs",
+ "dfs.namenode.audit.log.async": "true",
+ "dfs.datanode.balance.bandwidthPerSec": "6250000",
+ "dfs.namenode.safemode.threshold-pct": "1",
+ "dfs.namenode.checkpoint.edits.dir": "${dfs.namenode.checkpoint.dir}",
+ "dfs.namenode.rpc-address": "c6401.ambari.apache.org:8020",
+ "dfs.permissions.enabled": "true",
+ "dfs.client.read.shortcircuit": "true",
+ "dfs.https.port": "50470",
+ "dfs.namenode.https-address": "c6401.ambari.apache.org:50470",
+ "nfs.file.dump.dir": "/tmp/.hdfs-nfs",
+ "dfs.blocksize": "134217728",
+ "dfs.blockreport.initialDelay": "120",
+ "dfs.journalnode.edits.dir": "/hadoop/hdfs/journalnode",
+ "dfs.namenode.fslock.fair": "false",
+ "dfs.datanode.max.transfer.threads": "4096",
+ "dfs.heartbeat.interval": "3",
+ "dfs.replication": "3",
+ "dfs.namenode.handler.count": "50",
+ "dfs.namenode.checkpoint.dir": "/grid/0/hadoop/hdfs/namesecondary",
+ "fs.permissions.umask-mode": "022",
+ "dfs.namenode.stale.datanode.interval": "30000",
+ "dfs.datanode.ipc.address": "0.0.0.0:8010",
+ "dfs.datanode.failed.volumes.tolerated": "0",
+ "dfs.datanode.data.dir": "/grid/0/hadoop/hdfs/data",
+ "dfs.namenode.http-address": "c6401.ambari.apache.org:50070",
+ "dfs.webhdfs.enabled": "true",
+ "dfs.encrypt.data.transfer.cipher.suites": "AES/CTR/NoPadding",
+ "dfs.namenode.accesstime.precision": "0",
+ "dfs.datanode.https.address": "0.0.0.0:50475",
+ "dfs.namenode.write.stale.datanode.ratio": "1.0f",
+ "dfs.namenode.secondary.http-address": "c6401.ambari.apache.org:50090",
+ "nfs.exports.allowed.hosts": "* rw",
+ "dfs.datanode.http.address": "0.0.0.0:50075",
+ "dfs.datanode.du.reserved": "33011188224",
+ "dfs.client.read.shortcircuit.streams.cache.size": "4096",
+ "dfs.http.policy": "HTTP_ONLY",
+ "dfs.block.access.token.enable": "true",
+ "dfs.client.retry.policy.enabled": "false",
+ "dfs.namenode.name.dir.restore": "true",
+ "dfs.permissions.superusergroup": "hdfs",
+ "dfs.journalnode.https-address": "0.0.0.0:8481",
+ "dfs.journalnode.http-address": "0.0.0.0:8480",
+ "dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket",
+ "dfs.namenode.avoid.read.stale.datanode": "true",
+ "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude",
+ "dfs.datanode.data.dir.perm": "750",
+ "dfs.encryption.key.provider.uri": "",
+ "dfs.replication.max": "50",
+ "dfs.namenode.name.dir": "/grid/0/hadoop/hdfs/namenode"
+ },
+ "ranger-tagsync-site": {
+ "ranger.tagsync.atlas.to.ranger.service.mapping": "",
+ "ranger.tagsync.dest.ranger.ssl.config.filename": "/usr/hdp/current/ranger-tagsync/conf/mytruststore.jks",
+ "ranger.tagsync.source.file.check.interval.millis": "",
+ "ranger.tagsync.logdir": "/var/log/ranger/tagsync",
+ "ranger.tagsync.source.atlasrest.download.interval.millis": "",
+ "ranger.tagsync.keystore.filename": "/usr/hdp/current/ranger-tagsync/conf/rangertagsync.jceks",
+ "ranger.tagsync.source.atlasrest.endpoint": "",
+ "ranger.tagsync.dest.ranger.username": "rangertagsync",
+ "ranger.tagsync.kerberos.principal": "",
+ "ranger.tagsync.dest.ranger.endpoint": "{{ranger_external_url}}",
+ "ranger.tagsync.atlas.custom.resource.mappers": "",
+ "ranger.tagsync.kerberos.keytab": "",
+ "ranger.tagsync.source.atlas": "false",
+ "ranger.tagsync.source.atlasrest": "false",
+ "ranger.tagsync.source.file": "false",
+ "ranger.tagsync.source.file.filename": ""
+ },
+ "zoo.cfg": {
+ "clientPort": "2181",
+ "autopurge.purgeInterval": "24",
+ "syncLimit": "5",
+ "dataDir": "/grid/0/hadoop/zookeeper",
+ "initLimit": "10",
+ "tickTime": "2000",
+ "autopurge.snapRetainCount": "30"
+ },
+ "hadoop-policy": {
+ "security.job.client.protocol.acl": "*",
+ "security.job.task.protocol.acl": "*",
+ "security.datanode.protocol.acl": "*",
+ "security.namenode.protocol.acl": "*",
+ "security.client.datanode.protocol.acl": "*",
+ "security.inter.tracker.protocol.acl": "*",
+ "security.refresh.usertogroups.mappings.protocol.acl": "hadoop",
+ "security.client.protocol.acl": "*",
+ "security.refresh.policy.protocol.acl": "hadoop",
+ "security.admin.operations.protocol.acl": "hadoop",
+ "security.inter.datanode.protocol.acl": "*"
+ },
+ "hdfs-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n\n\n# Define some default values that can be overridden by system properties\n# To change daemon root logger use hadoop_root_logger in hadoop-env\nhadoop.root.logger=I
NFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n# Define the root logger to the system property \"hadoop.root.logger\".\nlog4j.rootLogger=${hadoop.root.logger}, EventCounter\n\n# Logging Threshold\nlog4j.threshhold=ALL\n\n#\n# Daily Rolling File Appender\n#\n\nlog4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n# Rollver at midnight\nlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd\n\n# 30-day backup\n#log4j.appender.DRFA.MaxBackupIndex=30\nlog4j.appender.DRFA.layout=org.apache.log4j.PatternLayout\n\n# Pattern format: Date LogLevel LoggerName LogMessage\nlog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\n# Debugging Pattern format\n#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n\n#\n# console\n# Add \"console\" to rootlogger above if you want to use this\n#\n\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.conso
le.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n#\n# TaskLog Appender\n#\n\n#Default values\nhadoop.tasklog.taskid=null\nhadoop.tasklog.iscleanup=false\nhadoop.tasklog.noKeepSplits=4\nhadoop.tasklog.totalLogFileSize=100\nhadoop.tasklog.purgeLogSplits=true\nhadoop.tasklog.logsRetainHours=12\n\nlog4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender\nlog4j.appender.TLA.taskId=${hadoop.tasklog.taskid}\nlog4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}\nlog4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}\n\nlog4j.appender.TLA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\n\n#\n#Security audit appender\n#\nhadoop.security.logger=INFO,console\nhadoop.security.log.maxfilesize=256MB\nhadoop.security.log.maxbackupindex=20\nlog4j.category.SecurityLogger=${hadoop.security.logger}\nh
adoop.security.log.file=SecurityAuth.audit\nlog4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\nlog4j.appender.DRFAS.DatePattern=.yyyy-MM-dd\n\nlog4j.appender.RFAS=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.RFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\nlog4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}\nlog4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}\n\n#\n# hdfs audit logging\n#\nhdfs.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}\nlog4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false\
nlog4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log\nlog4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n\nlog4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# NameNode metrics logging.\n# The default is to retain two namenode-metrics.log files up to 64MB each.\n#\nnamenode.metrics.logger=INFO,NullAppender\nlog4j.logger.NameNodeMetricsLog=${namenode.metrics.logger}\nlog4j.additivity.NameNodeMetricsLog=false\nlog4j.appender.NNMETRICSRFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.NNMETRICSRFA.File=${hadoop.log.dir}/namenode-metrics.log\nlog4j.appender.NNMETRICSRFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.NNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n\nlog4j.appender.NNMETRICSRFA.MaxBackupIndex=1\nlog4j.appender.NNMETRICSRFA.MaxFileSize=64MB\n\n#\n# mapred audit logging\n#\nmapred.audit
.logger=INFO,console\nlog4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}\nlog4j.additivity.org.apache.hadoop.mapred.AuditLogger=false\nlog4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log\nlog4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n\nlog4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# Rolling File Appender\n#\n\nlog4j.appender.RFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n# Logfile size and and 30-day backups\nlog4j.appender.RFA.MaxFileSize=256MB\nlog4j.appender.RFA.MaxBackupIndex=10\n\nlog4j.appender.RFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n\n# Custom Logging levels\n\n
hadoop.metrics.log.level=INFO\n#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG\n#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG\n#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG\nlog4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}\n\n# Jets3t library\nlog4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR\n\n#\n# Null Appender\n# Trap security logger on the hadoop client side\n#\nlog4j.appender.NullAppender=org.apache.log4j.varia.NullAppender\n\n#\n# Event Counter Appender\n# Sends counts of logging messages at different severity levels to Hadoop Metrics.\n#\nlog4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter\n\n# Removes \"deprecated\" messages\nlog4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN\n\n#\n# HDFS block state change log from block manager\n#\n# Uncomment the following to suppress normal block state change\n# messages from BlockManager in NameNode.\n#log4j.logger.BlockStateChange=W
ARN"
+ },
+ "ranger-hdfs-plugin-properties": {
+ "hadoop.rpc.protection": "authentication",
+ "ranger-hdfs-plugin-enabled": "No",
+ "REPOSITORY_CONFIG_USERNAME": "hadoop",
+ "policy_user": "ambari-qa",
+ "common.name.for.certificate": "",
+ "REPOSITORY_CONFIG_PASSWORD": "hadoop"
+ },
+ "core-site": {
+ "hadoop.proxyuser.root.hosts": "*",
+ "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",
+ "fs.trash.interval": "360",
+ "ha.failover-controller.active-standby-elector.zk.op.retries": "120",
+ "hadoop.http.authentication.simple.anonymous.allowed": "true",
+ "hadoop.security.authentication": "simple",
+ "hadoop.proxyuser.root.groups": "*",
+ "ipc.client.connection.maxidletime": "30000",
+ "hadoop.security.key.provider.path": "",
+ "mapreduce.jobtracker.webinterface.trusted": "false",
+ "hadoop.security.authorization": "false",
+ "net.topology.script.file.name": "/etc/hadoop/conf/topology_script.py",
+ "ipc.server.tcpnodelay": "true",
+ "ipc.client.connect.max.retries": "50",
+ "hadoop.security.auth_to_local": "DEFAULT",
+ "io.file.buffer.size": "131072",
+ "hadoop.proxyuser.hdfs.hosts": "*",
+ "hadoop.proxyuser.hdfs.groups": "*",
+ "ipc.client.idlethreshold": "8000",
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020",
+ "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec"
+ },
+ "hadoop-env": {
+ "keyserver_port": "",
+ "proxyuser_group": "users",
+ "hdfs_user_nproc_limit": "65536",
+ "hdfs_log_dir_prefix": "/var/log/hadoop",
+ "hdfs_user_nofile_limit": "128000",
+ "hdfs_user": "hdfs",
+ "keyserver_host": " ",
+ "namenode_opt_maxnewsize": "128m",
+ "namenode_opt_maxpermsize": "256m",
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n# The only required environment variable is JAVA_HOME. All others are\n# optional. When running a distributed configuration it is best to\n# set JAVA_HOME in this file, so that it is correctly defined on\n# remote nodes.\n\n# The java implementation to use. Required.\nexport JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hadoop Configuration Directory\n\n{# this is different for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java runtime options. Empty by default.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# Command specific options appended to H
ADOOP_OPTS when specified\nHADOOP_JOBTRACKER_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}\"\n\n{% if java_version < 8 %}\nSHARED_HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{na
menode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT\"\nexport HADOOP_NAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-namenode/bin/kill-name-node\\\" -Dorg.mortbay.jetty.Request.maxFormContentSize=-1 ${HADOOP_NAMENODE_OPTS}\"\nexport HADOOP_DATANODE_OPTS=\"-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -XX:PermSize=128m -XX:MaxPermSize=256m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtno
de_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-secondarynamenode/bin/kill-secondary-name-node\\\" ${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The following applies to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m -XX:MaxPermSize=512m $HADOOP_CLIENT_OPTS\"\n\n{% else %}\nSHARED_HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{namenode_h
eapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT\"\nexport HADOOP_NAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-namenode/bin/kill-name-node\\\" -Dorg.mortbay.jetty.Request.maxFormContentSize=-1 ${HADOOP_NAMENODE_OPTS}\"\nexport HADOOP_DATANODE_OPTS=\"-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtnode_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-secondarynamenode/bin/kill-secondary-name-node\\\" ${HADOOP_SECONDARYNAMENO
DE_OPTS}\"\n\n# The following applies to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n{% endif %}\n\nHADOOP_NFS3_OPTS=\"-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\n\n# On secure datanodes, user to run the datanode as after dropping privileges\nexport HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER:-{{hadoop_secure_dn_user}}}\n\n# Extra ssh options. Empty by default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored. $HADOOP_HOME/logs by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# Where log files are stored in the secure data environment.\nexport HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER
\n\n# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.\n# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where hadoop code should be rsync'd from. Unset by default.\n# export HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are stored. /tmp by default.\nexport HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# History server pid\nexport HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n# A string representing this instance of hadoop. $USER by default.\nexport HADOOP_IDENT_STRING=$USER\n\n# The scheduling pr
iority for daemon processes. See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Add database libraries\nJAVA_JDBC_LIBS=\"\"\nif [ -d \"/usr/share/java\" ]; then\n for jarFile in `ls /usr/share/java | grep -E \"(mysql|ojdbc|postgresql|sqljdbc)\" 2>/dev/null`\n do\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\n done\nfi\n\n# Add libraries to the hadoop classpath - some may not need a colon as they already include it\nexport HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}\n\n# Setting path to hdfs command line\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n# Mostly required for hadoop 2.0\nexport JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}\n\nexport HADOOP_OPTS=\"-Dhdp.version=$HDP_VERSION $HADOOP_OPTS\"\n\n{% if is_datanode_max_locked_memory_set %}\n# Fix temporary bug, when ulimit from conf files is not picked up, without full relogin. \n# Makes sense to fix only when runing DN as root \nif [ \"$command\" == \"datanode\" ] && [ \"$EUID\" -eq 0 ] && [ -n \"$HADOOP_SECURE_
DN_USER\" ]; then\n ulimit -l {{datanode_max_locked_memory}}\nfi\n{% endif %}",
+ "namenode_heapsize": "1024m",
+ "namenode_opt_newsize": "128m",
+ "nfsgateway_heapsize": "1024",
+ "dtnode_heapsize": "1024m",
+ "hadoop_root_logger": "INFO,RFA",
+ "hadoop_heapsize": "1024",
+ "hadoop_pid_dir_prefix": "/var/run/hadoop",
+ "namenode_opt_permsize": "128m",
+ "hdfs_tmp_dir": "/tmp"
+ },
+ "zookeeper-log4j": {
+ "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n#\n# ZooKeeper Logging Configuration\n#\n\n# DEFAULT: console appender only\nlog4j.rootLogger=INFO, CONSOLE\n\n# Example with rolling log file\n#log4j.root
Logger=DEBUG, CONSOLE, ROLLINGFILE\n\n# Example with rolling log file and tracing\n#log4j.rootLogger=TRACE, CONSOLE, ROLLINGFILE, TRACEFILE\n\n#\n# Log INFO level and above messages to the console\n#\nlog4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender\nlog4j.appender.CONSOLE.Threshold=INFO\nlog4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout\nlog4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n\n\n#\n# Add ROLLINGFILE to rootLogger to get log file output\n# Log DEBUG level and above messages to a log file\nlog4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender\nlog4j.appender.ROLLINGFILE.Threshold=DEBUG\nlog4j.appender.ROLLINGFILE.File=zookeeper.log\n\n# Max log file size of 10MB\nlog4j.appender.ROLLINGFILE.MaxFileSize=10MB\n# uncomment the next line to limit number of backup files\n#log4j.appender.ROLLINGFILE.MaxBackupIndex=10\n\nlog4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout\nlog4j.appender.ROLLINGFILE.
layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n\n\n\n#\n# Add TRACEFILE to rootLogger to get log file output\n# Log DEBUG level and above messages to a log file\nlog4j.appender.TRACEFILE=org.apache.log4j.FileAppender\nlog4j.appender.TRACEFILE.Threshold=TRACE\nlog4j.appender.TRACEFILE.File=zookeeper_trace.log\n\nlog4j.appender.TRACEFILE.layout=org.apache.log4j.PatternLayout\n### Notice we are including log4j's NDC here (%x)\nlog4j.appender.TRACEFILE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L][%x] - %m%n"
+ },
+ "ssl-server": {
+ "ssl.server.keystore.location": "/etc/security/serverKeys/keystore.jks",
+ "ssl.server.keystore.keypassword": "bigdata",
+ "ssl.server.truststore.location": "/etc/security/serverKeys/all.jks",
+ "ssl.server.keystore.password": "bigdata",
+ "ssl.server.truststore.password": "bigdata",
+ "ssl.server.truststore.type": "jks",
+ "ssl.server.keystore.type": "jks",
+ "ssl.server.truststore.reload.interval": "10000"
+ },
+ "ranger-site": {},
+ "admin-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nlog4j.rootLogger = warn,xa_log_appender\n\n\n# xa_logger\nlog4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.xa_log_appender.
file=${logdir}/xa_portal.log\nlog4j.appender.xa_log_appender.datePattern='.'yyyy-MM-dd\nlog4j.appender.xa_log_appender.append=true\nlog4j.appender.xa_log_appender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.xa_log_appender.layout.ConversionPattern=%d [%t] %-5p %C{6} (%F:%L) - %m%n\n# xa_log_appender : category and additivity\nlog4j.category.org.springframework=warn,xa_log_appender\nlog4j.additivity.org.springframework=false\n\nlog4j.category.org.apache.ranger=info,xa_log_appender\nlog4j.additivity.org.apache.ranger=false\n\nlog4j.category.xa=info,xa_log_appender\nlog4j.additivity.xa=false\n\n# perf_logger\nlog4j.appender.perf_appender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.perf_appender.file=${logdir}/ranger_admin_perf.log\nlog4j.appender.perf_appender.datePattern='.'yyyy-MM-dd\nlog4j.appender.perf_appender.append=true\nlog4j.appender.perf_appender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.perf_appender.layout.ConversionPattern=%d [%t] %m%n\n
\n\n# sql_appender\nlog4j.appender.sql_appender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.sql_appender.file=${logdir}/xa_portal_sql.log\nlog4j.appender.sql_appender.datePattern='.'yyyy-MM-dd\nlog4j.appender.sql_appender.append=true\nlog4j.appender.sql_appender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.sql_appender.layout.ConversionPattern=%d [%t] %-5p %C{6} (%F:%L) - %m%n\n\n# sql_appender : category and additivity\nlog4j.category.org.hibernate.SQL=warn,sql_appender\nlog4j.additivity.org.hibernate.SQL=false\n\nlog4j.category.jdbc.sqlonly=fatal,sql_appender\nlog4j.additivity.jdbc.sqlonly=false\n\nlog4j.category.jdbc.sqltiming=warn,sql_appender\nlog4j.additivity.jdbc.sqltiming=false\n\nlog4j.category.jdbc.audit=fatal,sql_appender\nlog4j.additivity.jdbc.audit=false\n\nlog4j.category.jdbc.resultset=fatal,sql_appender\nlog4j.additivity.jdbc.resultset=false\n\nlog4j.category.jdbc.connection=fatal,sql_appender\nlog4j.additivity.jdbc.connection=false"
+ },
+ "tagsync-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nlog4j.rootLogger = info,logFile\n\n# logFile\nlog4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.logFile.file=${logdir}/tagsync.log\n
log4j.appender.logFile.datePattern='.'yyyy-MM-dd\nlog4j.appender.logFile.layout=org.apache.log4j.PatternLayout\nlog4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %L %m%n\n\n# console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.Target=System.out\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %L %m%n"
+ },
+ "ranger-hdfs-security": {
+ "ranger.plugin.hdfs.policy.source.impl": "org.apache.ranger.admin.client.RangerAdminRESTClient",
+ "ranger.plugin.hdfs.service.name": "{{repo_name}}",
+ "ranger.plugin.hdfs.policy.cache.dir": "/etc/ranger/{{repo_name}}/policycache",
+ "ranger.plugin.hdfs.policy.pollIntervalMs": "30000",
+ "ranger.plugin.hdfs.policy.rest.url": "{{policymgr_mgr_url}}",
+ "ranger.plugin.hdfs.policy.rest.ssl.config.file": "/etc/hadoop/conf/ranger-policymgr-ssl.xml",
+ "xasecure.add-hadoop-authorization": "true"
+ },
+ "usersync-properties": {},
+ "zookeeper-env": {
+ "zk_log_dir": "/var/log/zookeeper",
+ "content": "\nexport JAVA_HOME={{java64_home}}\nexport ZOOKEEPER_HOME={{zk_home}}\nexport ZOO_LOG_DIR={{zk_log_dir}}\nexport ZOOPIDFILE={{zk_pid_file}}\nexport SERVER_JVMFLAGS={{zk_server_heapsize}}\nexport JAVA=$JAVA_HOME/bin/java\nexport CLASSPATH=$CLASSPATH:/usr/share/zookeeper/*\n\n{% if security_enabled %}\nexport SERVER_JVMFLAGS=\"$SERVER_JVMFLAGS -Djava.security.auth.login.config={{zk_server_jaas_file}}\"\nexport CLIENT_JVMFLAGS=\"$CLIENT_JVMFLAGS -Djava.security.auth.login.config={{zk_client_jaas_file}}\"\n{% endif %}",
+ "zk_server_heapsize": "1024m",
+ "zk_pid_dir": "/var/run/zookeeper",
+ "zk_user": "zookeeper"
+ },
+ "cluster-env": {
+ "security_enabled": "false",
+ "override_uid": "true",
+ "fetch_nonlocal_groups": "true",
+ "one_dir_per_partition": "true",
+ "repo_ubuntu_template": "{{package_type}} {{base_url}} {{components}}",
+ "ignore_groupsusers_create": "false",
+ "alerts_repeat_tolerance": "1",
+ "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
+ "kerberos_domain": "EXAMPLE.COM",
+ "manage_dirs_on_root": "true",
+ "recovery_lifetime_max_count": "1024",
+ "recovery_type": "AUTO_START",
+ "ignore_bad_mounts": "false",
+ "recovery_window_in_minutes": "60",
+ "user_group": "hadoop",
+ "stack_name": "HDP",
+ "stack_root": "{\"HDP\": \"/usr/hdp\"}",
+ "stack_tools": "{\n \"HDP\": { \"stack_selector\": [\"hdp-select\", \"/usr/bin/hdp-select\", \"hdp-select\"],\n \"conf_selector\": [\"conf-select\", \"/usr/bin/conf-select\", \"conf-select\"]\n}\n}",
+ "recovery_retry_interval": "5",
+ "recovery_enabled": "true",
+ "recovery_max_count": "6",
+ "repo_suse_rhel_template": "[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0",
+ "managed_hdfs_resource_property_names": "",
+ "smokeuser": "ambari-qa"
+ },
+ "dbks-site": {
+ "ranger.ks.jpa.jdbc.credential.provider.path": "/etc/ranger/kms/rangerkms.jceks",
+ "ranger.ks.kerberos.keytab": "/etc/security/keytabs/rangerkms.service.keytab",
+ "ranger.ks.hsm.partition.password": "_",
+ "ranger.ks.jpa.jdbc.driver": "com.mysql.jdbc.Driver",
+ "ranger.ks.jpa.jdbc.credential.alias": "ranger.ks.jdbc.password",
+ "ranger.ks.kerberos.principal": "rangerkms12/_HOST@EXAMPLE.COM",
+ "ranger.db.encrypt.key.password": "_",
+ "ranger.ks.hsm.enabled": "false",
+ "ranger.ks.jpa.jdbc.password": "_",
+ "ranger.ks.masterkey.credential.alias": "ranger.ks.masterkey.password",
+ "ranger.ks.jpa.jdbc.url": "jdbc:mysql://c6401.ambari.apache.org:3306/rangerkms01",
+ "hadoop.kms.blacklist.DECRYPT_EEK": "hdfs",
+ "ranger.ks.jdbc.sqlconnectorjar": "{{ews_lib_jar_path}}",
+ "ranger.ks.jpa.jdbc.user": "{{db_user}}",
+ "ranger.ks.hsm.partition.password.alias": "ranger.kms.hsm.partition.password",
+ "ranger.ks.hsm.type": "LunaProvider",
+ "ranger.ks.hsm.partition.name": "par19",
+ "ranger.ks.jpa.jdbc.dialect": "{{jdbc_dialect}}"
+ },
+ "kms-env": {
+ "kms_group": "kms",
+ "kms_log_dir": "/var/log/ranger/kms",
+ "hsm_partition_password": "",
+ "kms_user": "kms",
+ "create_db_user": "true",
+ "kms_port": "9292"
+ },
+ "kms-log4j": {
+ "content": "\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License. See accompanying LICENSE file.\n#\n\n# If the Java System property 'kms.log.dir' is not defined at KMS start up time\n# Setup sets its value to '${kms.home}/logs'\n\nlog4j.appender.kms=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kms.DatePattern='.'yyyy-MM-dd\nlog4j.appender.kms.File=${kms.log.dir}/kms.log\nlog4j.appender.kms.Append=true\nlog4j.appender.kms.layout=org.apache.log4j.PatternLayout\nlog4j
.appender.kms.layout.ConversionPattern=%d{ISO8601} %-5p %c{1} - %m%n\n\nlog4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kms-audit.DatePattern='.'yyyy-MM-dd\nlog4j.appender.kms-audit.File=${kms.log.dir}/kms-audit.log\nlog4j.appender.kms-audit.Append=true\nlog4j.appender.kms-audit.layout=org.apache.log4j.PatternLayout\nlog4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} %m%n\n\nlog4j.logger.kms-audit=INFO, kms-audit\nlog4j.additivity.kms-audit=false\n\nlog4j.rootLogger=ALL, kms\nlog4j.logger.org.apache.hadoop.conf=ERROR\nlog4j.logger.org.apache.hadoop=INFO\nlog4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF"
+ },
+ "kms-properties": {
+ "REPOSITORY_CONFIG_USERNAME": "keyadmin",
+ "db_user": "rangerkms01",
+ "DB_FLAVOR": "MYSQL",
+ "db_password": "rangerkms01",
+ "KMS_MASTER_KEY_PASSWD": "StrongPassword01",
+ "db_root_user": "root",
+ "db_name": "rangerkms01",
+ "db_host": "c6401.ambari.apache.org",
+ "db_root_password": "vagrant",
+ "SQL_CONNECTOR_JAR": "{{driver_curl_target}}",
+ "REPOSITORY_CONFIG_PASSWORD": "keyadmin"
+ },
+ "kms-site": {
+ "hadoop.kms.proxyuser.ranger.hosts": "*",
+ "hadoop.kms.authentication.type": "simple",
+ "hadoop.kms.proxyuser.ranger.groups": "*",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.path": "/hadoop-kms/hadoop-auth-signature-secret",
+ "hadoop.kms.security.authorization.manager": "org.apache.ranger.authorization.kms.authorizer.RangerKmsAuthorizer",
+ "hadoop.kms.authentication.kerberos.name.rules": "DEFAULT",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
+ "hadoop.kms.current.key.cache.timeout.ms": "30000",
+ "hadoop.kms.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
+ "hadoop.kms.audit.aggregation.window.ms": "10000",
+ "hadoop.kms.proxyuser.ranger.users": "*",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type": "kerberos",
+ "hadoop.kms.key.provider.uri": "dbks://http@localhost:9292/kms",
+ "hadoop.security.keystore.JavaKeyStoreProvider.password": "none",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM",
+ "hadoop.kms.authentication.signer.secret.provider": "random",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string": "#HOSTNAME#:#PORT#,...",
+ "hadoop.kms.cache.enable": "true",
+ "hadoop.kms.cache.timeout.ms": "600000",
+ "hadoop.kms.authentication.kerberos.principal": "*"
+ },
+ "ranger-kms-audit": {
+ "xasecure.audit.destination.solr.zookeepers": "c6401.ambari.apache.org:2181/ranger_audits",
+ "xasecure.audit.destination.solr.urls": "",
+ "xasecure.audit.destination.solr.batch.filespool.dir": "/var/log/ranger/kms/audit/solr/spool",
+ "xasecure.audit.destination.hdfs.batch.filespool.dir": "/var/log/ranger/kms/audit/hdfs/spool",
+ "xasecure.audit.destination.hdfs": "true",
+ "xasecure.audit.destination.solr": "true",
+ "xasecure.audit.provider.summary.enabled": "false",
+ "xasecure.audit.destination.hdfs.dir": "hdfs://c6401.ambari.apache.org:8020/ranger/audit",
+ "xasecure.audit.is.enabled": "true"
+ },
+ "ranger-kms-policymgr-ssl": {
+ "xasecure.policymgr.clientssl.keystore": "/usr/hdp/current/ranger-kms/conf/ranger-plugin-keystore.jks",
+ "xasecure.policymgr.clientssl.truststore.password": "changeit",
+ "xasecure.policymgr.clientssl.keystore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.truststore": "/usr/hdp/current/ranger-kms/conf/ranger-plugin-truststore.jks",
+ "xasecure.policymgr.clientssl.truststore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.keystore.password": "myKeyFilePassword"
+ },
+ "ranger-kms-security": {
+ "ranger.plugin.kms.policy.pollIntervalMs": "30000",
+ "ranger.plugin.kms.service.name": "{{repo_name}}",
+ "ranger.plugin.kms.policy.source.impl": "org.apache.ranger.admin.client.RangerAdminRESTClient",
+ "ranger.plugin.kms.policy.rest.ssl.config.file": "/etc/ranger/kms/conf/ranger-policymgr-ssl.xml",
+ "ranger.plugin.kms.policy.cache.dir": "/etc/ranger/{{repo_name}}/policycache",
+ "ranger.plugin.kms.policy.rest.url": "{{policymgr_mgr_url}}"
+ },
+ "ranger-kms-site": {
+ "ranger.service.https.port": "9393",
+ "ranger.service.https.attrib.ssl.enabled": "false",
+ "xa.webapp.dir": "./webapp",
+ "ranger.service.host": "{{kms_host}}",
+ "ranger.service.shutdown.port": "7085",
+ "ranger.contextName": "/kms",
+ "ranger.service.http.port": "{{kms_port}}"
+ }
+ }
+}
[43/57] [abbrv] ambari git commit: AMBARI-21834. UI changes for
Moving Masters. (Ishan via Jaimin)
Posted by lp...@apache.org.
AMBARI-21834. UI changes for Moving Masters. (Ishan via Jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ca87e8dd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ca87e8dd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ca87e8dd
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: ca87e8ddef14c02b5fbabc26e5672a157a6dbaec
Parents: 92bd10d
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Sep 11 11:08:23 2017 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Sep 11 11:08:23 2017 -0700
----------------------------------------------------------------------
.../rangerAdmin/step4_controller.js | 2 +-
.../main/service/reassign/step4_controller.js | 8 +++++++-
.../main/service/reassign_controller.js | 3 ++-
ambari-web/app/messages.js | 4 ++--
.../wizard/wizardProgressPageController.js | 20 +++++++++++++-------
.../views/main/service/reassign/step1_view.js | 19 ++++++++++++++++++-
.../service/reassign/step4_controller_test.js | 12 +++++++-----
7 files changed, 50 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ca87e8dd/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
index f4b4a59..cdfb450 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
@@ -29,7 +29,7 @@ App.RAHighAvailabilityWizardStep4Controller = App.HighAvailabilityProgressPageCo
tasksMessagesPrefix: 'admin.ra_highAvailability.wizard.step',
stopAllServices: function () {
- this.stopServices();
+ this.stopServices([], true, true);
},
installRangerAdmin: function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/ca87e8dd/ambari-web/app/controllers/main/service/reassign/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/reassign/step4_controller.js b/ambari-web/app/controllers/main/service/reassign/step4_controller.js
index 9a40283..c610b13 100644
--- a/ambari-web/app/controllers/main/service/reassign/step4_controller.js
+++ b/ambari-web/app/controllers/main/service/reassign/step4_controller.js
@@ -231,7 +231,13 @@ App.ReassignMasterWizardStep4Controller = App.HighAvailabilityProgressPageContro
* make server call to stop services
*/
stopRequiredServices: function () {
- this.stopServices(this.get('wizardController.relatedServicesMap')[this.get('content.reassign.component_name')], true);
+ var componentName = this.get('content.reassign.component_name');
+ var servicesToStop = this.get('wizardController.relatedServicesMap')[componentName];
+ if (this.get('content.componentsToStopAllServices').contains(componentName)) {
+ this.stopServices(servicesToStop, true, true);
+ } else {
+ this.stopServices(servicesToStop, true);
+ }
},
createHostComponents: function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/ca87e8dd/ambari-web/app/controllers/main/service/reassign_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/reassign_controller.js b/ambari-web/app/controllers/main/service/reassign_controller.js
index 5b3e951..1e11eaf 100644
--- a/ambari-web/app/controllers/main/service/reassign_controller.js
+++ b/ambari-web/app/controllers/main/service/reassign_controller.js
@@ -65,6 +65,7 @@ App.ReassignMasterController = App.WizardController.extend({
hasCheckDBStep: false,
componentsWithCheckDBStep: ['HIVE_METASTORE', 'HIVE_SERVER', 'OOZIE_SERVER'],
componentsWithoutSecurityConfigs: ['MYSQL_SERVER'],
+ componentsToStopAllServices: ['NAMENODE', 'SECONDARY_NAMENODE'],
reassignComponentsInMM: [],
configs: null,
configsAttributes: null
@@ -167,9 +168,9 @@ App.ReassignMasterController = App.WizardController.extend({
* Used to define list of services to stop/start.
*/
relatedServicesMap: {
- 'JOBTRACKER': ['PIG', 'OOZIE'],
'RESOURCEMANAGER': ['YARN', 'MAPREDUCE2', 'TEZ', 'PIG', 'OOZIE', 'SLIDER', 'SPARK'],
'APP_TIMELINE_SERVER': ['YARN', 'MAPREDUCE2', 'TEZ', 'OOZIE', 'SLIDER', 'SPARK'],
+ 'HISTORYSERVER': ['MAPREDUCE2', 'PIG', 'OOZIE'],
'HIVE_SERVER': ['HIVE', 'FALCON', 'ATLAS', 'OOZIE'],
'HIVE_METASTORE': ['HIVE', 'PIG', 'FALCON', 'ATLAS', 'OOZIE'],
'WEBHCAT_SERVER': ['HIVE'],
http://git-wip-us.apache.org/repos/asf/ambari/blob/ca87e8dd/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index a2b9960..bec2ca5 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2308,8 +2308,8 @@ Em.I18n.translations = {
'services.reassign.step1.message1': 'This wizard will walk you through moving {0}.<br/>',
'services.reassign.step1.message2': 'The process to reassign {0} involves a combination of <b>automated steps</b> (that will be handled by the wizard) and ' +
'<b>manual steps</b> (that you must perform in sequence as instructed by the wizard).<br/><br/>',
- 'services.reassign.step1.message3': '<br/><b>All required services will be restarted as part of the wizard. You should plan a cluster maintenance window and prepare ' +
- 'for cluster downtime when moving {0}.</b>',
+ 'services.reassign.step1.message3': '<br/><p class="alert">Following services will be restarted as part of the wizard: <b>{0}</b>. You should plan a cluster maintenance window and prepare ' +
+ 'for cluster downtime when moving {1}.</p>',
'services.reassign.step2.header':'Assign Master',
'services.reassign.step2.currentHost':'Current:',
http://git-wip-us.apache.org/repos/asf/ambari/blob/ca87e8dd/ambari-web/app/mixins/wizard/wizardProgressPageController.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/wizard/wizardProgressPageController.js b/ambari-web/app/mixins/wizard/wizardProgressPageController.js
index 9d9d000..3dcde74 100644
--- a/ambari-web/app/mixins/wizard/wizardProgressPageController.js
+++ b/ambari-web/app/mixins/wizard/wizardProgressPageController.js
@@ -428,30 +428,36 @@ App.wizardProgressPageControllerMixin = Em.Mixin.create(App.InstallComponent, {
* make server call to stop services
* if stopListedServicesFlag == false; stop all services excluding the services passed as parameters
* if stopListedServicesFlag == true; stop only services passed as parameters
- * if no parameters are passed; stop all services
- * @param services, stopListedServicesFlag
+ * if namenode or secondary namenode then stop all services
+ * @param services, stopListedServicesFlag, stopAllServices
* @returns {$.ajax}
*/
- stopServices: function (services, stopListedServicesFlag) {
+ stopServices: function (services, stopListedServicesFlag, stopAllServices) {
+ var stopAllServices = stopAllServices || false;
var stopListedServicesFlag = stopListedServicesFlag || false;
var data = {
'ServiceInfo': {
'state': 'INSTALLED'
}
};
- if (services && services.length) {
+ if (stopAllServices) {
+ data.context = "Stop all services";
+ } else {
+ if(!services || !services.length) {
+ services = App.Service.find().mapProperty('serviceName').filter(function (service) {
+ return service != 'HDFS';
+ });
+ }
var servicesList;
if (stopListedServicesFlag) {
servicesList = services.join(',');
- } else {
+ } else {
servicesList = App.Service.find().mapProperty("serviceName").filter(function (s) {
return !services.contains(s)
}).join(',');
}
data.context = "Stop required services";
data.urlParams = "ServiceInfo/service_name.in(" + servicesList + ")";
- } else {
- data.context = "Stop all services";
}
return App.ajax.send({
name: 'common.services.update',
http://git-wip-us.apache.org/repos/asf/ambari/blob/ca87e8dd/ambari-web/app/views/main/service/reassign/step1_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/reassign/step1_view.js b/ambari-web/app/views/main/service/reassign/step1_view.js
index ddabf14..b7787c0 100644
--- a/ambari-web/app/views/main/service/reassign/step1_view.js
+++ b/ambari-web/app/views/main/service/reassign/step1_view.js
@@ -18,17 +18,34 @@
var App = require('app');
+var stringUtils = require('utils/string_utils');
App.ReassignMasterWizardStep1View = Em.View.extend({
message: function () {
+ var componentName = this.get('controller.content.reassign.component_name');
+ var listOfServices;
+
+ if(this.get('controller.content.componentsToStopAllServices').contains(componentName)) {
+ listOfServices = App.Service.find().mapProperty('serviceName');
+ } else {
+ listOfServices = this.get('controller.target.reassignMasterController.relatedServicesMap')[componentName];
+ if(!listOfServices || !listOfServices.length) {
+ listOfServices = App.Service.find().mapProperty('serviceName').filter(function (service) {
+ return service != 'HDFS';
+ });
+ }
+ }
+
var messages = [
Em.I18n.t('services.reassign.step1.message1').format(this.get('controller.content.reassign.display_name')),
- Em.I18n.t('services.reassign.step1.message3').format(this.get('controller.content.reassign.display_name'))
+ Em.I18n.t('services.reassign.step1.message3').format(stringUtils.getFormattedStringFromArray(listOfServices),
+ this.get('controller.content.reassign.display_name'))
];
if (this.get('controller.content.hasManualSteps')) {
messages.splice(1,0, Em.I18n.t('services.reassign.step1.message2').format(this.get('controller.content.reassign.display_name')));
}
+
return messages;
}.property('controller.content.reassign.display_name','controller.content.hasManualSteps'),
http://git-wip-us.apache.org/repos/asf/ambari/blob/ca87e8dd/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
index 0b00412..5193b7a 100644
--- a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
+++ b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
@@ -260,9 +260,10 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
controller.stopServices.restore();
});
it('stopServices is called with valid list of services', function() {
- controller.set('content.reassign.component_name', 'JOBTRACKER');
+ controller.set('content.reassign.component_name', 'HISTORYSERVER');
+ controller.set('content.componentsToStopAllServices', ['NAMENODE', 'SECONDARY_NAMENODE'])
controller.stopRequiredServices();
- expect(controller.stopServices.calledWith(['PIG', 'OOZIE'], true)).to.be.true;
+ expect(controller.stopServices.calledWith(['MAPREDUCE2', 'PIG', 'OOZIE'], true)).to.be.true;
});
});
@@ -355,7 +356,8 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
describe('#stopServices()', function () {
it('request is sent', function () {
- controller.stopServices();
+ var servicesToStop;
+ controller.stopServices(servicesToStop, true, true);
var args = testHelpers.findAjaxRequest('name', 'common.services.update');
expect(args).exists;
});
@@ -933,9 +935,9 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
controller.startServices.restore();
});
it("component has related services", function() {
- controller.set('content.reassign.component_name', 'JOBTRACKER');
+ controller.set('content.reassign.component_name', 'HISTORYSERVER');
controller.startRequiredServices();
- expect(controller.startServices.calledWith(false, ['PIG', 'OOZIE'], true)).to.be.true;
+ expect(controller.startServices.calledWith(false, ['MAPREDUCE2', 'PIG', 'OOZIE'], true)).to.be.true;
});
it("component does not have related services", function() {
controller.set('content.reassign.component_name', 'C1');
[18/57] [abbrv] ambari git commit: AMBARI-21905 Log Search UI:
implement combo search filter. (ababiichuk)
Posted by lp...@apache.org.
AMBARI-21905 Log Search UI: implement combo search filter. (ababiichuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a10e3887
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a10e3887
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a10e3887
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: a10e3887c714e6bdeb83c21b2d8ecfbba438639e
Parents: ab06654
Author: ababiichuk <ab...@hortonworks.com>
Authored: Thu Sep 7 18:13:39 2017 +0300
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Thu Sep 7 18:13:39 2017 +0300
----------------------------------------------------------------------
.../ambari-logsearch-web/package.json | 1 +
.../ambari-logsearch-web/src/app/app.module.ts | 10 +-
...service-logs-histogram-query-params.class.ts | 2 +
.../dropdown-list/dropdown-list.component.less | 2 +-
.../filter-text-field.component.html | 21 ---
.../filter-text-field.component.less | 33 ----
.../filter-text-field.component.spec.ts | 82 ---------
.../filter-text-field.component.ts | 87 ---------
.../filters-panel/filters-panel.component.html | 10 +-
.../filters-panel/filters-panel.component.less | 16 +-
.../filters-panel.component.spec.ts | 22 ++-
.../filters-panel/filters-panel.component.ts | 37 +++-
.../search-box/search-box.component.html | 31 ++++
.../search-box/search-box.component.less | 118 +++++++++++++
.../search-box/search-box.component.spec.ts | 63 +++++++
.../search-box/search-box.component.ts | 176 +++++++++++++++++++
.../src/app/components/variables.less | 24 ++-
.../src/app/models/app-state.model.ts | 4 +-
.../src/app/services/filtering.service.ts | 35 +++-
.../src/app/services/logs-container.service.ts | 6 +-
.../src/app/services/utils.service.ts | 4 +
ambari-logsearch/ambari-logsearch-web/yarn.lock | 4 +
22 files changed, 533 insertions(+), 255 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/package.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/package.json b/ambari-logsearch/ambari-logsearch-web/package.json
index 96733eb..92c5043 100644
--- a/ambari-logsearch/ambari-logsearch-web/package.json
+++ b/ambari-logsearch/ambari-logsearch-web/package.json
@@ -33,6 +33,7 @@
"jquery": "^1.12.4",
"moment": "^2.18.1",
"moment-timezone": "^0.5.13",
+ "ng2-auto-complete": "^0.12.0",
"ngx-bootstrap": "^1.6.6",
"rxjs": "^5.1.0",
"zone.js": "^0.8.4"
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
index c08cb3a..c15ecbc 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
@@ -27,7 +27,10 @@ import {TranslateHttpLoader} from '@ngx-translate/http-loader';
import {StoreModule} from '@ngrx/store';
import {MomentModule} from 'angular2-moment';
import {MomentTimezoneModule} from 'angular-moment-timezone';
+import {Ng2AutoCompleteModule} from 'ng2-auto-complete';
+
import {environment} from '@envs/environment';
+
import {mockApiDataService} from '@app/services/mock-api-data.service'
import {HttpClientService} from '@app/services/http-client.service';
import {ComponentActionsService} from '@app/services/component-actions.service';
@@ -59,7 +62,6 @@ import {MainContainerComponent} from '@app/components/main-container/main-contai
import {FiltersPanelComponent} from '@app/components/filters-panel/filters-panel.component';
import {FilterDropdownComponent} from '@app/components/filter-dropdown/filter-dropdown.component';
import {DropdownListComponent} from '@app/components/dropdown-list/dropdown-list.component';
-import {FilterTextFieldComponent} from '@app/components/filter-text-field/filter-text-field.component';
import {FilterButtonComponent} from '@app/components/filter-button/filter-button.component';
import {AccordionPanelComponent} from '@app/components/accordion-panel/accordion-panel.component';
import {LogsListComponent} from '@app/components/logs-list/logs-list.component';
@@ -71,6 +73,7 @@ import {LogsContainerComponent} from '@app/components/logs-container/logs-contai
import {ModalComponent} from '@app/components/modal/modal.component';
import {TimeZonePickerComponent} from '@app/components/timezone-picker/timezone-picker.component';
import {NodeBarComponent} from '@app/components/node-bar/node-bar.component';
+import {SearchBoxComponent} from '@app/components/search-box/search-box.component';
import {TimeZoneAbbrPipe} from '@app/pipes/timezone-abbr.pipe';
@@ -104,7 +107,6 @@ export function getXHRBackend(injector: Injector, browser: BrowserXhr, xsrf: XSR
FiltersPanelComponent,
DropdownListComponent,
FilterDropdownComponent,
- FilterTextFieldComponent,
FilterButtonComponent,
AccordionPanelComponent,
LogsListComponent,
@@ -116,6 +118,7 @@ export function getXHRBackend(injector: Injector, browser: BrowserXhr, xsrf: XSR
ModalComponent,
TimeZonePickerComponent,
NodeBarComponent,
+ SearchBoxComponent,
TimeZoneAbbrPipe
],
imports: [
@@ -133,7 +136,8 @@ export function getXHRBackend(injector: Injector, browser: BrowserXhr, xsrf: XSR
}),
StoreModule.provideStore(reducer),
MomentModule,
- MomentTimezoneModule
+ MomentTimezoneModule,
+ Ng2AutoCompleteModule
],
providers: [
HttpClientService,
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.class.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.class.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.class.ts
index 87e82f6..572af03 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.class.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-histogram-query-params.class.ts
@@ -66,4 +66,6 @@ export class ServiceLogsHistogramQueryParams extends QueryParams {
clusters?: string;
iMessage?: string;
level?: string;
+ includeQuery?: string;
+ excludeQuery?: string;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.less
index d47160f..6faa192 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.less
@@ -19,7 +19,7 @@
@import '../variables';
:host {
- max-height: 500px; // TODO get rid of magic number, base on actual design
+ max-height: @dropdown-max-height;
overflow-y: auto;
.list-item-label {
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.html
deleted file mode 100644
index 3f00e8b..0000000
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.html
+++ /dev/null
@@ -1,21 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<div class="input-group">
- <span class="input-group-addon">{{label | translate}}</span>
- <input type="text" class="form-control" [(ngModel)]="instantValue" (ngModelChange)="updateInstantValue($event)">
-</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.less
deleted file mode 100644
index 1395959..0000000
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.less
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-@import '../variables';
-
-.input-group {
- border: @input-border;
- border-right-width: 0;
-}
-
-.input-group-addon {
- border: none;
- background-color: transparent;
- text-transform: uppercase;
-
- & + input {
- border: none;
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.spec.ts
deleted file mode 100644
index 71039ed..0000000
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.spec.ts
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core';
-import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {FormsModule} from '@angular/forms';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
-import {StoreModule} from '@ngrx/store';
-import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
-import {FilteringService} from '@app/services/filtering.service';
-import {UtilsService} from '@app/services/utils.service';
-import {ComponentActionsService} from '@app/services/component-actions.service';
-
-import {FilterTextFieldComponent} from './filter-text-field.component';
-
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
-describe('FilterTextFieldComponent', () => {
- let component: FilterTextFieldComponent;
- let fixture: ComponentFixture<FilterTextFieldComponent>;
- const filtering = {
- filters: {
- f: {}
- }
- };
-
- beforeEach(async(() => {
- TestBed.configureTestingModule({
- declarations: [FilterTextFieldComponent],
- imports: [
- FormsModule,
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- }),
- StoreModule.provideStore({
- appSettings
- })
- ],
- providers: [
- AppSettingsService,
- {
- provide: FilteringService,
- useValue: filtering
- },
- UtilsService,
- ComponentActionsService
- ],
- schemas: [CUSTOM_ELEMENTS_SCHEMA]
- })
- .compileComponents();
- }));
-
- beforeEach(() => {
- fixture = TestBed.createComponent(FilterTextFieldComponent);
- component = fixture.componentInstance;
- fixture.detectChanges();
- });
-
- it('should create component', () => {
- expect(component).toBeTruthy();
- });
-});
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.ts
deleted file mode 100644
index 2b6bfea..0000000
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-text-field/filter-text-field.component.ts
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import {Component, Input, forwardRef} from '@angular/core';
-import {ControlValueAccessor, NG_VALUE_ACCESSOR, FormGroup} from '@angular/forms';
-import {Subject} from 'rxjs/Subject';
-import 'rxjs/add/operator/debounceTime';
-import {UtilsService} from '@app/services/utils.service';
-
-@Component({
- selector: 'filter-text-field',
- templateUrl: './filter-text-field.component.html',
- styleUrls: ['./filter-text-field.component.less'],
- providers: [
- {
- provide: NG_VALUE_ACCESSOR,
- useExisting: forwardRef(() => FilterTextFieldComponent),
- multi: true
- }
- ]
-})
-export class FilterTextFieldComponent implements ControlValueAccessor {
-
- constructor(private utils: UtilsService) {
- this.valueSubject.debounceTime(this.debounceInterval).subscribe(value => this.updateValue({
- value
- }));
- }
-
- @Input()
- label: string;
-
- private selectedValue: string;
-
- private onChange: (fn: any) => void;
-
- private readonly debounceInterval = 1500;
-
- instantValue: string;
-
- private valueSubject = new Subject<string>();
-
- get value(): any {
- return this.selectedValue;
- }
-
- set value(newValue: any) {
- this.selectedValue = newValue;
- this.onChange(newValue);
- }
-
- updateValue(options: any) {
- const value = options && options.value;
- if (this.utils.valueHasChanged(this.selectedValue, value)) {
- this.value = value;
- }
- }
-
- writeValue() {
- }
-
- registerOnChange(callback: any): void {
- this.onChange = callback;
- }
-
- registerOnTouched() {
- }
-
- updateInstantValue(value: string): void {
- this.valueSubject.next(value);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
index 6df6988..5820b82 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.html
@@ -18,12 +18,12 @@
<form class="col-md-12" [formGroup]="filtersForm">
<div class="form-inline filter-input-container col-md-8">
<filter-dropdown [label]="filters.clusters.label" formControlName="clusters" [options]="filters.clusters.options"
- [defaultLabel]="filters.clusters.defaultLabel" [isMultipleChoice]="true"></filter-dropdown>
- <filter-text-field [label]="filters.text.label"
- formControlName="text"></filter-text-field>
+ [defaultLabel]="filters.clusters.defaultLabel" [isMultipleChoice]="true"
+ class="filter-input"></filter-dropdown>
+ <search-box formControlName="query" [items]="searchBoxItems" class="filter-input"></search-box>
<filter-dropdown formControlName="timeRange" [options]="filters.timeRange.options"
- [defaultLabel]="filters.timeRange.defaultLabel"></filter-dropdown>
- <timezone-picker></timezone-picker>
+ [defaultLabel]="filters.timeRange.defaultLabel" class="filter-input"></filter-dropdown>
+ <timezone-picker class="filter-input"></timezone-picker>
<!--button class="btn btn-success" type="button">
<span class="fa fa-search"></span>
</button-->
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less
index 9ab09ef..6b2408d 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.less
@@ -24,7 +24,8 @@
background-color: @filters-panel-background-color;
.filter-input-container {
- .flex-vertical-align;
+ display: flex;
+ align-items: flex-start;
justify-content: flex-start;
.btn-success {
@@ -32,7 +33,7 @@
border-bottom-left-radius: 0;
}
- filter-dropdown, dropdown-button, timezone-picker {
+ .filter-input {
border: @input-border;
&:not(:last-child) {
@@ -40,12 +41,19 @@
}
&:first-child {
- border-radius: @button-border-radius 0 0 @button-border-radius;
+ border-top-left-radius: @button-border-radius;
+ border-bottom-left-radius: @button-border-radius;
}
&:last-child {
- border-radius: 0 @button-border-radius @button-border-radius 0;
+ border-top-right-radius: @button-border-radius;
+ border-bottom-right-radius: @button-border-radius;
}
}
+
+ search-box.filter-input:not(:last-child) {
+ border-right-width: @input-border-width;
+ margin-right: -1 * (@input-border-width);
+ }
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
index b1cf990..2ced41e 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
@@ -26,9 +26,16 @@ import {AppSettingsService, appSettings} from '@app/services/storage/app-setting
import {ClustersService, clusters} from '@app/services/storage/clusters.service';
import {ComponentsService, components} from '@app/services/storage/components.service';
import {HostsService, hosts} from '@app/services/storage/hosts.service';
+import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service';
+import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
+import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service';
+import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
+import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {AppStateService, appState} from '@app/services/storage/app-state.service';
import {FilteringService} from '@app/services/filtering.service';
import {HttpClientService} from '@app/services/http-client.service';
import {UtilsService} from '@app/services/utils.service';
+import {LogsContainerService} from '@app/services/logs-container.service';
import {FiltersPanelComponent} from './filters-panel.component';
@@ -56,7 +63,13 @@ describe('FiltersPanelComponent', () => {
appSettings,
clusters,
components,
- hosts
+ hosts,
+ auditLogs,
+ serviceLogs,
+ auditLogsFields,
+ serviceLogsFields,
+ serviceLogsHistogramData,
+ appState
}),
TranslateModule.forRoot({
provide: TranslateLoader,
@@ -69,7 +82,14 @@ describe('FiltersPanelComponent', () => {
ClustersService,
ComponentsService,
HostsService,
+ AuditLogsService,
+ ServiceLogsService,
+ AuditLogsFieldsService,
+ ServiceLogsFieldsService,
+ ServiceLogsHistogramDataService,
+ AppStateService,
FilteringService,
+ LogsContainerService,
{
provide: HttpClientService,
useValue: httpClient
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
index e407021..644048f 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.ts
@@ -18,7 +18,10 @@
import {Component} from '@angular/core';
import {FormGroup} from '@angular/forms';
+import {TranslateService} from '@ngx-translate/core';
import {FilteringService} from '@app/services/filtering.service';
+import {LogsContainerService} from '@app/services/logs-container.service';
+import {AppStateService} from '@app/services/storage/app-state.service';
@Component({
selector: 'filters-panel',
@@ -27,12 +30,38 @@ import {FilteringService} from '@app/services/filtering.service';
})
export class FiltersPanelComponent {
- constructor(private filtering: FilteringService) {
- this.filtering.loadClusters();
- this.filtering.loadComponents();
- this.filtering.loadHosts();
+ constructor(private translate: TranslateService, private filtering: FilteringService, private logsContainer: LogsContainerService, private appState: AppStateService) {
+ appState.getParameter('activeLogsType').subscribe(value => {
+ this.logsType = value;
+ logsContainer.logsTypeMap[value].fieldsModel.getAll().subscribe(fields => {
+ if (fields.length) {
+ const items = fields.filter(field => this.excludedParameters.indexOf(field.name) === -1).map(field => {
+ return {
+ name: field.displayName || field.name,
+ value: field.name
+ };
+ }),
+ labelKeys = items.map(item => item.name);
+ translate.get(labelKeys).first().subscribe(translation => this.searchBoxItems = items.map(item => {
+ return {
+ name: translation[item.name],
+ value: item.value
+ };
+ }));
+ }
+ })
+ });
+ filtering.loadClusters();
+ filtering.loadComponents();
+ filtering.loadHosts();
}
+ private readonly excludedParameters = ['cluster', 'host', 'level', 'type', 'logtime'];
+
+ private logsType: string; // TODO implement setting the parameter depending on user's navigation
+
+ searchBoxItems: any[] = [];
+
get filters(): any {
return this.filtering.filters;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html
new file mode 100644
index 0000000..64e15dc
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.html
@@ -0,0 +1,31 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<label class="parameter-label" *ngFor="let parameter of parameters">
+ {{parameter.label | translate}}:
+ <span class="parameter-value">{{parameter.value}}</span>
+ <span class="remove-parameter" (click)="removeParameter($event, parameter.id)">×</span>
+</label>
+<span class="active-parameter-label" *ngIf="isActive && activeItem">{{activeItem.name | translate}}:</span>
+<div [ngClass]="{'search-item-container': true, 'active': isActive, 'value': isValueInput}">
+ <input #parameterInput auto-complete [(ngModel)]="currentValue" [source]="items" [list-formatter]="itemsListFormatter"
+ display-property-name="name" (valueChanged)="onParameterNameChange($event)"
+ class="search-item-input parameter-input form-control">
+ <input #valueInput type="text" [(ngModel)]="currentValue" class="search-item-input value-input form-control"
+ (keyup)="onParameterValueChange($event)">
+ <div class="search-item-text" [innerHTML]="currentValue"></div>
+</div>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less
new file mode 100644
index 0000000..cccf5d5
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.less
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+@import '../variables';
+
+@inactive-input-width: 1px;
+@label-margin: 2px;
+.collapsed-form-control {
+ width: 0;
+ padding: 0;
+}
+
+:host {
+ display: flex;
+ flex-wrap: wrap;
+ justify-content: flex-start;
+ align-items: center;
+ width: 100%;
+ border: @input-border;
+ cursor: text;
+
+ .parameter-label {
+ // TODO implement actual styles
+ margin: @label-margin;
+ padding: @label-margin;
+ background-color: @main-background-color;
+ font-size: 0.8em;
+
+ .parameter-value {
+ font-weight: normal;
+ }
+
+ .remove-parameter {
+ cursor: pointer;
+ }
+ }
+
+ .active-parameter-label {
+ font-weight: bold;
+ margin: 0 @label-margin;
+ }
+
+ .search-item-container {
+ position: relative;
+ min-width: @inactive-input-width;
+ height: @input-height;
+
+ .search-item-input {
+ border: none;
+ box-shadow: none;
+ }
+
+ .parameter-input {
+ width: @inactive-input-width;
+ }
+
+ .value-input {
+ .collapsed-form-control;
+ }
+
+ .search-item-text {
+ visibility: hidden;
+ padding: 0 @input-padding;
+ }
+
+ &.active {
+ min-width: @dropdown-min-width;
+
+ .parameter-input {
+ width: 100%;
+ }
+
+ .value-input {
+ .collapsed-form-control;
+ }
+
+ &.value {
+ /deep/ .ng2-auto-complete-wrapper, .parameter-input {
+ display: none;
+ }
+
+ .value-input {
+ width: 100%;
+ }
+ }
+ }
+
+ /deep/ .ng2-auto-complete {
+ cursor: pointer;
+ .dropdown-list-default;
+
+ > ul {
+ border: none;
+
+ li {
+ border: none;
+ background-color: initial;
+ .dropdown-item-default;
+ }
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts
new file mode 100644
index 0000000..2b3a957
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {NO_ERRORS_SCHEMA} from '@angular/core';
+import {async, ComponentFixture, TestBed} from '@angular/core/testing';
+import {Http} from '@angular/http';
+import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
+import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {UtilsService} from '@app/services/utils.service';
+
+import {SearchBoxComponent} from './search-box.component';
+
+export function HttpLoaderFactory(http: Http) {
+ return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
+}
+
+describe('SearchBoxComponent', () => {
+ let component: SearchBoxComponent;
+ let fixture: ComponentFixture<SearchBoxComponent>;
+
+ beforeEach(async(() => {
+ TestBed.configureTestingModule({
+ declarations: [SearchBoxComponent],
+ imports: [
+ TranslateModule.forRoot({
+ provide: TranslateLoader,
+ useFactory: HttpLoaderFactory,
+ deps: [Http]
+ })
+ ],
+ providers: [
+ UtilsService
+ ],
+ schemas: [NO_ERRORS_SCHEMA]
+ })
+ .compileComponents();
+ }));
+
+ beforeEach(() => {
+ fixture = TestBed.createComponent(SearchBoxComponent);
+ component = fixture.componentInstance;
+ fixture.detectChanges();
+ });
+
+ it('should create component', () => {
+ expect(component).toBeTruthy();
+ });
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts
new file mode 100644
index 0000000..82c455e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.ts
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {Component, OnInit, OnDestroy, Input, ViewChild, ElementRef, forwardRef} from '@angular/core';
+import {ControlValueAccessor, NG_VALUE_ACCESSOR} from '@angular/forms';
+import {UtilsService} from '@app/services/utils.service';
+
+@Component({
+ selector: 'search-box',
+ templateUrl: './search-box.component.html',
+ styleUrls: ['./search-box.component.less'],
+ providers: [
+ {
+ provide: NG_VALUE_ACCESSOR,
+ useExisting: forwardRef(() => SearchBoxComponent),
+ multi: true
+ }
+ ]
+})
+export class SearchBoxComponent implements OnInit, OnDestroy, ControlValueAccessor {
+
+ constructor(private element: ElementRef, private utils: UtilsService) {
+ this.rootElement = element.nativeElement;
+ this.rootElement.addEventListener('click', this.onRootClick);
+ this.rootElement.addEventListener('keydown', this.onRootKeyDown);
+ }
+
+ ngOnInit() {
+ this.parameterInput = this.parameterInputRef.nativeElement;
+ this.valueInput = this.valueInputRef.nativeElement;
+ this.parameterInput.addEventListener('focus', this.onParameterInputFocus);
+ this.parameterInput.addEventListener('blur', this.onParameterInputBlur);
+ this.valueInput.addEventListener('blur', this.onValueInputBlur);
+ }
+
+ ngOnDestroy() {
+ this.rootElement.removeEventListener('click', this.onRootClick);
+ this.rootElement.removeEventListener('keydown', this.onRootKeyDown);
+ this.parameterInput.removeEventListener('focus', this.onParameterInputFocus);
+ this.parameterInput.removeEventListener('blur', this.onParameterInputBlur);
+ this.valueInput.removeEventListener('blur', this.onValueInputBlur);
+ }
+
+ private currentId: number = 0;
+
+ isActive: boolean = false;
+
+ isParameterInput: boolean = false;
+
+ isValueInput: boolean = false;
+
+ currentValue: string;
+
+ @Input()
+ items: any[] = [];
+
+ @ViewChild('parameterInput')
+ parameterInputRef: ElementRef;
+
+ @ViewChild('valueInput')
+ valueInputRef: ElementRef;
+
+ rootElement: HTMLElement;
+
+ parameterInput: HTMLElement;
+
+ valueInput: HTMLElement;
+
+ activeItem?: any;
+
+ parameters: any[] = [];
+
+ private onChange: (fn: any) => void;
+
+ private onRootClick = (): void => {
+ if (!this.isActive) {
+ this.parameterInput.focus();
+ }
+ };
+
+ private onRootKeyDown = (event: KeyboardEvent): void => {
+ if (this.utils.isEnterPressed(event)) {
+ event.preventDefault();
+ }
+ };
+
+ private onParameterInputFocus = (): void => {
+ this.isActive = true;
+ this.isValueInput = false;
+ this.isParameterInput = true;
+ };
+
+ private onParameterInputBlur = (): void => {
+ if (!this.isValueInput) {
+ this.clear();
+ }
+ };
+
+ private onValueInputBlur = (): void => {
+ if (!this.isParameterInput) {
+ this.clear();
+ }
+ };
+
+ clear(): void {
+ this.isActive = false;
+ this.activeItem = null;
+ this.currentValue = null;
+ }
+
+ itemsListFormatter(item: any): string {
+ return item.name;
+ }
+
+ onParameterNameChange(item: any): void {
+ if (item) {
+ this.isParameterInput = false;
+ this.isValueInput = true;
+ this.activeItem = item;
+ this.currentValue = '';
+ setTimeout(() => this.valueInput.focus(), 0);
+ }
+ }
+
+ onParameterValueChange(event: KeyboardEvent): void {
+ if (this.utils.isEnterPressed(event) && this.currentValue) {
+ this.parameters.push({
+ id: this.currentId++,
+ name: this.activeItem.value,
+ label: this.activeItem.name,
+ value: this.currentValue,
+ isExclude: false
+ });
+ this.currentValue = '';
+ this.activeItem = null;
+ this.isValueInput = false;
+ this.updateValue();
+ }
+ }
+
+ removeParameter(event: MouseEvent, id: number): void {
+ this.parameters = this.parameters.filter(parameter => parameter.id !== id);
+ this.updateValue();
+ event.stopPropagation();
+ }
+
+ updateValue() {
+ this.onChange(this.parameters);
+ }
+
+ writeValue() {
+ }
+
+ registerOnChange(callback: any): void {
+ this.onChange = callback;
+ }
+
+ registerOnTouched() {
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
index f72183c..3f59a0d 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
@@ -20,7 +20,8 @@
@navbar-background-color: #323544;
@h1-vertical-margin: 20px;
@button-border-radius: 4px;
-@input-border: 1px solid #CFD3D7;
+@input-border-width: 1px;
+@input-border: @input-border-width solid #CFD3D7;
@button-border-radius: 4px;
@input-group-addon-padding: 6px 0 6px 12px;
@block-margin-top: 20px;
@@ -33,6 +34,10 @@
@filters-panel-padding: 10px 0;
@list-header-background-color: #F2F2F2;
@checkbox-top: 4px;
+@dropdown-min-width: 200px;
+@dropdown-max-height: 500px; // TODO get rid of magic number, base on actual design
+@input-height: 34px;
+@input-padding: 10px;
@fatal-color: #830A0A;
@error-color: #E81D1D;
@@ -100,6 +105,21 @@
left: 0;
}
+.dropdown-list-default {
+ line-height: 1;
+ border-radius: 2px;
+ font-size: 14px;
+ min-width: @dropdown-min-width;
+ background: #FFF;
+ color: #666;
+ border: 1px solid #CFD3D7;
+ padding: 5px 0;
+ margin: 2px 0 0;
+ text-align: left;
+ list-style: none;
+ box-shadow: 0 6px 12px rgba(0, 0, 0, .175);
+}
+
.dropdown-item-default {
display: block;
padding: 3px 20px;
@@ -113,6 +133,6 @@
&:hover {
color: #262626;
text-decoration: none;
- background-color: #f5f5f5;
+ background-color: #F5F5F5;
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
index 2995002..28ae763 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
@@ -22,6 +22,7 @@ export interface AppState {
isLoginInProgress: boolean;
isAuditLogsSet: boolean;
isServiceLogsSet: boolean;
+ activeLogsType?: string;
}
export const initialState: AppState = {
@@ -29,5 +30,6 @@ export const initialState: AppState = {
isInitialLoading: false,
isLoginInProgress: false,
isAuditLogsSet: false,
- isServiceLogsSet: false
+ isServiceLogsSet: false,
+ activeLogsType: 'serviceLogs' // TODO implement setting the parameter depending on user's navigation
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/services/filtering.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/filtering.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/filtering.service.ts
index 5b9e90d..38c063e 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/filtering.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/filtering.service.ts
@@ -254,7 +254,8 @@ export class FilteringService {
},
page: {
defaultValue: 0
- }
+ },
+ query: {}
};
private filtersFormItems = Object.keys(this.filters).reduce((currentObject, key) => {
@@ -298,7 +299,7 @@ export class FilteringService {
});
}
- private getStartTime(value: any, current: string): string {
+ private getStartTime = (value: any, current: string): string => {
let time;
if (value) {
const endTime = moment(moment(current).valueOf());
@@ -317,9 +318,9 @@ export class FilteringService {
}
}
return time ? time.toISOString() : '';
- }
+ };
- private getEndTime(value: any): string {
+ private getEndTime = (value: any): string => {
let time;
if (value) {
switch (value.type) {
@@ -337,16 +338,32 @@ export class FilteringService {
}
}
return time ? time.toISOString() : '';
+ };
+
+ private getQuery(isExclude: boolean): (value: any[]) => string {
+ return (value: any[]): string => {
+ let parameters;
+ if (value && value.length) {
+ parameters = value.filter(item => item.isExclude === isExclude).map(parameter => {
+ return {
+ [parameter.name]: parameter.value.replace(/\s/g, '+')
+ };
+ });
+ }
+ return parameters && parameters.length ? JSON.stringify(parameters) : '';
+ }
}
readonly valueGetters = {
- end_time: this.getEndTime.bind(this),
- start_time: this.getStartTime.bind(this),
- to: this.getEndTime.bind(this),
- from: this.getStartTime.bind(this),
+ end_time: this.getEndTime,
+ start_time: this.getStartTime,
+ to: this.getEndTime,
+ from: this.getStartTime,
sortType: value => value && value.type,
sortBy: value => value && value.key,
- page: value => value == null ? value : value.toString()
+ page: value => value == null ? value : value.toString(),
+ includeQuery: this.getQuery(false),
+ excludeQuery: this.getQuery(true)
};
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
index 702deab..a90d099 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
@@ -50,7 +50,8 @@ export class LogsContainerService {
hosts: ['host_name'],
sorting: ['sortType', 'sortBy'],
pageSize: ['pageSize'],
- page: ['page']
+ page: ['page'],
+ query: ['includeQuery', 'excludeQuery']
};
private readonly histogramFilters = {
@@ -58,7 +59,8 @@ export class LogsContainerService {
text: ['iMessage'],
timeRange: ['to', 'from'],
components: ['mustBe'],
- levels: ['level']
+ levels: ['level'],
+ query: ['includeQuery', 'excludeQuery']
};
readonly logsTypeMap = {
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
index 9f6cacd..0f90ba3 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/utils.service.ts
@@ -43,4 +43,8 @@ export class UtilsService {
return valuesArray.join(',');
}
+ isEnterPressed(event: KeyboardEvent): boolean {
+ return event.keyCode === 13;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a10e3887/ambari-logsearch/ambari-logsearch-web/yarn.lock
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/yarn.lock b/ambari-logsearch/ambari-logsearch-web/yarn.lock
index 291b489..4883a15 100644
--- a/ambari-logsearch/ambari-logsearch-web/yarn.lock
+++ b/ambari-logsearch/ambari-logsearch-web/yarn.lock
@@ -3499,6 +3499,10 @@ negotiator@0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
+ng2-auto-complete@^0.12.0:
+ version "0.12.0"
+ resolved "https://registry.yarnpkg.com/ng2-auto-complete/-/ng2-auto-complete-0.12.0.tgz#9a78c39c5012404e7bc8365c03815ab7f68cea3d"
+
ngx-bootstrap@^1.6.6:
version "1.6.6"
resolved "https://registry.yarnpkg.com/ngx-bootstrap/-/ngx-bootstrap-1.6.6.tgz#0057141cfbdd7e8a50e81bda735fad8e95acb0dd"
[55/57] [abbrv] ambari git commit: AMBARI-21307 AddedLDAP
configuration provider for loading and maintaining the LDAP configuration in
the application
Posted by lp...@apache.org.
AMBARI-21307 AddedLDAP configuration provider for loading and maintaining the LDAP configuration in the application
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f71fac3d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f71fac3d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f71fac3d
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: f71fac3dcea9c66376e02667d32d4c1d16baaca9
Parents: 12294a0
Author: lpuskas <lp...@apache.org>
Authored: Tue Aug 29 14:55:09 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:02 2017 +0200
----------------------------------------------------------------------
.../services/ldap/LdapConfigurationService.java | 1 +
.../AmbariConfigurationResourceProvider.java | 35 +++++-
.../ambari/server/events/AmbariEvent.java | 11 +-
.../events/AmbariLdapConfigChangedEvent.java | 37 ++++++
.../server/ldap/LdapConfigurationFactory.java | 2 +-
.../apache/ambari/server/ldap/LdapModule.java | 3 +
.../AmbariLdapConfigurationProvider.java | 114 +++++++++++++++++++
.../server/ldap/service/AmbariLdapFacade.java | 7 +-
.../server/orm/dao/AmbariConfigurationDAO.java | 48 ++++++++
.../orm/entities/AmbariConfigurationEntity.java | 4 +-
.../DefaultLdapConfigurationServiceTest.java | 10 +-
11 files changed, 260 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
index fc6bd41..1b8427b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
@@ -82,6 +82,7 @@ public class LdapConfigurationService extends AmbariConfigurationService {
@Produces(MediaType.APPLICATION_JSON)
public Response validateConfiguration(LdapConfigurationRequest ldapConfigurationRequest) {
+ // check if the user is authorized to perform the operation
authorize();
Set<String> groups = Sets.newHashSet();
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
index 2302d8b..4f4cc70 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
@@ -35,6 +35,9 @@ import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
import org.apache.ambari.server.controller.utilities.PredicateHelper;
+import org.apache.ambari.server.events.AmbariEvent;
+import org.apache.ambari.server.events.AmbariLdapConfigChangedEvent;
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
import org.apache.ambari.server.orm.dao.AmbariConfigurationDAO;
import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
import org.apache.ambari.server.orm.entities.ConfigurationBaseEntity;
@@ -113,6 +116,10 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
@Inject
private AmbariConfigurationDAO ambariConfigurationDAO;
+ @Inject
+ private AmbariEventPublisher publisher;
+
+
private Gson gson;
@AssistedInject
@@ -142,7 +149,18 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
}
LOGGER.info("Persisting new ambari configuration: {} ", ambariConfigurationEntity);
- ambariConfigurationDAO.create(ambariConfigurationEntity);
+
+ try {
+ ambariConfigurationDAO.create(ambariConfigurationEntity);
+ } catch (Exception e) {
+ LOGGER.error("Failed to create resource", e);
+ throw new ResourceAlreadyExistsException(e.getMessage());
+ }
+
+ // todo filter by configuration type
+ // notify subscribers about the configuration changes
+ publisher.publish(new AmbariLdapConfigChangedEvent(AmbariEvent.AmbariEventType.LDAP_CONFIG_CHANGED,
+ ambariConfigurationEntity.getId()));
return getRequestStatus(null);
}
@@ -183,6 +201,10 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
}
+ // notify subscribers about the configuration changes
+ publisher.publish(new AmbariLdapConfigChangedEvent(AmbariEvent.AmbariEventType.LDAP_CONFIG_CHANGED, idFromRequest));
+
+
return getRequestStatus(null);
}
@@ -209,11 +231,15 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
persistedEntity.getConfigurationBaseEntity().setConfigurationAttributes(entityFromRequest.getConfigurationBaseEntity().getConfigurationAttributes());
- ambariConfigurationDAO.create(persistedEntity);
+ ambariConfigurationDAO.update(persistedEntity);
} catch (AmbariException e) {
throw new NoSuchParentResourceException(e.getMessage());
}
+ publisher.publish(new AmbariLdapConfigChangedEvent(AmbariEvent.AmbariEventType.LDAP_CONFIG_CHANGED,
+ persistedEntity.getId()));
+
+
return getRequestStatus(null);
}
@@ -251,6 +277,11 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
throw new AmbariException("There must be only one resource specified in the request");
}
+ // the configuration type must be set
+ if (getValueFromResourceProperties(ResourcePropertyId.TYPE, resourcePropertiesSet.iterator().next()) == null) {
+ throw new AmbariException("The configuration type must be set");
+ }
+
for (ResourcePropertyId resourcePropertyId : ResourcePropertyId.values()) {
Object requestValue = getValueFromResourceProperties(resourcePropertyId, resourcePropertiesSet.iterator().next());
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
index 9a5ee79..0f9ff52 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
@@ -140,7 +140,13 @@ public abstract class AmbariEvent {
/**
* Local user has been created.
*/
- USER_CREATED;
+ USER_CREATED,
+
+ /**
+ * LDAP config changed event;
+ */
+ LDAP_CONFIG_CHANGED;
+
}
/**
@@ -151,8 +157,7 @@ public abstract class AmbariEvent {
/**
* Constructor.
*
- * @param eventType
- * the type of event (not {@code null}).
+ * @param eventType the type of event (not {@code null}).
*/
public AmbariEvent(AmbariEventType eventType) {
m_eventType = eventType;
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariLdapConfigChangedEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariLdapConfigChangedEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariLdapConfigChangedEvent.java
new file mode 100644
index 0000000..48799d7
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariLdapConfigChangedEvent.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events;
+
+/**
+ * Event signaling the creation or changing of an LDAP configuration entry.
+ */
+public class AmbariLdapConfigChangedEvent extends AmbariEvent {
+
+ private Long configurationId;
+
+ /**
+ * Constructor.
+ *
+ * @param eventType the type of event (not {@code null}).
+ */
+ public AmbariLdapConfigChangedEvent(AmbariEventType eventType, Long configurationId) {
+ super(eventType);
+ this.configurationId = configurationId;
+ }
+
+ public Long getConfigurationId() {
+ return configurationId;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java
index bcd6e39..57cdf6e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java
@@ -18,4 +18,4 @@ import java.util.Map;
public interface LdapConfigurationFactory {
AmbariLdapConfiguration createLdapConfiguration(Map<String, Object> configuration);
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
index 3ae4587..81f2a44 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
@@ -15,6 +15,7 @@
package org.apache.ambari.server.ldap;
+import org.apache.ambari.server.ldap.service.AmbariLdapConfigurationProvider;
import org.apache.ambari.server.ldap.service.AmbariLdapFacade;
import org.apache.ambari.server.ldap.service.LdapConnectionService;
import org.apache.ambari.server.ldap.service.LdapFacade;
@@ -35,6 +36,8 @@ public class LdapModule extends AbstractModule {
bind(LdapConfigurationService.class).to(DefaultLdapConfigurationService.class);
bind(LdapConnectionService.class).to(DefaultLdapConnectionService.class);
+ bind(AmbariLdapConfiguration.class).toProvider(AmbariLdapConfigurationProvider.class);
+
install(new FactoryModuleBuilder().build(LdapConfigurationFactory.class));
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapConfigurationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapConfigurationProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapConfigurationProvider.java
new file mode 100644
index 0000000..7f3e8a9
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapConfigurationProvider.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service;
+
+import java.util.Map;
+import java.util.Set;
+
+import javax.inject.Inject;
+import javax.inject.Provider;
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.events.AmbariLdapConfigChangedEvent;
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.orm.dao.AmbariConfigurationDAO;
+import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
+import org.apache.ambari.server.security.authorization.AmbariLdapAuthenticationProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.eventbus.Subscribe;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * Provider implementation for LDAP configurations. It needs to be registered in the related GUICE module as a provider
+ * It's responsible for managing LDAP configurations in the application.
+ * Whenever requested, this provider returns an AmbariLdapConfiguration which is always in sync with the persisted LDAP
+ * configuration resource.
+ *
+ * The provider receives notifications on CRUD operations related to the persisted resource and reloads the cached
+ * configuration instance accordingly.
+ */
+@Singleton
+public class AmbariLdapConfigurationProvider implements Provider<AmbariLdapConfiguration> {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AmbariLdapAuthenticationProvider.class);
+ private AmbariLdapConfiguration instance;
+
+ @Inject
+ private AmbariEventPublisher publisher;
+
+ @Inject
+ private Provider<AmbariConfigurationDAO> ambariConfigurationDAOProvider;
+
+ private Gson gson = new GsonBuilder().create();
+
+ @Inject
+ public AmbariLdapConfigurationProvider() {
+ }
+
+ @Inject
+ void register() {
+ publisher.register(this);
+ }
+
+ @Override
+ public AmbariLdapConfiguration get() {
+ return instance != null ? instance : loadInstance(null);
+ }
+
+ /**
+ * Loads the AmbariLdapConfiguration from the database.
+ *
+ * @param configurationId the configuration id
+ * @return the AmbariLdapConfiguration instance
+ */
+ private AmbariLdapConfiguration loadInstance(Long configurationId) {
+ AmbariConfigurationEntity configEntity = null;
+
+ LOGGER.info("Loading LDAP configuration ...");
+ if (null != configurationId) {
+
+ LOGGER.debug("Reloading configuration based on the provied id: {}", configurationId);
+ configEntity = ambariConfigurationDAOProvider.get().findByPK(configurationId);
+
+ } else {
+
+ LOGGER.debug("Initial loading of the ldap configuration ...");
+ configEntity = ambariConfigurationDAOProvider.get().getLdapConfiguration();
+
+ }
+
+ if (configEntity != null) {
+ Set propertyMaps = gson.fromJson(configEntity.getConfigurationBaseEntity().getConfigurationData(), Set.class);
+ instance = new AmbariLdapConfiguration((Map<String, Object>) propertyMaps.iterator().next());
+ }
+
+ LOGGER.info("Loaded LDAP configuration instance: [ {} ]", instance);
+
+ return instance;
+ }
+
+ @Subscribe
+ public void ambariLdapConfigChanged(AmbariLdapConfigChangedEvent event) {
+ LOGGER.info("LDAP config changed event received: {}", event);
+ loadInstance(event.getConfigurationId());
+ LOGGER.info("Refreshed LDAP config instance.");
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
index 683ed43..90a5ba7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
@@ -19,6 +19,7 @@ import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
+import javax.inject.Provider;
import javax.inject.Singleton;
import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
@@ -54,6 +55,10 @@ public class AmbariLdapFacade implements LdapFacade {
@Inject
private LdapConnectionService ldapConnectionService;
+ //todo remove this, added for testing purposes only
+ @Inject
+ private Provider<AmbariLdapConfiguration> ambariLdapConfigurationProvider;
+
@Inject
public AmbariLdapFacade() {
}
@@ -75,7 +80,7 @@ public class AmbariLdapFacade implements LdapFacade {
@Override
public void detectAttributes(AmbariLdapConfiguration ambariLdapConfiguration) {
LOGGER.info("Detecting LDAP configuration attributes ...");
- throw new UnsupportedOperationException("Not yet implemented");
+ LOGGER.info("LDAP config: {}", ambariLdapConfigurationProvider.get());
}
@Override
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
index 5710a7f..83293ef 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
@@ -16,8 +16,13 @@ package org.apache.ambari.server.orm.dao;
import javax.inject.Inject;
import javax.inject.Singleton;
+import javax.persistence.EntityExistsException;
+import javax.persistence.EntityNotFoundException;
+import javax.persistence.TypedQuery;
import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.inject.persist.Transactional;
@@ -29,6 +34,8 @@ import com.google.inject.persist.Transactional;
@Singleton
public class AmbariConfigurationDAO extends CrudDAO<AmbariConfigurationEntity, Long> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(AmbariConfigurationDAO.class);
+
@Inject
public AmbariConfigurationDAO() {
super(AmbariConfigurationEntity.class);
@@ -36,6 +43,47 @@ public class AmbariConfigurationDAO extends CrudDAO<AmbariConfigurationEntity, L
@Transactional
public void create(AmbariConfigurationEntity entity) {
+ // make sure only one LDAP config entry exists
+ if ("ldap-configuration".equals(entity.getConfigurationBaseEntity().getType())) {
+ AmbariConfigurationEntity ldapConfigEntity = getLdapConfiguration();
+ if (ldapConfigEntity != null) {
+ LOGGER.error("Only one LDAP configuration entry can exist!");
+ throw new EntityExistsException("LDAP configuration entity already exists!");
+ }
+ }
super.create(entity);
}
+
+
+ @Transactional
+ public void update(AmbariConfigurationEntity entity) {
+ if (entity.getId() == null || findByPK(entity.getId()) == null) {
+ String msg = String.format("The entity with id [ %s ] is not found", entity.getId());
+ LOGGER.debug(msg);
+ throw new EntityNotFoundException(msg);
+ }
+
+ // updating the existing entity
+ super.merge(entity);
+ entityManagerProvider.get().flush();
+ }
+
+ /**
+ * Returns the LDAP configuration from the database.
+ *
+ * @return the configuration entity
+ */
+ @Transactional
+ public AmbariConfigurationEntity getLdapConfiguration() {
+ LOGGER.info("Looking up the LDAP configuration ....");
+ AmbariConfigurationEntity ldapConfigEntity = null;
+
+ TypedQuery<AmbariConfigurationEntity> query = entityManagerProvider.get().createNamedQuery(
+ "AmbariConfigurationEntity.findByType", AmbariConfigurationEntity.class);
+ query.setParameter("typeName", "ldap-configuration");
+
+ ldapConfigEntity = daoUtils.selectSingle(query);
+ LOGGER.info("Returned entity: {} ", ldapConfigEntity);
+ return ldapConfigEntity;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java
index 34fa221..c9f4695 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java
@@ -29,8 +29,8 @@ import javax.persistence.Table;
@Table(name = "ambari_configuration")
@NamedQueries({
@NamedQuery(
- name = "AmbariConfigurationEntity.findAll",
- query = "select ace from AmbariConfigurationEntity ace")
+ name = "AmbariConfigurationEntity.findByType",
+ query = "select ace from AmbariConfigurationEntity ace where ace.configurationBaseEntity.type = :typeName")
})
public class AmbariConfigurationEntity {
http://git-wip-us.apache.org/repos/asf/ambari/blob/f71fac3d/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java
index 2b7448e..b5978a5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java
@@ -28,6 +28,7 @@ import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.apache.directory.ldap.client.api.search.FilterBuilder;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -46,15 +47,18 @@ public class DefaultLdapConfigurationServiceTest {
// WHEN
LdapConnectionConfig config = new LdapConnectionConfig();
- config.setLdapHost("localhost");
+ config.setLdapHost("172.22.112.167");
config.setLdapPort(389);
LdapConnection connection = new LdapNetworkConnection(config);
// THEN
- connection.anonymousBind();
+ connection.bind("CN=Robert Levas,CN=Users,DC=HWQE,DC=HORTONWORKS,DC=COM", "Hadoop1234");
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, "person"),
+ FilterBuilder.equal("name", "User1 Levas")).toString();
- EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
+ EntryCursor cursor = connection.search("OU=levas,DC=hwqe,DC=hortonworks,DC=com", filter, SearchScope.SUBTREE);
for (Entry entry : cursor) {
assertNotNull(entry);
[48/57] [abbrv] ambari git commit: Added swagger annotations to the
new endpoint
Posted by lp...@apache.org.
Added swagger annotations to the new endpoint
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8ac1c824
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8ac1c824
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8ac1c824
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 8ac1c824bbeafe4c45a130421f80431a16a6cb9c
Parents: 0d3e842
Author: Balázs Bence Sári <bs...@hortonworks.com>
Authored: Thu Jul 6 18:36:18 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:00 2017 +0200
----------------------------------------------------------------------
.../AmbariConfigurationRequestSwagger.java | 48 +++++++++++++++
.../AmbariConfigurationResponseSwagger.java | 40 +++++++++++++
.../services/AmbariConfigurationService.java | 62 +++++++++++++++++---
3 files changed, 143 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/8ac1c824/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java
new file mode 100644
index 0000000..d6714f9
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.api.services;
+
+import java.util.Map;
+
+import org.apache.ambari.server.controller.ApiModel;
+import org.apache.ambari.server.orm.entities.ConfigurationBaseEntity;
+
+import io.swagger.annotations.ApiModelProperty;
+
+/**
+ * Request data model for {@link org.apache.ambari.server.api.services.AmbariConfigurationService}
+ */
+public interface AmbariConfigurationRequestSwagger extends ApiModel {
+
+ @ApiModelProperty(name = "AmbariConfiguration")
+ AmbariConfigurationRequestInfo getAmbariConfiguration();
+
+ interface AmbariConfigurationRequestInfo {
+ @ApiModelProperty
+ Long getId();
+
+ @ApiModelProperty
+ Map<String, Object> getData();
+
+ @ApiModelProperty
+ String getType();
+
+ @ApiModelProperty
+ Long getVersion();
+
+ @ApiModelProperty(name = "version_tag")
+ String getVersionTag();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8ac1c824/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationResponseSwagger.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationResponseSwagger.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationResponseSwagger.java
new file mode 100644
index 0000000..c55ac1d
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationResponseSwagger.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.api.services;
+
+import java.util.Map;
+
+import org.apache.ambari.server.controller.ApiModel;
+
+import io.swagger.annotations.ApiModelProperty;
+
+/**
+ * Response data model for {@link org.apache.ambari.server.api.services.AmbariConfigurationService}
+ */
+public interface AmbariConfigurationResponseSwagger extends ApiModel {
+
+ @ApiModelProperty(name = "AmbariConfiguration")
+ AmbariConfigurationResponseInfo getAmbariConfigurationResponse();
+
+ interface AmbariConfigurationResponseInfo {
+ @ApiModelProperty
+ Long getId();
+
+ @ApiModelProperty
+ Map<String, Object> getData();
+
+ @ApiModelProperty
+ String getType();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8ac1c824/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
index 0fa6e44..0c159b9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
@@ -70,9 +70,12 @@ import io.swagger.annotations.ApiResponses;
* </pre>
*/
@Path("/configurations/")
-@Api(value = "/configurations", description = "Endpoint for Ambari configuration related operations")
+@Api(value = "Ambari Configurations", description = "Endpoint for Ambari configuration related operations")
public class AmbariConfigurationService extends BaseService {
+ private static final String AMBARI_CONFIGURATION_REQUEST_TYPE =
+ "org.apache.ambari.server.api.services.AmbariConfigurationRequestSwagger";
+
/**
* Creates an ambari configuration resource.
*
@@ -83,9 +86,10 @@ public class AmbariConfigurationService extends BaseService {
*/
@POST
@Produces(MediaType.TEXT_PLAIN)
- @ApiOperation(value = "Creates an ambari configuration resource")
+ @ApiOperation(value = "Creates an ambari configuration resource",
+ nickname = "AmbariConfigurationService#createAmbariConfiguration")
@ApiImplicitParams({
- @ApiImplicitParam(dataType = "", paramType = PARAM_TYPE_BODY)
+ @ApiImplicitParam(dataType = AMBARI_CONFIGURATION_REQUEST_TYPE, paramType = PARAM_TYPE_BODY)
})
@ApiResponses({
@ApiResponse(code = HttpStatus.SC_CREATED, message = MSG_SUCCESSFUL_OPERATION),
@@ -103,7 +107,26 @@ public class AmbariConfigurationService extends BaseService {
@GET
@Produces(MediaType.TEXT_PLAIN)
- @ApiOperation(value = "Retrieve ambari configuration resources")
+ @ApiOperation(value = "Retrieve all ambari configuration resources",
+ nickname = "AmbariConfigurationService#getAmbariConfigurations",
+ notes = "Returns all Ambari configurations.",
+ response = AmbariConfigurationResponseSwagger.class,
+ responseContainer = RESPONSE_CONTAINER_LIST)
+ @ApiImplicitParams({
+ @ApiImplicitParam(name = QUERY_FIELDS, value = QUERY_FILTER_DESCRIPTION,
+ defaultValue = "AmbariConfiguration/data, AmbariConfiguration/id, AmbariConfiguration/type",
+ dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_SORT, value = QUERY_SORT_DESCRIPTION,
+ defaultValue = "AmbariConfiguration/id",
+ dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_PAGE_SIZE, value = QUERY_PAGE_SIZE_DESCRIPTION, defaultValue = DEFAULT_PAGE_SIZE, dataType = DATA_TYPE_INT, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_FROM, value = QUERY_FROM_DESCRIPTION, defaultValue = DEFAULT_FROM, dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_TO, value = QUERY_TO_DESCRIPTION, dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY)
+ })
+ @ApiResponses(value = {
+ @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR)
+ })
public Response getAmbariConfigurations(String body, @Context HttpHeaders headers, @Context UriInfo uri) {
return handleRequest(headers, body, uri, Request.Type.GET, createResource(Resource.Type.AmbariConfiguration,
Collections.EMPTY_MAP));
@@ -112,7 +135,18 @@ public class AmbariConfigurationService extends BaseService {
@GET
@Path("{configurationId}")
@Produces(MediaType.TEXT_PLAIN)
- @ApiOperation(value = "Retrieve ambari configuration resource")
+ @ApiOperation(value = "Retrieve the details of an ambari configuration resource",
+ nickname = "AmbariConfigurationService#getAmbariConfiguration",
+ response = AmbariConfigurationResponseSwagger.class)
+ @ApiImplicitParams({
+ @ApiImplicitParam(name = QUERY_FIELDS, value = QUERY_FILTER_DESCRIPTION, defaultValue = "AmbariConfiguration/*",
+ dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY)
+ })
+ @ApiResponses(value = {
+ @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR)
+ })
public Response getAmbariConfiguration(String body, @Context HttpHeaders headers, @Context UriInfo uri,
@PathParam("configurationId") String configurationId) {
return handleRequest(headers, body, uri, Request.Type.GET, createResource(Resource.Type.AmbariConfiguration,
@@ -121,7 +155,20 @@ public class AmbariConfigurationService extends BaseService {
@PUT
@Produces(MediaType.TEXT_PLAIN)
- @ApiOperation(value = "Update ambari configuration resources")
+ @ApiOperation(value = "Updates ambari configuration resources - Not implemented yet",
+ nickname = "AmbariConfigurationService#updateAmbariConfiguration")
+ @ApiImplicitParams({
+ @ApiImplicitParam(dataType = AMBARI_CONFIGURATION_REQUEST_TYPE, paramType = PARAM_TYPE_BODY)
+ })
+ @ApiResponses({
+ @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_ACCEPTED, message = MSG_REQUEST_ACCEPTED),
+ @ApiResponse(code = HttpStatus.SC_BAD_REQUEST, message = MSG_INVALID_ARGUMENTS),
+ @ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
+ @ApiResponse(code = HttpStatus.SC_UNAUTHORIZED, message = MSG_NOT_AUTHENTICATED),
+ @ApiResponse(code = HttpStatus.SC_FORBIDDEN, message = MSG_PERMISSION_DENIED),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR),
+ })
public Response updateAmbariConfiguration() {
throw new UnsupportedOperationException("Not yet implemented");
}
@@ -129,7 +176,8 @@ public class AmbariConfigurationService extends BaseService {
@DELETE
@Path("{configurationId}")
@Produces(MediaType.TEXT_PLAIN)
- @ApiOperation(value = "Deletes an ambari configuration resource")
+ @ApiOperation(value = "Deletes an ambari configuration resource",
+ nickname = "AmbariConfigurationService#deleteAmbariConfiguration")
@ApiResponses({
@ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
@ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
[36/57] [abbrv] ambari git commit: AMBARI-21850. Keep Hive/LLAP log4j
properties file in sync. (Prasanth Jayachandran via Swapan Shridhar).
Posted by lp...@apache.org.
AMBARI-21850. Keep Hive/LLAP log4j properties file in sync. (Prasanth Jayachandran via Swapan Shridhar).
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/acde5028
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/acde5028
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/acde5028
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: acde502817ed185921a5b56a75af4fb814d3c353
Parents: c7a3bcd
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Fri Sep 8 16:53:48 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Fri Sep 8 16:54:09 2017 -0700
----------------------------------------------------------------------
.../HIVE/configuration/hive-exec-log4j2.xml | 6 +++---
.../services/HIVE/configuration/hive-log4j2.xml | 20 +++++++++++++++-----
.../HIVE/configuration/llap-cli-log4j2.xml | 6 +++---
.../HIVE/configuration/llap-daemon-log4j.xml | 9 +++++++--
4 files changed, 28 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/acde5028/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
index b96a468..3854640 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
@@ -60,14 +60,14 @@ appender.console.type = Console
appender.console.name = console
appender.console.target = SYSTEM_ERR
appender.console.layout.type = PatternLayout
-appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
+appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
# simple file appender
-appender.FA.type = File
+appender.FA.type = RandomAccessFile
appender.FA.name = FA
appender.FA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
appender.FA.layout.type = PatternLayout
-appender.FA.layout.pattern = %d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
+appender.FA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
# list of all loggers
loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX
http://git-wip-us.apache.org/repos/asf/ambari/blob/acde5028/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
index 079bdce..7de3567 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
@@ -71,6 +71,7 @@ property.hive.log.level = {{hive_log_level}}
property.hive.root.logger = DRFA
property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}
property.hive.log.file = hive.log
+property.hive.perflogger.log.level = INFO
# list of all appenders
appenders = console, DRFA
@@ -80,16 +81,16 @@ appender.console.type = Console
appender.console.name = console
appender.console.target = SYSTEM_ERR
appender.console.layout.type = PatternLayout
-appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
+appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
# daily rolling file appender
-appender.DRFA.type = RollingFile
+appender.DRFA.type = RollingRandomAccessFile
appender.DRFA.name = DRFA
appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
# Use %pid in the filePattern to append process-id@host-name to the filename if you want separate log files for different CLI session
-appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}_%i.gz
+appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
appender.DRFA.layout.type = PatternLayout
-appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
+appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
appender.DRFA.policies.type = Policies
appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
appender.DRFA.policies.time.interval = 1
@@ -100,7 +101,7 @@ appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.fsize.size = {{hive2_log_maxfilesize}}MB
# list of all loggers
-loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX
+loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, PerfLogger, AmazonAws, ApacheHttp
logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
logger.NIOServerCnxn.level = WARN
@@ -117,6 +118,15 @@ logger.Datastore.level = ERROR
logger.JPOX.name = JPOX
logger.JPOX.level = ERROR
+logger.AmazonAws.name=com.amazonaws
+logger.AmazonAws.level = INFO
+
+logger.ApacheHttp.name=org.apache.http
+logger.ApacheHttp.level = INFO
+
+logger.PerfLogger.name = org.apache.hadoop.hive.ql.log.PerfLogger
+logger.PerfLogger.level = ${sys:hive.perflogger.log.level}
+
# root logger
rootLogger.level = ${sys:hive.log.level}
rootLogger.appenderRefs = root
http://git-wip-us.apache.org/repos/asf/ambari/blob/acde5028/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
index ed52447..f40cfa8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
@@ -86,7 +86,7 @@ appender.console.layout.pattern = %p %c{2}: %m%n
# llapstatusconsole appender
appender.llapstatusconsole.type = Console
appender.llapstatusconsole.name = llapstatusconsole
-appender.llapstatusconsole.target = SYSTEM_OUT
+appender.llapstatusconsole.target = SYSTEM_ERR
appender.llapstatusconsole.layout.type = PatternLayout
appender.llapstatusconsole.layout.pattern = %m%n
@@ -94,8 +94,8 @@ appender.llapstatusconsole.layout.pattern = %m%n
appender.DRFA.type = RollingRandomAccessFile
appender.DRFA.name = DRFA
appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
-# Use %pid in the filePattern to append process-id@host-name to the filename if you want separate log files for different CLI session
-appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}_%i
+# Use %pidn in the filePattern to append process-id@host-name to the filename if you want separate log files for different CLI session
+appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
appender.DRFA.layout.type = PatternLayout
appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
appender.DRFA.policies.type = Policies
http://git-wip-us.apache.org/repos/asf/ambari/blob/acde5028/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
index 0b8e0ee..44d0175 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
@@ -112,7 +112,7 @@ appender.RFA.strategy.max = ${sys:llap.daemon.log.maxbackupindex}
appender.HISTORYAPPENDER.type = RollingRandomAccessFile
appender.HISTORYAPPENDER.name = HISTORYAPPENDER
appender.HISTORYAPPENDER.fileName = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.historylog.file}
-appender.HISTORYAPPENDER.filePattern = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.historylog.file}_%d{yyyy-MM-dd}_%i.done
+appender.HISTORYAPPENDER.filePattern = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.historylog.file}_%d{yyyy-MM-dd-HH}_%i.done
appender.HISTORYAPPENDER.layout.type = PatternLayout
appender.HISTORYAPPENDER.layout.pattern = %m%n
appender.HISTORYAPPENDER.policies.type = Policies
@@ -148,7 +148,12 @@ appender.query-routing.routes.route-mdc.file-mdc.app.layout.type = PatternLayout
appender.query-routing.routes.route-mdc.file-mdc.app.layout.pattern = %d{ISO8601} %5p [%t (%X{fragmentId})] %c{2}: %m%n
# list of all loggers
-loggers = PerfLogger, EncodedReader, NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, HistoryLogger, LlapIoImpl, LlapIoOrc, LlapIoCache, LlapIoLocking, TezSM, TezSS, TezHC
+loggers = PerfLogger, EncodedReader, NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, HistoryLogger, LlapIoImpl, LlapIoOrc, LlapIoCache, LlapIoLocking, TezSM, TezSS, TezHC, LlapDaemon
+
+logger.LlapDaemon.name = org.apache.hadoop.hive.llap.daemon.impl.LlapDaemon
+logger.LlapDaemon.level = INFO
+
+# shut up the Tez logs that log debug-level stuff on INFO
logger.TezSM.name = org.apache.tez.runtime.library.common.shuffle.impl.ShuffleManager.fetch
logger.TezSM.level = WARN
[41/57] [abbrv] ambari git commit: AMBARI-21926. Failed to call stack
advisor on oozie config change (echekanskiy)
Posted by lp...@apache.org.
AMBARI-21926. Failed to call stack advisor on oozie config change (echekanskiy)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/26b9f4f1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/26b9f4f1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/26b9f4f1
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 26b9f4f13c141434ab51a945ebc50b56b82e41b9
Parents: 4f23c1e
Author: Eugene Chekanskiy <ec...@apache.org>
Authored: Mon Sep 11 17:50:19 2017 +0300
Committer: Eugene Chekanskiy <ec...@apache.org>
Committed: Mon Sep 11 17:50:19 2017 +0300
----------------------------------------------------------------------
.../src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/26b9f4f1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index bd60bed..70cb7a2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -313,7 +313,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
]
self.updateMountProperties("hdfs-site", hdfs_mount_properties, configurations, services, hosts)
-
+ dataDirs = []
if configurations and "hdfs-site" in configurations and \
"dfs.datanode.data.dir" in configurations["hdfs-site"]["properties"] and \
configurations["hdfs-site"]["properties"]["dfs.datanode.data.dir"] is not None:
[08/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
Posted by lp...@apache.org.
AMBARI-21882. Throw an error if unsupported database JDBC driver is configured for HDP services. (stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/680f1148
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/680f1148
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/680f1148
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 680f1148e8e0dac064b7b77f9fba77d7d5a3c448
Parents: 202eaed
Author: Toader, Sebastian <st...@hortonworks.com>
Authored: Wed Sep 6 15:07:49 2017 +0200
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Wed Sep 6 15:07:49 2017 +0200
----------------------------------------------------------------------
.../functions/setup_ranger_plugin_xml.py | 1 +
.../0.12.0.2.0/package/scripts/params_linux.py | 2 +
.../2.1.0.3.0/package/scripts/params_linux.py | 2 +
.../RANGER/0.4.0/package/scripts/params.py | 2 +
.../RANGER/1.0.0.3.0/package/scripts/params.py | 2 +
.../0.5.0.2.3/package/scripts/params.py | 3 +
.../1.0.0.3.0/package/scripts/params.py | 3 +
.../1.4.4.2.0/package/scripts/params_linux.py | 2 +
.../1.4.4.3.0/package/scripts/params_linux.py | 2 +
.../custom_actions/scripts/check_host.py | 1 +
.../HIVE/test_jdbc_driver_config.py | 66 ++
.../RANGER/test_db_flavor_config.py | 63 ++
.../RANGER_KMS/test_db_flavor_config.py | 63 ++
.../SQOOP/test_jdbc_driver_config.py | 63 ++
.../common-services/configs/hive_default.json | 650 ++++++++++++++
.../configs/hive_unsupported_jdbc_type.json | 650 ++++++++++++++
.../configs/ranger_admin_default.json | 386 ++++++++
.../ranger_admin_unsupported_db_flavor.json | 386 ++++++++
.../configs/ranger_kms_default.json | 802 +++++++++++++++++
.../ranger_kms_unsupported_db_flavor.json | 802 +++++++++++++++++
.../common-services/configs/sqoop_default.json | 879 +++++++++++++++++++
.../configs/sqoop_unsupported_jdbc_driver.json | 879 +++++++++++++++++++
.../test/python/custom_actions/TestCheckHost.py | 33 +
23 files changed, 5742 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
index c80c577..485c1a6 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
@@ -284,6 +284,7 @@ def get_audit_configs(config):
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
audit_jdbc_url = format('jdbc:sqlanywhere:database={xa_audit_db_name};host={xa_db_host}')
jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
+ else: raise Fail(format("'{xa_audit_db_flavor}' db flavor not supported."))
return jdbc_jar_name, previous_jdbc_jar_name, audit_jdbc_url, jdbc_driver
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 39c06f2..9ba1f99 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -49,6 +49,7 @@ from resource_management.libraries.functions.get_architecture import get_archite
from resource_management.libraries.functions.version import get_major_version
from resource_management.core.utils import PasswordString
+from resource_management.core.exceptions import Fail
from resource_management.core.shell import checked_call
from ambari_commons.credential_store_helper import get_password_from_credential_store
@@ -288,6 +289,7 @@ elif hive_jdbc_driver == "sap.jdbc4.sqlanywhere.IDriver":
jdbc_jar_name = default("/hostLevelParams/custom_sqlanywhere_jdbc_name", None)
hive_previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
sqla_db_used = True
+else: raise Fail(format("JDBC driver '{hive_jdbc_driver}' not supported."))
default_mysql_jar_name = "mysql-connector-java.jar"
default_mysql_target = format("{hive_lib}/{default_mysql_jar_name}")
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
index 90d9067..f6b676b 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
@@ -49,6 +49,7 @@ from resource_management.libraries.functions.get_architecture import get_archite
from resource_management.core.utils import PasswordString
from resource_management.core.shell import checked_call
+from resource_management.core.exceptions import Fail
from ambari_commons.credential_store_helper import get_password_from_credential_store
# Default log4j version; put config files under /etc/hive/conf
@@ -286,6 +287,7 @@ elif hive_jdbc_driver == "sap.jdbc4.sqlanywhere.IDriver":
jdbc_jar_name = default("/hostLevelParams/custom_sqlanywhere_jdbc_name", None)
hive_previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
sqla_db_used = True
+else: raise Fail(format("JDBC driver '{hive_jdbc_driver}' not supported."))
default_mysql_jar_name = "mysql-connector-java.jar"
default_mysql_target = format("{hive_lib}/{default_mysql_jar_name}")
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index d0f0974..5731e6c 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -29,6 +29,7 @@ from resource_management.libraries.functions.stack_features import check_stack_f
from resource_management.libraries.functions.stack_features import get_stack_feature_version
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.get_bare_principal import get_bare_principal
+from resource_management.core.exceptions import Fail
# a map of the Ambari role to the component name
# for use with <stack-root>/current/<component>
@@ -199,6 +200,7 @@ elif db_flavor.lower() == 'sqla':
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
audit_jdbc_url = format('jdbc:sqlanywhere:database={ranger_auditdb_name};host={db_host}') if stack_supports_ranger_audit_db else None
jdbc_dialect = "org.eclipse.persistence.platform.database.SQLAnywherePlatform"
+else: raise Fail(format("'{db_flavor}' db flavor not supported."))
downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
index 24f459c..b88f1a4 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
@@ -29,6 +29,7 @@ from resource_management.libraries.functions.stack_features import check_stack_f
from resource_management.libraries.functions.stack_features import get_stack_feature_version
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.get_bare_principal import get_bare_principal
+from resource_management.core.exceptions import Fail
# a map of the Ambari role to the component name
# for use with <stack-root>/current/<component>
@@ -197,6 +198,7 @@ elif db_flavor.lower() == 'sqla':
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
audit_jdbc_url = format('jdbc:sqlanywhere:database={ranger_auditdb_name};host={db_host}') if stack_supports_ranger_audit_db else None
jdbc_dialect = "org.eclipse.persistence.platform.database.SQLAnywherePlatform"
+else: raise Fail(format("'{db_flavor}' db flavor not supported."))
downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index 466646b..56003ad 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -32,6 +32,7 @@ from resource_management.libraries.functions.setup_ranger_plugin_xml import gene
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import get_kinit_path
+from resource_management.core.exceptions import Fail
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
@@ -163,6 +164,7 @@ elif db_flavor == 'sqla':
db_jdbc_url = format('jdbc:sqlanywhere:database={db_name};host={db_host}')
db_jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
jdbc_dialect = "org.eclipse.persistence.platform.database.SQLAnywherePlatform"
+else: raise Fail(format("'{db_flavor}' db flavor not supported."))
downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
@@ -210,6 +212,7 @@ if has_ranger_admin:
xa_previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
audit_jdbc_url = format('jdbc:sqlanywhere:database={xa_audit_db_name};host={xa_db_host}')
jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
+ else: raise Fail(format("'{xa_audit_db_flavor}' db flavor not supported."))
downloaded_connector_path = format("{tmp_dir}/{jdbc_jar}") if stack_supports_ranger_audit_db else None
driver_source = format("{jdk_location}/{jdbc_jar}") if stack_supports_ranger_audit_db else None
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/params.py
index 003eee1..da8eb8c 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/params.py
@@ -32,6 +32,7 @@ from resource_management.libraries.functions.setup_ranger_plugin_xml import gene
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import get_kinit_path
+from resource_management.core.exceptions import Fail
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
@@ -162,6 +163,7 @@ elif db_flavor == 'sqla':
db_jdbc_url = format('jdbc:sqlanywhere:database={db_name};host={db_host}')
db_jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
jdbc_dialect = "org.eclipse.persistence.platform.database.SQLAnywherePlatform"
+else: raise Fail(format("'{db_flavor}' db flavor not supported."))
downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
@@ -209,6 +211,7 @@ if has_ranger_admin:
xa_previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_sqlanywhere_jdbc_name", None)
audit_jdbc_url = format('jdbc:sqlanywhere:database={xa_audit_db_name};host={xa_db_host}')
jdbc_driver = "sap.jdbc4.sqlanywhere.IDriver"
+ else: raise Fail(format("'{xa_audit_db_flavor}' db flavor not supported."))
downloaded_connector_path = format("{tmp_dir}/{jdbc_jar}") if stack_supports_ranger_audit_db else None
driver_source = format("{jdk_location}/{jdbc_jar}") if stack_supports_ranger_audit_db else None
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
index c1138b3..400c87c 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
@@ -28,6 +28,7 @@ from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.expect import expect
from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
+from resource_management.core.exceptions import Fail
# a map of the Ambari role to the component name
@@ -117,6 +118,7 @@ if "jdbc_drivers" in config['configurations']['sqoop-env']:
jdbc_name = default("/hostLevelParams/custom_hsqldb_jdbc_name", None)
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_hsqldb_jdbc_name", None)
jdbc_driver_name = "hsqldb"
+ else: raise Fail(format("JDBC driver '{driver_name}' not supported."))
else:
continue
sqoop_jdbc_drivers_dict.append(jdbc_name)
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
index c1138b3..400c87c 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
@@ -28,6 +28,7 @@ from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.expect import expect
from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
+from resource_management.core.exceptions import Fail
# a map of the Ambari role to the component name
@@ -117,6 +118,7 @@ if "jdbc_drivers" in config['configurations']['sqoop-env']:
jdbc_name = default("/hostLevelParams/custom_hsqldb_jdbc_name", None)
previous_jdbc_jar_name = default("/hostLevelParams/previous_custom_hsqldb_jdbc_name", None)
jdbc_driver_name = "hsqldb"
+ else: raise Fail(format("JDBC driver '{driver_name}' not supported."))
else:
continue
sqoop_jdbc_drivers_dict.append(jdbc_name)
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/check_host.py b/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
index a485415..3ca2909 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
@@ -319,6 +319,7 @@ class CheckHost(Script):
jdbc_url = jdk_location + jdbc_driver_sqla_name
jdbc_driver_class = JDBC_DRIVER_CLASS_SQLA
jdbc_name = jdbc_driver_sqla_name
+ else: no_jdbc_error_message = format("'{db_name}' database type not supported.")
if no_jdbc_error_message:
Logger.warning(no_jdbc_error_message)
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/HIVE/test_jdbc_driver_config.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/HIVE/test_jdbc_driver_config.py b/ambari-server/src/test/python/common-services/HIVE/test_jdbc_driver_config.py
new file mode 100644
index 0000000..e4d81b1
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/HIVE/test_jdbc_driver_config.py
@@ -0,0 +1,66 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+
+from resource_management.core.exceptions import Fail
+from stacks.utils.RMFTestCase import RMFTestCase
+
+import unittest
+
+class TestJdbcDriverConfig(RMFTestCase):
+ STACK_VERSION = "2.6"
+ CONFIG_DIR = os.path.join(os.path.dirname(__file__), '../configs')
+
+ def test_jdbc_type_0_12_0_2_0(self):
+ self.executeScript("HIVE/0.12.0.2.0/package/scripts/hive_server.py",
+ classname="HiveServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "hive_default.json"))
+
+ def test_unsupported_jdbc_type_throws_error_0_12_0_2_0(self):
+ with self.assertRaises(Fail):
+ self.executeScript("HIVE/0.12.0.2.0/package/scripts/hive_server.py",
+ classname="HiveServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "hive_unsupported_jdbc_type.json"))
+
+ def test_jdbc_type_2_1_0_3_0(self):
+ self.executeScript("HIVE/2.1.0.3.0/package/scripts/hive_server.py",
+ classname="HiveServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "hive_default.json"))
+
+ def test_unsupported_jdbc_type_throws_error_2_1_0_3_0(self):
+ with self.assertRaises(Fail):
+ self.executeScript("HIVE/2.1.0.3.0/package/scripts/hive_server.py",
+ classname="HiveServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "hive_unsupported_jdbc_type.json"))
+
+
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/RANGER/test_db_flavor_config.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/RANGER/test_db_flavor_config.py b/ambari-server/src/test/python/common-services/RANGER/test_db_flavor_config.py
new file mode 100644
index 0000000..568e3fd
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/RANGER/test_db_flavor_config.py
@@ -0,0 +1,63 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+
+from resource_management.core.exceptions import Fail
+from stacks.utils.RMFTestCase import RMFTestCase
+
+import unittest
+
+class TestDbFlavorConfig(RMFTestCase):
+ STACK_VERSION = "2.6"
+ CONFIG_DIR = os.path.join(os.path.dirname(__file__), '../configs')
+
+
+ def test_db_flavor_0_4_0(self):
+ self.executeScript("RANGER/0.4.0/package/scripts/ranger_admin.py",
+ classname="RangerAdmin",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_admin_default.json"))
+
+ def test_unsupported_db_flavor_0_4_0(self):
+ with self.assertRaises(Fail):
+ self.executeScript("RANGER/0.4.0/package/scripts/ranger_admin.py",
+ classname="RangerAdmin",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_admin_unsupported_db_flavor.json"))
+
+ def test_db_flavor_1_0_0_3_0(self):
+ self.executeScript("RANGER/1.0.0.3.0/package/scripts/ranger_admin.py",
+ classname="RangerAdmin",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_admin_default.json"))
+
+ def test_unsupported_db_flavor_1_0_0_3_0(self):
+ with self.assertRaises(Fail):
+ self.executeScript("RANGER/1.0.0.3.0/package/scripts/ranger_admin.py",
+ classname="RangerAdmin",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_admin_unsupported_db_flavor.json"))
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/RANGER_KMS/test_db_flavor_config.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/RANGER_KMS/test_db_flavor_config.py b/ambari-server/src/test/python/common-services/RANGER_KMS/test_db_flavor_config.py
new file mode 100644
index 0000000..48654ee
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/RANGER_KMS/test_db_flavor_config.py
@@ -0,0 +1,63 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+
+from resource_management.core.exceptions import Fail
+from stacks.utils.RMFTestCase import RMFTestCase
+
+import unittest
+
+class TestDbFlavorConfig(RMFTestCase):
+ STACK_VERSION = "2.6"
+ CONFIG_DIR = os.path.join(os.path.dirname(__file__), '../configs')
+
+
+ def test_db_flavor_0_5_0_2_3(self):
+ self.executeScript("RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py",
+ classname="KmsServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_kms_default.json"))
+
+ def test_unsupported_db_flavor_0_5_0_2_3(self):
+ with self.assertRaises(Fail):
+ self.executeScript("RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py",
+ classname="KmsServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_kms_unsupported_db_flavor.json"))
+
+ def test_db_flavor_1_0_0_3_0(self):
+ self.executeScript("RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py",
+ classname="KmsServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_kms_default.json"))
+
+ def test_unsupported_db_flavor_1_0_0_3_0(self):
+ with self.assertRaises(Fail):
+ self.executeScript("RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py",
+ classname="KmsServer",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "ranger_kms_unsupported_db_flavor.json"))
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/SQOOP/test_jdbc_driver_config.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/SQOOP/test_jdbc_driver_config.py b/ambari-server/src/test/python/common-services/SQOOP/test_jdbc_driver_config.py
new file mode 100644
index 0000000..7bb809a
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/SQOOP/test_jdbc_driver_config.py
@@ -0,0 +1,63 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+
+from resource_management.core.exceptions import Fail
+from stacks.utils.RMFTestCase import RMFTestCase
+
+import unittest
+
+class TestJdbcDriverConfig(RMFTestCase):
+ STACK_VERSION = "2.6"
+ CONFIG_DIR = os.path.join(os.path.dirname(__file__), '../configs')
+
+
+ def test_jdbc_driver_1_4_4_2_0(self):
+ self.executeScript("SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py",
+ classname="SqoopClient",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "sqoop_default.json"))
+
+ def test_unsupported_jdbc_driver_1_4_4_2_0(self):
+ with self.assertRaises(Fail):
+ self.executeScript("SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py",
+ classname="SqoopClient",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "sqoop_unsupported_jdbc_driver.json"))
+
+ def test_jdbc_driver_1_4_4_3_0(self):
+ self.executeScript("SQOOP/1.4.4.3.0/package/scripts/sqoop_client.py",
+ classname="SqoopClient",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "sqoop_default.json"))
+
+ def test_unsupported_jdbc_driver_1_4_4_3_0(self):
+ with self.assertRaises(Fail):
+ self.executeScript("SQOOP/1.4.4.3.0/package/scripts/sqoop_client.py",
+ classname="SqoopClient",
+ command="configure",
+ target=RMFTestCase.TARGET_COMMON_SERVICES,
+ stack_version=self.STACK_VERSION,
+ config_file=os.path.join(self.CONFIG_DIR, "sqoop_unsupported_jdbc_driver.json"))
\ No newline at end of file
[50/57] [abbrv] ambari git commit: AMBARI-21545 Stack Advisor support
for LDAP configuration (benyoka)
Posted by lp...@apache.org.
AMBARI-21545 Stack Advisor support for LDAP configuration (benyoka)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/341be710
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/341be710
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/341be710
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 341be7104478763871b057226a98490512101a71
Parents: ff9b378
Author: Balazs Bence Sari <be...@apache.org>
Authored: Tue Aug 8 20:17:14 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:01 2017 +0200
----------------------------------------------------------------------
.../services/AmbariConfigurationService.java | 4 +-
.../stackadvisor/StackAdvisorRequest.java | 12 ++
.../commands/StackAdvisorCommand.java | 54 +++++
.../commands/StackAdvisorCommandTest.java | 212 +++++++++++++++++++
.../StackAdvisorResourceProviderTest.java | 97 ++++-----
5 files changed, 324 insertions(+), 55 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/341be710/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
index 0632361..927e518 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
@@ -56,9 +56,9 @@ import io.swagger.annotations.ApiResponses;
* "data": [
* {
* "authentication.ldap.primaryUrl": "localhost:33389"
- "authentication.ldap.secondaryUrl": "localhost:333"
+ * "authentication.ldap.secondaryUrl": "localhost:333"
* "authentication.ldap.baseDn": "dc=ambari,dc=apache,dc=org"
- * // ......
+ * // ......
* ]
* }
* </pre>
http://git-wip-us.apache.org/repos/asf/ambari/blob/341be710/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
index 3a2b488..cd26c56 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java
@@ -31,6 +31,8 @@ import org.apache.ambari.server.api.services.stackadvisor.recommendations.Recomm
import org.apache.ambari.server.state.ChangedConfigInfo;
import org.apache.commons.lang.StringUtils;
+import com.google.common.base.Preconditions;
+
/**
* Stack advisor request.
*/
@@ -48,6 +50,7 @@ public class StackAdvisorRequest {
private List<ChangedConfigInfo> changedConfigurations = new LinkedList<>();
private Set<RecommendationResponse.ConfigGroup> configGroups;
private Map<String, String> userContext = new HashMap<>();
+ private Map<String, Object> ldapConfig = new HashMap<>();
public String getStackName() {
return stackName;
@@ -93,6 +96,8 @@ public class StackAdvisorRequest {
return configurations;
}
+ public Map<String, Object> getLdapConfig() { return ldapConfig; }
+
public List<ChangedConfigInfo> getChangedConfigurations() {
return changedConfigurations;
}
@@ -189,6 +194,13 @@ public class StackAdvisorRequest {
return this;
}
+ public StackAdvisorRequestBuilder withLdapConfig(Map<String, Object> ldapConfig) {
+ Preconditions.checkNotNull(ldapConfig);
+ this.instance.ldapConfig = ldapConfig;
+ return this;
+ }
+
+
public StackAdvisorRequest build() {
return this.instance;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/341be710/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
index 356754d..2dc45de 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
@@ -84,6 +84,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
+ ",services/configurations/dependencies/StackConfigurationDependency/dependency_name"
+ ",services/configurations/dependencies/StackConfigurationDependency/dependency_type,services/configurations/StackConfigurations/type"
+ "&services/StackServices/service_name.in(%s)";
+ private static final String GET_LDAP_CONFIG_URI = "/api/v1/configurations?AmbariConfiguration/type=ldap&fields=AmbariConfiguration/*";
private static final String SERVICES_PROPERTY = "services";
private static final String SERVICES_COMPONENTS_PROPERTY = "components";
private static final String CONFIG_GROUPS_PROPERTY = "config-groups";
@@ -95,6 +96,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
private static final String CHANGED_CONFIGURATIONS_PROPERTY = "changed-configurations";
private static final String USER_CONTEXT_PROPERTY = "user-context";
private static final String AMBARI_SERVER_CONFIGURATIONS_PROPERTY = "ambari-server-properties";
+ protected static final String LDAP_CONFIGURATION_PROPERTY = "ldap-configuration";
private File recommendationsDir;
private String recommendationsArtifactsLifetime;
@@ -160,6 +162,7 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
populateConfigurations(root, request);
populateConfigGroups(root, request);
populateAmbariServerInfo(root);
+ populateLdapConfiguration(root);
data.servicesJSON = mapper.writeValueAsString(root);
} catch (Exception e) {
// should not happen
@@ -171,6 +174,52 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
return data;
}
+ /**
+ * Retrieves the LDAP configuration if exists and adds it to services.json
+ * @param root The JSON document that will become service.json when passed to the stack advisor engine
+ * @throws StackAdvisorException
+ * @throws IOException
+ */
+ protected void populateLdapConfiguration(ObjectNode root) throws StackAdvisorException, IOException {
+ Response response = handleRequest(null, null, new LocalUriInfo(GET_LDAP_CONFIG_URI), Request.Type.GET,
+ createConfigResource());
+
+ if (response.getStatus() != Status.OK.getStatusCode()) {
+ String message = String.format(
+ "Error occured during retrieving ldap configuration, status=%s, response=%s",
+ response.getStatus(), (String) response.getEntity());
+ LOG.warn(message);
+ throw new StackAdvisorException(message);
+ }
+
+ String ldapConfigJSON = (String) response.getEntity();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("LDAP configuration: {}", ldapConfigJSON);
+ }
+
+ JsonNode ldapConfigRoot = mapper.readTree(ldapConfigJSON);
+ ArrayNode ldapConfigs = ((ArrayNode)ldapConfigRoot.get("items"));
+ int numConfigs = ldapConfigs.size();
+ // Zero or one config may exist
+ switch (numConfigs) {
+ case 0:
+ LOG.debug("No LDAP config is stored in the DB");
+ break;
+ case 1:
+ ArrayNode ldapConfigData = (ArrayNode)ldapConfigs.get(0).get("AmbariConfiguration").get("data");
+ if (ldapConfigData.size() == 0) {
+ throw new StackAdvisorException("No configuration data for LDAP configuration.");
+ }
+ if (ldapConfigData.size() > 1) {
+ throw new StackAdvisorException("Ambigous configuration data for LDAP configuration.");
+ }
+ root.put(LDAP_CONFIGURATION_PROPERTY, ldapConfigData.get(0));
+ break;
+ default:
+ throw new StackAdvisorException(String.format("Multiple (%s) LDAP configs are found in the DB.", numConfigs));
+ }
+ }
+
protected void populateAmbariServerInfo(ObjectNode root) throws StackAdvisorException {
Map<String, String> serverProperties = metaInfo.getAmbariServerProperties();
@@ -437,6 +486,11 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
return createResource(Resource.Type.Host, mapIds);
}
+ protected ResourceInstance createConfigResource() {
+ return createResource(Resource.Type.AmbariConfiguration, new HashMap<>());
+ }
+
+
private ResourceInstance createStackVersionResource(String stackName, String stackVersion) {
Map<Resource.Type, String> mapIds = new HashMap<>();
mapIds.put(Resource.Type.Stack, stackName);
http://git-wip-us.apache.org/repos/asf/ambari/blob/341be710/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
index eaa4716..959db15 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.api.services.stackadvisor.commands;
+import static org.apache.ambari.server.api.services.stackadvisor.commands.StackAdvisorCommand.LDAP_CONFIGURATION_PROPERTY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@@ -33,12 +34,21 @@ import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
+import java.util.HashMap;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import org.apache.ambari.server.api.resources.ResourceInstance;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.api.services.Request;
+import org.apache.ambari.server.api.services.ResultStatus;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestBuilder;
@@ -50,6 +60,7 @@ import org.apache.ambari.server.state.ServiceInfo;
import org.apache.commons.io.FileUtils;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.annotate.JsonProperty;
+import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.ObjectNode;
import org.junit.After;
@@ -59,6 +70,8 @@ import org.junit.rules.TemporaryFolder;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import com.google.common.collect.Lists;
+
/**
* StackAdvisorCommand unit tests.
*/
@@ -265,6 +278,197 @@ public class StackAdvisorCommandTest {
assertEquals(0, stackVersions.size());
}
+ @Test
+ public void testPopulateLdapConfig() throws Exception {
+ File recommendationsDir = temp.newFolder("recommendationDir");
+ String recommendationsArtifactsLifetime = "1w";
+ int requestId = 0;
+ StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+ AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+ doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
+ TestStackAdvisorCommand command = spy(new TestStackAdvisorCommand(recommendationsDir, recommendationsArtifactsLifetime,
+ ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo));
+
+ StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion").build();
+
+ Map<String, Object> ldapConfigData = map(
+ "authentication.ldap.primaryUrl", "localhost:33389",
+ "authentication.ldap.secondaryUrl", "localhost:333",
+ "authentication.ldap.baseDn", "c=ambari,dc=apache,dc=org"
+ );
+
+ Map<String, Object> storedLdapConfigResult = map(
+ "items",
+ list(
+ map(
+ "AmbariConfiguration",
+ map(
+ "data", list(ldapConfigData)
+ )
+ )
+ )
+ );
+
+ Response response =
+ Response.status(ResultStatus.STATUS.OK.getStatus()).entity(jsonString(storedLdapConfigResult)).build();
+
+ doReturn(response).when(command).handleRequest(any(), any(), any(), any(), any(), any());
+
+ JsonNode servicesRootNode = json("{}");
+ command.populateLdapConfiguration((ObjectNode)servicesRootNode);
+
+ JsonNode expectedLdapConfig = json(
+ map(LDAP_CONFIGURATION_PROPERTY, ldapConfigData)
+ );
+
+ assertEquals(expectedLdapConfig, servicesRootNode);
+ }
+
+ @Test
+ public void testPopulateLdapConfig_NoConfigs() throws Exception {
+ File recommendationsDir = temp.newFolder("recommendationDir");
+ String recommendationsArtifactsLifetime = "1w";
+ int requestId = 0;
+ StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+ AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+ doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
+ TestStackAdvisorCommand command = spy(new TestStackAdvisorCommand(recommendationsDir, recommendationsArtifactsLifetime,
+ ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo));
+
+ StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion").build();
+
+ Map<String, Object> storedLdapConfigResult = map(
+ "items", list()
+ );
+
+ Response response =
+ Response.status(ResultStatus.STATUS.OK.getStatus()).entity(jsonString(storedLdapConfigResult)).build();
+
+ doReturn(response).when(command).handleRequest(any(), any(), any(), any(), any(), any());
+
+ JsonNode servicesRootNode = json("{}");
+ command.populateLdapConfiguration((ObjectNode)servicesRootNode);
+
+ JsonNode expectedLdapConfig = json("{}");
+
+ assertEquals(expectedLdapConfig, servicesRootNode);
+ }
+
+ /**
+ * An ambigous ldap config that has two items in its data[] array should result in exception
+ */
+ @Test(expected = StackAdvisorException.class)
+ public void testPopulateLdapConfig_multipleConfigs() throws Exception {
+ File recommendationsDir = temp.newFolder("recommendationDir");
+ String recommendationsArtifactsLifetime = "1w";
+ int requestId = 0;
+ StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+ AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+ doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
+ TestStackAdvisorCommand command = spy(new TestStackAdvisorCommand(recommendationsDir, recommendationsArtifactsLifetime,
+ ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo));
+
+ StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion").build();
+
+ Map<String, Object> ldapConfigData = map(
+ "authentication.ldap.primaryUrl", "localhost:33389",
+ "authentication.ldap.secondaryUrl", "localhost:333",
+ "authentication.ldap.baseDn", "c=ambari,dc=apache,dc=org"
+ );
+
+ Map<String, Object> storedLdapConfigResult = map(
+ "items",
+ list(
+ map(
+ "AmbariConfiguration",
+ map(
+ "data",
+ list(ldapConfigData, ldapConfigData)
+ )
+ )
+ )
+ );
+
+ Response response =
+ Response.status(ResultStatus.STATUS.OK.getStatus()).entity(jsonString(storedLdapConfigResult)).build();
+
+ doReturn(response).when(command).handleRequest(any(), any(), any(), any(), any(), any());
+
+ JsonNode servicesRootNode = json("{}");
+ command.populateLdapConfiguration((ObjectNode)servicesRootNode);
+ }
+
+ /**
+ * An if multiple ambari configurations are stored with 'ldap-config' type, an
+ * exception should be thrown
+ */
+ @Test(expected = StackAdvisorException.class)
+ public void testPopulateLdapConfig_multipleResults() throws Exception {
+ File recommendationsDir = temp.newFolder("recommendationDir");
+ String recommendationsArtifactsLifetime = "1w";
+ int requestId = 0;
+ StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+ AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+ doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
+ TestStackAdvisorCommand command = spy(new TestStackAdvisorCommand(recommendationsDir, recommendationsArtifactsLifetime,
+ ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo));
+
+ StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion")
+ .build();
+
+ Map<String, Object> ldapConfig = map(
+ "AmbariConfiguration",
+ map(
+ "data",
+ list(
+ map(
+ "authentication.ldap.primaryUrl", "localhost:33389",
+ "authentication.ldap.secondaryUrl", "localhost:333",
+ "authentication.ldap.baseDn", "c=ambari,dc=apache,dc=org"
+ )
+ )
+ )
+ );
+
+ Map<String, Object> storedLdapConfigResult = map(
+ "items",
+ list(ldapConfig, ldapConfig)
+ );
+
+ Response response =
+ Response.status(ResultStatus.STATUS.OK.getStatus()).entity(jsonString(storedLdapConfigResult)).build();
+
+ doReturn(response).when(command).handleRequest(any(), any(), any(), any(), any(), any());
+
+ JsonNode servicesRootNode = json("{}");
+ command.populateLdapConfiguration((ObjectNode)servicesRootNode);
+ }
+
+ private static String jsonString(Object obj) throws IOException {
+ return new ObjectMapper().writeValueAsString(obj);
+ }
+
+ private static JsonNode json(Object obj) throws IOException {
+ return new ObjectMapper().convertValue(obj, JsonNode.class);
+ }
+
+ private static JsonNode json(String jsonString) throws IOException {
+ return new ObjectMapper().readTree(jsonString);
+ }
+
+ private static List<Object> list(Object... items) {
+ return Lists.newArrayList(items);
+ }
+
+ private static Map<String, Object> map(Object... keysAndValues) {
+ Map<String, Object> map = new HashMap<>();
+ Iterator<Object> iterator = Arrays.asList(keysAndValues).iterator();
+ while (iterator.hasNext()) {
+ map.put(iterator.next().toString(), iterator.next());
+ }
+ return map;
+ }
+
class TestStackAdvisorCommand extends StackAdvisorCommand<TestResource> {
public TestStackAdvisorCommand(File recommendationsDir, String recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType serviceAdvisorType,
int requestId, StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo) {
@@ -290,6 +494,14 @@ public class StackAdvisorCommandTest {
protected TestResource updateResponse(StackAdvisorRequest request, TestResource response) {
return response;
}
+
+ // Overridden to ensure visiblity in tests
+ @Override
+ public javax.ws.rs.core.Response handleRequest(HttpHeaders headers, String body,
+ UriInfo uriInfo, Request.Type requestType,
+ MediaType mediaType, ResourceInstance resource) {
+ return super.handleRequest(headers, body, uriInfo, requestType, mediaType, resource);
+ }
}
public static class TestResource extends StackAdvisorResponse {
http://git-wip-us.apache.org/repos/asf/ambari/blob/341be710/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
index ab60948..05232ea 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
@@ -27,43 +27,35 @@ import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
-import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.LinkedHashSet;
-import java.util.List;
import java.util.Map;
import java.util.Set;
+import javax.annotation.Nonnull;
+
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
import org.junit.Assert;
+import org.junit.Before;
import org.junit.Test;
+import com.google.common.collect.Lists;
+
public class StackAdvisorResourceProviderTest {
+ private RecommendationResourceProvider provider;
+
@Test
public void testCalculateConfigurations() throws Exception {
-
- Map<Resource.Type, String> keyPropertyIds = Collections.emptyMap();
- Set<String> propertyIds = Collections.emptySet();
- AmbariManagementController ambariManagementController = mock(AmbariManagementController.class);
- RecommendationResourceProvider provider = new RecommendationResourceProvider(propertyIds,
- keyPropertyIds, ambariManagementController);
-
- Request request = mock(Request.class);
- Set<Map<String, Object>> propertiesSet = new HashSet<>();
- Map<String, Object> propertiesMap = new HashMap<>();
- propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", "string");
- List<Object> array = new ArrayList<>();
- array.add("array1");
- array.add("array2");
- propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/array_prop", array);
- propertiesSet.add(propertiesMap);
-
- doReturn(propertiesSet).when(request).getProperties();
+ Request request = createMockRequest(
+ CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", "string",
+ CONFIGURATIONS_PROPERTY_ID + "site/properties/array_prop", Lists.newArrayList("array1", "array2"));
Map<String, Map<String, Map<String, String>>> calculatedConfigurations = provider.calculateConfigurations(request);
@@ -79,27 +71,37 @@ public class StackAdvisorResourceProviderTest {
assertEquals("[array1, array2]", properties.get("array_prop"));
}
- @Test
- public void testReadUserContext() throws Exception {
-
+ @Nonnull
+ private RecommendationResourceProvider createRecommendationResourceProvider() {
Map<Resource.Type, String> keyPropertyIds = Collections.emptyMap();
Set<String> propertyIds = Collections.emptySet();
AmbariManagementController ambariManagementController = mock(AmbariManagementController.class);
- RecommendationResourceProvider provider = new RecommendationResourceProvider(propertyIds,
- keyPropertyIds, ambariManagementController);
+ return new RecommendationResourceProvider(propertyIds,
+ keyPropertyIds, ambariManagementController);
+ }
+ @Nonnull
+ private Request createMockRequest(Object... propertyKeysAndValues) {
Request request = mock(Request.class);
Set<Map<String, Object>> propertiesSet = new HashSet<>();
Map<String, Object> propertiesMap = new HashMap<>();
- propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", "string");
- List<Object> array = new ArrayList<>();
- array.add("array1");
- array.add("array2");
- propertiesMap.put(USER_CONTEXT_OPERATION_PROPERTY, "op1");
- propertiesMap.put(USER_CONTEXT_OPERATION_DETAILS_PROPERTY, "op_det");
+ Iterator<Object> it = Arrays.asList(propertyKeysAndValues).iterator();
+ while(it.hasNext()) {
+ String key = (String)it.next();
+ Object value = it.next();
+ propertiesMap.put(key, value);
+ }
propertiesSet.add(propertiesMap);
-
doReturn(propertiesSet).when(request).getProperties();
+ return request;
+ }
+
+ @Test
+ public void testReadUserContext() throws Exception {
+ Request request = createMockRequest(
+ CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", "string",
+ USER_CONTEXT_OPERATION_PROPERTY, "op1",
+ USER_CONTEXT_OPERATION_DETAILS_PROPERTY, "op_det");
Map<String, String> userContext = provider.readUserContext(request);
@@ -111,24 +113,9 @@ public class StackAdvisorResourceProviderTest {
@Test
public void testCalculateConfigurationsWithNullPropertyValues() throws Exception {
-
- Map<Resource.Type, String> keyPropertyIds = Collections.emptyMap();
- Set<String> propertyIds = Collections.emptySet();
- AmbariManagementController ambariManagementController = mock(AmbariManagementController.class);
- RecommendationResourceProvider provider = new RecommendationResourceProvider(propertyIds,
- keyPropertyIds, ambariManagementController);
-
- Request request = mock(Request.class);
- Set<Map<String, Object>> propertiesSet = new HashSet<>();
- Map<String, Object> propertiesMap = new HashMap<>();
- propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", null); //null value means no value specified for the property
- List<Object> array = new ArrayList<>();
- array.add("array1");
- array.add("array2");
- propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/array_prop", array);
- propertiesSet.add(propertiesMap);
-
- doReturn(propertiesSet).when(request).getProperties();
+ Request request = createMockRequest(
+ CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", null,
+ CONFIGURATIONS_PROPERTY_ID + "site/properties/array_prop", Lists.newArrayList("array1", "array2"));
Map<String, Map<String, Map<String, String>>> calculatedConfigurations = provider.calculateConfigurations(request);
@@ -142,19 +129,18 @@ public class StackAdvisorResourceProviderTest {
assertEquals("[array1, array2]", properties.get("array_prop"));
-
// config properties with null values should be ignored
assertFalse(properties.containsKey("string_prop"));
-
}
+
@Test
public void testStackAdvisorWithEmptyHosts() {
Map<Resource.Type, String> keyPropertyIds = Collections.emptyMap();
Set<String> propertyIds = Collections.emptySet();
AmbariManagementController ambariManagementController = mock(AmbariManagementController.class);
RecommendationResourceProvider provider = new RecommendationResourceProvider(propertyIds,
- keyPropertyIds, ambariManagementController);
+ keyPropertyIds, ambariManagementController);
Request request = mock(Request.class);
Set<Map<String, Object>> propertiesSet = new HashSet<>();
@@ -170,4 +156,9 @@ public class StackAdvisorResourceProviderTest {
} catch (Exception e) {
}
}
+
+ @Before
+ public void init() {
+ provider = createRecommendationResourceProvider();
+ }
}
[07/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
Posted by lp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/hive_default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/hive_default.json b/ambari-server/src/test/python/common-services/configs/hive_default.json
new file mode 100644
index 0000000..2cd0d11
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/hive_default.json
@@ -0,0 +1,650 @@
+{
+ "roleCommand": "SERVICE_CHECK",
+ "clusterName": "c1",
+ "hostname": "c6401.ambari.apache.org",
+ "hostLevelParams": {
+ "not_managed_hdfs_path_list": "[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+ "agent_stack_retry_count": "5",
+ "agent_stack_retry_on_unavailability": "false",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "ambari_db_rca_password": "mapred",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "stack_version": "2.6",
+ "stack_name": "HDP",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "jdk_name": "jdk-7u67-linux-x64.tar.gz",
+ "ambari_db_rca_username": "mapred",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "db_name": "ambari",
+ "custom_mysql_jdbc_name": "mysql-connector-java.jar"
+ },
+ "commandType": "EXECUTION_COMMAND",
+ "roleParams": {},
+ "serviceName": "SLIDER",
+ "role": "SLIDER",
+ "commandParams": {
+ "version": "2.5.0.0-1235",
+ "command_timeout": "300",
+ "service_package_folder": "OOZIE",
+ "script_type": "PYTHON",
+ "script": "scripts/service_check.py",
+ "excluded_hosts": "host1,host2"
+ },
+ "taskId": 152,
+ "public_hostname": "c6401.ambari.apache.org",
+ "configurations": {
+ "hive-env" : {
+ "hcat_pid_dir": "/var/run/webhcat",
+ "hcat_user": "hcat",
+ "hive_ambari_database": "MySQL",
+ "hive_hostname": "abtest-3.c.pramod-thangali.internal",
+ "hive_metastore_port": "9083",
+ "webhcat_user": "hcat",
+ "content": "\n if [ \"$SERVICE\" = \"cli\" ]; then\n if [ -z \"$DEBUG\" ]; then\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n else\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n fi\n fi\n\n# The heap size of the jvm stared by hive shell script can be controlled via:\n\nexport HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n\n# Larger heap size may be required when running queries over large number of files or partitions.\n# By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be\n# appropriate for hive server (hwi etc).\n\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hive Configuration Directory can
be controlled by:\nexport HIVE_CONF_DIR={{conf_dir}}\n\n# Folder containing extra ibraries required for hive compilation/execution can be controlled by:\nif [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\nelif [ -d \"/usr/lib/hive-hcatalog/\" ]; then\n export HIVE_AUX_JARS_PATH=/usr/lib/hive-hcatalog/share/hcatalog/hive-hcatalog-core-*.jar\nelse\n export HIVE_AUX_JARS_PATH=/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar\nfi\nexport METASTORE_PORT={{hive_metastore_port}}",
+ "hive_database_name": "hive",
+ "hive_database_type": "mysql",
+ "hive_pid_dir": "/var/run/hive",
+ "hive_log_dir": "/var/log/hive",
+ "hive_user": "hive",
+ "hcat_log_dir": "/var/log/webhcat",
+ "hive_database": "New MySQL Database",
+ "hive_security_authorization": "None"
+ },
+ "hive-site": {
+ "hive.enforce.sorting": "true",
+ "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+ "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true",
+ "hive.security.metastore.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "fs.file.impl.disable.cache": "true",
+ "hive.auto.convert.join.noconditionaltask": "true",
+ "hive.map.aggr": "true",
+ "hive.optimize.index.filter": "true",
+ "hive.security.authorization.enabled": "false",
+ "hive.optimize.reducededuplication.min.reducer": "1",
+ "hive.optimize.bucketmapjoin": "true",
+ "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083",
+ "hive.mapjoin.bucket.cache.size": "10000",
+ "hive.auto.convert.join.noconditionaltask.size": "1000000000",
+ "hive.vectorized.execution.enabled": "false",
+ "javax.jdo.option.ConnectionUserName": "hive",
+ "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
+ "hive.optimize.reducededuplication": "true",
+ "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
+ "hive.metastore.client.socket.timeout": "60",
+ "hive.auto.convert.join": "true",
+ "hive.enforce.bucketing": "true",
+ "hive.mapred.reduce.tasks.speculative.execution": "false",
+ "hive.security.authenticator.manager": "org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator",
+ "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true",
+ "hive.auto.convert.sortmerge.join": "true",
+ "fs.hdfs.impl.disable.cache": "true",
+ "hive.security.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "ambari.hive.db.schema.name": "hive",
+ "hive.metastore.execute.setugi": "true",
+ "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
+ "hive.server2.enable.doAs": "true",
+ "hive.server2.authentication": "NOSASL",
+ "hive.server2.transport.mode": "binary",
+ "hive.optimize.mapjoin.mapreduce": "true",
+ "hive.exec.scratchdir" : "/custompath/tmp/hive"
+ },
+ "slider-client": {
+ "slider.yarn.queue": "default"
+ },
+ "sqoop-site": {
+ "atlas.cluster.name": "c1",
+ "sqoop.job.data.publish.class": "org.apache.atlas.sqoop.hook.SqoopHook"
+ },
+ "mahout-env": {
+ "mahout_user": "mahout"
+ },
+ "hbase-env": {
+ "hbase_user": "hbase"
+ },
+ "yarn-env": {
+ "yarn_user": "yarn"
+ },
+ "mahout-log4j": {
+ "content": "\n #\n #\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing,\n # software distributed under the License is distributed on an\n # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n # KIND, either express or implied. See the License for the\n # specific language governing permissions a
nd limitations\n # under the License.\n #\n #\n #\n\n # Set everything to be logged to the console\n log4j.rootCategory=WARN, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n # Settings to quiet third party logs that are too verbose\n log4j.logger.org.eclipse.jetty=WARN\n log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN\n log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN"
+ },
+ "hadoop-env": {
+ "hdfs_user": "hdfs",
+ "hdfs_tmp_dir": "/tmp"
+ },
+ "core-site": {
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
+ },
+ "hdfs-site": {
+ "a": "b"
+ },
+ "yarn-site": {
+ "yarn.application.classpath": "/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*",
+ "yarn.resourcemanager.address": "c6401.ambari.apache.org:8050",
+ "yarn.resourcemanager.scheduler.address": "c6401.ambari.apache.org:8030"
+ },
+ "cluster-env": {
+ "managed_hdfs_resource_property_names": "",
+ "security_enabled": "false",
+ "ignore_groupsusers_create": "false",
+ "smokeuser": "ambari-qa",
+ "kerberos_domain": "EXAMPLE.COM",
+ "user_group": "hadoop"
+ },
+ "webhcat-site": {
+ "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
+ "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
+ "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive.tar.gz",
+ "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz",
+ "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming.jar"
+ },
+ "slider-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "slider-env": {
+ "content": "envproperties\nline2"
+ },
+ "gateway-site": {
+ "java.security.auth.login.config": "/etc/knox/conf/krb5JAASLogin.conf",
+ "gateway.hadoop.kerberos.secured": "false",
+ "gateway.gateway.conf.dir": "deployments",
+ "gateway.path": "gateway",
+ "sun.security.krb5.debug": "true",
+ "java.security.krb5.conf": "/etc/knox/conf/krb5.conf",
+ "gateway.port": "8443"
+ },
+
+ "users-ldif": {
+ "content": "\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an \"AS IS\" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the Lice
nse.\n\n version: 1\n\n # Please replace with site specific values\n dn: dc=hadoop,dc=apache,dc=org\n objectclass: organization\n objectclass: dcObject\n o: Hadoop\n dc: hadoop\n\n # Entry for a sample people container\n # Please replace with site specific values\n dn: ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:organizationalUnit\n ou: people\n\n # Entry for a sample end user\n # Please replace with site specific values\n dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: Guest\n sn: User\n uid: guest\n userPassword:guest-password\n\n # entry for sample user admin\n dn:
uid=admin,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: Admin\n sn: Admin\n uid: admin\n userPassword:admin-password\n\n # entry for sample user sam\n dn: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: sam\n sn: sam\n uid: sam\n userPassword:sam-password\n\n # entry for sample user tom\n dn: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: tom\n sn: tom\n uid: tom\n userPasswor
d:tom-password\n\n # create FIRST Level groups branch\n dn: ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:organizationalUnit\n ou: groups\n description: generic groups branch\n\n # create the analyst group under groups\n dn: cn=analyst,ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass: groupofnames\n cn: analyst\n description:analyst group\n member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n member: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n\n\n # create the scientist group under groups\n dn: cn=scientist,ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass: groupofnames\n cn: scientist\n description: scientist group\n member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org"
+ },
+
+ "topology": {
+ "content": "\n <topology>\n\n <gateway>\n\n <provider>\n <role>authentication</role>\n <name>ShiroProvider</name>\n <enabled>true</enabled>\n <param>\n <name>sessionTimeout</name>\n <value>30</value>\n </param>\n <param>\n <name>main.ldapRealm</name>\n <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n </param>\n <param>\n <name>main.ldapRealm.userDnTemplate</name>\n <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n </param>\n <param>\n <name>main.ldapRealm.contextFactory.url</name>\n <value>ldap://{{knox_host_name}}:33389</value>\n
</param>\n <param>\n <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n <value>simple</value>\n </param>\n <param>\n <name>urls./**</name>\n <value>authcBasic</value>\n </param>\n </provider>\n\n <provider>\n <role>identity-assertion</role>\n <name>Default</name>\n <enabled>true</enabled>\n </provider>\n\n </gateway>\n\n <service>\n <role>NAMENODE</role>\n <url>hdfs://{{namenode_host}}:{{namenode_rpc_port}}</url>\n </service>\n\n <service>\n <role>JOBTRACKER</role>\n <url>rpc://{{rm_host}}:{{jt_rpc_port}}</url>\n </service>\n\n <service>\n <role>WEBHDFS</role
>\n <url>http://{{namenode_host}}:{{namenode_http_port}}/webhdfs</url>\n </service>\n\n <service>\n <role>WEBHCAT</role>\n <url>http://{{webhcat_server_host}}:{{templeton_port}}/templeton</url>\n </service>\n\n <service>\n <role>OOZIE</role>\n <url>http://{{oozie_server_host}}:{{oozie_server_port}}/oozie</url>\n </service>\n\n <service>\n <role>WEBHBASE</role>\n <url>http://{{hbase_master_host}}:{{hbase_master_port}}</url>\n </service>\n\n <service>\n <role>HIVE</role>\n <url>http://{{hive_server_host}}:{{hive_http_port}}/{{hive_http_path}}</url>\n </service>\n\n <service>\n <role>RESOURCEMANAGER</role>\n <url>http://{{rm_host}}:{{rm_port}}/ws</url>\n </service>\n </topology>"
+ },
+
+ "ldap-log4j": {
+ "content": "\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an \"AS IS\" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the License.\n #testing\n\n app.log.dir=${launcher.dir
}/../logs\n app.log.file=${launcher.name}.log\n\n log4j.rootLogger=ERROR, drfa\n log4j.logger.org.apache.directory.server.ldap.LdapServer=INFO\n log4j.logger.org.apache.directory=WARN\n\n log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n log4j.appender.drfa.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n"
+ },
+
+ "gateway-log4j": {
+ "content": "\n\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an \"AS IS\" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the License.\n\n app.log.dir=${launcher.dir}/../logs\n app.log.file=${launcher.name}.log\n
app.audit.file=${launcher.name}-audit.log\n\n log4j.rootLogger=ERROR, drfa\n\n log4j.logger.org.apache.hadoop.gateway=INFO\n #log4j.logger.org.apache.hadoop.gateway=DEBUG\n\n #log4j.logger.org.eclipse.jetty=DEBUG\n #log4j.logger.org.apache.shiro=DEBUG\n #log4j.logger.org.apache.http=DEBUG\n #log4j.logger.org.apache.http.client=DEBUG\n #log4j.logger.org.apache.http.headers=DEBUG\n #log4j.logger.org.apache.http.wire=DEBUG\n\n log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n log4j.appender.drfa.layout.ConversionPattern=%
d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n log4j.logger.audit=INFO, auditfile\n log4j.appender.auditfile=org.apache.log4j.DailyRollingFileAppender\n log4j.appender.auditfile.File=${app.log.dir}/${app.audit.file}\n log4j.appender.auditfile.Append = true\n log4j.appender.auditfile.DatePattern = '.'yyyy-MM-dd\n log4j.appender.auditfile.layout = org.apache.hadoop.gateway.audit.log4j.layout.AuditLayout"
+ },
+ "knox-env": {
+ "knox_master_secret": "sa",
+ "knox_group": "knox",
+ "knox_pid_dir": "/var/run/knox",
+ "knox_user": "knox"
+ },
+ "kafka-env": {
+ "content": "\n#!/bin/bash\n\n# Set KAFKA specific environment variables here.\n\n# The java implementation to use.\nexport JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin",
+ "kafka_user": "kafka",
+ "kafka_log_dir": "/var/log/kafka",
+ "kafka_pid_dir": "/var/run/kafka"
+ },
+ "kafka-log4j": {
+ "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\nkafka.logs.dir=logs\n\nlog4j.rootLogger=INFO, stdout\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout=org.apache.log4j
.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log\nlog4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log\nlog4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-reques
t.log\nlog4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log\nlog4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log\nlog4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\n# Turn on all our debugging info\n#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender\n#log4j.logge
r.kafka.client.ClientUtils=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender\n#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG\nlog4j.logger.kafka=INFO, kafkaAppender\nlog4j.logger.kafka.network.RequestChannel$=WARN, requestAppender\nlog4j.additivity.kafka.network.RequestChannel$=false\n\n#log4j.logger.kafka.network.Processor=TRACE, requestAppender\n#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender\n#log4j.additivity.kafka.server.KafkaApis=false\nlog4j.logger.kafka.request.logger=WARN, requestAppender\nlog4j.additivity.kafka.request.logger=false\n\nlog4j.logger.kafka.controller=TRACE, controllerAppender\nlog4j.additivity.kafka.controller=false\n\nlog4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender\nlog4j.additivity.kafka.log.LogCleaner=false\n\nlog4j.logger.state.change.logger=TRACE, stateChangeAppender\nlog4j.additivity.state.change.logger=false"
+ },
+ "kafka-broker": {
+ "log.segment.bytes": "1073741824",
+ "socket.send.buffer.bytes": "102400",
+ "num.network.threads": "3",
+ "log.flush.scheduler.interval.ms": "3000",
+ "kafka.ganglia.metrics.host": "localhost",
+ "zookeeper.session.timeout.ms": "6000",
+ "replica.lag.time.max.ms": "10000",
+ "num.io.threads": "8",
+ "kafka.ganglia.metrics.group": "kafka",
+ "replica.lag.max.messages": "4000",
+ "port": "6667",
+ "log.retention.bytes": "-1",
+ "fetch.purgatory.purge.interval.requests": "10000",
+ "producer.purgatory.purge.interval.requests": "10000",
+ "default.replication.factor": "1",
+ "replica.high.watermark.checkpoint.interval.ms": "5000",
+ "zookeeper.connect": "c6402.ambari.apache.org:2181",
+ "controlled.shutdown.retry.backoff.ms": "5000",
+ "num.partitions": "1",
+ "log.flush.interval.messages": "10000",
+ "replica.fetch.min.bytes": "1",
+ "queued.max.requests": "500",
+ "controlled.shutdown.max.retries": "3",
+ "replica.fetch.wait.max.ms": "500",
+ "controlled.shutdown.enable": "false",
+ "log.roll.hours": "168",
+ "log.cleanup.interval.mins": "10",
+ "replica.socket.receive.buffer.bytes": "65536",
+ "zookeeper.connection.timeout.ms": "6000",
+ "replica.fetch.max.bytes": "1048576",
+ "num.replica.fetchers": "1",
+ "socket.request.max.bytes": "104857600",
+ "message.max.bytes": "1000000",
+ "zookeeper.sync.time.ms": "2000",
+ "socket.receive.buffer.bytes": "102400",
+ "controller.message.queue.size": "10",
+ "log.flush.interval.ms": "3000",
+ "log.dirs": "/tmp/log/dir",
+ "controller.socket.timeout.ms": "30000",
+ "replica.socket.timeout.ms": "30000",
+ "auto.create.topics.enable": "true",
+ "log.index.size.max.bytes": "10485760",
+ "kafka.ganglia.metrics.port": "8649",
+ "log.index.interval.bytes": "4096",
+ "log.retention.hours": "168"
+ },
+ "spark-defaults": {
+ "spark.yarn.applicationMaster.waitTries": "10",
+ "spark.history.kerberos.keytab": "none",
+ "spark.yarn.preserve.staging.files": "false",
+ "spark.yarn.submit.file.replication": "3",
+ "spark.history.kerberos.principal": "none",
+ "spark.yarn.driver.memoryOverhead": "384",
+ "spark.yarn.queue": "default",
+ "spark.yarn.containerLauncherMaxThreads": "25",
+ "spark.yarn.scheduler.heartbeat.interval-ms": "5000",
+ "spark.history.ui.port": "18080",
+ "spark.yarn.max.executor.failures": "3",
+ "spark.driver.extraJavaOptions": "",
+ "spark.history.provider": "org.apache.spark.deploy.yarn.history.YarnHistoryProvider",
+ "spark.yarn.am.extraJavaOptions": "",
+ "spark.yarn.executor.memoryOverhead": "384"
+ },
+ "spark-javaopts-properties": {
+ "content": " "
+ },
+ "spark-log4j-properties": {
+ "content": "\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO"
+ },
+ "spark-env": {
+ "content": "\n#!/usr/bin/env bash\n\n# This file is sourced when running various Spark programs.\n# Copy it as spark-env.sh and edit that to configure Spark for your site.\n\n# Options read in YARN client mode\n#SPARK_EXECUTOR_INSTANCES=\"2\" #Number of workers to start (Default: 2)\n#SPARK_EXECUTOR_CORES=\"1\" #Number of cores for the workers (Default: 1).\n#SPARK_EXECUTOR_MEMORY=\"1G\" #Memory per Worker (e.g. 1000M, 2G) (Default: 1G)\n#SPARK_DRIVER_MEMORY=\"512 Mb\" #Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)\n#SPARK_YARN_APP_NAME=\"spark\" #The name of your application (Default: Spark)\n#SPARK_YARN_QUEUE=\"~@~Xdefault~@~Y\" #The hadoop queue to use for allocation requests (Default: @~Xdefault~@~Y)\n#SPARK_YARN_DIST_FILES=\"\" #Comma separated list of files to be distributed with the job.\n#SPARK_YARN_DIST_ARCHIVES=\"\" #Comma separated list of archives to be distributed with the job.\n\n# Generic options for the daemons used in the standalone deploy mode\n\n# Alt
ernate conf dir. (Default: ${SPARK_HOME}/conf)\nexport SPARK_CONF_DIR=${SPARK_HOME:-{{spark_home}}}/conf\n\n# Where log files are stored.(Default:${SPARK_HOME}/logs)\n#export SPARK_LOG_DIR=${SPARK_HOME:-{{spark_home}}}/logs\nexport SPARK_LOG_DIR={{spark_log_dir}}\n\n# Where the pid file is stored. (Default: /tmp)\nexport SPARK_PID_DIR={{spark_pid_dir}}\n\n# A string representing this instance of spark.(Default: $USER)\nSPARK_IDENT_STRING=$USER\n\n# The scheduling priority for daemons. (Default: 0)\nSPARK_NICENESS=0\n\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\nexport HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n# The java implementation to use.\nexport JAVA_HOME={{java_home}}\n\nif [ -d \"/etc/tez/conf/\" ]; then\n export TEZ_CONF_DIR=/etc/tez/conf\nelse\n export TEZ_CONF_DIR=\nfi",
+ "spark_pid_dir": "/var/run/spark",
+ "spark_log_dir": "/var/log/spark",
+ "spark_group": "spark",
+ "spark_user": "spark"
+ },
+ "spark2-env": {
+ "content": "\n#!/usr/bin/env bash\n\n# This file is sourced when running various Spark programs.\n# Copy it as spark-env.sh and edit that to configure Spark for your site.\n\n# Options read in YARN client mode\n#SPARK_EXECUTOR_INSTANCES=\"2\" #Number of workers to start (Default: 2)\n#SPARK_EXECUTOR_CORES=\"1\" #Number of cores for the workers (Default: 1).\n#SPARK_EXECUTOR_MEMORY=\"1G\" #Memory per Worker (e.g. 1000M, 2G) (Default: 1G)\n#SPARK_DRIVER_MEMORY=\"512 Mb\" #Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)\n#SPARK_YARN_APP_NAME=\"spark\" #The name of your application (Default: Spark)\n#SPARK_YARN_QUEUE=\"~@~Xdefault~@~Y\" #The hadoop queue to use for allocation requests (Default: @~Xdefault~@~Y)\n#SPARK_YARN_DIST_FILES=\"\" #Comma separated list of files to be distributed with the job.\n#SPARK_YARN_DIST_ARCHIVES=\"\" #Comma separated list of archives to be distributed with the job.\n\n# Generic options for the daemons used in the standalone deploy mode\n\n# Alt
ernate conf dir. (Default: ${SPARK_HOME}/conf)\nexport SPARK_CONF_DIR=${SPARK_HOME:-{{spark_home}}}/conf\n\n# Where log files are stored.(Default:${SPARK_HOME}/logs)\n#export SPARK_LOG_DIR=${SPARK_HOME:-{{spark_home}}}/logs\nexport SPARK_LOG_DIR={{spark_log_dir}}\n\n# Where the pid file is stored. (Default: /tmp)\nexport SPARK_PID_DIR={{spark_pid_dir}}\n\n# A string representing this instance of spark.(Default: $USER)\nSPARK_IDENT_STRING=$USER\n\n# The scheduling priority for daemons. (Default: 0)\nSPARK_NICENESS=0\n\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\nexport HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n# The java implementation to use.\nexport JAVA_HOME={{java_home}}\n\nif [ -d \"/etc/tez/conf/\" ]; then\n export TEZ_CONF_DIR=/etc/tez/conf\nelse\n export TEZ_CONF_DIR=\nfi",
+ "spark_pid_dir": "/var/run/spark",
+ "spark_log_dir": "/var/log/spark",
+ "spark_group": "spark",
+ "spark_user": "spark"
+ },
+ "spark-metrics-properties": {
+ "content": "\n# syntax: [instance].sink|source.[name].[options]=[value]\n\n# This file configures Spark's internal metrics system. The metrics system is\n# divided into instances which correspond to internal components.\n# Each instance can be configured to report its metrics to one or more sinks.\n# Accepted values for [instance] are \"master\", \"worker\", \"executor\", \"driver\",\n# and \"applications\". A wild card \"*\" can be used as an instance name, in\n# which case all instances will inherit the supplied property.\n#\n# Within an instance, a \"source\" specifies a particular set of grouped metrics.\n# there are two kinds of sources:\n# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will\n# collect a Spark component's internal state. Each instance is paired with a\n# Spark source that is added automatically.\n# 2. Common sources, like JvmSource, which will collect low level state.\n# These can be added through configuration options and are then
loaded\n# using reflection.\n#\n# A \"sink\" specifies where metrics are delivered to. Each instance can be\n# assigned one or more sinks.\n#\n# The sink|source field specifies whether the property relates to a sink or\n# source.\n#\n# The [name] field specifies the name of source or sink.\n#\n# The [options] field is the specific property of this source or sink. The\n# source or sink is responsible for parsing this property.\n#\n# Notes:\n# 1. To add a new sink, set the \"class\" option to a fully qualified class\n# name (see examples below).\n# 2. Some sinks involve a polling period. The minimum allowed polling period\n# is 1 second.\n# 3. Wild card properties can be overridden by more specific properties.\n# For example, master.sink.console.period takes precedence over\n# *.sink.console.period.\n# 4. A metrics specific configuration\n# \"spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties\" should be\n# added to Java properties using -Dspark.metrics.conf=xxx if you want to\
n# customize metrics system. You can also put the file in ${SPARK_HOME}/conf\n# and it will be loaded automatically.\n# 5. MetricsServlet is added by default as a sink in master, worker and client\n# driver, you can send http request \"/metrics/json\" to get a snapshot of all the\n# registered metrics in json format. For master, requests \"/metrics/master/json\" and\n# \"/metrics/applications/json\" can be sent seperately to get metrics snapshot of\n# instance master and applications. MetricsServlet may not be configured by self.\n#\n\n## List of available sinks and their properties.\n\n# org.apache.spark.metrics.sink.ConsoleSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n\n# org.apache.spark.metrics.sink.CSVSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n# directory /tmp Where to store CSV files\n\n# org.apache.spark.metrics.sink.GangliaSink\n# Name: Default: Description:\n# host N
ONE Hostname or multicast group of Ganglia server\n# port NONE Port of Ganglia server(s)\n# period 10 Poll period\n# unit seconds Units of poll period\n# ttl 1 TTL of messages sent by Ganglia\n# mode multicast Ganglia network mode ('unicast' or 'multicast')\n\n# org.apache.spark.metrics.sink.JmxSink\n\n# org.apache.spark.metrics.sink.MetricsServlet\n# Name: Default: Description:\n# path VARIES* Path prefix from the web server root\n# sample false Whether to show entire set of samples for histograms ('false' or 'true')\n#\n# * Default path is /metrics/json for all instances except the master. The master has two paths:\n# /metrics/aplications/json # App information\n# /metrics/master/json # Master information\n\n# org.apache.spark.metrics.sink.GraphiteSink\n# Name: Default: Description:\n# host NONE Hostname of Graphite server\n# port NONE Port of Graphite server\n# period 10 Poll period\n# unit seconds Units of poll period\n# prefix EMPTY STRING Prefix to prepend to metric name\n\n##
Examples\n# Enable JmxSink for all instances by class name\n#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink\n\n# Enable ConsoleSink for all instances by class name\n#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink\n\n# Polling period for ConsoleSink\n#*.sink.console.period=10\n\n#*.sink.console.unit=seconds\n\n# Master instance overlap polling period\n#master.sink.console.period=15\n\n#master.sink.console.unit=seconds\n\n# Enable CsvSink for all instances\n#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink\n\n# Polling period for CsvSink\n#*.sink.csv.period=1\n\n#*.sink.csv.unit=minutes\n\n# Polling directory for CsvSink\n#*.sink.csv.directory=/tmp/\n\n# Worker instance overlap polling period\n#worker.sink.csv.period=10\n\n#worker.sink.csv.unit=minutes\n\n# Enable jvm source for instance master, worker, driver and executor\n#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSo
urce\n\n#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource"
+ },
+ "spark-metrics-properties": {
+ "content": "\n# syntax: [instance].sink|source.[name].[options]=[value]\n\n# This file configures Spark's internal metrics system. The metrics system is\n# divided into instances which correspond to internal components.\n# Each instance can be configured to report its metrics to one or more sinks.\n# Accepted values for [instance] are \"master\", \"worker\", \"executor\", \"driver\",\n# and \"applications\". A wild card \"*\" can be used as an instance name, in\n# which case all instances will inherit the supplied property.\n#\n# Within an instance, a \"source\" specifies a particular set of grouped metrics.\n# there are two kinds of sources:\n# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will\n# collect a Spark component's internal state. Each instance is paired with a\n# Spark source that is added automatically.\n# 2. Common sources, like JvmSource, which will collect low level state.\n# These can be added through configuration options and are then
loaded\n# using reflection.\n#\n# A \"sink\" specifies where metrics are delivered to. Each instance can be\n# assigned one or more sinks.\n#\n# The sink|source field specifies whether the property relates to a sink or\n# source.\n#\n# The [name] field specifies the name of source or sink.\n#\n# The [options] field is the specific property of this source or sink. The\n# source or sink is responsible for parsing this property.\n#\n# Notes:\n# 1. To add a new sink, set the \"class\" option to a fully qualified class\n# name (see examples below).\n# 2. Some sinks involve a polling period. The minimum allowed polling period\n# is 1 second.\n# 3. Wild card properties can be overridden by more specific properties.\n# For example, master.sink.console.period takes precedence over\n# *.sink.console.period.\n# 4. A metrics specific configuration\n# \"spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties\" should be\n# added to Java properties using -Dspark.metrics.conf=xxx if you want to\
n# customize metrics system. You can also put the file in ${SPARK_HOME}/conf\n# and it will be loaded automatically.\n# 5. MetricsServlet is added by default as a sink in master, worker and client\n# driver, you can send http request \"/metrics/json\" to get a snapshot of all the\n# registered metrics in json format. For master, requests \"/metrics/master/json\" and\n# \"/metrics/applications/json\" can be sent seperately to get metrics snapshot of\n# instance master and applications. MetricsServlet may not be configured by self.\n#\n\n## List of available sinks and their properties.\n\n# org.apache.spark.metrics.sink.ConsoleSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n\n# org.apache.spark.metrics.sink.CSVSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n# directory /tmp Where to store CSV files\n\n# org.apache.spark.metrics.sink.GangliaSink\n# Name: Default: Description:\n# host N
ONE Hostname or multicast group of Ganglia server\n# port NONE Port of Ganglia server(s)\n# period 10 Poll period\n# unit seconds Units of poll period\n# ttl 1 TTL of messages sent by Ganglia\n# mode multicast Ganglia network mode ('unicast' or 'multicast')\n\n# org.apache.spark.metrics.sink.JmxSink\n\n# org.apache.spark.metrics.sink.MetricsServlet\n# Name: Default: Description:\n# path VARIES* Path prefix from the web server root\n# sample false Whether to show entire set of samples for histograms ('false' or 'true')\n#\n# * Default path is /metrics/json for all instances except the master. The master has two paths:\n# /metrics/aplications/json # App information\n# /metrics/master/json # Master information\n\n# org.apache.spark.metrics.sink.GraphiteSink\n# Name: Default: Description:\n# host NONE Hostname of Graphite server\n# port NONE Port of Graphite server\n# period 10 Poll period\n# unit seconds Units of poll period\n# prefix EMPTY STRING Prefix to prepend to metric name\n\n##
Examples\n# Enable JmxSink for all instances by class name\n#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink\n\n# Enable ConsoleSink for all instances by class name\n#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink\n\n# Polling period for ConsoleSink\n#*.sink.console.period=10\n\n#*.sink.console.unit=seconds\n\n# Master instance overlap polling period\n#master.sink.console.period=15\n\n#master.sink.console.unit=seconds\n\n# Enable CsvSink for all instances\n#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink\n\n# Polling period for CsvSink\n#*.sink.csv.period=1\n\n#*.sink.csv.unit=minutes\n\n# Polling directory for CsvSink\n#*.sink.csv.directory=/tmp/\n\n# Worker instance overlap polling period\n#worker.sink.csv.period=10\n\n#worker.sink.csv.unit=minutes\n\n# Enable jvm source for instance master, worker, driver and executor\n#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSo
urce\n\n#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource"
+ },
+ "livy-log4j-properties": {
+ "content": "\n # Set everything to be logged to the console\n log4j.rootCategory=INFO, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n log4j.logger.org.eclipse.jetty=WARN"
+ },
+ "livy2-log4j-properties": {
+ "content": "\n # Set everything to be logged to the console\n log4j.rootCategory=INFO, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n log4j.logger.org.eclipse.jetty=WARN"
+ },
+ "livy-conf": {
+ "livy.server.port": "8998",
+ "livy.server.csrf_protection.enabled": "true",
+ "livy.environment": "production",
+ "livy.impersonation.enabled": "true",
+ "livy.server.session.timeout": "3600000"
+ },
+ "livy2-conf": {
+ "livy.server.port": "8999",
+ "livy.server.csrf_protection.enabled": "true",
+ "livy.environment": "production",
+ "livy.impersonation.enabled": "true",
+ "livy.server.session.timeout": "3600000"
+ },
+ "livy-spark-blacklist": {
+ "content": "\n #\n # Configuration override / blacklist. Defines a list of properties that users are not allowed\n # to override when starting Spark sessions.\n #\n # This file takes a list of property names (one per line). Empty lines and lines starting with \"#\"\n # are ignored.\n #"
+ },
+ "livy2-spark-blacklist": {
+ "content": "\n #\n # Configuration override / blacklist. Defines a list of properties that users are not allowed\n # to override when starting Spark sessions.\n #\n # This file takes a list of property names (one per line). Empty lines and lines starting with \"#\"\n # are ignored.\n #"
+ },
+ "livy-env": {
+ "livy_group": "livy",
+ "spark_home": "/usr/hdp/current/spark-client",
+ "content": "\n #!/usr/bin/env bash\n\n # - SPARK_HOME Spark which you would like to use in livy\n # - HADOOP_CONF_DIR Directory containing the Hadoop / YARN configuration to use.\n # - LIVY_LOG_DIR Where log files are stored. (Default: ${LIVY_HOME}/logs)\n # - LIVY_PID_DIR Where the pid file is stored. (Default: /tmp)\n # - LIVY_SERVER_JAVA_OPTS Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.)\n export SPARK_HOME=/usr/hdp/current/spark-client\n export HADOOP_CONF_DIR=/etc/hadoop/conf\n export LIVY_LOG_DIR={{livy_log_dir}}\n export LIVY_PID_DIR={{livy_pid_dir}}\n export LIVY_SERVER_JAVA_OPTS=\"-Xmx2g\"",
+ "livy_pid_dir": "/var/run/livy",
+ "livy_log_dir": "/var/log/livy",
+ "livy_user": "livy"
+ },
+ "livy2-env": {
+ "livy2_group": "livy",
+ "spark_home": "/usr/hdp/current/spark2-client",
+ "content": "\n #!/usr/bin/env bash\n\n # - SPARK_HOME Spark which you would like to use in livy\n # - HADOOP_CONF_DIR Directory containing the Hadoop / YARN configuration to use.\n # - LIVY_LOG_DIR Where log files are stored. (Default: ${LIVY_HOME}/logs)\n # - LIVY_PID_DIR Where the pid file is stored. (Default: /tmp)\n # - LIVY_SERVER_JAVA_OPTS Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.)\n export SPARK_HOME=/usr/hdp/current/spark2-client\n export HADOOP_CONF_DIR=/etc/hadoop/conf\n export LIVY_LOG_DIR={{livy_log_dir}}\n export LIVY_PID_DIR={{livy_pid_dir}}\n export LIVY_SERVER_JAVA_OPTS=\"-Xmx2g\"",
+ "livy2_pid_dir": "/var/run/livy2",
+ "livy2_log_dir": "/var/log/livy2",
+ "livy2_user": "livy"
+ },
+ "infra-solr-env": {
+ "infra_solr_znode": "/infra-solr",
+ "infra_solr_user": "solr",
+ "infra_solr_client_log_dir" :"/var/log/ambari-infra-solr-client"
+ },
+ "infra-solr-client-log4j" : {
+ "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
+ "content" : "content"
+ },
+ "application-properties": {
+ "atlas.cluster.name" : "c2",
+ "atlas.rest.address": "http://c6401.ambari.apache.org:21000",
+ "atlas.graph.storage.backend": "berkeleyje",
+ "atlas.graph.storage.directory": "data/berkley",
+ "atlas.graph.index.search.backend": "solr5",
+ "atlas.graph.index.search.directory": "data/es",
+ "atlas.graph.index.search.elasticsearch.client-only": false,
+ "atlas.graph.index.search.elasticsearch.local-mode": true,
+ "atlas.lineage.hive.table.type.name": "Table",
+ "atlas.lineage.hive.column.type.name": "Column",
+ "atlas.lineage.hive.table.column.name": "columns",
+ "atlas.lineage.hive.process.type.name": "LoadProcess",
+ "atlas.lineage.hive.process.inputs.name": "inputTables",
+ "atlas.lineage.hive.process.outputs.name": "outputTables",
+ "atlas.enableTLS": false,
+ "atlas.authentication.method": "simple",
+ "atlas.authentication.principal": "atlas",
+ "atlas.authentication.keytab": "/etc/security/keytabs/atlas.service.keytab",
+ "atlas.http.authentication.enabled": false,
+ "atlas.http.authentication.type": "simple",
+ "atlas.http.authentication.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM",
+ "atlas.http.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
+ "atlas.http.authentication.kerberos.name.rules": "DEFAULT",
+ "atlas.server.http.port" : "21000",
+ "atlas.notification.embedded" : false,
+ "atlas.kafka.bootstrap.servers" : "c6401.ambari.apache.org:6667",
+ "atlas.kafka.data" : "/usr/hdp/current/atlas-server/data/kafka",
+ "atlas.kafka.entities.group.id" : "entities",
+ "atlas.kafka.hook.group.id" : "atlas",
+ "atlas.kafka.zookeeper.connect" : "c6401.ambari.apache.org:2181"
+ },
+ "atlas-env": {
+ "content": "# The java implementation to use. If JAVA_HOME is not found we expect java and jar to be in path\nexport JAVA_HOME={{java64_home}}\n# any additional java opts you want to set. This will apply to both client and server operations\nexport METADATA_OPTS={{metadata_opts}}\n# metadata configuration directory\nexport METADATA_CONF={{conf_dir}}\n# Where log files are stored. Defatult is logs directory under the base install location\nexport METADATA_LOG_DIR={{log_dir}}\n# additional classpath entries\nexport METADATACPPATH={{metadata_classpath}}\n# data dir\nexport METADATA_DATA_DIR={{data_dir}}\n# Where do you want to expand the war file. By Default it is in /server/webapp dir under the base install dir.\nexport METADATA_EXPANDED_WEBAPP_DIR={{expanded_war_dir}}",
+ "metadata_user": "atlas",
+ "metadata_port": 21000,
+ "metadata_pid_dir": "/var/run/atlas",
+ "metadata_log_dir": "/var/log/atlas",
+ "metadata_data_dir": "/var/lib/atlas/data",
+ "metadata_expanded_war_dir": "/var/lib/atlas/server/webapp"
+ },
+ "atlas-log4j": {
+ "content": "<property><name>content</name><description>Custom log4j.properties</description><value></value></property>",
+ "atlas_log_level": "debug",
+ "audit_log_level": "OFF"
+ },
+ "atlas-solrconfig": {
+ "content": "<property><name>content</name><description>Custom solrconfig properties</description><value></value></property>"
+ },
+ "zeppelin-env": {
+ "zeppelin.server.kerberos.keytab": "",
+ "shiro_ini_content": "\n[users]\n# List of users with their password allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / ...) check the shiro doc at http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 = password4, role2\n\n# Sample LDAP configuration, for user Authentication, currently tested for single Realm\n[main]\n#ldapRealm = org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = SIMPLE\n#sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager = $sessionManager\n# 86,400,000 milliseconds = 24 hour\n#securityManager.sessionManager.globalSessionTimeout = 86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore security, comment the line below and uncomment the next one\n/api/version = anon\n/** = anon\n#/** = authc",
+ "zeppelin.spark.jar.dir": "/apps/zeppelin",
+ "zeppelin.executor.mem": "512m",
+ "zeppelin_pid_dir": "/var/run/zeppelin",
+ "zeppelin.executor.instances": "2",
+ "log4j_properties_content": "\nlog4j.rootLogger = INFO, dailyfile\nlog4j.appender.stdout = org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold = INFO\nlog4j.appender.dailyfile = org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = ${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n",
+ "zeppelin.server.kerberos.principal": "",
+ "zeppelin_user": "zeppelin",
+ "zeppelin_env_content": "\n# Spark master url. eg. spark://master_addr:7077. Leave empty if you want to use local mode\nexport MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files are stored. PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# The pid files are stored. /tmp by default.\nexport ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} -Dspark.executor.memory={{executor_mem}} -Dspark.executor.instances={{executor_instances}} -Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default -Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter process jvm mem options. Defualt = ZEPPELIN_MEM\n# export ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = ZEPPELIN_JA
VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook from list when this value set to \"true\". default \"false\"\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For example bucket/user/notebook/2A94M5J1Z/note.json\n# export ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. $USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark interpreter configuration ####\n\n## Use provided spark installation ##\n## defining SPARK_HOME makes Zeppelin run spark interpreter process using spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin embedded Spark libraries\n
export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without SPARK_HOME defined, Zeppelin still able to run spark interpreter process using embedded spark binaries.\n## however, it is not encouraged when you can define SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located in configuration directory in HADOOP_CONF_DIR.\nexport HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and above)\n# To configure pyspark, you need to set spark distribution's path to 'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to the python command. must be the same path on the driver(Zeppelin) and all workers.\n# export PYSPARK_PYTHON\n\nexport PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of SQLContext if set true. true by default.\n# export ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number of SparkSQL result to display. 1000 by default.\n# export ZEPPELIN_SPARK_MAXRESULT",
+ "zeppelin_log_dir": "/var/log/zeppelin",
+ "zeppelin_group": "zeppelin"
+ },
+"zeppelin-config": {
+ "zeppelin.server.port": "9995",
+ "zeppelin.server.ssl.port": "9995",
+ "zeppelin.ssl.truststore.password": "change me",
+ "zeppelin.interpreters": "org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+ "zeppelin.interpreter.group.order": "spark,angular,jdbc,livy,md,sh",
+ "zeppelin.ssl.truststore.path": "conf/truststore",
+ "zeppelin.notebook.dir": "notebook",
+ "zeppelin.ssl.keystore.password": "change me",
+ "zeppelin.ssl.keystore.path": "conf/keystore",
+ "zeppelin.server.addr": "0.0.0.0",
+ "zeppelin.ssl.client.auth": "false",
+ "zeppelin.notebook.homescreen": " ",
+ "zeppelin.interpreter.dir": "interpreter",
+ "zeppelin.ssl.keystore.type": "JKS",
+ "zeppelin.notebook.s3.user": "user",
+ "zeppelin.ssl.key.manager.password": "change me",
+ "zeppelin.anonymous.allowed": "true",
+ "zeppelin.ssl.truststore.type": "JKS",
+ "zeppelin.ssl": "false",
+ "zeppelin.notebook.storage": "org.apache.zeppelin.notebook.repo.VFSNotebookRepo",
+ "zeppelin.websocket.max.text.message.size": "1024000",
+ "zeppelin.interpreter.connect.timeout": "30000",
+ "zeppelin.notebook.s3.bucket": "zeppelin",
+ "zeppelin.notebook.homescreen.hide": "false",
+ "zeppelin.server.allowed.origins": "*"
+ },
+ "zoo.cfg": {
+ "clientPort": "2181"
+ },
+ "ranger-hbase-plugin-properties": {
+ "ranger-hbase-plugin-enabled":"yes"
+ },
+ "ranger-hive-plugin-properties": {
+ "ranger-hive-plugin-enabled":"yes"
+ },
+ "ranger-env": {
+ "xml_configurations_supported" : "true"
+ },
+ "tagsync-application-properties": {
+ "atlas.kafka.hook.group.id": "atlas",
+ "atlas.kafka.zookeeper.connect": "os-mv-31-dev-4.novalocal:2181",
+ "atlas.kafka.acks": "1",
+ "atlas.kafka.entities.group.id": "ranger_entities_consumer",
+ "atlas.kafka.data": "/usr/hdp/current/atlas-server/data/kafka",
+ "atlas.kafka.bootstrap.servers": "localhost:2181",
+ "atlas.notification.embedded": "false"
+ },
+ "ranger-tagsync-site": {
+ "ranger.tagsync.sink.impl.class": "org.apache.ranger.tagsync.sink.tagadmin.TagAdminRESTSink",
+ "ranger.tagsync.atlasrestsource.endpoint": "",
+ "ranger.tagsync.tagadmin.rest.ssl.config.file": "/usr/hdp/current/ranger-tagsync/conf/mytruststore.jks",
+ "ranger.tagsync.logdir": "/var/log/ranger/tagsync",
+ "ranger.tagsync.filesource.filename": "/usr/hdp/current/ranger-tagsync/conf/etc/ranger/data/tags.json",
+ "ranger.tagsync.enabled": "true",
+ "ranger.tagsync.tagadmin.rest.url": "{{ranger_external_url}}",
+ "ranger.tagsync.atlasrestsource.download.interval": "",
+ "ranger.tagsync.filesource.modtime.check.interval": "60000",
+ "ranger.tagsync.tagadmin.password": "rangertagsync",
+ "ranger.tagsync.source.impl.class": "file",
+ "ranger.tagsync.source.atlas.custom.resource.mappers": "",
+ "ranger.tagsync.tagadmin.alias": "tagsync.tagadmin",
+ "ranger.tagsync.tagadmin.keystore": "/usr/hdp/current/ranger-tagsync/conf/rangertagsync.jceks",
+ "ranger.tagsync.atlas.to.service.mapping": ""
+ },
+ "druid-env": {
+ "druid_log_dir" : "/var/log/druid",
+ "druid_pid_dir" : "/var/run/druid",
+ "content" : "#!/bin/bash\n # Set DRUID specific environment variables here.\n# The java implementation to use\nexport JAVA_HOME={{java8_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport DRUID_PID_DIR={{druid_pid_dir}}\nexport DRUID_LOG_DIR={{druid_log_dir}}\nexport DRUID_CONF_DIR={{druid_conf_dir}}\nexport DRUID_LIB_DIR={{druid_home}}/lib",
+ "druid.coordinator.jvm.heap.memory" : 1024,
+ "druid.coordinator.jvm.direct.memory": 2048,
+ "druid.coordinator.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.broker.jvm.heap.memory" : 1024,
+ "druid.broker.jvm.direct.memory": 2048,
+ "druid.broker.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.middlemanager.jvm.heap.memory" : 1024,
+ "druid.middlemanager.jvm.direct.memory": 2048,
+ "druid.middlemanager.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.historical.jvm.heap.memory" : 1024,
+ "druid.historical.jvm.direct.memory": 2048,
+ "druid.historical.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.overlord.jvm.heap.memory" : 1024,
+ "druid.overlord.jvm.direct.memory": 2048,
+ "druid.overlord.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.router.jvm.heap.memory" : 1024,
+ "druid.router.jvm.direct.memory": 2048,
+ "druid.router.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid_user": "druid"
+ },
+ "druid-common" : {
+ "druid.metadata.storage.type" : "mysql",
+ "druid.metadata.storage.connector.connectURI" : "jdbc:mysql://my-db-host:3306/druid?createDatabaseIfNotExist=true",
+ "druid.metadata.storage.connector.user" : "druid",
+ "druid.metadata.storage.connector.password" : "diurd",
+ "druid.storage.type" : "hdfs",
+ "druid.storage.storageDirectory" : "/user/druid/data",
+ "druid.indexer.logs.type": "hdfs",
+ "druid.indexer.logs.directory": "/user/druid/logs",
+ "druid.extensions.pullList": "[\"custom-druid-extension\"]",
+ "druid.extensions.repositoryList": "[\"http://custom-mvn-repo/public/release\"]",
+ "druid.extensions.loadList": "[\"mysql-metadata-storage\", \"druid-datasketches\"]",
+ "druid.security.extensions.loadList": "[\"druid-kerberos\"]"
+ },
+ "druid-historical" : {
+ "druid.segmentCache.infoDir" : "/apps/druid/segmentCache/info_dir",
+ "druid.segmentCache.locations" :"[{\"path\":\"/apps/druid/segmentCache\",\"maxSize\":300000000000}]"
+ },
+ "druid-coordinator" : {
+ "druid.service" : "druid/coordinator"
+ },
+ "druid-overlord" : {
+ "druid.service" : "druid/overlord"
+ },
+ "druid-broker" : {
+ "druid.service" : "druid/broker"
+ },
+ "druid-middlemanager" : {
+ "druid.service" : "druid/middlemanager",
+ "druid.indexer.task.hadoopWorkingPath" : "/tmp/druid-indexing",
+ "druid.indexer.task.baseTaskDir" : "/apps/druid/tasks"
+ },
+ "druid-router" : {
+ "druid.service" : "druid/router"
+ },
+ "druid-log4j" : {
+ "content" : "<![CDATA[<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n ~ Licensed to the Apache Software Foundation (ASF) under one\n ~ or more contributor license agreements. See the NOTICE file\n ~ distributed with this work for additional information\n ~ regarding copyright ownership. The ASF licenses this file\n ~ to you under the Apache License, Version 2.0 (the\n ~ \"License\"); you may not use this file except in compliance\n ~ with the License. You may obtain a copy of the License at\n ~\n ~ http://www.apache.org/licenses/LICENSE-2.0\n ~\n ~ Unless required by applicable law or agreed to in writing, software\n ~ distributed under the License is distributed on an \"AS IS\" BASIS,\n ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n ~ See the License for the specific language governing permissions and\n ~ limitations under the License.\n -->\n <Configuration>\n <Appenders>\n <Console name=\"Console\
" target=\"SYSTEM_OUT\">\n <PatternLayout pattern=\"%d{ISO8601} %p [%t] %c - %m%n\"/>\n </Console>\n </Appenders>\n <Loggers>\n <Logger name=\"com.metamx\" level=\"{{metamx_log_level}}\"/>\n <Logger name=\"io.druid\" level=\"{{druid_log_level}}\"/>\n <Root level=\"{{root_log_level}}\">\n <AppenderRef ref=\"Console\"/>\n </Root>\n </Loggers>\n </Configuration>\n ]]>\n"
+ },
+ "druid-logrotate" : {
+ "content" : "<![CDATA[\n {{druid_log_dir}}/*.log {\n copytruncate\n rotate 7\n daily\n nocompress\n missingok\n notifempty\n create 660 druid users\n dateext\n dateformat -%Y-%m-%d-%s\n }\n ]]>\n"
+ },
+ "druid-superset" : {
+ "SUPERSET_DATABASE_TYPE" : "sqllite"
+ }
+ },
+ "configuration_attributes": {
+ "sqoop-site": {},
+ "yarn-site": {
+ "final": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
+ "yarn.nodemanager.container-executor.class": "true",
+ "yarn.nodemanager.local-dirs": "true"
+ }
+ },
+ "yarn-site": {
+ "final": {
+ "is_supported_yarn_ranger": "true"
+ }
+ },
+ "hdfs-site": {
+ "final": {
+ "dfs.web.ugi": "true",
+ "dfs.support.append": "true",
+ "dfs.cluster.administrators": "true"
+ }
+ },
+ "core-site": {
+ "final": {
+ "hadoop.proxyuser.hive.groups": "true",
+ "webinterface.private.actions": "true",
+ "hadoop.proxyuser.oozie.hosts": "true"
+ }
+ },
+ "knox-env": {},
+ "gateway-site": {},
+ "users-ldif": {},
+ "kafka-env": {},
+ "kafka-log4j": {},
+ "kafka-broker": {},
+ "metadata-env": {},
+ "atlas-hbase-site": {},
+ "tagsync-application-properties": {},
+ "ranger-tagsync-site": {}
+ },
+ "configurationTags": {
+ "slider-client": {
+ "tag": "version1"
+ },
+ "slider-log4j": {
+ "tag": "version1"
+ },
+ "slider-env": {
+ "tag": "version1"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "yarn-site": {
+ "tag": "version1"
+ },
+ "gateway-site": {
+ "tag": "version1"
+ },
+ "topology": {
+ "tag": "version1"
+ },
+ "users-ldif": {
+ "tag": "version1"
+ },
+ "kafka-env": {
+ "tag": "version1"
+ },
+ "kafka-log4j": {
+ "tag": "version1"
+ },
+ "kafka-broker": {
+ "tag": "version1"
+ },
+ "metadata-env": {
+ "tag": "version1"
+ },
+ "tagsync-application-properties": {
+ "tag": "version1"
+ },
+ "ranger-tagsync-site": {
+ "tag": "version1"
+ }
+ },
+ "commandId": "7-1",
+ "clusterHostInfo": {
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_ping_ports": [
+ "8670",
+ "8670"
+ ],
+ "rm_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "knox_gateway_hosts": [
+ "jaimin-knox-1.c.pramod-thangali.internal"
+ ],
+ "kafka_broker_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "infra_solr_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_tagsync_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "atlas_server_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "zeppelin_master_hosts": [
+ "c6401.ambari.apache.org"
+ ]
+ }
+}
[42/57] [abbrv] ambari git commit: AMBARI-21927. Unable to add a
property to Default conf group which has same property name as in custom
config group (akovalenko)
Posted by lp...@apache.org.
AMBARI-21927. Unable to add a property to Default conf group which has same property name as in custom config group (akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/92bd10d6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/92bd10d6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/92bd10d6
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 92bd10d65f92f95c55bd2bc84a75244f2748ed09
Parents: 26b9f4f
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Mon Sep 11 16:57:13 2017 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Mon Sep 11 19:53:52 2017 +0300
----------------------------------------------------------------------
.../configs/service_configs_by_category_view.js | 16 +++++++++++++---
1 file changed, 13 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/92bd10d6/ambari-web/app/views/common/configs/service_configs_by_category_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/service_configs_by_category_view.js b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
index 6cf9b99..2e7de36 100644
--- a/ambari-web/app/views/common/configs/service_configs_by_category_view.js
+++ b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
@@ -430,6 +430,7 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.Persist, App.ConfigOverrid
createProperty: function (propertyObj) {
var config;
var selectedConfigGroup = this.get('controller.selectedConfigGroup');
+ var categoryConfigsAll = this.get('categoryConfigsAll');
if (selectedConfigGroup.get('isDefault')) {
config = App.config.createDefaultConfig(propertyObj.name, propertyObj.filename, false, {
value: propertyObj.value,
@@ -447,7 +448,13 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.Persist, App.ConfigOverrid
isNotSaved: true
}, selectedConfigGroup);
}
- this._appendConfigToCollection(App.ServiceConfigProperty.create(config));
+ var serviceConfigProperty = App.ServiceConfigProperty.create(config);
+ var duplicatedProperty = categoryConfigsAll.findProperty('name', config.name);
+ if (duplicatedProperty && duplicatedProperty.get('isUndefinedLabel')) {
+ serviceConfigProperty.set('overrides', duplicatedProperty.get('overrides'));
+ categoryConfigsAll.removeAt(categoryConfigsAll.indexOf(duplicatedProperty));
+ }
+ this._appendConfigToCollection(serviceConfigProperty);
},
/**
@@ -457,6 +464,7 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.Persist, App.ConfigOverrid
_appendConfigToCollection: function (serviceConfigProperty) {
this.get('serviceConfigs').pushObject(serviceConfigProperty);
this.get('categoryConfigsAll').pushObject(serviceConfigProperty);
+ this.setVisibleCategoryConfigs();
},
/**
@@ -471,7 +479,8 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.Persist, App.ConfigOverrid
var serviceName = service.get('serviceName');
var configsOfFile = service.get('configs').filterProperty('filename', siteFileName);
- return configsOfFile.findProperty('name', name);
+ var duplicatedProperty = configsOfFile.findProperty('name', name);
+ return duplicatedProperty && !duplicatedProperty.get('isUndefinedLabel');
},
/**
@@ -484,7 +493,8 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.Persist, App.ConfigOverrid
var configFiles = service.get('configs').mapProperty('filename').uniq();
configFiles.forEach(function (configFile) {
var configsOfFile = service.get('configs').filterProperty('filename', configFile);
- if (configsOfFile.findProperty('name', name)) {
+ var duplicatedProperty = configsOfFile.findProperty('name', name);
+ if (duplicatedProperty && !duplicatedProperty.get('isUndefinedLabel')) {
files.push(configFile);
}
}, this);
[28/57] [abbrv] ambari git commit: AMBARI-21915 Log Search UI: unit
tests setup. (ababiichuk)
Posted by lp...@apache.org.
AMBARI-21915 Log Search UI: unit tests setup. (ababiichuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2ab8b39c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2ab8b39c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2ab8b39c
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 2ab8b39c17da02d4c06b22d12b9fa041c7407540
Parents: a059478
Author: ababiichuk <ab...@hortonworks.com>
Authored: Fri Sep 8 14:44:20 2017 +0300
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Fri Sep 8 14:44:20 2017 +0300
----------------------------------------------------------------------
.../ambari-logsearch-web/.angular-cli.json | 2 +-
.../ambari-logsearch-web/karma.conf.js | 10 +-
.../ambari-logsearch-web/package.json | 4 +-
.../src/app/components/app.component.spec.ts | 14 +-
.../dropdown-button.component.spec.ts | 14 +-
.../dropdown-list.component.spec.ts | 14 +-
.../filter-button.component.spec.ts | 15 +-
.../filter-dropdown.component.spec.ts | 14 +-
.../filters-panel.component.spec.ts | 14 +-
.../login-form/login-form.component.spec.ts | 15 +-
.../logs-list/logs-list.component.spec.ts | 14 +-
.../menu-button/menu-button.component.spec.ts | 18 +-
.../components/modal/modal.component.spec.ts | 16 +-
.../pagination/pagination.component.spec.ts | 29 +-
.../search-box/search-box.component.spec.ts | 16 +-
.../timezone-picker.component.spec.ts | 14 +-
.../app/services/http-client.service.spec.ts | 8 +-
.../src/app/test-config.spec.ts | 36 +
.../ambari-logsearch-web/src/test.ts | 7 +-
ambari-logsearch/ambari-logsearch-web/yarn.lock | 1803 +++++++++++++-----
20 files changed, 1409 insertions(+), 668 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/.angular-cli.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/.angular-cli.json b/ambari-logsearch/ambari-logsearch-web/.angular-cli.json
index cc74739..70fd7b6 100644
--- a/ambari-logsearch/ambari-logsearch-web/.angular-cli.json
+++ b/ambari-logsearch/ambari-logsearch-web/.angular-cli.json
@@ -1,7 +1,7 @@
{
"$schema": "./node_modules/@angular/cli/lib/config/schema.json",
"project": {
- "name": "ambari-logsearch-web-new"
+ "name": "ambari-logsearch-web"
},
"apps": [
{
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/karma.conf.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/karma.conf.js b/ambari-logsearch/ambari-logsearch-web/karma.conf.js
index 0512808..08608d8 100644
--- a/ambari-logsearch/ambari-logsearch-web/karma.conf.js
+++ b/ambari-logsearch/ambari-logsearch-web/karma.conf.js
@@ -25,7 +25,7 @@ module.exports = function (config) {
frameworks: ['jasmine', '@angular/cli'],
plugins: [
require('karma-jasmine'),
- require('karma-chrome-launcher'),
+ require('karma-phantomjs-launcher'),
require('karma-jasmine-html-reporter'),
require('karma-coverage-istanbul-reporter'),
require('@angular/cli/plugins/karma')
@@ -43,7 +43,7 @@ module.exports = function (config) {
'text/x-typescript': ['ts','tsx']
},
coverageIstanbulReporter: {
- reports: [ 'html', 'lcovonly' ],
+ reports: ['html', 'lcovonly'],
fixWebpackSourcePaths: true
},
angularCli: {
@@ -55,8 +55,8 @@ module.exports = function (config) {
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
- autoWatch: true,
- browsers: ['Chrome'],
- singleRun: false
+ autoWatch: false,
+ browsers: ['PhantomJS'],
+ singleRun: true
});
};
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/package.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/package.json b/ambari-logsearch/ambari-logsearch-web/package.json
index 92c5043..0892d85 100644
--- a/ambari-logsearch/ambari-logsearch-web/package.json
+++ b/ambari-logsearch/ambari-logsearch-web/package.json
@@ -39,7 +39,7 @@
"zone.js": "^0.8.4"
},
"devDependencies": {
- "@angular/cli": "1.0.0",
+ "@angular/cli": "^1.4.0",
"@angular/compiler-cli": "^4.0.0",
"@types/d3": "^4.10.0",
"@types/jasmine": "2.5.38",
@@ -52,11 +52,11 @@
"jasmine-core": "~2.5.2",
"jasmine-spec-reporter": "~3.2.0",
"karma": "~1.4.1",
- "karma-chrome-launcher": "~2.0.0",
"karma-cli": "~1.0.1",
"karma-coverage-istanbul-reporter": "^0.2.0",
"karma-jasmine": "~1.1.0",
"karma-jasmine-html-reporter": "^0.2.2",
+ "karma-phantomjs-launcher": "^1.0.4",
"protractor": "~5.1.0",
"ts-node": "~2.0.0",
"tslint": "~4.5.0",
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts
index bc16ea4..490e058 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/app.component.spec.ts
@@ -18,19 +18,13 @@
import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core';
import {TestBed, async} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
import {StoreModule} from '@ngrx/store';
+import {TranslationModules} from '@app/test-config.spec';
import {AppStateService, appState} from '@app/services/storage/app-state.service';
import {HttpClientService} from '@app/services/http-client.service';
import {AppComponent} from './app.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('AppComponent', () => {
beforeEach(async(() => {
const httpClient = {
@@ -46,11 +40,7 @@ describe('AppComponent', () => {
StoreModule.provideStore({
appState
}),
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
+ ...TranslationModules
],
providers: [
AppStateService,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
index 8efe320..c5845b3 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
@@ -18,9 +18,7 @@
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
import {ClustersService, clusters} from '@app/services/storage/clusters.service';
@@ -31,10 +29,6 @@ import {ComponentActionsService} from '@app/services/component-actions.service';
import {DropdownButtonComponent} from './dropdown-button.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('DropdownButtonComponent', () => {
let component: DropdownButtonComponent;
let fixture: ComponentFixture<DropdownButtonComponent>;
@@ -48,11 +42,7 @@ describe('DropdownButtonComponent', () => {
clusters,
components
}),
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
+ ...TranslationModules
],
providers: [
AppSettingsService,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
index 5409d30..eacac04 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
@@ -17,10 +17,8 @@
*/
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
import {StoreModule} from '@ngrx/store';
+import {TranslationModules} from '@app/test-config.spec';
import {HostsService, hosts} from '@app/services/storage/hosts.service';
import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service';
import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
@@ -37,10 +35,6 @@ import {FilteringService} from '@app/services/filtering.service';
import {DropdownListComponent} from './dropdown-list.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('DropdownListComponent', () => {
let component: DropdownListComponent;
let fixture: ComponentFixture<DropdownListComponent>;
@@ -57,11 +51,7 @@ describe('DropdownListComponent', () => {
TestBed.configureTestingModule({
declarations: [DropdownListComponent],
imports: [
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- }),
+ ...TranslationModules,
StoreModule.provideStore({
hosts,
auditLogs,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
index 5d58b5c..22e4fca 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
@@ -18,9 +18,7 @@
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
import {ClustersService, clusters} from '@app/services/storage/clusters.service';
@@ -31,10 +29,6 @@ import {UtilsService} from '@app/services/utils.service';
import {FilterButtonComponent} from './filter-button.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('FilterButtonComponent', () => {
let component: FilterButtonComponent;
let fixture: ComponentFixture<FilterButtonComponent>;
@@ -48,11 +42,8 @@ describe('FilterButtonComponent', () => {
clusters,
components
}),
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })],
+ ...TranslationModules
+ ],
providers: [
AppSettingsService,
ClustersService,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
index 323aa56..8620607 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
@@ -17,9 +17,7 @@
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
import {FilteringService} from '@app/services/filtering.service';
@@ -28,10 +26,6 @@ import {ComponentActionsService} from '@app/services/component-actions.service';
import {FilterDropdownComponent} from './filter-dropdown.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('FilterDropdownComponent', () => {
let component: FilterDropdownComponent;
let fixture: ComponentFixture<FilterDropdownComponent>;
@@ -59,11 +53,7 @@ describe('FilterDropdownComponent', () => {
StoreModule.provideStore({
appSettings
}),
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
+ ...TranslationModules
],
providers: [
AppSettingsService,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
index 2ced41e..fe2e40a 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
@@ -18,9 +18,7 @@
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
import {ClustersService, clusters} from '@app/services/storage/clusters.service';
@@ -39,10 +37,6 @@ import {LogsContainerService} from '@app/services/logs-container.service';
import {FiltersPanelComponent} from './filters-panel.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('FiltersPanelComponent', () => {
let component: FiltersPanelComponent;
let fixture: ComponentFixture<FiltersPanelComponent>;
@@ -71,11 +65,7 @@ describe('FiltersPanelComponent', () => {
serviceLogsHistogramData,
appState
}),
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
+ ...TranslationModules
],
providers: [
AppSettingsService,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts
index fd54fe6..fb5c2a0 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/login-form/login-form.component.spec.ts
@@ -18,19 +18,13 @@
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {FormsModule} from '@angular/forms';
-import {HttpModule, Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {AppStateService, appState} from '@app/services/storage/app-state.service';
import {HttpClientService} from '@app/services/http-client.service';
import {LoginFormComponent} from './login-form.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('LoginFormComponent', () => {
let component: LoginFormComponent;
let fixture: ComponentFixture<LoginFormComponent>;
@@ -53,13 +47,8 @@ describe('LoginFormComponent', () => {
TestBed.configureTestingModule({
declarations: [LoginFormComponent],
imports: [
- HttpModule,
FormsModule,
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- }),
+ ...TranslationModules,
StoreModule.provideStore({
appState
})
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
index 02c3b23..8c67a13 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
@@ -17,9 +17,7 @@
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {MomentModule} from 'angular2-moment';
import {MomentTimezoneModule} from 'angular-moment-timezone';
@@ -35,10 +33,6 @@ import {UtilsService} from '@app/services/utils.service';
import {LogsListComponent} from './logs-list.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('LogsListComponent', () => {
let component: LogsListComponent;
let fixture: ComponentFixture<LogsListComponent>;
@@ -65,11 +59,7 @@ describe('LogsListComponent', () => {
}),
MomentModule,
MomentTimezoneModule,
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
+ ...TranslationModules
],
providers: [
{
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
index 6c9e021..c57c11d 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
@@ -18,9 +18,7 @@
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
import {ComponentActionsService} from '@app/services/component-actions.service';
@@ -28,10 +26,6 @@ import {FilteringService} from '@app/services/filtering.service';
import {MenuButtonComponent} from './menu-button.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('MenuButtonComponent', () => {
let component: MenuButtonComponent;
let fixture: ComponentFixture<MenuButtonComponent>;
@@ -43,11 +37,7 @@ describe('MenuButtonComponent', () => {
StoreModule.provideStore({
appSettings
}),
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
+ ...TranslationModules
],
providers: [
AppSettingsService,
@@ -100,11 +90,13 @@ describe('MenuButtonComponent', () => {
const cases = [
{
subItems: null,
+ hideCaret: false,
hasCaret: false,
title: 'no sub-items'
},
{
subItems: [],
+ hideCaret: false,
hasCaret: false,
title: 'empty sub-items array'
},
@@ -125,7 +117,7 @@ describe('MenuButtonComponent', () => {
cases.forEach((test) => {
it(test.title, () => {
component.subItems = test.subItems;
- component.hideCaret = Boolean(test.hideCaret);
+ component.hideCaret = test.hideCaret;
expect(component.hasSubItems).toEqual(test.hasCaret);
});
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/modal/modal.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/modal/modal.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/modal/modal.component.spec.ts
index 802bd13..acdcae8 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/modal/modal.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/modal/modal.component.spec.ts
@@ -17,16 +17,10 @@
*/
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {ModalComponent} from './modal.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('ModalComponent', () => {
let component: ModalComponent;
let fixture: ComponentFixture<ModalComponent>;
@@ -34,13 +28,7 @@ describe('ModalComponent', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ModalComponent],
- imports: [
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
- ]
+ imports: TranslationModules
})
.compileComponents();
}));
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts
index 7a15bbc..ff8675d 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/pagination/pagination.component.spec.ts
@@ -18,29 +18,18 @@
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
import {NO_ERRORS_SCHEMA} from '@angular/core';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
+import {FormControl, FormGroup} from '@angular/forms';
import {PaginationComponent} from './pagination.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('PaginationComponent', () => {
let component: PaginationComponent;
let fixture: ComponentFixture<PaginationComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
- imports: [
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
- ],
+ imports: TranslationModules,
declarations: [PaginationComponent],
schemas: [NO_ERRORS_SCHEMA]
})
@@ -51,15 +40,9 @@ describe('PaginationComponent', () => {
fixture = TestBed.createComponent(PaginationComponent);
component = fixture.componentInstance;
component.filterInstance = {};
- component.filtersForm = {
- controls: {
- pageSize: {
- valueChanges: {
- subscribe: () => {}
- }
- }
- }
- };
+ component.filtersForm = new FormGroup({
+ pageSize: new FormControl()
+ });
fixture.detectChanges();
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts
index 2b3a957..72795a4 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/search-box/search-box.component.spec.ts
@@ -18,17 +18,11 @@
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {UtilsService} from '@app/services/utils.service';
import {SearchBoxComponent} from './search-box.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('SearchBoxComponent', () => {
let component: SearchBoxComponent;
let fixture: ComponentFixture<SearchBoxComponent>;
@@ -36,13 +30,7 @@ describe('SearchBoxComponent', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [SearchBoxComponent],
- imports: [
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
- ],
+ imports: TranslationModules,
providers: [
UtilsService
],
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
index 0ef17de..380f030 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
@@ -17,9 +17,7 @@
*/
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
-import {Http} from '@angular/http';
-import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
-import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+import {TranslationModules} from '@app/test-config.spec';
import {StoreModule} from '@ngrx/store';
import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
import {ComponentActionsService} from '@app/services/component-actions.service';
@@ -28,10 +26,6 @@ import {ModalComponent} from '@app/components/modal/modal.component';
import {TimeZonePickerComponent} from './timezone-picker.component';
-export function HttpLoaderFactory(http: Http) {
- return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
-}
-
describe('TimeZonePickerComponent', () => {
let component: TimeZonePickerComponent;
let fixture: ComponentFixture<TimeZonePickerComponent>;
@@ -47,11 +41,7 @@ describe('TimeZonePickerComponent', () => {
StoreModule.provideStore({
appSettings
}),
- TranslateModule.forRoot({
- provide: TranslateLoader,
- useFactory: HttpLoaderFactory,
- deps: [Http]
- })
+ ...TranslationModules
],
providers: [
AppSettingsService,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts
index 0dfb0f3..4720a74 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.spec.ts
@@ -44,24 +44,24 @@ describe('HttpClientService', () => {
describe('#generateUrlString()', () => {
it('should generate URL from presets', inject([HttpClientService], (service: HttpClientService) => {
- expect(service.generateUrlString('status')).toEqual('api/v1/status');
+ expect(service['generateUrlString']('status')).toEqual('api/v1/status');
}));
it('should return explicit URL', inject([HttpClientService], (service: HttpClientService) => {
- expect(service.generateUrlString('login')).toEqual('login');
+ expect(service['generateUrlString']('login')).toEqual('login');
}));
});
describe('#generateUrl()', () => {
it('string parameter', inject([HttpClientService], (service: HttpClientService) => {
- expect(service.generateUrl('status')).toEqual('api/v1/status');
+ expect(service['generateUrl']('status')).toEqual('api/v1/status');
}));
it('request object parameter', inject([HttpClientService], (service: HttpClientService) => {
let request = new Request({
url: 'status'
});
- expect(service.generateUrl(request).url).toEqual('api/v1/status');
+ expect(service['generateUrl'](request)['url']).toEqual('api/v1/status');
}));
});
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/app/test-config.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/test-config.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/test-config.spec.ts
new file mode 100644
index 0000000..8fc6c87
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/test-config.spec.ts
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {HttpModule, Http} from '@angular/http';
+import {TranslateModule, TranslateLoader} from '@ngx-translate/core';
+import {TranslateHttpLoader} from '@ngx-translate/http-loader';
+
+function HttpLoaderFactory(http: Http) {
+ return new TranslateHttpLoader(http, 'assets/i18n/', '.json');
+}
+
+export const TranslationModules = [
+ HttpModule,
+ TranslateModule.forRoot({
+ loader: {
+ provide: TranslateLoader,
+ useFactory: HttpLoaderFactory,
+ deps: [Http]
+ }
+ })
+];
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ab8b39c/ambari-logsearch/ambari-logsearch-web/src/test.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/test.ts b/ambari-logsearch/ambari-logsearch-web/src/test.ts
index 6597b1c..0fc13fb 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/test.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/test.ts
@@ -24,11 +24,8 @@ import 'zone.js/dist/sync-test';
import 'zone.js/dist/jasmine-patch';
import 'zone.js/dist/async-test';
import 'zone.js/dist/fake-async-test';
-import { getTestBed } from '@angular/core/testing';
-import {
- BrowserDynamicTestingModule,
- platformBrowserDynamicTesting
-} from '@angular/platform-browser-dynamic/testing';
+import {getTestBed} from '@angular/core/testing';
+import {BrowserDynamicTestingModule, platformBrowserDynamicTesting} from '@angular/platform-browser-dynamic/testing';
// Unfortunately there's no typing for the `__karma__` variable. Just declare it as any.
declare var __karma__: any;
[11/57] [abbrv] ambari git commit: AMBARI-21884. Installation should
ignore OS that are not managed by Ambari (ncole)
Posted by lp...@apache.org.
AMBARI-21884. Installation should ignore OS that are not managed by Ambari (ncole)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/249bb97a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/249bb97a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/249bb97a
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 249bb97a864c0f18278d37d87c2c3117809e5ca4
Parents: f3232d2
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Sep 6 11:44:40 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Sep 6 11:44:40 2017 -0400
----------------------------------------------------------------------
.../libraries/functions/repository_util.py | 32 +++++++---
.../ambari/server/agent/CommandRepository.java | 20 +++++-
.../AmbariCustomCommandExecutionHelper.java | 44 ++++----------
.../ClusterStackVersionResourceProvider.java | 28 ++++++---
.../HostStackVersionResourceProvider.java | 30 ++++-----
.../stack/upgrade/RepositoryVersionHelper.java | 16 +++--
.../src/main/resources/version_definition.xsd | 3 +-
.../ExecutionCommandWrapperTest.java | 64 ++++++++++++++++++++
.../AmbariCustomCommandExecutionHelperTest.java | 14 ++---
...ClusterStackVersionResourceProviderTest.java | 8 ++-
10 files changed, 175 insertions(+), 84 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-common/src/main/python/resource_management/libraries/functions/repository_util.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/repository_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/repository_util.py
index 6ad1aee..120d464 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/repository_util.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/repository_util.py
@@ -40,7 +40,10 @@ def create_repo_files(template, command_repository):
raise Fail("The command repository was not parsed correctly")
if 0 == len(command_repository.repositories):
- raise Fail("Cannot create repository files when no repositories are defined")
+ Logger.warning(
+ "Repository for {0}/{1} has no repositories. Ambari may not be managing this version.".format(
+ command_repository.stack_name, command_repository.version_string))
+ return
# add the stack name to the file name just to make it a little easier to debug
# version_id is the primary id of the repo_version table in the database
@@ -54,15 +57,20 @@ def create_repo_files(template, command_repository):
if repository.repo_id is None:
raise Fail("Repository with url {0} has no id".format(repository.base_url))
- Repository(repository.repo_id,
- action = "create",
- base_url = repository.base_url,
- mirror_list = repository.mirrors_list,
- repo_file_name = file_name,
- repo_template = template,
- components = repository.ubuntu_components,
- append_to_file = append_to_file)
- append_to_file = True
+ if not repository.ambari_managed:
+ Logger.warning(
+ "Repository for {0}/{1}/{2} is not managed by Ambari".format(
+ command_repository.stack_name, command_repository.version_string, repository.repo_id))
+ else:
+ Repository(repository.repo_id,
+ action = "create",
+ base_url = repository.base_url,
+ mirror_list = repository.mirrors_list,
+ repo_file_name = file_name,
+ repo_template = template,
+ components = repository.ubuntu_components,
+ append_to_file = append_to_file)
+ append_to_file = True
def _find_value(dictionary, key):
@@ -116,6 +124,10 @@ class _CommandRepositoryEntry(object):
self.repo_name = _find_value(json_dict, 'repoName')
self.base_url = _find_value(json_dict, 'baseUrl')
self.mirrors_list = _find_value(json_dict, 'mirrorsList')
+ self.ambari_managed = _find_value(json_dict, 'ambariManaged')
+
+ if self.ambari_managed is None:
+ self.ambari_managed = True
# if repoName is changed on the java side, this will fail for ubuntu since we rely on the
# name being the same as how the repository was built
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
index 3d96122..858a55f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
@@ -108,6 +108,17 @@ public class CommandRepository {
}
/**
+ * Sets fields for non-managed
+ */
+ public void setNonManaged() {
+ for (Repository repo : m_repositories) {
+ repo.m_baseUrl = null;
+ repo.m_mirrorsList = null;
+ repo.m_ambariManaged = false;
+ }
+ }
+
+ /**
* Minimal information required to generate repo files on the agent. These are copies
* of the repository objects from repo versions that can be changed for URL overrides, etc.
*/
@@ -119,6 +130,9 @@ public class CommandRepository {
@SerializedName("repoId")
private String m_repoId;
+ @SerializedName("ambariManaged")
+ private boolean m_ambariManaged = true;
+
/**
* The name should not change. Ubuntu requires that it match exactly as the repo was built.
*/
@@ -167,6 +181,10 @@ public class CommandRepository {
return m_baseUrl;
}
+ public boolean isAmbariManaged() {
+ return m_ambariManaged;
+ }
+
/**
* {@inheritDoc}
*/
@@ -179,6 +197,6 @@ public class CommandRepository {
.append("baseUrl", m_baseUrl)
.toString();
}
- }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 6d97854..822539f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -1273,49 +1273,31 @@ public class AmbariCustomCommandExecutionHelper {
* @throws AmbariException
*/
@Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
- public CommandRepository getCommandRepository(final Cluster cluster, ServiceComponent component, Host host) throws AmbariException {
-
- Function<List<RepositoryInfo>, List<RepositoryInfo>> function = new Function<List<RepositoryInfo>, List<RepositoryInfo>>() {
- @Override
- public List<RepositoryInfo> apply(List<RepositoryInfo> input) {
- // !!! just return what is given
- return input;
- }
- };
-
- final List<RepositoryInfo> repoInfos = getBaseUrls(cluster, component, host, function);
-
- if (null == repoInfos) {
- return null;
- }
+ public CommandRepository getCommandRepository(final Cluster cluster, ServiceComponent component, final Host host) throws AmbariException {
final CommandRepository command = new CommandRepository();
StackId stackId = component.getDesiredStackId();
- command.setRepositories(repoInfos);
+ command.setRepositories(Collections.<RepositoryInfo>emptyList());
command.setStackName(stackId.getStackName());
final BaseUrlUpdater<Void> updater = new BaseUrlUpdater<Void>(null) {
@Override
public Void apply(RepositoryVersionEntity rve) {
-
command.setRepositoryVersionId(rve.getId());
command.setRepositoryVersion(rve.getVersion());
command.setStackName(rve.getStackName());
- command.setUniqueSuffix(String.format("-repo-%s", rve.getId()));
- for (CommandRepository.Repository commandRepo : command.getRepositories()) {
- String osType = commandRepo.getOsType();
- String repoName = commandRepo.getRepoName();
- String baseUrl = commandRepo.getBaseUrl();
-
- for (OperatingSystemEntity ose : rve.getOperatingSystems()) {
- if (ose.getOsType().equals(osType) && ose.isAmbariManagedRepos()) {
- for (RepositoryEntity re : ose.getRepositories()) {
- if (re.getName().equals(repoName) &&
- !re.getBaseUrl().equals(baseUrl)) {
- commandRepo.setBaseUrl(re.getBaseUrl());
- }
- }
+ // !!! a repository version entity has all the repos worked out. We shouldn't use
+ // the stack at all.
+ for (OperatingSystemEntity osEntity : rve.getOperatingSystems()) {
+ String osEntityFamily = os_family.find(osEntity.getOsType());
+ if (osEntityFamily.equals(host.getOsFamily())) {
+ command.setRepositories(osEntity.getOsType(), osEntity.getRepositories());
+
+ if (!osEntity.isAmbariManagedRepos()) {
+ command.setNonManaged();
+ } else {
+ command.setUniqueSuffix(String.format("-repo-%s", rve.getId()));
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index 3e4d4fd..1766da3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@ -85,6 +85,7 @@ import org.apache.ambari.server.state.repository.VersionDefinitionXml;
import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.ambari.server.utils.VersionUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils;
@@ -579,7 +580,7 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
Host host = hostIterator.next();
if (hostHasVersionableComponents(cluster, serviceNames, ami, stackId, host)) {
ActionExecutionContext actionContext = getHostVersionInstallCommand(repoVersionEnt,
- cluster, managementController, ami, stackId, serviceNames, perOsRepos, stage, host);
+ cluster, managementController, ami, stackId, serviceNames, stage, host);
if (null != actionContext) {
try {
actionExecutionHelper.get().addExecutionCommandsToStage(actionContext, stage, null);
@@ -639,20 +640,24 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
private ActionExecutionContext getHostVersionInstallCommand(RepositoryVersionEntity repoVersion,
Cluster cluster, AmbariManagementController managementController, AmbariMetaInfo ami,
- final StackId stackId, Set<String> repoServices, Map<String, List<RepositoryEntity>> perOsRepos, Stage stage1, Host host)
+ final StackId stackId, Set<String> repoServices, Stage stage1, Host host)
throws SystemException {
+
// Determine repositories for host
String osFamily = host.getOsFamily();
- final List<RepositoryEntity> repoInfo = perOsRepos.get(osFamily);
- if (repoInfo == null) {
- throw new SystemException(String.format("Repositories for os type %s are " +
- "not defined. Repo version=%s, stackId=%s",
- osFamily, repoVersion.getVersion(), stackId));
+ OperatingSystemEntity osEntity = null;
+ for (OperatingSystemEntity os : repoVersion.getOperatingSystems()) {
+ if (os.getOsType().equals(osFamily)) {
+ osEntity = os;
+ break;
+ }
}
- if (repoInfo.isEmpty()){
- LOG.error(String.format("Repository list is empty. Ambari may not be managing the repositories for %s", osFamily));
+ if (null == osEntity || CollectionUtils.isEmpty(osEntity.getRepositories())) {
+ throw new SystemException(String.format("Repositories for os type %s are " +
+ "not defined. Repo version=%s, stackId=%s",
+ osFamily, repoVersion.getVersion(), stackId));
}
// determine packages for all services that are installed on host
@@ -683,7 +688,7 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
actionContext.setRepositoryVersion(repoVersion);
actionContext.setTimeout(Short.valueOf(configuration.getDefaultAgentTaskTimeout(true)));
- repoVersionHelper.addCommandRepository(actionContext, osFamily, repoVersion, repoInfo);
+ repoVersionHelper.addCommandRepository(actionContext, repoVersion, osEntity);
return actionContext;
}
@@ -748,6 +753,9 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
* compares build numbers
*/
private static int compareVersions(String version1, String version2) {
+ version1 = (null == version1) ? "0" : version1;
+ version2 = (null == version2) ? "0" : version2;
+
// check _exact_ equality
if (StringUtils.equals(version1, version2)) {
return 0;
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProvider.java
index bcd4089..ba5fccc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostStackVersionResourceProvider.java
@@ -53,7 +53,6 @@ import org.apache.ambari.server.orm.dao.HostVersionDAO;
import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
import org.apache.ambari.server.orm.entities.HostVersionEntity;
import org.apache.ambari.server.orm.entities.OperatingSystemEntity;
-import org.apache.ambari.server.orm.entities.RepositoryEntity;
import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Host;
@@ -62,6 +61,7 @@ import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
@@ -379,25 +379,27 @@ public class HostStackVersionResourceProvider extends AbstractControllerResource
}
}
- List<OperatingSystemEntity> operatingSystems = repoVersionEnt.getOperatingSystems();
- Map<String, List<RepositoryEntity>> perOsRepos = new HashMap<>();
- for (OperatingSystemEntity operatingSystem : operatingSystems) {
- perOsRepos.put(operatingSystem.getOsType(), operatingSystem.getRepositories());
- }
-
// Determine repositories for host
String osFamily = host.getOsFamily();
- final List<RepositoryEntity> repoInfo = perOsRepos.get(osFamily);
- if (repoInfo == null) {
+ OperatingSystemEntity osEntity = null;
+ for (OperatingSystemEntity operatingSystem : repoVersionEnt.getOperatingSystems()) {
+ if (osFamily.equals(operatingSystem.getOsType())) {
+ osEntity = operatingSystem;
+ break;
+ }
+ }
+
+ if (null == osEntity) {
+ throw new SystemException(String.format("Operating System matching %s could not be found",
+ osFamily));
+ }
+
+ if (CollectionUtils.isEmpty(osEntity.getRepositories())) {
throw new SystemException(String.format("Repositories for os type %s are " +
"not defined. Repo version=%s, stackId=%s",
osFamily, desiredRepoVersion, stackId));
}
- if (repoInfo.isEmpty()){
- LOG.error(String.format("Repository list is empty. Ambari may not be managing the repositories for %s", osFamily));
- }
-
Set<String> servicesOnHost = new HashSet<>();
if (forceInstallOnNonMemberHost) {
@@ -441,7 +443,7 @@ public class HostStackVersionResourceProvider extends AbstractControllerResource
roleParams);
actionContext.setTimeout(Short.valueOf(configuration.getDefaultAgentTaskTimeout(true)));
- repoVersionHelper.addCommandRepository(actionContext, osFamily, repoVersionEnt, repoInfo);
+ repoVersionHelper.addCommandRepository(actionContext, repoVersionEnt, osEntity);
String caption = String.format(INSTALL_PACKAGES_FULL_NAME + " on host %s", hostName);
RequestStageContainer req = createRequest(caption);
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
index 60ad446..9524c09 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
@@ -303,16 +303,20 @@ public class RepositoryVersionHelper {
* @param repoVersion the repository version entity
* @param repos the repository entities
*/
- public void addCommandRepository(ActionExecutionContext context, String osFamily,
- RepositoryVersionEntity repoVersion, List<RepositoryEntity> repos) {
- StackId stackId = repoVersion.getStackId();
+ public void addCommandRepository(ActionExecutionContext context,
+ RepositoryVersionEntity repoVersion, OperatingSystemEntity osEntity) {
final CommandRepository commandRepo = new CommandRepository();
- commandRepo.setRepositories(osFamily, repos);
+ commandRepo.setRepositories(osEntity.getOsType(), osEntity.getRepositories());
commandRepo.setRepositoryVersion(repoVersion.getVersion());
commandRepo.setRepositoryVersionId(repoVersion.getId());
- commandRepo.setStackName(stackId.getStackName());
- commandRepo.setUniqueSuffix(String.format("-repo-%s", repoVersion.getId()));
+ commandRepo.setStackName(repoVersion.getStackId().getStackName());
+
+ if (!osEntity.isAmbariManagedRepos()) {
+ commandRepo.setNonManaged();
+ } else {
+ commandRepo.setUniqueSuffix(String.format("-repo-%s", repoVersion.getId()));
+ }
context.addVisitor(new ExecutionCommandVisitor() {
@Override
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/main/resources/version_definition.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/version_definition.xsd b/ambari-server/src/main/resources/version_definition.xsd
index 851e0d5..832d7f9 100644
--- a/ambari-server/src/main/resources/version_definition.xsd
+++ b/ambari-server/src/main/resources/version_definition.xsd
@@ -126,7 +126,8 @@
<xs:element name="baseurl" type="xs:string" />
<xs:element name="repoid" type="xs:string" />
<xs:element name="reponame" type="xs:string" />
- <xs:element name="unique" type="xs:boolean" minOccurs="0" maxOccurs="1"/>
+ <xs:element name="mirrorslist" type="xs:string" minOccurs="0" maxOccurs="1" />
+ <xs:element name="unique" type="xs:boolean" minOccurs="0" maxOccurs="1" />
</xs:sequence>
</xs:complexType>
</xs:element>
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
index 76160cc..fb84df5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
@@ -337,6 +337,70 @@ public class ExecutionCommandWrapperTest {
Assert.assertEquals("0.1-0000", commandParams.get(KeyNames.VERSION));
}
+ /**
+ * Test that the execution command wrapper ignores repository file when there are none to use.
+ */
+ @Test
+ public void testExecutionCommandNoRepositoryFile() throws Exception {
+ Cluster cluster = clusters.getCluster(CLUSTER1);
+
+ StackId stackId = cluster.getDesiredStackVersion();
+ RepositoryVersionEntity repositoryVersion = ormTestHelper.getOrCreateRepositoryVersion(stackId, "0.1-0000");
+ Service service = cluster.getService("HDFS");
+ service.setDesiredRepositoryVersion(repositoryVersion);
+
+ repositoryVersion.setOperatingSystems("[]");
+
+ ormTestHelper.repositoryVersionDAO.merge(repositoryVersion);
+
+ // first try with an INSTALL command - this should not populate version info
+ ExecutionCommand executionCommand = new ExecutionCommand();
+ Map<String, String> commandParams = new HashMap<>();
+
+ executionCommand.setClusterName(CLUSTER1);
+ executionCommand.setTaskId(1);
+ executionCommand.setRequestAndStage(1, 1);
+ executionCommand.setHostname(HOST1);
+ executionCommand.setRole("NAMENODE");
+ executionCommand.setRoleParams(Collections.<String, String>emptyMap());
+ executionCommand.setRoleCommand(RoleCommand.INSTALL);
+ executionCommand.setServiceName("HDFS");
+ executionCommand.setCommandType(AgentCommandType.EXECUTION_COMMAND);
+ executionCommand.setCommandParams(commandParams);
+
+ String json = StageUtils.getGson().toJson(executionCommand, ExecutionCommand.class);
+ ExecutionCommandWrapper execCommWrap = new ExecutionCommandWrapper(json);
+ injector.injectMembers(execCommWrap);
+
+ ExecutionCommand processedExecutionCommand = execCommWrap.getExecutionCommand();
+ commandParams = processedExecutionCommand.getCommandParams();
+ Assert.assertFalse(commandParams.containsKey(KeyNames.VERSION));
+
+ // now try with a START command which should populate the version even
+ // though the state is INSTALLING
+ executionCommand = new ExecutionCommand();
+ commandParams = new HashMap<>();
+
+ executionCommand.setClusterName(CLUSTER1);
+ executionCommand.setTaskId(1);
+ executionCommand.setRequestAndStage(1, 1);
+ executionCommand.setHostname(HOST1);
+ executionCommand.setRole("NAMENODE");
+ executionCommand.setRoleParams(Collections.<String, String> emptyMap());
+ executionCommand.setRoleCommand(RoleCommand.START);
+ executionCommand.setServiceName("HDFS");
+ executionCommand.setCommandType(AgentCommandType.EXECUTION_COMMAND);
+ executionCommand.setCommandParams(commandParams);
+
+ json = StageUtils.getGson().toJson(executionCommand, ExecutionCommand.class);
+ execCommWrap = new ExecutionCommandWrapper(json);
+ injector.injectMembers(execCommWrap);
+
+ processedExecutionCommand = execCommWrap.getExecutionCommand();
+ commandParams = processedExecutionCommand.getCommandParams();
+ Assert.assertEquals("0.1-0000", commandParams.get(KeyNames.VERSION));
+ }
+
@AfterClass
public static void tearDown() throws AmbariException, SQLException {
H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector);
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
index 7384464..883e891 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
@@ -581,9 +581,7 @@ public class AmbariCustomCommandExecutionHelperTest {
CommandRepository commandRepo = helper.getCommandRepository(cluster, componentRM, host);
- Assert.assertEquals(1, commandRepo.getRepositories().size());
- CommandRepository.Repository repo = commandRepo.getRepositories().iterator().next();
- Assert.assertEquals("http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0", repo.getBaseUrl());
+ Assert.assertEquals(0, commandRepo.getRepositories().size());
RepositoryInfo ri = new RepositoryInfo();
ri.setBaseUrl("http://foo");
@@ -592,7 +590,6 @@ public class AmbariCustomCommandExecutionHelperTest {
ri.setOsType("redhat6");
String operatingSystems = repoVersionHelper.serializeOperatingSystems(Collections.singletonList(ri));
-
StackEntity stackEntity = stackDAO.find(cluster.getDesiredStackVersion().getStackName(),
cluster.getDesiredStackVersion().getStackVersion());
@@ -616,15 +613,12 @@ public class AmbariCustomCommandExecutionHelperTest {
commandRepo = helper.getCommandRepository(cluster, componentRM, host);
Assert.assertEquals(1, commandRepo.getRepositories().size());
- repo = commandRepo.getRepositories().iterator().next();
+ CommandRepository.Repository repo = commandRepo.getRepositories().iterator().next();
Assert.assertEquals("http://foo", repo.getBaseUrl());
- // verify that ZK is NOT overwritten
+ // verify that ZK has no repositories, since we haven't defined a repo version for ZKC
commandRepo = helper.getCommandRepository(cluster, componentZKC, host);
-
- Assert.assertEquals(1, commandRepo.getRepositories().size());
- repo = commandRepo.getRepositories().iterator().next();
- Assert.assertEquals("http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0", repo.getBaseUrl());
+ Assert.assertEquals(0, commandRepo.getRepositories().size());
}
private void createClusterFixture(String clusterName, StackId stackId,
http://git-wip-us.apache.org/repos/asf/ambari/blob/249bb97a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
index 0e0e1c6..654067b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
@@ -55,6 +55,7 @@ import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
import org.apache.ambari.server.actionmanager.HostRoleCommand;
import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.actionmanager.StageFactory;
+import org.apache.ambari.server.agent.CommandRepository;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -1069,7 +1070,12 @@ public class ClusterStackVersionResourceProviderTest {
Assert.assertEquals(Float.valueOf(0.85f), successFactor);
Assert.assertNotNull(executionCommand.getRepositoryFile());
- Assert.assertEquals(0, executionCommand.getRepositoryFile().getRepositories().size());
+ Assert.assertEquals(2, executionCommand.getRepositoryFile().getRepositories().size());
+
+ for (CommandRepository.Repository repo : executionCommand.getRepositoryFile().getRepositories()) {
+ Assert.assertFalse(repo.isAmbariManaged());
+ }
+
}
@Test
[25/57] [abbrv] ambari git commit: AMBARI-21889 MaxBackupIndex does
not work with DailyRollingFileAppender in Ranger (mugdha)
Posted by lp...@apache.org.
AMBARI-21889 MaxBackupIndex does not work with DailyRollingFileAppender in Ranger (mugdha)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b7f53dc8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b7f53dc8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b7f53dc8
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: b7f53dc8c516ba148cdb59577baf3db9b29d538b
Parents: 2ece0b3
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Fri Sep 8 14:03:02 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Fri Sep 8 14:23:12 2017 +0530
----------------------------------------------------------------------
.../RANGER/0.6.0/configuration/admin-log4j.xml | 1 -
.../RANGER/0.6.0/configuration/tagsync-log4j.xml | 3 +--
.../RANGER/0.6.0/configuration/usersync-log4j.xml | 3 +--
.../RANGER/1.0.0.3.0/configuration/admin-log4j.xml | 1 -
.../RANGER/1.0.0.3.0/configuration/tagsync-log4j.xml | 3 +--
.../RANGER/1.0.0.3.0/configuration/usersync-log4j.xml | 3 +--
.../RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml | 6 ++----
.../RANGER_KMS/1.0.0.3.0/configuration/kms-log4j.xml | 6 ++----
.../stacks/HDP/2.3/upgrades/config-upgrade.xml | 6 ++----
.../stacks/HDP/2.4/upgrades/config-upgrade.xml | 6 ++----
.../stacks/HDP/2.5/upgrades/config-upgrade.xml | 13 ++++---------
11 files changed, 16 insertions(+), 35 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
index 6108c36..01f4c39 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
@@ -75,7 +75,6 @@ log4j.appender.xa_log_appender.append=true
log4j.appender.xa_log_appender.layout=org.apache.log4j.PatternLayout
log4j.appender.xa_log_appender.layout.ConversionPattern=%d [%t] %-5p %C{6} (%F:%L) - %m%n
log4j.appender.xa_log_appender.MaxFileSize={{ranger_xa_log_maxfilesize}}MB
-log4j.appender.xa_log_appender.MaxBackupIndex={{ranger_xa_log_maxbackupindex}}
# xa_log_appender : category and additivity
log4j.category.org.springframework=warn,xa_log_appender
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
index 6384302..e84aa87 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
@@ -71,8 +71,7 @@ log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.file=${logdir}/tagsync.log
log4j.appender.logFile.datePattern='.'yyyy-MM-dd
log4j.appender.logFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.logFile.MaxFileSize = {{ranger_tagsync_log_maxfilesize}}MB
-log4j.appender.logFile.MaxBackupIndex = {{ranger_tagsync_log_number_of_backup_files}}
+log4j.appender.logFile.MaxFileSize={{ranger_tagsync_log_maxfilesize}}MB
log4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %L %m%n
# console
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
index 8843a2a..04297ab 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
@@ -71,8 +71,7 @@ log4j.appender.logFile.file=${logdir}/usersync.log
log4j.appender.logFile.datePattern='.'yyyy-MM-dd
log4j.appender.logFile.layout=org.apache.log4j.PatternLayout
log4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n
-log4j.appender.logFile.MaxFileSize = {{ranger_usersync_log_maxfilesize}}MB
-log4j.appender.logFile.MaxBackupIndex = {{ranger_usersync_log_maxbackupindex}}
+log4j.appender.logFile.MaxFileSize={{ranger_usersync_log_maxfilesize}}MB
# console
log4j.appender.console=org.apache.log4j.ConsoleAppender
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/admin-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/admin-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/admin-log4j.xml
index 281c7ce..2211436 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/admin-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/admin-log4j.xml
@@ -75,7 +75,6 @@ log4j.appender.xa_log_appender.append=true
log4j.appender.xa_log_appender.layout=org.apache.log4j.PatternLayout
log4j.appender.xa_log_appender.layout.ConversionPattern=%d [%t] %-5p %C{6} (%F:%L) - %m%n
log4j.appender.xa_log_appender.MaxFileSize={{ranger_xa_log_maxfilesize}}MB
-log4j.appender.xa_log_appender.MaxBackupIndex={{ranger_xa_log_maxbackupindex}}
# xa_log_appender : category and additivity
log4j.category.org.springframework=warn,xa_log_appender
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/tagsync-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/tagsync-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/tagsync-log4j.xml
index bd2e109..7db3368 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/tagsync-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/tagsync-log4j.xml
@@ -71,8 +71,7 @@ log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.file=${logdir}/tagsync.log
log4j.appender.logFile.datePattern='.'yyyy-MM-dd
log4j.appender.logFile.layout=org.apache.log4j.PatternLayout
-log4j.appender.logFile.MaxFileSize = {{ranger_tagsync_log_maxfilesize}}MB
-log4j.appender.logFile.MaxBackupIndex = {{ranger_tagsync_log_number_of_backup_files}}
+log4j.appender.logFile.MaxFileSize={{ranger_tagsync_log_maxfilesize}}MB
log4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %L %m%n
# console
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/usersync-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/usersync-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/usersync-log4j.xml
index b5f2a7a..e54d34f 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/usersync-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/configuration/usersync-log4j.xml
@@ -71,8 +71,7 @@ log4j.appender.logFile.file=${logdir}/usersync.log
log4j.appender.logFile.datePattern='.'yyyy-MM-dd
log4j.appender.logFile.layout=org.apache.log4j.PatternLayout
log4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n
-log4j.appender.logFile.MaxFileSize = {{ranger_usersync_log_maxfilesize}}MB
-log4j.appender.logFile.MaxBackupIndex = {{ranger_usersync_log_maxbackupindex}}
+log4j.appender.logFile.MaxFileSize={{ranger_usersync_log_maxfilesize}}MB
# console
log4j.appender.console=org.apache.log4j.ConsoleAppender
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
index bac2e84..7f4b9d0 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
@@ -89,8 +89,7 @@ log4j.appender.kms.File=${kms.log.dir}/kms.log
log4j.appender.kms.Append=true
log4j.appender.kms.layout=org.apache.log4j.PatternLayout
log4j.appender.kms.layout.ConversionPattern=%d{ISO8601} %-5p %c{1} - %m%n
-log4j.appender.kms.MaxFileSize = {{ranger_kms_log_maxfilesize}}MB
-log4j.appender.kms.MaxBackupIndex = {{ranger_kms_log_maxbackupindex}}
+log4j.appender.kms.MaxFileSize={{ranger_kms_log_maxfilesize}}MB
log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.DatePattern='.'yyyy-MM-dd
@@ -98,8 +97,7 @@ log4j.appender.kms-audit.File=${kms.log.dir}/kms-audit.log
log4j.appender.kms-audit.Append=true
log4j.appender.kms-audit.layout=org.apache.log4j.PatternLayout
log4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} %m%n
-log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB
-log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}
+log4j.appender.kms-audit.MaxFileSize={{ranger_kms_audit_log_maxfilesize}}MB
log4j.logger.kms-audit=INFO, kms-audit
log4j.additivity.kms-audit=false
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/configuration/kms-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/configuration/kms-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/configuration/kms-log4j.xml
index daae579..b88c7f2 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/configuration/kms-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/configuration/kms-log4j.xml
@@ -89,8 +89,7 @@ log4j.appender.kms.File=${kms.log.dir}/kms.log
log4j.appender.kms.Append=true
log4j.appender.kms.layout=org.apache.log4j.PatternLayout
log4j.appender.kms.layout.ConversionPattern=%d{ISO8601} %-5p %c{1} - %m%n
-log4j.appender.kms.MaxFileSize = {{ranger_kms_log_maxfilesize}}MB
-log4j.appender.kms.MaxBackupIndex = {{ranger_kms_log_maxbackupindex}}
+log4j.appender.kms.MaxFileSize={{ranger_kms_log_maxfilesize}}MB
log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.DatePattern='.'yyyy-MM-dd
@@ -98,8 +97,7 @@ log4j.appender.kms-audit.File=${kms.log.dir}/kms-audit.log
log4j.appender.kms-audit.Append=true
log4j.appender.kms-audit.layout=org.apache.log4j.PatternLayout
log4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} %m%n
-log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB
-log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}
+log4j.appender.kms-audit.MaxFileSize={{ranger_kms_audit_log_maxfilesize}}MB
log4j.logger.kms-audit=INFO, kms-audit
log4j.additivity.kms-audit=false
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index 8b5c07d..ff12150 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -326,10 +326,8 @@
<set key="ranger_kms_log_maxbackupindex" value="20"/>
<set key="ranger_kms_audit_log_maxfilesize" value="256"/>
<set key="ranger_kms_audit_log_maxbackupindex" value="20"/>
- <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxFileSize = {{ranger_kms_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxBackupIndex = {{ranger_kms_log_maxbackupindex}}"/>
- <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}"/>
+ <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxFileSize={{ranger_kms_log_maxfilesize}}MB"/>
+ <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxFileSize={{ranger_kms_audit_log_maxfilesize}}MB"/>
</definition>
<definition xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl">
<type>ranger-kms-site</type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index b3d19d4..5c1f33f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -215,10 +215,8 @@
<set key="ranger_kms_log_maxbackupindex" value="20"/>
<set key="ranger_kms_audit_log_maxfilesize" value="256"/>
<set key="ranger_kms_audit_log_maxbackupindex" value="20"/>
- <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxFileSize = {{ranger_kms_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxBackupIndex = {{ranger_kms_log_maxbackupindex}}"/>
- <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}"/>
+ <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxFileSize={{ranger_kms_log_maxfilesize}}MB"/>
+ <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxFileSize={{ranger_kms_audit_log_maxfilesize}}MB"/>
</definition>
<definition xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl">
<type>ranger-kms-site</type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f53dc8/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index f89654e..13173d2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -309,7 +309,6 @@
<set key="ranger_xa_log_maxfilesize" value="256"/>
<set key="ranger_xa_log_maxbackupindex" value="20"/>
<replace key="content" find="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.xa_log_appender.MaxFileSize={{ranger_xa_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.xa_log_appender.MaxBackupIndex={{ranger_xa_log_maxbackupindex}}"/>
</definition>
</changes>
</component>
@@ -319,8 +318,7 @@
<type>usersync-log4j</type>
<set key="ranger_usersync_log_maxfilesize" value="256"/>
<set key="ranger_usersync_log_maxbackupindex" value="20"/>
- <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.MaxFileSize = {{ranger_usersync_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.MaxBackupIndex = {{ranger_usersync_log_maxbackupindex}}"/>
+ <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.MaxFileSize={{ranger_usersync_log_maxfilesize}}MB"/>
</definition>
<definition xsi:type="configure" id="hdp_2_6_0_0_disable_delta_sync_during_upgrade">
@@ -336,8 +334,7 @@
<type>tagsync-log4j</type>
<set key="ranger_tagsync_log_maxfilesize" value="256"/>
<set key="ranger_tagsync_log_number_of_backup_files" value="20"/>
- <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.MaxFileSize = {{ranger_tagsync_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.MaxBackupIndex = {{ranger_tagsync_log_number_of_backup_files}}"/>
+ <replace key="content" find="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.logFile.MaxFileSize={{ranger_tagsync_log_maxfilesize}}MB"/>
</definition>
</changes>
</component>
@@ -351,10 +348,8 @@
<set key="ranger_kms_log_maxbackupindex" value="20"/>
<set key="ranger_kms_audit_log_maxfilesize" value="256"/>
<set key="ranger_kms_audit_log_maxbackupindex" value="20"/>
- <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxFileSize = {{ranger_kms_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxBackupIndex = {{ranger_kms_log_maxbackupindex}}"/>
- <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB"/>
- <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}"/>
+ <replace key="content" find="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms.MaxFileSize={{ranger_kms_log_maxfilesize}}MB"/>
+ <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kms-audit.MaxFileSize={{ranger_kms_audit_log_maxfilesize}}MB"/>
</definition>
<definition xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl">
<type>ranger-kms-site</type>
[57/57] [abbrv] ambari git commit: AMBARI-21307 Add all known LDAP
properties to AmbariConfigurationEnum (benyoka)
Posted by lp...@apache.org.
AMBARI-21307 Add all known LDAP properties to AmbariConfigurationEnum (benyoka)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1c22c736
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1c22c736
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1c22c736
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 1c22c73687711baa34179866a2f316b74f2182d9
Parents: f71fac3
Author: Balazs Bence Sari <be...@apache.org>
Authored: Mon Sep 4 12:45:07 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:02 2017 +0200
----------------------------------------------------------------------
.../ambari/server/ldap/AmbariLdapConfiguration.java | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1c22c736/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
index b1cbced..e913e77 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
@@ -40,22 +40,35 @@ public class AmbariLdapConfiguration {
USE_SSL("ambari.ldap.usessl"),
LDAP_SERVER_HOST("ambari.ldap.server.host"),
LDAP_SERVER_PORT("ambari.ldap.server.port"),
+ LDAP_TRUSTSTORE("ambari.ldap.truststore"),
LDAP_TRUSTSTORE_TYPE("ambari.ldap.truststore.type"),
LDAP_TRUSTSTORE_PATH("ambari.ldap.truststore.path"),
+ LDAP_TRUSTSTORE_PASSWORD("ambari.ldap.truststore.password"),
BASE_DN("ambari.ldap.bind.dn"),
+ REFERRAL("ambari.ldap.referral"),
+ PAGINATION_ENABLED("ambari.ldap.pagination.enabled"),
BIND_ANONIMOUSLY("ambari.ldap.bindanonymously"),
MANAGER_DN("ambari.ldap.managerdn"),
MANAGER_PASSWORD("ambari.ldap.managerpassword"),
USER_OBJECT_CLASS("ambari.ldap.user.object.class"),
USER_NAME_ATTRIBUTE("ambari.ldap.user.name.attribute"),
+ USER_NAME_FORCE_LOWERCASE("ambari.ldap.username.force.lowercase"),
USER_SEARCH_BASE("ambari.ldap.user.search.base"),
+ SYNC_USER_MEMBER_REPLACE_PATTERN("ambari.ldap.sync.user.member.replacepattern"),
+ SYNC_USER_MEMBER_FILTER("ambari.ldap.sync.user.member_filter"),
+ ADMIN_GROUP_MAPPING_RULES ("ambari.ldap.admin.group.mappingrules"),
GROUP_OBJECT_CLASS("ambari.ldap.group.object.class"),
GROUP_NAME_ATTRIBUTE("ambari.ldap.group.name.attribute"),
GROUP_MEMBER_ATTRIBUTE("ambari.ldap.group.member.attribute"),
GROUP_SEARCH_BASE("ambari.ldap.group.search.base"),
- DN_ATTRIBUTE("authentication.ldap.dnAttribute");
+ SYNC_GROUP_MEMBER_REPLACE_PATTERN("ambari.ldap.sync.group.member.replacepattern"),
+ SYNC_GROUP_MEMBER_FILTER("ambari.ldap.sync.group.member_filter"),
+ DN_ATTRIBUTE("authentication.ldap.dnAttribute"),
+
+ TEST_USER_NAME("ambari.ldap.test.user.name"),
+ TEST_USER_PASSWORD("ambari.ldap.test.user.password");
private String propertyName;
[23/57] [abbrv] ambari git commit: AMBARI-21890.Ambari Files View -
browser going to hung state while opening a HDFS folder which has huge number
of files(>10000)(Venkata Sairam)
Posted by lp...@apache.org.
AMBARI-21890.Ambari Files View - browser going to hung state while opening a HDFS folder which has huge number of files(>10000)(Venkata Sairam)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f6ecbd1d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f6ecbd1d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f6ecbd1d
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: f6ecbd1d7fa5f90ec020e64dbb403b3c3c2bdeb3
Parents: 5b1a63b
Author: Venkata Sairam <ve...@gmail.com>
Authored: Fri Sep 8 08:38:54 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Fri Sep 8 08:38:54 2017 +0530
----------------------------------------------------------------------
.../view/commons/hdfs/FileOperationService.java | 41 +++-
.../resources/ui/app/components/file-search.js | 10 +-
.../main/resources/ui/app/controllers/files.js | 20 +-
.../src/main/resources/ui/app/routes/files.js | 16 +-
.../ui/app/templates/components/file-row.hbs | 2 +-
.../ui/app/templates/components/file-search.hbs | 2 +-
.../main/resources/ui/app/templates/files.hbs | 8 +-
.../view/filebrowser/FilebrowserTest.java | 4 +-
.../ambari/view/utils/hdfs/DirListInfo.java | 97 +++++++++
.../ambari/view/utils/hdfs/DirStatus.java | 75 +++++++
.../apache/ambari/view/utils/hdfs/HdfsApi.java | 124 ++++++++++--
.../ambari/view/utils/hdfs/HdfsApiTest.java | 201 +++++++++++++++++++
12 files changed, 557 insertions(+), 43 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
----------------------------------------------------------------------
diff --git a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
index d6e484d..6fa1056 100644
--- a/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
+++ b/contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
@@ -18,12 +18,17 @@
package org.apache.ambari.view.commons.hdfs;
+import com.google.common.base.Strings;
import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.commons.exceptions.NotFoundFormattedException;
import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.DirListInfo;
+import org.apache.ambari.view.utils.hdfs.DirStatus;
import org.apache.ambari.view.utils.hdfs.HdfsApi;
import org.apache.ambari.view.utils.hdfs.HdfsApiException;
import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
@@ -41,6 +46,14 @@ import java.util.Map;
* File operations service
*/
public class FileOperationService extends HdfsService {
+ private final static Logger LOG =
+ LoggerFactory.getLogger(FileOperationService.class);
+
+
+ private static final String FILES_VIEW_MAX_FILE_PER_PAGE = "views.files.max.files.per.page";
+ private static final int DEFAULT_FILES_VIEW_MAX_FILE_PER_PAGE = 5000;
+
+ private Integer maxFilesPerPage = DEFAULT_FILES_VIEW_MAX_FILE_PER_PAGE;
/**
* Constructor
@@ -48,6 +61,19 @@ public class FileOperationService extends HdfsService {
*/
public FileOperationService(ViewContext context) {
super(context);
+ setMaxFilesPerPage(context);
+ }
+
+ private void setMaxFilesPerPage(ViewContext context) {
+ String maxFilesPerPageProperty = context.getAmbariProperty(FILES_VIEW_MAX_FILE_PER_PAGE);
+ LOG.info("maxFilesPerPageProperty = {}", maxFilesPerPageProperty);
+ if(!Strings.isNullOrEmpty(maxFilesPerPageProperty)){
+ try {
+ maxFilesPerPage = Integer.parseInt(maxFilesPerPageProperty);
+ }catch(Exception e){
+ LOG.error("{} should be integer, but it is {}, using default value of {}", FILES_VIEW_MAX_FILE_PER_PAGE , maxFilesPerPageProperty, DEFAULT_FILES_VIEW_MAX_FILE_PER_PAGE);
+ }
+ }
}
/**
@@ -56,21 +82,30 @@ public class FileOperationService extends HdfsService {
*/
public FileOperationService(ViewContext context, Map<String, String> customProperties) {
super(context, customProperties);
+ this.setMaxFilesPerPage(context);
}
/**
* List dir
* @param path path
+ * @param nameFilter : name on which filter is applied
* @return response with dir content
*/
@GET
@Path("/listdir")
@Produces(MediaType.APPLICATION_JSON)
- public Response listdir(@QueryParam("path") String path) {
+ public Response listdir(@QueryParam("path") String path, @QueryParam("nameFilter") String nameFilter) {
try {
JSONObject response = new JSONObject();
- response.put("files", getApi().fileStatusToJSON(getApi().listdir(path)));
- response.put("meta", getApi().fileStatusToJSON(getApi().getFileStatus(path)));
+ Map<String, Object> parentInfo = getApi().fileStatusToJSON(getApi().getFileStatus(path));
+ DirStatus dirStatus = getApi().listdir(path, nameFilter, maxFilesPerPage);
+ DirListInfo dirListInfo = dirStatus.getDirListInfo();
+ parentInfo.put("originalSize", dirListInfo.getOriginalSize());
+ parentInfo.put("truncated", dirListInfo.isTruncated());
+ parentInfo.put("finalSize", dirListInfo.getFinalSize());
+ parentInfo.put("nameFilter", dirListInfo.getNameFilter());
+ response.put("files", getApi().fileStatusToJSON(dirStatus.getFileStatuses()));
+ response.put("meta", parentInfo);
return Response.ok(response).build();
} catch (WebApplicationException ex) {
throw ex;
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/components/file-search.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/components/file-search.js b/contrib/views/files/src/main/resources/ui/app/components/file-search.js
index b65749c..68ec280 100644
--- a/contrib/views/files/src/main/resources/ui/app/components/file-search.js
+++ b/contrib/views/files/src/main/resources/ui/app/components/file-search.js
@@ -23,11 +23,6 @@ export default Ember.Component.extend({
classNameBindings: ['expanded::col-md-9', 'expanded::col-md-offset-3'],
expanded: false,
- searchText: '',
-
- throttleTyping: Ember.observer('searchText', function() {
- Ember.run.debounce(this, this.searchFiles, 500);
- }),
searchFiles: function() {
this.sendAction('searchAction', this.get('searchText'));
@@ -38,5 +33,10 @@ export default Ember.Component.extend({
},
focusOut: function() {
this.set('expanded', false);
+ },
+ actions : {
+ throttleTyping: function() {
+ Ember.run.debounce(this, this.searchFiles, 1000);
+ }
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/controllers/files.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/controllers/files.js b/contrib/views/files/src/main/resources/ui/app/controllers/files.js
index 8b5bb7b..30d9896 100644
--- a/contrib/views/files/src/main/resources/ui/app/controllers/files.js
+++ b/contrib/views/files/src/main/resources/ui/app/controllers/files.js
@@ -28,9 +28,9 @@ export default Ember.Controller.extend({
isSelected: Ember.computed('selectedFilesCount', 'selectedFolderCount', function() {
return (this.get('selectedFilesCount') + this.get('selectedFolderCount')) !== 0;
}),
-
- queryParams: ['path'],
+ queryParams: ['path', 'filter'],
path: '/',
+ filter: '',
columns: columnConfig,
currentMessagesCount: Ember.computed.alias('logger.currentMessagesCount'),
@@ -71,16 +71,10 @@ export default Ember.Controller.extend({
return parentPath;
}),
- sortedContent: Ember.computed.sort('model', 'sortProperty'),
+ arrangedContent: Ember.computed.sort('model', 'sortProperty'),
- arrangedContent: Ember.computed('model', 'sortProperty', 'validSearchText', function() {
- var searchText = this.get('validSearchText');
- if(!Ember.isBlank(searchText)) {
- return this.get('sortedContent').filter(function(entry) {
- return !!entry.get('name').match(searchText);
- });
- }
- return this.get('sortedContent');
+ metaInfo: Ember.computed('model', function() {
+ return this.get('model.meta');
}),
selectedFilePathsText: function () {
@@ -144,7 +138,7 @@ export default Ember.Controller.extend({
selectAll: function(selectStatus) {
this.get('fileSelectionService').deselectAll();
if(selectStatus === false) {
- this.get('fileSelectionService').selectFiles(this.get('sortedContent'));
+ this.get('fileSelectionService').selectFiles(this.get('arrangedContent'));
}
},
@@ -155,7 +149,7 @@ export default Ember.Controller.extend({
//Context Menu actions
openFolder: function(path) {
- this.transitionToRoute({queryParams: {path: path}});
+ this.transitionToRoute({queryParams: {path: path, filter:''}});
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/routes/files.js
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/routes/files.js b/contrib/views/files/src/main/resources/ui/app/routes/files.js
index 140732f..be7a515 100644
--- a/contrib/views/files/src/main/resources/ui/app/routes/files.js
+++ b/contrib/views/files/src/main/resources/ui/app/routes/files.js
@@ -26,13 +26,15 @@ export default Ember.Route.extend(FileOperationMixin, {
queryParams: {
path: {
refreshModel: true
+ },
+ filter: {
+ refreshModel: true
}
},
model: function(params) {
this.store.unloadAll('file');
- return this.store.query('file', {path: params.path});
+ return this.store.query('file', {path: params.path, nameFilter:params.filter});
},
-
setupController: function(controller, model) {
this._super(controller, model);
controller.set('searchText', '');
@@ -44,7 +46,17 @@ export default Ember.Route.extend(FileOperationMixin, {
refreshCurrentRoute: function() {
this.refresh();
},
+ searchAction : function(searchText) {
+ this.set('controller.filter', searchText);
+ this.transitionTo({
+ queryParams: {
+ path: this.get('currentPath'),
+ filter: searchText
+ }
+ });
+
+ },
error: function(error, transition) {
this.get('fileSelectionService').reset();
let path = transition.queryParams.path;
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs b/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
index 72ed840..5198504 100644
--- a/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
+++ b/contrib/views/files/src/main/resources/ui/app/templates/components/file-row.hbs
@@ -19,7 +19,7 @@
<div class="row">
<div class={{get-value-from-columns columnHeaders 'name' 'columnClass'}}>
{{#if file.isDirectory}}
- {{#link-to 'files' (query-params path=file.path) bubbles=false title=file.name}}{{fa-icon "folder-o"}} {{shorten-text file.name 40}} {{/link-to}}
+ {{#link-to 'files' (query-params path=file.path filter='') bubbles=false title=file.name}}{{fa-icon "folder-o"}} {{shorten-text file.name 40}} {{/link-to}}
{{else}}
<span title={{ file.name }}>{{fa-icon "file-o"}} {{shorten-text file.name 40}}</span>
{{/if}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs b/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
index 298d672..f3dc8f9 100644
--- a/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
+++ b/contrib/views/files/src/main/resources/ui/app/templates/components/file-search.hbs
@@ -16,5 +16,5 @@
* limitations under the License.
}}
-{{input type="text" placeholder="Search in current directory..." class="form-control input-sm" value=searchText}}
+{{input type="text" placeholder="Search in current directory..." class="form-control input-sm" action='throttleTyping' on="key-down" value=searchText}}
<span class="input-group-addon">{{fa-icon icon='search'}}</span>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/ui/app/templates/files.hbs b/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
index 63e0dd8..5714cf3 100644
--- a/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
+++ b/contrib/views/files/src/main/resources/ui/app/templates/files.hbs
@@ -29,7 +29,11 @@
{{else}}
<span class="context-text" style=" z-index: 1;
position: relative;">
- Total: <strong>{{arrangedContent.length}}</strong> files or folders
+ {{#if metaInfo.truncated}}
+ Showing <strong>{{arrangedContent.length}}</strong> files or folders of <strong>{{metaInfo.originalSize}}</strong>
+ {{else}}
+ Total: <strong>{{arrangedContent.length}}</strong> files or folders
+ {{/if}}
</span>
{{/if}}
</div>
@@ -82,7 +86,7 @@
</div>
<div class="col-md-4 col-xs-4">
<div class="row">
- {{file-search searchText=searchText searchAction="searchFiles"}}
+ {{file-search searchText=filter searchAction="searchAction"}}
</div>
</div>
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
index f431f66..6ddc8f6 100644
--- a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
+++ b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
@@ -110,7 +110,7 @@ public class FilebrowserTest{
FileOperationService.MkdirRequest request = new FileOperationService.MkdirRequest();
request.path = "/tmp1";
fileBrowserService.fileOps().mkdir(request);
- Response response = fileBrowserService.fileOps().listdir("/");
+ Response response = fileBrowserService.fileOps().listdir("/", null);
JSONObject responseObject = (JSONObject) response.getEntity();
JSONArray statuses = (JSONArray) responseObject.get("files");
System.out.println(response.getEntity());
@@ -140,7 +140,7 @@ public class FilebrowserTest{
public void testUploadFile() throws Exception {
Response response = uploadFile("/tmp/", "testUpload", ".tmp", "Hello world");
Assert.assertEquals(200, response.getStatus());
- Response listdir = fileBrowserService.fileOps().listdir("/tmp");
+ Response listdir = fileBrowserService.fileOps().listdir("/tmp", null);
JSONObject responseObject = (JSONObject) listdir.getEntity();
JSONArray statuses = (JSONArray) responseObject.get("files");
System.out.println(statuses.size());
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java
new file mode 100644
index 0000000..6bd13bb
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirListInfo.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+public class DirListInfo {
+ private int originalSize;
+ private boolean truncated;
+ private int finalSize;
+ private String nameFilter;
+
+ public DirListInfo(int originalSize, boolean truncated, int finalSize, String nameFilter) {
+ this.originalSize = originalSize;
+ this.truncated = truncated;
+ this.finalSize = finalSize;
+ this.nameFilter = nameFilter;
+ }
+
+ public int getOriginalSize() {
+ return originalSize;
+ }
+
+ public void setOriginalSize(int originalSize) {
+ this.originalSize = originalSize;
+ }
+
+ public boolean isTruncated() {
+ return truncated;
+ }
+
+ public void setTruncated(boolean truncated) {
+ this.truncated = truncated;
+ }
+
+ public int getFinalSize() {
+ return finalSize;
+ }
+
+ public void setFinalSize(int finalSize) {
+ this.finalSize = finalSize;
+ }
+
+ public String getNameFilter() {
+ return nameFilter;
+ }
+
+ public void setNameFilter(String nameFilter) {
+ this.nameFilter = nameFilter;
+ }
+
+ @Override
+ public String toString() {
+ return "DirListInfo{" +
+ "originalSize=" + originalSize +
+ ", truncated=" + truncated +
+ ", finalSize=" + finalSize +
+ ", nameFilter='" + nameFilter + '\'' +
+ '}';
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ DirListInfo that = (DirListInfo) o;
+
+ if (originalSize != that.originalSize) return false;
+ if (truncated != that.truncated) return false;
+ if (finalSize != that.finalSize) return false;
+ return nameFilter != null ? nameFilter.equals(that.nameFilter) : that.nameFilter == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = originalSize;
+ result = 31 * result + (truncated ? 1 : 0);
+ result = 31 * result + finalSize;
+ result = 31 * result + (nameFilter != null ? nameFilter.hashCode() : 0);
+ return result;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java
new file mode 100644
index 0000000..f922b00
--- /dev/null
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/DirStatus.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.utils.hdfs;
+
+import org.apache.hadoop.fs.FileStatus;
+
+import java.util.Arrays;
+
+public class DirStatus {
+ private DirListInfo dirListInfo;
+ private FileStatus [] fileStatuses;
+
+ public DirStatus(FileStatus[] fileStatuses, DirListInfo dirListInfo) {
+ this.fileStatuses = fileStatuses;
+ this.dirListInfo = dirListInfo;
+ }
+
+ public DirListInfo getDirListInfo() {
+ return dirListInfo;
+ }
+
+ public void setDirListInfo(DirListInfo dirListInfo) {
+ this.dirListInfo = dirListInfo;
+ }
+
+ public FileStatus[] getFileStatuses() {
+ return fileStatuses;
+ }
+
+ public void setFileStatuses(FileStatus[] fileStatuses) {
+ this.fileStatuses = fileStatuses;
+ }
+
+ @Override
+ public String toString() {
+ return "DirStatus{" +
+ "dirListInfo=" + dirListInfo +
+ ", fileStatuses=" + Arrays.toString(fileStatuses) +
+ '}';
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ DirStatus dirStatus = (DirStatus) o;
+
+ if (dirListInfo != null ? !dirListInfo.equals(dirStatus.dirListInfo) : dirStatus.dirListInfo != null) return false;
+ // Probably incorrect - comparing Object[] arrays with Arrays.equals
+ return Arrays.equals(fileStatuses, dirStatus.fileStatuses);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = dirListInfo != null ? dirListInfo.hashCode() : 0;
+ result = 31 * result + Arrays.hashCode(fileStatuses);
+ return result;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 90fa483..8b987be 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -18,6 +18,7 @@
package org.apache.ambari.view.utils.hdfs;
+import com.google.common.base.Strings;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,6 +41,7 @@ import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.LinkedHashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@@ -51,13 +53,14 @@ public class HdfsApi {
LoggerFactory.getLogger(HdfsApi.class);
private final Configuration conf;
- private final Map<String, String> authParams;
+ private Map<String, String> authParams;
private FileSystem fs;
private UserGroupInformation ugi;
/**
* Constructor
+ *
* @param configurationBuilder hdfs configuration builder
* @throws IOException
* @throws InterruptedException
@@ -76,6 +79,38 @@ public class HdfsApi {
});
}
+ /**
+ * for testing
+ * @throws IOException
+ * @throws InterruptedException
+ * @throws HdfsApiException
+ */
+ HdfsApi(Configuration configuration, FileSystem fs, UserGroupInformation ugi) throws IOException,
+ InterruptedException, HdfsApiException {
+ if(null != configuration){
+ conf = configuration;
+ }else {
+ conf = new Configuration();
+ }
+
+ UserGroupInformation.setConfiguration(conf);
+ if(null != ugi){
+ this.ugi = ugi;
+ }else {
+ this.ugi = UserGroupInformation.getCurrentUser();
+ }
+
+ if(null != fs){
+ this.fs = fs;
+ }else {
+ this.fs = execute(new PrivilegedExceptionAction<FileSystem>() {
+ public FileSystem run() throws IOException {
+ return FileSystem.get(conf);
+ }
+ });
+ }
+ }
+
private UserGroupInformation getProxyUser() throws IOException {
UserGroupInformation proxyuser;
if (authParams.containsKey("proxyuser")) {
@@ -101,6 +136,7 @@ public class HdfsApi {
/**
* List dir operation
+ *
* @param path path
* @return array of FileStatus objects
* @throws FileNotFoundException
@@ -117,7 +153,56 @@ public class HdfsApi {
}
/**
+ *
+ * @param path : list files and dirs in this path
+ * @param nameFilter : if not empty or null, then file names that contain this are only sent.
+ * @param maxAllowedSize : maximum number of files sent in output. -1 means infinite.
+ * @return
+ * @throws FileNotFoundException
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public DirStatus listdir(final String path, final String nameFilter, int maxAllowedSize) throws FileNotFoundException,
+ IOException, InterruptedException {
+ FileStatus[] fileStatuses = this.listdir(path);
+ return filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+ }
+
+ public DirStatus filterAndTruncateDirStatus(String nameFilter, int maxAllowedSize, FileStatus[] fileStatuses) {
+ if(null == fileStatuses){
+ return new DirStatus(null, new DirListInfo(0, false, 0, nameFilter));
+ }
+
+ int originalSize = fileStatuses.length;
+ boolean truncated = false;
+
+ if (!Strings.isNullOrEmpty(nameFilter)) {
+ List<FileStatus> filteredList = new LinkedList<>();
+ for(FileStatus fileStatus : fileStatuses){
+ if(maxAllowedSize >=0 && maxAllowedSize <= filteredList.size()){
+ truncated = true;
+ break;
+ }
+ if(fileStatus.getPath().getName().contains(nameFilter)){
+ filteredList.add(fileStatus);
+ }
+ }
+ fileStatuses = filteredList.toArray(new FileStatus[0]);
+ }
+
+ if(maxAllowedSize >=0 && fileStatuses.length > maxAllowedSize) { // in cases where name filter loop is not executed.
+ truncated = true;
+ fileStatuses = Arrays.copyOf(fileStatuses, maxAllowedSize);
+ }
+
+ int finalSize = fileStatuses.length;
+
+ return new DirStatus(fileStatuses, new DirListInfo(originalSize, truncated, finalSize, nameFilter));
+ }
+
+ /**
* Get file status
+ *
* @param path path
* @return file status
* @throws IOException
@@ -135,6 +220,7 @@ public class HdfsApi {
/**
* Make directory
+ *
* @param path path
* @return success
* @throws IOException
@@ -151,6 +237,7 @@ public class HdfsApi {
/**
* Rename
+ *
* @param src source path
* @param dst destination path
* @return success
@@ -168,6 +255,7 @@ public class HdfsApi {
/**
* Check is trash enabled
+ *
* @return true if trash is enabled
* @throws Exception
*/
@@ -182,6 +270,7 @@ public class HdfsApi {
/**
* Home directory
+ *
* @return home directory
* @throws Exception
*/
@@ -195,6 +284,7 @@ public class HdfsApi {
/**
* Hdfs Status
+ *
* @return home directory
* @throws Exception
*/
@@ -208,6 +298,7 @@ public class HdfsApi {
/**
* Trash directory
+ *
* @return trash directory
* @throws Exception
*/
@@ -236,7 +327,7 @@ public class HdfsApi {
/**
* Trash directory path.
*
- * @param filePath the path to the file
+ * @param filePath the path to the file
* @return trash directory path for the file
* @throws Exception
*/
@@ -251,6 +342,7 @@ public class HdfsApi {
/**
* Empty trash
+ *
* @return
* @throws Exception
*/
@@ -266,6 +358,7 @@ public class HdfsApi {
/**
* Move to trash
+ *
* @param path path
* @return success
* @throws IOException
@@ -282,7 +375,8 @@ public class HdfsApi {
/**
* Delete
- * @param path path
+ *
+ * @param path path
* @param recursive delete recursive
* @return success
* @throws IOException
@@ -299,7 +393,8 @@ public class HdfsApi {
/**
* Create file
- * @param path path
+ *
+ * @param path path
* @param overwrite overwrite existent file
* @return output stream
* @throws IOException
@@ -316,6 +411,7 @@ public class HdfsApi {
/**
* Open file
+ *
* @param path path
* @return input stream
* @throws IOException
@@ -332,7 +428,8 @@ public class HdfsApi {
/**
* Change permissions
- * @param path path
+ *
+ * @param path path
* @param permissions permissions in format rwxrwxrwx
* @throws IOException
* @throws InterruptedException
@@ -353,7 +450,8 @@ public class HdfsApi {
/**
* Copy file
- * @param src source path
+ *
+ * @param src source path
* @param dest destination path
* @throws java.io.IOException
* @throws InterruptedException
@@ -380,8 +478,9 @@ public class HdfsApi {
/**
* Executes action on HDFS using doAs
+ *
* @param action strategy object
- * @param <T> result type
+ * @param <T> result type
* @return result of operation
* @throws IOException
* @throws InterruptedException
@@ -419,10 +518,9 @@ public class HdfsApi {
* Converts a Hadoop permission into a Unix permission symbolic representation
* (i.e. -rwxr--r--) or default if the permission is NULL.
*
- * @param p
- * Hadoop permission.
+ * @param p Hadoop permission.
* @return the Unix permission symbolic representation or default if the
- * permission is NULL.
+ * permission is NULL.
*/
private static String permissionToString(FsPermission p) {
return (p == null) ? "default" : "-" + p.getUserAction().SYMBOL
@@ -435,8 +533,7 @@ public class HdfsApi {
* specified URL.
* <p/>
*
- * @param status
- * Hadoop file status.
+ * @param status Hadoop file status.
* @return The JSON representation of the file status.
*/
public Map<String, Object> fileStatusToJSON(FileStatus status) {
@@ -465,8 +562,7 @@ public class HdfsApi {
* specified URL.
* <p/>
*
- * @param status
- * Hadoop file status array.
+ * @param status Hadoop file status array.
* @return The JSON representation of the file status array.
*/
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/ambari/blob/f6ecbd1d/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java
new file mode 100644
index 0000000..e7a6752
--- /dev/null
+++ b/contrib/views/utils/src/test/java/org/apache/ambari/view/utils/hdfs/HdfsApiTest.java
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.utils.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+public class HdfsApiTest {
+ private FileSystem fs;
+ private HdfsApi hdfsApi;
+ private Configuration conf;
+ private MiniDFSCluster hdfsCluster;
+
+ @Before
+ public void setup() throws IOException, HdfsApiException, InterruptedException {
+ File baseDir = new File("./target/hdfs/" + "HdfsApiTest.filterAndTruncateDirStatus").getAbsoluteFile();
+ FileUtil.fullyDelete(baseDir);
+
+ conf = new Configuration();
+ conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
+ MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+ hdfsCluster = builder.build();
+ String hdfsURI = hdfsCluster.getURI() + "/";
+ conf.set("webhdfs.url", hdfsURI);
+ conf.set("fs.defaultFS", hdfsURI);
+ fs = FileSystem.get(conf);
+ hdfsApi = new HdfsApi(conf, fs, null);
+
+ }
+
+ @After
+ public void tearDown(){
+ hdfsCluster.shutdown();
+ }
+
+ @Test
+ public void filterAndTruncateDirStatus() throws Exception {
+ {
+ // null fileStatuses
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus("", 0, null);
+ Assert.assertEquals(new DirStatus(null, new DirListInfo(0, false, 0, "")), dirStatus);
+ }
+
+ {
+ FileStatus[] fileStatuses = getFileStatuses(10);
+ DirStatus dirStatus1 = hdfsApi.filterAndTruncateDirStatus("", 0, fileStatuses);
+ Assert.assertEquals(new DirStatus(new FileStatus[0], new DirListInfo(10, true, 0, "")), dirStatus1);
+ }
+
+ {
+ int originalSize = 10;
+ int maxAllowedSize = 5;
+ String nameFilter = "";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus2 = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+ Assert.assertEquals(new DirStatus(Arrays.copyOf(fileStatuses, maxAllowedSize), new DirListInfo(originalSize, true, maxAllowedSize, nameFilter)), dirStatus2);
+ }
+
+ {
+ int originalSize = 10;
+ int maxAllowedSize = 10;
+ String nameFilter = "";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus2 = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+ Assert.assertEquals(new DirStatus(Arrays.copyOf(fileStatuses, maxAllowedSize), new DirListInfo(originalSize, false, maxAllowedSize, nameFilter)), dirStatus2);
+ }
+
+ {
+ int originalSize = 11;
+ int maxAllowedSize = 2;
+ String nameFilter = "1";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10]}, new DirListInfo(originalSize, false, 2, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 20;
+ int maxAllowedSize = 3;
+ String nameFilter = "1";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10], fileStatuses[11]}, new DirListInfo(originalSize, true, 3, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 12;
+ int maxAllowedSize = 3;
+ String nameFilter = "1";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10], fileStatuses[11]}, new DirListInfo(originalSize, false, 3, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 13;
+ int maxAllowedSize = 3;
+ String nameFilter = "1";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[1], fileStatuses[10], fileStatuses[11]}, new DirListInfo(originalSize, true, 3, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 0;
+ int maxAllowedSize = 3;
+ String nameFilter = "1";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[0], new DirListInfo(originalSize, false, originalSize, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 20;
+ int maxAllowedSize = 3;
+ String nameFilter = "";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[0], fileStatuses[1], fileStatuses[2]}, new DirListInfo(originalSize, true, maxAllowedSize, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 20;
+ int maxAllowedSize = 3;
+ String nameFilter = null;
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[0], fileStatuses[1], fileStatuses[2]}, new DirListInfo(originalSize, true, maxAllowedSize, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 3;
+ int maxAllowedSize = 3;
+ String nameFilter = null;
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[]{fileStatuses[0], fileStatuses[1], fileStatuses[2]}, new DirListInfo(originalSize, false, maxAllowedSize, nameFilter)), dirStatus);
+ }
+
+ {
+ int originalSize = 20;
+ int maxAllowedSize = 3;
+ String nameFilter = "a";
+ FileStatus[] fileStatuses = getFileStatuses(originalSize);
+ DirStatus dirStatus = hdfsApi.filterAndTruncateDirStatus(nameFilter, maxAllowedSize, fileStatuses);
+
+ Assert.assertEquals(new DirStatus(new FileStatus[0], new DirListInfo(originalSize, false, 0, nameFilter)), dirStatus);
+ }
+
+ }
+
+ private FileStatus[] getFileStatuses(int numberOfFiles) {
+ FileStatus[] fileStatuses = new FileStatus[numberOfFiles];
+ for(int i = 0 ; i < numberOfFiles; i++){
+ fileStatuses[i] = getFileStatus("/"+i);
+ }
+
+ return fileStatuses;
+ }
+
+ private FileStatus getFileStatus(String path) {
+ return new FileStatus(10, false, 3, 1000, 10000, new Path(path));
+ }
+
+}
\ No newline at end of file
[34/57] [abbrv] ambari git commit: AMBARI-21916. Shut down LLAP when
HSI startup fails.
Posted by lp...@apache.org.
AMBARI-21916. Shut down LLAP when HSI startup fails.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/75c8f5ef
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/75c8f5ef
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/75c8f5ef
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 75c8f5ef2374f8d5af01d0e6212053437d3d9f6e
Parents: a9d622e
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Fri Sep 8 10:25:54 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Fri Sep 8 10:26:06 2017 -0700
----------------------------------------------------------------------
.../HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/75c8f5ef/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index e8e9666..c0b152e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -114,6 +114,8 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
# Start LLAP before Hive Server Interactive start.
status = self._llap_start(env)
if not status:
+ # if we couldnt get LLAP in RUNNING or RUNNING_ALL state, stop LLAP process before bailing out.
+ self._llap_stop(env)
raise Fail("Skipping START of Hive Server Interactive since LLAP app couldn't be STARTED.")
# TODO : test the workability of Ranger and Hive2 during upgrade
[20/57] [abbrv] ambari git commit: AMBARI-21779. Storm should not
have ranger 'ranger-admin-site' and 'ranger-ugsync-site' dependencies.
(JaySenSharma via Swapan Shridhar).
Posted by lp...@apache.org.
AMBARI-21779. Storm should not have ranger 'ranger-admin-site' and 'ranger-ugsync-site' dependencies. (JaySenSharma via Swapan Shridhar).
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d7a787bb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d7a787bb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d7a787bb
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: d7a787bb2fb643bc1cae193f491196c181ae5f40
Parents: a5291ab
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Thu Sep 7 10:54:50 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Thu Sep 7 10:55:01 2017 -0700
----------------------------------------------------------------------
.../src/main/resources/common-services/STORM/0.9.1/metainfo.xml | 2 --
.../main/resources/common-services/STORM/1.0.1.3.0/metainfo.xml | 2 --
.../src/main/resources/common-services/STORM/1.0.1/metainfo.xml | 2 --
.../src/main/resources/common-services/STORM/1.1.0/metainfo.xml | 2 --
4 files changed, 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/d7a787bb/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml b/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml
index 0c62c1a..e97300e 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/metainfo.xml
@@ -164,8 +164,6 @@
<config-type>ranger-storm-policymgr-ssl</config-type>
<config-type>ranger-storm-security</config-type>
<config-type>admin-properties</config-type>
- <config-type>ranger-ugsync-site</config-type>
- <config-type>ranger-admin-site</config-type>
<config-type>zookeeper-env</config-type>
<config-type>zoo.cfg</config-type>
</configuration-dependencies>
http://git-wip-us.apache.org/repos/asf/ambari/blob/d7a787bb/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/metainfo.xml b/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/metainfo.xml
index 1bc23e4..36dddd1 100644
--- a/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/metainfo.xml
@@ -159,8 +159,6 @@
<config-type>ranger-storm-policymgr-ssl</config-type>
<config-type>ranger-storm-security</config-type>
<config-type>admin-properties</config-type>
- <config-type>ranger-ugsync-site</config-type>
- <config-type>ranger-admin-site</config-type>
<config-type>zookeeper-env</config-type>
<config-type>zoo.cfg</config-type>
<config-type>application.properties</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/d7a787bb/ambari-server/src/main/resources/common-services/STORM/1.0.1/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/1.0.1/metainfo.xml b/ambari-server/src/main/resources/common-services/STORM/1.0.1/metainfo.xml
index 084eac2..f7e771a 100644
--- a/ambari-server/src/main/resources/common-services/STORM/1.0.1/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/1.0.1/metainfo.xml
@@ -32,8 +32,6 @@
<config-type>ranger-storm-policymgr-ssl</config-type>
<config-type>ranger-storm-security</config-type>
<config-type>admin-properties</config-type>
- <config-type>ranger-ugsync-site</config-type>
- <config-type>ranger-admin-site</config-type>
<config-type>zookeeper-env</config-type>
<config-type>zoo.cfg</config-type>
<config-type>application.properties</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/d7a787bb/ambari-server/src/main/resources/common-services/STORM/1.1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/1.1.0/metainfo.xml b/ambari-server/src/main/resources/common-services/STORM/1.1.0/metainfo.xml
index 94f5ca3..3ad0cb6 100644
--- a/ambari-server/src/main/resources/common-services/STORM/1.1.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/1.1.0/metainfo.xml
@@ -32,8 +32,6 @@
<config-type>ranger-storm-policymgr-ssl</config-type>
<config-type>ranger-storm-security</config-type>
<config-type>admin-properties</config-type>
- <config-type>ranger-ugsync-site</config-type>
- <config-type>ranger-admin-site</config-type>
<config-type>zookeeper-env</config-type>
<config-type>zoo.cfg</config-type>
<config-type>application.properties</config-type>
[13/57] [abbrv] ambari git commit: AMBARI-21883. Upgrade to Ambari
2.6.0 fails with DB constraint violation on fk_sc_history_upgrade_id
(dlysnichenko)
Posted by lp...@apache.org.
AMBARI-21883. Upgrade to Ambari 2.6.0 fails with DB constraint violation on fk_sc_history_upgrade_id (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0c45d48f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0c45d48f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0c45d48f
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 0c45d48f5d82edbe096461c51832aa90d97593a4
Parents: 54d4d5e
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Wed Sep 6 20:05:32 2017 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Wed Sep 6 20:06:05 2017 +0300
----------------------------------------------------------------------
.../ambari/server/checks/DatabaseConsistencyCheckHelper.java | 5 ++++-
.../org/apache/ambari/server/upgrade/UpgradeCatalog260.java | 6 +++++-
2 files changed, 9 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0c45d48f/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
index 6fef3b8..054c470 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
@@ -605,7 +605,7 @@ public class DatabaseConsistencyCheckHelper {
Set<String> nonMappedConfigs = new HashSet<>();
for (ClusterConfigEntity clusterConfigEntity : notMappedClasterConfigs) {
- if (!clusterConfigEntity.isUnmapped()){
+ if (!clusterConfigEntity.isUnmapped()){ //this check does not report warning for configs from deleted services
nonMappedConfigs.add(clusterConfigEntity.getType() + '-' + clusterConfigEntity.getTag());
}
}
@@ -619,6 +619,7 @@ public class DatabaseConsistencyCheckHelper {
* of desired host component states. According to ambari logic these
* two tables should have the same count of rows. If not then we are
* adding missed host components.
+ * hard to predict root couse of inconsistency, so it can be dangerous
*/
@Transactional
static void fixHostComponentStatesCountEqualsHostComponentsDesiredStates() {
@@ -692,6 +693,7 @@ public class DatabaseConsistencyCheckHelper {
* The purpose of these checks is to avoid that tables and constraints in ambari's schema get confused with tables
* and constraints in other schemas on the DB user's search path. This can happen after an improperly made DB restore
* operation and can cause issues during upgrade.
+ * we may have no permissions (e.g. external DB) to auto fix this problem
**/
static void checkSchemaName () {
Configuration conf = injector.getInstance(Configuration.class);
@@ -747,6 +749,7 @@ public class DatabaseConsistencyCheckHelper {
/**
* This method checks tables engine type to be innodb for MySQL.
+ * it's too risky to autofix by migrating DB to innodb
* */
static void checkMySQLEngine () {
Configuration conf = injector.getInstance(Configuration.class);
http://git-wip-us.apache.org/repos/asf/ambari/blob/0c45d48f/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index de5d5ae..665b350 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -88,6 +88,8 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
public static final String FK_REPO_VERSION_ID = "FK_repo_version_id";
public static final String UPGRADE_TABLE = "upgrade";
+ public static final String UPGRADE_GROUP_TABLE = "upgrade_group";
+ public static final String UPGRADE_ITEM_TABLE = "upgrade_item";
public static final String FROM_REPO_VERSION_ID_COLUMN = "from_repo_version_id";
public static final String TO_REPO_VERSION_ID_COLUMN = "to_repo_version_id";
public static final String ORCHESTRATION_COLUMN = "orchestration";
@@ -159,9 +161,9 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
addSelectedCollumsToClusterconfigTable();
updateHostComponentDesiredStateTable();
updateHostComponentStateTable();
+ dropStaleTables();
updateUpgradeTable();
createUpgradeHistoryTable();
- dropStaleTables();
updateRepositoryVersionTable();
renameServiceDeletedColumn();
}
@@ -231,6 +233,8 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
* @throws java.sql.SQLException
*/
private void updateUpgradeTable() throws SQLException {
+ dbAccessor.clearTable(UPGRADE_ITEM_TABLE);
+ dbAccessor.clearTable(UPGRADE_GROUP_TABLE);
dbAccessor.clearTable(UPGRADE_TABLE);
dbAccessor.dropFKConstraint(UPGRADE_TABLE, FK_UPGRADE_FROM_REPO_ID);
dbAccessor.dropFKConstraint(UPGRADE_TABLE, FK_UPGRADE_TO_REPO_ID);
[46/57] [abbrv] ambari git commit: AMBARI-21307 Implemented PUT
operation, added unit tests
Posted by lp...@apache.org.
AMBARI-21307 Implemented PUT operation, added unit tests
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d5b3d291
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d5b3d291
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d5b3d291
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: d5b3d2918eb68405189caffa611af32ece5059a1
Parents: 8ac1c82
Author: lpuskas <lp...@apache.org>
Authored: Thu Jul 13 16:20:58 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:00 2017 +0200
----------------------------------------------------------------------
.../services/AmbariConfigurationService.java | 89 ++++---
.../server/controller/ControllerModule.java | 2 +
.../controller/ResourceProviderFactory.java | 23 +-
.../AbstractControllerResourceProvider.java | 2 +
.../AmbariConfigurationResourceProvider.java | 88 +++++--
.../internal/DefaultProviderModule.java | 2 -
.../server/orm/dao/AmbariConfigurationDAO.java | 4 +
...AmbariConfigurationResourceProviderTest.java | 231 +++++++++++++++++++
8 files changed, 363 insertions(+), 78 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
index 0c159b9..0632361 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
@@ -56,16 +56,10 @@ import io.swagger.annotations.ApiResponses;
* "data": [
* {
* "authentication.ldap.primaryUrl": "localhost:33389"
- * },
- * {
- * "authentication.ldap.secondaryUrl": "localhost:333"
- * },
- * {
+ "authentication.ldap.secondaryUrl": "localhost:333"
* "authentication.ldap.baseDn": "dc=ambari,dc=apache,dc=org"
- * }
- * // ......
- * ]
- * }
+ * // ......
+ * ]
* }
* </pre>
*/
@@ -74,7 +68,7 @@ import io.swagger.annotations.ApiResponses;
public class AmbariConfigurationService extends BaseService {
private static final String AMBARI_CONFIGURATION_REQUEST_TYPE =
- "org.apache.ambari.server.api.services.AmbariConfigurationRequestSwagger";
+ "org.apache.ambari.server.api.services.AmbariConfigurationRequestSwagger";
/**
* Creates an ambari configuration resource.
@@ -87,9 +81,9 @@ public class AmbariConfigurationService extends BaseService {
@POST
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Creates an ambari configuration resource",
- nickname = "AmbariConfigurationService#createAmbariConfiguration")
+ nickname = "AmbariConfigurationService#createAmbariConfiguration")
@ApiImplicitParams({
- @ApiImplicitParam(dataType = AMBARI_CONFIGURATION_REQUEST_TYPE, paramType = PARAM_TYPE_BODY)
+ @ApiImplicitParam(dataType = AMBARI_CONFIGURATION_REQUEST_TYPE, paramType = PARAM_TYPE_BODY)
})
@ApiResponses({
@ApiResponse(code = HttpStatus.SC_CREATED, message = MSG_SUCCESSFUL_OPERATION),
@@ -108,24 +102,24 @@ public class AmbariConfigurationService extends BaseService {
@GET
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Retrieve all ambari configuration resources",
- nickname = "AmbariConfigurationService#getAmbariConfigurations",
- notes = "Returns all Ambari configurations.",
- response = AmbariConfigurationResponseSwagger.class,
- responseContainer = RESPONSE_CONTAINER_LIST)
+ nickname = "AmbariConfigurationService#getAmbariConfigurations",
+ notes = "Returns all Ambari configurations.",
+ response = AmbariConfigurationResponseSwagger.class,
+ responseContainer = RESPONSE_CONTAINER_LIST)
@ApiImplicitParams({
- @ApiImplicitParam(name = QUERY_FIELDS, value = QUERY_FILTER_DESCRIPTION,
- defaultValue = "AmbariConfiguration/data, AmbariConfiguration/id, AmbariConfiguration/type",
- dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
- @ApiImplicitParam(name = QUERY_SORT, value = QUERY_SORT_DESCRIPTION,
- defaultValue = "AmbariConfiguration/id",
- dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
- @ApiImplicitParam(name = QUERY_PAGE_SIZE, value = QUERY_PAGE_SIZE_DESCRIPTION, defaultValue = DEFAULT_PAGE_SIZE, dataType = DATA_TYPE_INT, paramType = PARAM_TYPE_QUERY),
- @ApiImplicitParam(name = QUERY_FROM, value = QUERY_FROM_DESCRIPTION, defaultValue = DEFAULT_FROM, dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
- @ApiImplicitParam(name = QUERY_TO, value = QUERY_TO_DESCRIPTION, dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY)
+ @ApiImplicitParam(name = QUERY_FIELDS, value = QUERY_FILTER_DESCRIPTION,
+ defaultValue = "AmbariConfiguration/data, AmbariConfiguration/id, AmbariConfiguration/type",
+ dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_SORT, value = QUERY_SORT_DESCRIPTION,
+ defaultValue = "AmbariConfiguration/id",
+ dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_PAGE_SIZE, value = QUERY_PAGE_SIZE_DESCRIPTION, defaultValue = DEFAULT_PAGE_SIZE, dataType = DATA_TYPE_INT, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_FROM, value = QUERY_FROM_DESCRIPTION, defaultValue = DEFAULT_FROM, dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY),
+ @ApiImplicitParam(name = QUERY_TO, value = QUERY_TO_DESCRIPTION, dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY)
})
@ApiResponses(value = {
- @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
- @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR)
+ @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR)
})
public Response getAmbariConfigurations(String body, @Context HttpHeaders headers, @Context UriInfo uri) {
return handleRequest(headers, body, uri, Request.Type.GET, createResource(Resource.Type.AmbariConfiguration,
@@ -136,16 +130,16 @@ public class AmbariConfigurationService extends BaseService {
@Path("{configurationId}")
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Retrieve the details of an ambari configuration resource",
- nickname = "AmbariConfigurationService#getAmbariConfiguration",
- response = AmbariConfigurationResponseSwagger.class)
+ nickname = "AmbariConfigurationService#getAmbariConfiguration",
+ response = AmbariConfigurationResponseSwagger.class)
@ApiImplicitParams({
- @ApiImplicitParam(name = QUERY_FIELDS, value = QUERY_FILTER_DESCRIPTION, defaultValue = "AmbariConfiguration/*",
- dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY)
+ @ApiImplicitParam(name = QUERY_FIELDS, value = QUERY_FILTER_DESCRIPTION, defaultValue = "AmbariConfiguration/*",
+ dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY)
})
@ApiResponses(value = {
- @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
- @ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
- @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR)
+ @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR)
})
public Response getAmbariConfiguration(String body, @Context HttpHeaders headers, @Context UriInfo uri,
@PathParam("configurationId") String configurationId) {
@@ -154,30 +148,35 @@ public class AmbariConfigurationService extends BaseService {
}
@PUT
+ @Path("{configurationId}")
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Updates ambari configuration resources - Not implemented yet",
nickname = "AmbariConfigurationService#updateAmbariConfiguration")
@ApiImplicitParams({
- @ApiImplicitParam(dataType = AMBARI_CONFIGURATION_REQUEST_TYPE, paramType = PARAM_TYPE_BODY)
+ @ApiImplicitParam(dataType = AMBARI_CONFIGURATION_REQUEST_TYPE, paramType = PARAM_TYPE_BODY),
+ @ApiImplicitParam(name = QUERY_FIELDS, value = QUERY_FILTER_DESCRIPTION, defaultValue = "AmbariConfiguration/*",
+ dataType = DATA_TYPE_STRING, paramType = PARAM_TYPE_QUERY)
})
@ApiResponses({
- @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
- @ApiResponse(code = HttpStatus.SC_ACCEPTED, message = MSG_REQUEST_ACCEPTED),
- @ApiResponse(code = HttpStatus.SC_BAD_REQUEST, message = MSG_INVALID_ARGUMENTS),
- @ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
- @ApiResponse(code = HttpStatus.SC_UNAUTHORIZED, message = MSG_NOT_AUTHENTICATED),
- @ApiResponse(code = HttpStatus.SC_FORBIDDEN, message = MSG_PERMISSION_DENIED),
- @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR),
+ @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_ACCEPTED, message = MSG_REQUEST_ACCEPTED),
+ @ApiResponse(code = HttpStatus.SC_BAD_REQUEST, message = MSG_INVALID_ARGUMENTS),
+ @ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
+ @ApiResponse(code = HttpStatus.SC_UNAUTHORIZED, message = MSG_NOT_AUTHENTICATED),
+ @ApiResponse(code = HttpStatus.SC_FORBIDDEN, message = MSG_PERMISSION_DENIED),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR),
})
- public Response updateAmbariConfiguration() {
- throw new UnsupportedOperationException("Not yet implemented");
+ public Response updateAmbariConfiguration(String body, @Context HttpHeaders headers, @Context UriInfo uri,
+ @PathParam("configurationId") String configurationId) {
+ return handleRequest(headers, body, uri, Request.Type.PUT, createResource(Resource.Type.AmbariConfiguration,
+ Collections.singletonMap(Resource.Type.AmbariConfiguration, configurationId)));
}
@DELETE
@Path("{configurationId}")
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Deletes an ambari configuration resource",
- nickname = "AmbariConfigurationService#deleteAmbariConfiguration")
+ nickname = "AmbariConfigurationService#deleteAmbariConfiguration")
@ApiResponses({
@ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
@ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index e151e63..edabcdb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -62,6 +62,7 @@ import org.apache.ambari.server.cleanup.ClasspathScannerUtils;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.configuration.Configuration.ConnectionPoolType;
import org.apache.ambari.server.configuration.Configuration.DatabaseType;
+import org.apache.ambari.server.controller.internal.AmbariConfigurationResourceProvider;
import org.apache.ambari.server.controller.internal.ComponentResourceProvider;
import org.apache.ambari.server.controller.internal.CredentialResourceProvider;
import org.apache.ambari.server.controller.internal.HostComponentResourceProvider;
@@ -467,6 +468,7 @@ public class ControllerModule extends AbstractModule {
.implement(ResourceProvider.class, Names.named("credential"), CredentialResourceProvider.class)
.implement(ResourceProvider.class, Names.named("kerberosDescriptor"), KerberosDescriptorResourceProvider.class)
.implement(ResourceProvider.class, Names.named("upgrade"), UpgradeResourceProvider.class)
+ .implement(ResourceProvider.class, Names.named("ambariConfiguration"), AmbariConfigurationResourceProvider.class)
.build(ResourceProviderFactory.class));
install(new FactoryModuleBuilder().implement(
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
index 3912138..36dfdf9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
@@ -22,23 +22,22 @@ package org.apache.ambari.server.controller;
import java.util.Map;
import java.util.Set;
+import javax.inject.Named;
+
import org.apache.ambari.server.controller.internal.UpgradeResourceProvider;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.Resource.Type;
import org.apache.ambari.server.controller.spi.ResourceProvider;
-import com.google.inject.name.Named;
public interface ResourceProviderFactory {
@Named("host")
- ResourceProvider getHostResourceProvider(Set<String> propertyIds,
- Map<Type, String> keyPropertyIds,
- AmbariManagementController managementController);
+ ResourceProvider getHostResourceProvider(Set<String> propertyIds, Map<Type, String> keyPropertyIds,
+ AmbariManagementController managementController);
@Named("hostComponent")
- ResourceProvider getHostComponentResourceProvider(Set<String> propertyIds,
- Map<Type, String> keyPropertyIds,
- AmbariManagementController managementController);
+ ResourceProvider getHostComponentResourceProvider(Set<String> propertyIds, Map<Type, String> keyPropertyIds,
+ AmbariManagementController managementController);
@Named("service")
ResourceProvider getServiceResourceProvider(AmbariManagementController managementController);
@@ -47,9 +46,8 @@ public interface ResourceProviderFactory {
ResourceProvider getComponentResourceProvider(AmbariManagementController managementController);
@Named("member")
- ResourceProvider getMemberResourceProvider(Set<String> propertyIds,
- Map<Type, String> keyPropertyIds,
- AmbariManagementController managementController);
+ ResourceProvider getMemberResourceProvider(Set<String> propertyIds, Map<Type, String> keyPropertyIds,
+ AmbariManagementController managementController);
@Named("hostKerberosIdentity")
ResourceProvider getHostKerberosIdentityResourceProvider(AmbariManagementController managementController);
@@ -61,11 +59,12 @@ public interface ResourceProviderFactory {
ResourceProvider getRepositoryVersionResourceProvider();
@Named("kerberosDescriptor")
- ResourceProvider getKerberosDescriptorResourceProvider(AmbariManagementController managementController,
- Set<String> propertyIds,
+ ResourceProvider getKerberosDescriptorResourceProvider(AmbariManagementController managementController, Set<String> propertyIds,
Map<Resource.Type, String> keyPropertyIds);
@Named("upgrade")
UpgradeResourceProvider getUpgradeResourceProvider(AmbariManagementController managementController);
+ @Named("ambariConfiguration")
+ ResourceProvider getAmbariConfigurationResourceProvider();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
index b35b2a8..95d33cf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
@@ -254,6 +254,8 @@ public abstract class AbstractControllerResourceProvider extends AbstractAuthori
return new ClusterKerberosDescriptorResourceProvider(managementController);
case LoggingQuery:
return new LoggingResourceProvider(propertyIds, keyPropertyIds, managementController);
+ case AmbariConfiguration:
+ return resourceProviderFactory.getAmbariConfigurationResourceProvider();
default:
throw new IllegalArgumentException("Unknown type " + type);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
index e8f186d..2302d8b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
@@ -25,7 +25,6 @@ import java.util.Set;
import javax.inject.Inject;
import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.StaticallyInject;
import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
@@ -46,11 +45,11 @@ import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
+import com.google.inject.assistedinject.AssistedInject;
/**
* Resource provider for AmbariConfiguration resources.
*/
-@StaticallyInject
public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResourceProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(AmbariConfigurationResourceProvider.class);
@@ -60,7 +59,7 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
/**
* Resource property id constants.
*/
- private enum ResourcePropertyId {
+ public enum ResourcePropertyId {
ID("AmbariConfiguration/id"),
TYPE("AmbariConfiguration/type"),
@@ -112,11 +111,12 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
@Inject
- private static AmbariConfigurationDAO ambariConfigurationDAO;
+ private AmbariConfigurationDAO ambariConfigurationDAO;
private Gson gson;
- protected AmbariConfigurationResourceProvider() {
+ @AssistedInject
+ public AmbariConfigurationResourceProvider() {
super(PROPERTIES, PK_PROPERTY_MAP);
setRequiredCreateAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION));
setRequiredDeleteAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION));
@@ -134,7 +134,12 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
ResourceAlreadyExistsException, NoSuchParentResourceException {
LOGGER.info("Creating new ambari configuration resource ...");
- AmbariConfigurationEntity ambariConfigurationEntity = getEntityFromRequest(request);
+ AmbariConfigurationEntity ambariConfigurationEntity = null;
+ try {
+ ambariConfigurationEntity = getEntityFromRequest(request);
+ } catch (AmbariException e) {
+ throw new NoSuchParentResourceException(e.getMessage());
+ }
LOGGER.info("Persisting new ambari configuration: {} ", ambariConfigurationEntity);
ambariConfigurationDAO.create(ambariConfigurationEntity);
@@ -148,6 +153,7 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
Set<Resource> resources = Sets.newHashSet();
+ // retrieves allconfigurations, filtering is done at a higher level
List<AmbariConfigurationEntity> ambariConfigurationEntities = ambariConfigurationDAO.findAll();
for (AmbariConfigurationEntity ambariConfigurationEntity : ambariConfigurationEntities) {
try {
@@ -181,40 +187,86 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
}
+ @Override
+ protected RequestStatus updateResourcesAuthorized(Request request, Predicate predicate) throws SystemException,
+ UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
+ Long idFromRequest = Long.valueOf((String) PredicateHelper.getProperties(predicate).get(ResourcePropertyId.ID.getPropertyId()));
+
+ AmbariConfigurationEntity persistedEntity = ambariConfigurationDAO.findByPK(idFromRequest);
+ if (persistedEntity == null) {
+ String errorMsg = String.format("Entity with primary key [ %s ] not found in the database.", idFromRequest);
+ LOGGER.error(errorMsg);
+ throw new NoSuchResourceException(errorMsg);
+ }
+
+ try {
+
+ AmbariConfigurationEntity entityFromRequest = getEntityFromRequest(request);
+ persistedEntity.getConfigurationBaseEntity().setVersionTag(entityFromRequest.getConfigurationBaseEntity().getVersionTag());
+ persistedEntity.getConfigurationBaseEntity().setVersion(entityFromRequest.getConfigurationBaseEntity().getVersion());
+ persistedEntity.getConfigurationBaseEntity().setType(entityFromRequest.getConfigurationBaseEntity().getType());
+ persistedEntity.getConfigurationBaseEntity().setConfigurationData(entityFromRequest.getConfigurationBaseEntity().getConfigurationData());
+ persistedEntity.getConfigurationBaseEntity().setConfigurationAttributes(entityFromRequest.getConfigurationBaseEntity().getConfigurationAttributes());
+
+
+ ambariConfigurationDAO.create(persistedEntity);
+ } catch (AmbariException e) {
+ throw new NoSuchParentResourceException(e.getMessage());
+ }
+
+ return getRequestStatus(null);
+
+ }
+
private Resource toResource(AmbariConfigurationEntity entity, Set<String> requestedIds) throws AmbariException {
+
+ if (null == entity) {
+ throw new IllegalArgumentException("Null entity can't be transformed into a resource");
+ }
+
+ if (null == entity.getConfigurationBaseEntity()) {
+ throw new IllegalArgumentException("Invalid configuration entity can't be transformed into a resource");
+ }
Resource resource = new ResourceImpl(Resource.Type.AmbariConfiguration);
Set<Map<String, String>> configurationSet = gson.fromJson(entity.getConfigurationBaseEntity().getConfigurationData(), Set.class);
setResourceProperty(resource, ResourcePropertyId.ID.getPropertyId(), entity.getId(), requestedIds);
setResourceProperty(resource, ResourcePropertyId.TYPE.getPropertyId(), entity.getConfigurationBaseEntity().getType(), requestedIds);
setResourceProperty(resource, ResourcePropertyId.DATA.getPropertyId(), configurationSet, requestedIds);
+ setResourceProperty(resource, ResourcePropertyId.VERSION.getPropertyId(), entity.getConfigurationBaseEntity().getVersion(), requestedIds);
+ setResourceProperty(resource, ResourcePropertyId.VERSION_TAG.getPropertyId(), entity.getConfigurationBaseEntity().getVersionTag(), requestedIds);
return resource;
}
- private AmbariConfigurationEntity getEntityFromRequest(Request request) {
+ private AmbariConfigurationEntity getEntityFromRequest(Request request) throws AmbariException {
AmbariConfigurationEntity ambariConfigurationEntity = new AmbariConfigurationEntity();
ambariConfigurationEntity.setConfigurationBaseEntity(new ConfigurationBaseEntity());
+ // set of resource properties (eache entry in the set belongs to a different resource)
+ Set<Map<String, Object>> resourcePropertiesSet = request.getProperties();
+
+ if (resourcePropertiesSet.size() != 1) {
+ throw new AmbariException("There must be only one resource specified in the request");
+ }
+
for (ResourcePropertyId resourcePropertyId : ResourcePropertyId.values()) {
- Object requestValue = getValueFromRequest(resourcePropertyId, request);
+ Object requestValue = getValueFromResourceProperties(resourcePropertyId, resourcePropertiesSet.iterator().next());
switch (resourcePropertyId) {
case DATA:
if (requestValue == null) {
throw new IllegalArgumentException("No configuration data is provided in the request");
}
-
ambariConfigurationEntity.getConfigurationBaseEntity().setConfigurationData(gson.toJson(requestValue));
break;
case TYPE:
ambariConfigurationEntity.getConfigurationBaseEntity().setType((String) requestValue);
break;
-
case VERSION:
- Integer version = (requestValue == null) ? DEFAULT_VERSION : Integer.valueOf((Integer) requestValue);
+ Integer version = (requestValue == null) ? DEFAULT_VERSION : Integer.valueOf((String) requestValue);
ambariConfigurationEntity.getConfigurationBaseEntity().setVersion((version));
break;
case VERSION_TAG:
@@ -231,15 +283,13 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
}
- private Object getValueFromRequest(ResourcePropertyId resourcePropertyIdEnum, Request request) {
- LOGGER.debug("Locating resource property [{}] in the request ...", resourcePropertyIdEnum);
+ private Object getValueFromResourceProperties(ResourcePropertyId resourcePropertyIdEnum, Map<String, Object> resourceProperties) {
+ LOGGER.debug("Locating resource property [{}] in the resource properties map ...", resourcePropertyIdEnum);
Object requestValue = null;
- for (Map<String, Object> propertyMap : request.getProperties()) {
- if (propertyMap.containsKey(resourcePropertyIdEnum.getPropertyId())) {
- requestValue = propertyMap.get(resourcePropertyIdEnum.getPropertyId());
- LOGGER.debug("Found resource property {} in the request, value: {} ...", resourcePropertyIdEnum, requestValue);
- break;
- }
+
+ if (resourceProperties.containsKey(resourcePropertyIdEnum.getPropertyId())) {
+ requestValue = resourceProperties.get(resourcePropertyIdEnum.getPropertyId());
+ LOGGER.debug("Found resource property {} in the resource properties map, value: {}", resourcePropertyIdEnum, requestValue);
}
return requestValue;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
index 6e7ca0a..95c7b83 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
@@ -122,8 +122,6 @@ public class DefaultProviderModule extends AbstractProviderModule {
return new ArtifactResourceProvider(managementController);
case RemoteCluster:
return new RemoteClusterResourceProvider();
- case AmbariConfiguration:
- return new AmbariConfigurationResourceProvider();
default:
LOGGER.debug("Delegating creation of resource provider for: {} to the AbstractControllerResourceProvider", type.getInternalType());
return AbstractControllerResourceProvider.getResourceProvider(type, propertyIds,
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
index c29a423..5710a7f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
@@ -19,8 +19,11 @@ import javax.inject.Singleton;
import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
+import com.google.inject.persist.Transactional;
+
/**
* DAO dealing with ambari configuration related JPA operations.
+ * Operations delegate to the JPA provider implementation of CRUD operations.
*/
@Singleton
@@ -31,6 +34,7 @@ public class AmbariConfigurationDAO extends CrudDAO<AmbariConfigurationEntity, L
super(AmbariConfigurationEntity.class);
}
+ @Transactional
public void create(AmbariConfigurationEntity entity) {
super.create(entity);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5b3d291/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProviderTest.java
new file mode 100644
index 0000000..d974682
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProviderTest.java
@@ -0,0 +1,231 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.orm.dao.AmbariConfigurationDAO;
+import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
+import org.apache.ambari.server.orm.entities.ConfigurationBaseEntity;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.easymock.EasyMockRule;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.easymock.TestSubject;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+public class AmbariConfigurationResourceProviderTest extends EasyMockSupport {
+
+ @Rule
+ public EasyMockRule mocks = new EasyMockRule(this);
+
+ @Mock
+ private Request requestMock;
+
+ @Mock
+ private AmbariConfigurationDAO ambariConfigurationDAO;
+
+ private Capture<AmbariConfigurationEntity> ambariConfigurationEntityCapture;
+
+ private Gson gson;
+
+ private static final String DATA_MOCK_STR = "[\n" +
+ " {\n" +
+ " \"authentication.ldap.baseDn\" : \"dc=ambari,dc=apache,dc=org\",\n" +
+ " \"authentication.ldap.primaryUrl\" : \"localhost:33389\",\n" +
+ " \"authentication.ldap.secondaryUrl\" : \"localhost:333\"\n" +
+ " }\n" +
+ " ]";
+
+ private static final Long PK_LONG = Long.valueOf(1);
+ private static final String PK_STRING = String.valueOf(1);
+ private static final String VERSION_TAG = "test version";
+ private static final String VERSION = "1";
+
+ @TestSubject
+ private AmbariConfigurationResourceProvider ambariConfigurationResourceProvider = new AmbariConfigurationResourceProvider();
+
+ @Before
+ public void setup() {
+ ambariConfigurationEntityCapture = Capture.newInstance();
+ gson = new GsonBuilder().create();
+ }
+
+ @Test
+ public void testCreateAmbariConfigurationRequestResultsInTheProperPersistenceCall() throws Exception {
+
+ // GIVEN
+ // configuration properties parsed from the request
+ Set<Map<String, Object>> resourcePropertiesSet = Sets.newHashSet(
+ new PropertiesMapBuilder()
+ .withId(PK_LONG)
+ .withVersion(VERSION)
+ .withVersionTag(VERSION_TAG)
+ .withData(DATA_MOCK_STR)
+ .build());
+
+ // mock the request to return the properties
+ EasyMock.expect(requestMock.getProperties()).andReturn(resourcePropertiesSet);
+
+ // capture the entity the DAO gets called with
+ ambariConfigurationDAO.create(EasyMock.capture(ambariConfigurationEntityCapture));
+ replayAll();
+
+ // WHEN
+ ambariConfigurationResourceProvider.createResourcesAuthorized(requestMock);
+
+ // THEN
+ AmbariConfigurationEntity capturedAmbariConfigurationEntity = ambariConfigurationEntityCapture.getValue();
+ Assert.assertNotNull(capturedAmbariConfigurationEntity);
+ Assert.assertNull("The entity identifier should be null", capturedAmbariConfigurationEntity.getId());
+ Assert.assertEquals("The entity version is not the expected", Integer.valueOf(VERSION),
+ capturedAmbariConfigurationEntity.getConfigurationBaseEntity().getVersion());
+ Assert.assertEquals("The entity version tag is not the expected", VERSION_TAG,
+ capturedAmbariConfigurationEntity.getConfigurationBaseEntity().getVersionTag());
+ Assert.assertEquals("The entity data is not the expected", DATA_MOCK_STR,
+ gson.fromJson(capturedAmbariConfigurationEntity.getConfigurationBaseEntity().getConfigurationData(), String.class));
+ }
+
+ @Test
+ public void testRemoveAmbariConfigurationRequestResultsInTheProperPersistenceCall() throws Exception {
+ // GIVEN
+ Predicate predicate = new PredicateBuilder().property(
+ AmbariConfigurationResourceProvider.ResourcePropertyId.ID.getPropertyId()).equals("1").toPredicate();
+
+ Capture<Long> pkCapture = Capture.newInstance();
+ ambariConfigurationDAO.removeByPK(EasyMock.capture(pkCapture));
+ replayAll();
+
+ // WHEN
+ ambariConfigurationResourceProvider.deleteResourcesAuthorized(requestMock, predicate);
+
+ // THEN
+ Assert.assertEquals("The pk of the entity to be removed doen't match the expected id", Long.valueOf(1), pkCapture.getValue());
+ }
+
+
+ @Test
+ public void testRetrieveAmbariConfigurationShouldResultsInTheProperDAOCall() throws Exception {
+ // GIVEN
+ Predicate predicate = new PredicateBuilder().property(
+ AmbariConfigurationResourceProvider.ResourcePropertyId.ID.getPropertyId()).equals("1").toPredicate();
+
+ EasyMock.expect(ambariConfigurationDAO.findAll()).andReturn(Lists.newArrayList(createDummyAmbariConfigurationEntity()));
+ replayAll();
+
+ // WHEN
+ Set<Resource> resourceSet = ambariConfigurationResourceProvider.getResourcesAuthorized(requestMock, predicate);
+
+ // THEN
+ Assert.assertNotNull(resourceSet);
+ Assert.assertFalse(resourceSet.isEmpty());
+ }
+
+ @Test
+ public void testUpdateAmbariConfigurationShouldResultInTheProperDAOCalls() throws Exception {
+ // GIVEN
+
+ Predicate predicate = new PredicateBuilder().property(
+ AmbariConfigurationResourceProvider.ResourcePropertyId.ID.getPropertyId()).equals("1").toPredicate();
+
+ // properteies in the request, representing the updated configuration
+ Set<Map<String, Object>> resourcePropertiesSet = Sets.newHashSet(new PropertiesMapBuilder()
+ .withId(PK_LONG)
+ .withVersion("2")
+ .withVersionTag("version-2")
+ .withData(DATA_MOCK_STR).build());
+
+ EasyMock.expect(requestMock.getProperties()).andReturn(resourcePropertiesSet);
+
+ AmbariConfigurationEntity persistedEntity = createDummyAmbariConfigurationEntity();
+ EasyMock.expect(ambariConfigurationDAO.findByPK(PK_LONG)).andReturn(persistedEntity);
+ ambariConfigurationDAO.create(EasyMock.capture(ambariConfigurationEntityCapture));
+
+ replayAll();
+
+ // WHEN
+ ambariConfigurationResourceProvider.updateResourcesAuthorized(requestMock, predicate);
+
+ // the captured entity should be the updated one
+ AmbariConfigurationEntity updatedEntity = ambariConfigurationEntityCapture.getValue();
+
+ // THEN
+ Assert.assertNotNull(updatedEntity);
+ Assert.assertEquals("The updated version is wrong", Integer.valueOf(2), updatedEntity.getConfigurationBaseEntity().getVersion());
+ }
+
+ private class PropertiesMapBuilder {
+
+ private Map<String, Object> resourcePropertiesMap = Maps.newHashMap();
+
+ private PropertiesMapBuilder() {
+ }
+
+ public PropertiesMapBuilder withId(Long id) {
+ resourcePropertiesMap.put(AmbariConfigurationResourceProvider.ResourcePropertyId.ID.getPropertyId(), id);
+ return this;
+ }
+
+ private PropertiesMapBuilder withVersion(String version) {
+ resourcePropertiesMap.put(AmbariConfigurationResourceProvider.ResourcePropertyId.VERSION.getPropertyId(), version);
+ return this;
+ }
+
+ private PropertiesMapBuilder withVersionTag(String versionTag) {
+ resourcePropertiesMap.put(AmbariConfigurationResourceProvider.ResourcePropertyId.VERSION_TAG.getPropertyId(), versionTag);
+ return this;
+ }
+
+ private PropertiesMapBuilder withData(String dataJson) {
+ resourcePropertiesMap.put(AmbariConfigurationResourceProvider.ResourcePropertyId.DATA.getPropertyId(), dataJson);
+ return this;
+ }
+
+ public Map<String, Object> build() {
+ return this.resourcePropertiesMap;
+ }
+
+ }
+
+ private AmbariConfigurationEntity createDummyAmbariConfigurationEntity() {
+ AmbariConfigurationEntity acEntity = new AmbariConfigurationEntity();
+ ConfigurationBaseEntity configurationBaseEntity = new ConfigurationBaseEntity();
+ acEntity.setConfigurationBaseEntity(configurationBaseEntity);
+ acEntity.setId(PK_LONG);
+ acEntity.getConfigurationBaseEntity().setConfigurationData(DATA_MOCK_STR);
+ acEntity.getConfigurationBaseEntity().setVersion(Integer.valueOf(VERSION));
+ acEntity.getConfigurationBaseEntity().setVersionTag(VERSION_TAG);
+ acEntity.getConfigurationBaseEntity().setType("ldap-config");
+
+ return acEntity;
+ }
+
+
+}
\ No newline at end of file
[19/57] [abbrv] ambari git commit: Revert "AMBARI-21898. Property
provider in-memory maps are refreshed too slowly after config updates.
(swagle)"
Posted by lp...@apache.org.
Revert "AMBARI-21898. Property provider in-memory maps are refreshed too slowly after config updates. (swagle)"
This reverts commit 8cb942393ce15efb5f6fbc9f594287c30971c296.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a5291abd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a5291abd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a5291abd
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: a5291abd9717aa0c97562c264effc1d330a4e129
Parents: a10e388
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Sep 7 12:25:06 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Sep 7 12:34:55 2017 -0400
----------------------------------------------------------------------
.../internal/AbstractProviderModule.java | 93 ++++++++++++--------
.../org/apache/ambari/server/state/Cluster.java | 5 --
.../server/state/cluster/ClusterImpl.java | 13 +--
3 files changed, 60 insertions(+), 51 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a5291abd/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index e0df487..77549f5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -67,9 +67,6 @@ import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -247,8 +244,6 @@ public abstract class AbstractProviderModule implements ProviderModule,
@Inject
protected AmbariEventPublisher eventPublisher;
- @Inject
- private Clusters clusters;
/**
* The map of host components.
@@ -263,7 +258,8 @@ public abstract class AbstractProviderModule implements ProviderModule,
/**
* JMX ports read from the configs
*/
- private final Map<String, ConcurrentMap<String, ConcurrentMap<String, String>>> jmxPortMap = new ConcurrentHashMap<>(1);
+ private final Map<String, ConcurrentMap<String, ConcurrentMap<String, String>> >jmxPortMap =
+ Collections.synchronizedMap(new HashMap<String, ConcurrentMap<String, ConcurrentMap<String, String>>>());
private volatile boolean initialized = false;
@@ -521,19 +517,16 @@ public abstract class AbstractProviderModule implements ProviderModule,
@Override
public String getPort(String clusterName, String componentName, String hostName, boolean httpsEnabled) throws SystemException {
// Parent map need not be synchronized
- ConcurrentMap<String, ConcurrentMap<String, String>> clusterJmxPorts;
- // Still need double check to ensure single init
- if (!jmxPortMap.containsKey(clusterName)) {
+ ConcurrentMap<String, ConcurrentMap<String, String>> clusterJmxPorts = jmxPortMap.get(clusterName);
+ if (clusterJmxPorts == null) {
synchronized (jmxPortMap) {
- if (!jmxPortMap.containsKey(clusterName)) {
+ clusterJmxPorts = jmxPortMap.get(clusterName);
+ if (clusterJmxPorts == null) {
clusterJmxPorts = new ConcurrentHashMap<>();
jmxPortMap.put(clusterName, clusterJmxPorts);
}
}
}
-
- clusterJmxPorts = jmxPortMap.get(clusterName);
-
Service.Type service = componentServiceMap.get(componentName);
if (service != null) {
@@ -865,34 +858,49 @@ public abstract class AbstractProviderModule implements ProviderModule,
}
}
- // TODO: Fix for multi-service feature support (trunk)
- // Called from a synchornized block !
private void initProviderMaps() throws SystemException {
+ ResourceProvider provider = getResourceProvider(Resource.Type.Cluster);
- jmxPortMap.clear();
- clusterHostComponentMap = new HashMap<>();
- clusterGangliaCollectorMap = new HashMap<>();
+ Set<String> propertyIds = new HashSet<>();
+ propertyIds.add(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID);
- Map<String, Cluster> clusterMap = clusters.getClusters();
- if (MapUtils.isEmpty(clusterMap)) {
- return;
- }
+ Map<String, String> requestInfoProperties = new HashMap<>();
+ requestInfoProperties.put(ClusterResourceProvider.GET_IGNORE_PERMISSIONS_PROPERTY_ID, "true");
- for (Cluster cluster : clusterMap.values()) {
- String clusterName = cluster.getClusterName();
+ Request request = PropertyHelper.getReadRequest(propertyIds,
+ requestInfoProperties, null, null, null);
- Map<String, String> hostComponentMap = clusterHostComponentMap.get(clusterName);
- if (hostComponentMap == null) {
- hostComponentMap = new HashMap<>();
- clusterHostComponentMap.put(clusterName, hostComponentMap);
- }
+ try {
+ jmxPortMap.clear();
+ Set<Resource> clusters = provider.getResources(request, null);
+
+ clusterHostComponentMap = new HashMap<>();
+ clusterGangliaCollectorMap = new HashMap<>();
+
+ for (Resource cluster : clusters) {
+
+ String clusterName = (String) cluster.getPropertyValue(CLUSTER_NAME_PROPERTY_ID);
- List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts();
+ // initialize the host component map and Ganglia server from the known hosts components...
+ provider = getResourceProvider(Resource.Type.HostComponent);
- if (!CollectionUtils.isEmpty(serviceComponentHosts)) {
- for (ServiceComponentHost sch : serviceComponentHosts) {
- String componentName = sch.getServiceComponentName();
- String hostName = sch.getHostName();
+ request = PropertyHelper.getReadRequest(HOST_COMPONENT_HOST_NAME_PROPERTY_ID,
+ HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID);
+
+ Predicate predicate = new PredicateBuilder().property(HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID).
+ equals(clusterName).toPredicate();
+
+ Set<Resource> hostComponents = provider.getResources(request, predicate);
+ Map<String, String> hostComponentMap = clusterHostComponentMap.get(clusterName);
+
+ if (hostComponentMap == null) {
+ hostComponentMap = new HashMap<>();
+ clusterHostComponentMap.put(clusterName, hostComponentMap);
+ }
+
+ for (Resource hostComponent : hostComponents) {
+ String componentName = (String) hostComponent.getPropertyValue(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID);
+ String hostName = (String) hostComponent.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID);
hostComponentMap.put(componentName, hostName);
@@ -902,11 +910,26 @@ public abstract class AbstractProviderModule implements ProviderModule,
}
if (componentName.equals(METRIC_SERVER)) {
// If current collector host is null or if the host or the host component not live
- // Update clusterMetricCollectorMap.
+ // Update clusterMetricCollectorMap.
metricsCollectorHAManager.addCollectorHost(clusterName, hostName);
}
}
}
+ } catch (UnsupportedPropertyException e) {
+ if (LOG.isErrorEnabled()) {
+ LOG.error("Caught UnsupportedPropertyException while trying to get the host mappings.", e);
+ }
+ throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
+ } catch (NoSuchResourceException e) {
+ if (LOG.isErrorEnabled()) {
+ LOG.error("Caught NoSuchResourceException exception while trying to get the host mappings.", e);
+ }
+ throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
+ } catch (NoSuchParentResourceException e) {
+ if (LOG.isErrorEnabled()) {
+ LOG.error("Caught NoSuchParentResourceException exception while trying to get the host mappings.", e);
+ }
+ throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a5291abd/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 90dd611..9597ba1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -119,11 +119,6 @@ public interface Cluster {
List<ServiceComponentHost> getServiceComponentHosts(String serviceName, String componentName);
/**
- * Get all ServiceComponentHosts for this cluster.
- */
- List<ServiceComponentHost> getServiceComponentHosts();
-
- /**
* Get all hosts associated with this cluster.
*
* @return collection of hosts that are associated with this cluster
http://git-wip-us.apache.org/repos/asf/ambari/blob/a5291abd/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8f1a882..3953184 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -554,17 +554,8 @@ public class ClusterImpl implements Cluster {
throw new ServiceComponentHostNotFoundException(getClusterName(),
serviceName, serviceComponentName, hostname);
}
- return serviceComponentHosts.get(serviceName).get(serviceComponentName).get(hostname);
- }
-
- public List<ServiceComponentHost> getServiceComponentHosts() {
- List<ServiceComponentHost> serviceComponentHosts = new ArrayList<>();
- if (!serviceComponentHostsByHost.isEmpty()) {
- for (List<ServiceComponentHost> schList : serviceComponentHostsByHost.values()) {
- serviceComponentHosts.addAll(schList);
- }
- }
- return Collections.unmodifiableList(serviceComponentHosts);
+ return serviceComponentHosts.get(serviceName).get(serviceComponentName).get(
+ hostname);
}
@Override
[45/57] [abbrv] ambari git commit: AMBARI-21307 Added new resource
related changes to the db creation ddl-s
Posted by lp...@apache.org.
AMBARI-21307 Added new resource related changes to the db creation ddl-s
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0d3e842b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0d3e842b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0d3e842b
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 0d3e842b74f3bf6e09e96e1a562eea843020e43c
Parents: 700bce9
Author: lpuskas <lp...@apache.org>
Authored: Thu Jul 6 18:15:18 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:00 2017 +0200
----------------------------------------------------------------------
.../AmbariConfigurationResourceProvider.java | 12 ++--
.../server/orm/dao/AmbariConfigurationDAO.java | 65 ++------------------
.../apache/ambari/server/orm/dao/DaoUtils.java | 13 +---
.../main/resources/Ambari-DDL-Derby-CREATE.sql | 21 +++++++
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 20 ++++++
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 20 ++++++
.../resources/Ambari-DDL-Postgres-CREATE.sql | 14 ++---
.../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 20 ++++++
.../resources/Ambari-DDL-SQLServer-CREATE.sql | 20 ++++++
9 files changed, 120 insertions(+), 85 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
index 5e5af9e..e8f186d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
@@ -97,14 +97,14 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
}
}
- private static Set<String> properties = Sets.newHashSet(
+ private static Set<String> PROPERTIES = Sets.newHashSet(
ResourcePropertyId.ID.getPropertyId(),
ResourcePropertyId.TYPE.getPropertyId(),
ResourcePropertyId.VERSION.getPropertyId(),
ResourcePropertyId.VERSION_TAG.getPropertyId(),
ResourcePropertyId.DATA.getPropertyId());
- private static Map<Resource.Type, String> pkPropertyMap = Collections.unmodifiableMap(
+ private static Map<Resource.Type, String> PK_PROPERTY_MAP = Collections.unmodifiableMap(
new HashMap<Resource.Type, String>() {{
put(Resource.Type.AmbariConfiguration, ResourcePropertyId.ID.getPropertyId());
}}
@@ -117,7 +117,7 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
private Gson gson;
protected AmbariConfigurationResourceProvider() {
- super(properties, pkPropertyMap);
+ super(PROPERTIES, PK_PROPERTY_MAP);
setRequiredCreateAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION));
setRequiredDeleteAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION));
@@ -126,7 +126,7 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
@Override
protected Set<String> getPKPropertyIds() {
- return Sets.newHashSet("AmbariConfiguration/id");
+ return Sets.newHashSet(ResourcePropertyId.ID.getPropertyId());
}
@Override
@@ -137,7 +137,7 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
AmbariConfigurationEntity ambariConfigurationEntity = getEntityFromRequest(request);
LOGGER.info("Persisting new ambari configuration: {} ", ambariConfigurationEntity);
- ambariConfigurationDAO.persist(ambariConfigurationEntity);
+ ambariConfigurationDAO.create(ambariConfigurationEntity);
return getRequestStatus(null);
}
@@ -170,7 +170,7 @@ public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResou
} else {
LOGGER.debug("Deleting amari configuration with id: {}", idFromRequest);
try {
- ambariConfigurationDAO.deleteById(idFromRequest);
+ ambariConfigurationDAO.removeByPK(idFromRequest);
} catch (IllegalStateException e) {
throw new NoSuchResourceException(e.getMessage());
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
index dea37eb..c29a423 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
@@ -14,79 +14,24 @@
package org.apache.ambari.server.orm.dao;
-import java.util.List;
-
import javax.inject.Inject;
-import javax.inject.Provider;
import javax.inject.Singleton;
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
-import org.apache.ambari.server.orm.RequiresSession;
import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.inject.persist.Transactional;
/**
* DAO dealing with ambari configuration related JPA operations.
*/
@Singleton
-// todo extend CrudDao (amend crud dao to handle NPEs)
-public class AmbariConfigurationDAO {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(AmbariConfigurationDAO.class);
-
- @Inject
- private Provider<EntityManager> entityManagerProvider;
+public class AmbariConfigurationDAO extends CrudDAO<AmbariConfigurationEntity, Long> {
- /**
- * DAO utilities for dealing mostly with {@link TypedQuery} results.
- */
@Inject
- private DaoUtils daoUtils;
-
- public AmbariConfigurationEntity findByid(Long id) {
- return entityManagerProvider.get().find(AmbariConfigurationEntity.class, id);
- }
-
- @RequiresSession
- @Transactional
- public void persist(AmbariConfigurationEntity entity) {
- LOGGER.debug("Persisting ambari configuration: {}", entity);
- entityManagerProvider.get().persist(entity);
+ public AmbariConfigurationDAO() {
+ super(AmbariConfigurationEntity.class);
}
- @RequiresSession
- public List<AmbariConfigurationEntity> findAll() {
- TypedQuery<AmbariConfigurationEntity> query = entityManagerProvider.get().createNamedQuery(
- "AmbariConfigurationEntity.findAll", AmbariConfigurationEntity.class);
- return daoUtils.selectList(query);
+ public void create(AmbariConfigurationEntity entity) {
+ super.create(entity);
}
-
-
- @RequiresSession
- @Transactional
- public void deleteById(Long ambariConfigurationId) {
-
- if (ambariConfigurationId == null) {
- throw new IllegalArgumentException("No Ambari Configuration id provided.");
- }
-
- LOGGER.debug("Removing Ambari Configuration with id :{}", ambariConfigurationId);
-
- AmbariConfigurationEntity ambariConfigurationEntity = findByid(ambariConfigurationId);
- if (ambariConfigurationEntity == null) {
- String msg = String.format("No Ambari Configuration found with id: %s", ambariConfigurationId);
- LOGGER.debug(msg);
- throw new IllegalStateException(msg);
- }
-
- entityManagerProvider.get().remove(ambariConfigurationEntity);
- LOGGER.debug("Ambari Configuration with id: {}", ambariConfigurationId);
- }
-
-
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/DaoUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/DaoUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/DaoUtils.java
index cd3faf0..e6112ad 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/DaoUtils.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/DaoUtils.java
@@ -18,8 +18,6 @@
package org.apache.ambari.server.orm.dao;
-import static org.apache.ambari.server.orm.DBAccessor.DbType;
-
import java.util.Collections;
import java.util.List;
@@ -31,19 +29,10 @@ import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
-import org.apache.ambari.server.orm.DBAccessor;
-
-import com.google.inject.Inject;
import com.google.inject.Singleton;
@Singleton
public class DaoUtils {
- @Inject
- private DBAccessor dbAccessor;
-
- public DbType getDbType() {
- return dbAccessor.getDbType();
- }
public <T> List<T> selectAll(EntityManager entityManager, Class<T> entityClass) {
CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
@@ -92,7 +81,7 @@ public class DaoUtils {
public void setParameters(Query query, Object... parameters) {
for (int i = 0; i < parameters.length; i++) {
- query.setParameter(i+1, parameters[i]);
+ query.setParameter(i + 1, parameters[i]);
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index e7359a7..c6861f3 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -84,6 +84,23 @@ CREATE TABLE clusterconfig (
CONSTRAINT UQ_config_type_tag UNIQUE (version_tag, type_name, cluster_id),
CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+CREATE TABLE configuration_base (
+ id BIGINT NOT NULL,
+ version_tag VARCHAR(255) NOT NULL,
+ version BIGINT NOT NULL,
+ type VARCHAR(255) NOT NULL,
+ data VARCHAR(3000) NOT NULL,
+ attributes VARCHAR(3000),
+ create_timestamp BIGINT NOT NULL,
+ CONSTRAINT PK_configuration_base PRIMARY KEY (id)
+);
+
+CREATE TABLE ambari_configuration (
+ id BIGINT NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
+ CONSTRAINT FK_ambari_configuration_configuration_base FOREIGN KEY (id) REFERENCES configuration_base (id)
+);
+
CREATE TABLE serviceconfig (
service_config_id BIGINT NOT NULL,
cluster_id BIGINT NOT NULL,
@@ -1145,6 +1162,8 @@ INSERT INTO ambari_sequences (sequence_name, sequence_value)
union all
select 'servicecomponent_version_id_seq', 0 FROM SYSIBM.SYSDUMMY1
union all
+ select 'configuration_id_seq', 0 FROM SYSIBM.SYSDUMMY1
+ union all
select 'hostcomponentdesiredstate_id_seq', 0 FROM SYSIBM.SYSDUMMY1;
@@ -1245,6 +1264,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' FROM SYSIBM.SYSDUMMY1 UNION ALL
+ SELECT 'AMBARI.MANAGE_CONFIGURATION', 'Manage ambari configurations' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.MANAGE_USERS', 'Manage users' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.MANAGE_GROUPS', 'Manage groups' FROM SYSIBM.SYSDUMMY1 UNION ALL
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' FROM SYSIBM.SYSDUMMY1 UNION ALL
@@ -1446,6 +1466,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.MANAGE_CONFIGURATION' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_USERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_GROUPS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index c1e1953..502a04f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -104,6 +104,23 @@ CREATE TABLE clusterconfig (
CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+CREATE TABLE configuration_base (
+ id BIGINT NOT NULL,
+ version_tag VARCHAR(100) NOT NULL,
+ version BIGINT NOT NULL,
+ type VARCHAR(100) NOT NULL,
+ data LONGTEXT NOT NULL,
+ attributes LONGTEXT,
+ create_timestamp BIGINT NOT NULL,
+ CONSTRAINT PK_configuration_base PRIMARY KEY (id)
+);
+
+CREATE TABLE ambari_configuration (
+ id BIGINT NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
+ CONSTRAINT FK_ambari_configuration_configuration_base FOREIGN KEY (id) REFERENCES configuration_base (id)
+);
+
CREATE TABLE serviceconfig (
service_config_id BIGINT NOT NULL,
cluster_id BIGINT NOT NULL,
@@ -1109,6 +1126,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES
('remote_cluster_id_seq', 0),
('remote_cluster_service_id_seq', 0),
('servicecomponent_version_id_seq', 0),
+ ('configuration_id_seq', 0),
('hostcomponentdesiredstate_id_seq', 0);
INSERT INTO adminresourcetype (resource_type_id, resource_type_name) VALUES
@@ -1193,6 +1211,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage administrative settings' UNION ALL
+ SELECT 'AMBARI.MANAGE_CONFIGURATION', 'Manage ambari configuration' UNION ALL
SELECT 'AMBARI.MANAGE_USERS', 'Manage users' UNION ALL
SELECT 'AMBARI.MANAGE_GROUPS', 'Manage groups' UNION ALL
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
@@ -1398,6 +1417,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.MANAGE_CONFIGURATION' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_USERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_GROUPS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index c0b2f0c..d82d64b 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -84,6 +84,23 @@ CREATE TABLE clusterconfig (
CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+CREATE TABLE configuration_base (
+ id NUMBER(19) NOT NULL,
+ version_tag VARCHAR(255) NOT NULL,
+ version NUMBER(19) NOT NULL,
+ type VARCHAR(255) NOT NULL,
+ data CLOB NOT NULL,
+ attributes CLOB,
+ create_timestamp NUMBER(19) NOT NULL,
+ CONSTRAINT PK_configuration_base PRIMARY KEY (id)
+);
+
+CREATE TABLE ambari_configuration (
+ id NUMBER(19) NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
+ CONSTRAINT FK_ambari_configuration_configuration_base FOREIGN KEY (id) REFERENCES configuration_base (id)
+);
+
CREATE TABLE serviceconfig (
service_config_id NUMBER(19) NOT NULL,
cluster_id NUMBER(19) NOT NULL,
@@ -1088,6 +1105,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('ambari_oper
INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('remote_cluster_id_seq', 0);
INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('remote_cluster_service_id_seq', 0);
INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_version_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('configuration_id_seq', 0);
INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentdesiredstate_id_seq', 0);
INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariSchemaVersion}');
@@ -1191,6 +1209,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' FROM dual UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' FROM dual UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' FROM dual UNION ALL
+ SELECT 'AMBARI.MANAGE_CONFIGURATION', 'Manage ambari configuration' FROM dual UNION ALL
SELECT 'AMBARI.MANAGE_USERS', 'Manage users' FROM dual UNION ALL
SELECT 'AMBARI.MANAGE_GROUPS', 'Manage groups' FROM dual UNION ALL
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' FROM dual UNION ALL
@@ -1396,6 +1415,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.MANAGE_CONFIGURATION' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_USERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_GROUPS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 3605783..0809ab5 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -67,13 +67,13 @@ CREATE TABLE clusters (
);
CREATE TABLE configuration_base (
- id BIGINT NOT NULL,
- version_tag VARCHAR(255) NOT NULL,
- version BIGINT NOT NULL,
- type VARCHAR(255) NOT NULL,
- data TEXT NOT NULL,
- attributes TEXT,
- create_timestamp BIGINT NOT NULL,
+ id BIGINT NOT NULL,
+ version_tag VARCHAR(255) NOT NULL,
+ version BIGINT NOT NULL,
+ type VARCHAR(255) NOT NULL,
+ data TEXT NOT NULL,
+ attributes TEXT,
+ create_timestamp BIGINT NOT NULL,
CONSTRAINT PK_configuration_base PRIMARY KEY (id)
);
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 7f39535..8d3882d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -83,6 +83,23 @@ CREATE TABLE clusterconfig (
CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+CREATE TABLE configuration_base (
+ id NUMERIC(19) NOT NULL,
+ version_tag VARCHAR(255) NOT NULL,
+ version NUMERIC(19) NOT NULL,
+ type VARCHAR(255) NOT NULL,
+ data TEXT NOT NULL,
+ attributes TEXT,
+ create_timestamp NUMERIC(19) NOT NULL,
+ CONSTRAINT PK_configuration_base PRIMARY KEY (id)
+);
+
+CREATE TABLE ambari_configuration (
+ id NUMERIC(19) NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
+ CONSTRAINT FK_ambari_configuration_configuration_base FOREIGN KEY (id) REFERENCES configuration_base (id)
+);
+
CREATE TABLE serviceconfig (
service_config_id NUMERIC(19) NOT NULL,
cluster_id NUMERIC(19) NOT NULL,
@@ -1087,6 +1104,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('remote_clus
INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('remote_cluster_service_id_seq', 0);
INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_version_id_seq', 0);
INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentdesiredstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('configuration_id_seq', 0);
insert into adminresourcetype (resource_type_id, resource_type_name)
select 1, 'AMBARI'
@@ -1187,6 +1205,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' UNION ALL
+ SELECT 'AMBARI.MANAGE_CONFIGURATION', 'Manage ambari configuration' UNION ALL
SELECT 'AMBARI.MANAGE_USERS', 'Manage users' UNION ALL
SELECT 'AMBARI.MANAGE_GROUPS', 'Manage groups' UNION ALL
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
@@ -1392,6 +1411,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.MANAGE_CONFIGURATION' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_USERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_GROUPS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
http://git-wip-us.apache.org/repos/asf/ambari/blob/0d3e842b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index aa06c4d..010ddf4 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -97,6 +97,23 @@ CREATE TABLE clusterconfig (
CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+CREATE TABLE configuration_base (
+ id BIGINT NOT NULL,
+ version_tag VARCHAR(255) NOT NULL,
+ version BIGINT NOT NULL,
+ type VARCHAR(255) NOT NULL,
+ data VARCHAR(MAX) NOT NULL,
+ attributes VARCHAR(MAX),
+ create_timestamp BIGINT NOT NULL,
+ CONSTRAINT PK_configuration_base PRIMARY KEY (id)
+);
+
+CREATE TABLE ambari_configuration (
+ id BIGINT NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
+ CONSTRAINT FK_ambari_configuration_configuration_base FOREIGN KEY (id) REFERENCES configuration_base (id)
+);
+
CREATE TABLE serviceconfig (
service_config_id BIGINT NOT NULL,
cluster_id BIGINT NOT NULL,
@@ -1112,6 +1129,7 @@ BEGIN TRANSACTION
('remote_cluster_id_seq', 0),
('remote_cluster_service_id_seq', 0),
('servicecomponent_version_id_seq', 0),
+ ('configuration_id_seq', 0),
('hostcomponentdesiredstate_id_seq', 0);
insert into adminresourcetype (resource_type_id, resource_type_name)
@@ -1200,6 +1218,7 @@ BEGIN TRANSACTION
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' UNION ALL
+ SELECT 'AMBARI.MANAGE_CONFIGURATION', 'Manage ambari configuration' UNION ALL
SELECT 'AMBARI.MANAGE_USERS', 'Manage users' UNION ALL
SELECT 'AMBARI.MANAGE_GROUPS', 'Manage groups' UNION ALL
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
@@ -1405,6 +1424,7 @@ BEGIN TRANSACTION
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.MANAGE_CONFIGURATION' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_USERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_GROUPS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
[26/57] [abbrv] ambari git commit: AMBARI-21904 Remove redundant
smokeuser entry from Ranger KMS Kerberos descriptor (mugdha)
Posted by lp...@apache.org.
AMBARI-21904 Remove redundant smokeuser entry from Ranger KMS Kerberos descriptor (mugdha)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a0594787
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a0594787
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a0594787
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: a05947873b39d646575e6568e9b7cd086a10fac3
Parents: b7f53dc
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Fri Sep 8 12:08:00 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Fri Sep 8 14:24:18 2017 +0530
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog260.java | 40 +++++++
.../RANGER_KMS/0.5.0.2.3/kerberos.json | 6 --
.../RANGER_KMS/1.0.0.3.0/kerberos.json | 6 --
.../HDP/2.5/services/RANGER_KMS/kerberos.json | 6 --
.../server/upgrade/UpgradeCatalog260Test.java | 53 +++++++++
.../test_kerberos_descriptor_ranger_kms.json | 108 +++++++++++++++++++
6 files changed, 201 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a0594787/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index d1de998..d05f39a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -30,10 +30,17 @@ import javax.persistence.Query;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -387,6 +394,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
setUnmappedForOrphanedConfigs();
removeSupersetFromDruid();
ensureZeppelinProxyUserConfigs();
+ updateKerberosDescriptorArtifacts();
}
public int getCurrentVersionID() throws AmbariException, SQLException {
@@ -495,4 +503,36 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
}
}
}
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity) throws AmbariException {
+ if (artifactEntity != null) {
+ Map<String, Object> data = artifactEntity.getArtifactData();
+ if (data != null) {
+ final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(data);
+ if (kerberosDescriptor != null) {
+ KerberosServiceDescriptor rangerKmsServiceDescriptor = kerberosDescriptor.getService("RANGER_KMS");
+ if (rangerKmsServiceDescriptor != null) {
+
+ KerberosIdentityDescriptor rangerKmsServiceIdentity = rangerKmsServiceDescriptor.getIdentity("/smokeuser");
+ if (rangerKmsServiceIdentity != null) {
+ rangerKmsServiceDescriptor.removeIdentity("/smokeuser");
+ }
+ KerberosComponentDescriptor rangerKmscomponentDescriptor = rangerKmsServiceDescriptor.getComponent("RANGER_KMS_SERVER");
+ if (rangerKmscomponentDescriptor != null) {
+ KerberosIdentityDescriptor rangerKmsComponentIdentity = rangerKmscomponentDescriptor.getIdentity("/smokeuser");
+ if (rangerKmsComponentIdentity != null) {
+ rangerKmscomponentDescriptor.removeIdentity("/smokeuser");
+ }
+ }
+ }
+ artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+ artifactDAO.merge(artifactEntity);
+ }
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a0594787/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/kerberos.json b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/kerberos.json
index 69d6b6c..208a04d 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/kerberos.json
@@ -8,9 +8,6 @@
"keytab": {
"configuration": "kms-site/hadoop.kms.authentication.kerberos.keytab"
}
- },
- {
- "name": "/smokeuser"
}
],
"configurations": [
@@ -33,9 +30,6 @@
"keytab": {
"configuration": "kms-site/hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab"
}
- },
- {
- "name": "/smokeuser"
}
]
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a0594787/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/kerberos.json b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/kerberos.json
index a54783e..8bf4cd8 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/kerberos.json
@@ -8,9 +8,6 @@
"keytab": {
"configuration": "kms-site/hadoop.kms.authentication.kerberos.keytab"
}
- },
- {
- "name": "/smokeuser"
}
],
"auth_to_local_properties" : [
@@ -48,9 +45,6 @@
}
},
{
- "name": "/smokeuser"
- },
- {
"name": "rangerkms",
"principal": {
"value": "rangerkms/_HOST@${realm}",
http://git-wip-us.apache.org/repos/asf/ambari/blob/a0594787/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/kerberos.json
index a54783e..8bf4cd8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/kerberos.json
@@ -8,9 +8,6 @@
"keytab": {
"configuration": "kms-site/hadoop.kms.authentication.kerberos.keytab"
}
- },
- {
- "name": "/smokeuser"
}
],
"auth_to_local_properties" : [
@@ -48,9 +45,6 @@
}
},
{
- "name": "/smokeuser"
- },
- {
"name": "rangerkms",
"principal": {
"value": "rangerkms/_HOST@${realm}",
http://git-wip-us.apache.org/repos/asf/ambari/blob/a0594787/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index 2a62f2e..33c29bc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -22,6 +22,7 @@ import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createMockBuilder;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
@@ -30,6 +31,8 @@ import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
+import java.io.File;
+import java.net.URL;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
@@ -51,11 +54,17 @@ import org.apache.ambari.server.controller.KerberosHelper;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.ambari.server.state.stack.OsFamily;
import org.easymock.Capture;
import org.easymock.EasyMockRunner;
@@ -620,4 +629,48 @@ public class UpgradeCatalog260Test {
Assert.assertEquals("existing_value", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.hosts"));
Assert.assertEquals("*", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.groups"));
}
+
+ @Test
+ public void testUpdateKerberosDescriptorArtifact() throws Exception {
+
+ URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_kerberos_descriptor_ranger_kms.json");
+ Assert.assertNotNull(systemResourceURL);
+
+ final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(new File(systemResourceURL.getFile()));
+ Assert.assertNotNull(kerberosDescriptor);
+
+ KerberosServiceDescriptor serviceDescriptor;
+ serviceDescriptor = kerberosDescriptor.getService("RANGER_KMS");
+ Assert.assertNotNull(serviceDescriptor);
+ Assert.assertNotNull(serviceDescriptor.getIdentity("/smokeuser"));
+
+ KerberosComponentDescriptor componentDescriptor;
+ componentDescriptor = serviceDescriptor.getComponent("RANGER_KMS_SERVER");
+ Assert.assertNotNull(componentDescriptor);
+ Assert.assertNotNull(componentDescriptor.getIdentity("/smokeuser"));
+
+ ArtifactEntity artifactEntity = createMock(ArtifactEntity.class);
+
+ expect(artifactEntity.getArtifactData()).andReturn(kerberosDescriptor.toMap()).once();
+
+ Capture<Map<String, Object>> captureMap = newCapture();
+ artifactEntity.setArtifactData(capture(captureMap));
+ expectLastCall().once();
+
+ ArtifactDAO artifactDAO = createMock(ArtifactDAO.class);
+ expect(artifactDAO.merge(artifactEntity)).andReturn(artifactEntity).atLeastOnce();
+
+ replay(artifactDAO, artifactEntity);
+
+ UpgradeCatalog260 upgradeCatalog260 = createMockBuilder(UpgradeCatalog260.class).createMock();
+ upgradeCatalog260.updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
+ verify(artifactDAO, artifactEntity);
+
+ KerberosDescriptor kerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(captureMap.getValue());
+ Assert.assertNotNull(kerberosDescriptorUpdated);
+
+ Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getIdentity("/smokeuser"));
+ Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("/smokeuser"));
+
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a0594787/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json
new file mode 100644
index 0000000..d7e048f
--- /dev/null
+++ b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json
@@ -0,0 +1,108 @@
+{
+ "properties": {
+ "realm": "${kerberos-env/realm}",
+ "keytab_dir": "/etc/security/keytabs"
+ },
+ "identities": [
+ {
+ "name": "spnego",
+ "principal": {
+ "value": "HTTP/_HOST@${realm}",
+ "type": "service"
+ },
+ "keytab": {
+ "file": "${keytab_dir}/spnego.service.keytab",
+ "owner": {
+ "name": "root",
+ "access": "r"
+ },
+ "group": {
+ "name": "${cluster-env/user_group}",
+ "access": "r"
+ }
+ }
+ }
+ ],
+ "services": [
+ {
+ "name": "RANGER_KMS",
+ "identities": [
+ {
+ "name": "/spnego",
+ "keytab": {
+ "configuration": "kms-site/hadoop.kms.authentication.kerberos.keytab"
+ }
+ },
+ {
+ "name": "/smokeuser"
+ }
+ ],
+ "auth_to_local_properties" : [
+ "kms-site/hadoop.kms.authentication.kerberos.name.rules"
+ ],
+ "configurations": [
+ {
+ "kms-site": {
+ "hadoop.kms.authentication.type": "kerberos",
+ "hadoop.kms.authentication.kerberos.principal": "*"
+ }
+ },
+ {
+ "ranger-kms-audit": {
+ "xasecure.audit.jaas.Client.loginModuleName": "com.sun.security.auth.module.Krb5LoginModule",
+ "xasecure.audit.jaas.Client.loginModuleControlFlag": "required",
+ "xasecure.audit.jaas.Client.option.useKeyTab": "true",
+ "xasecure.audit.jaas.Client.option.storeKey": "false",
+ "xasecure.audit.jaas.Client.option.serviceName": "solr",
+ "xasecure.audit.destination.solr.force.use.inmemory.jaas.config": "true"
+ }
+ }
+ ],
+ "components": [
+ {
+ "name": "RANGER_KMS_SERVER",
+ "identities": [
+ {
+ "name": "/spnego",
+ "principal": {
+ "configuration": "kms-site/hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal"
+ },
+ "keytab": {
+ "configuration": "kms-site/hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab"
+ }
+ },
+ {
+ "name": "/smokeuser"
+ },
+ {
+ "name": "rangerkms",
+ "principal": {
+ "value": "rangerkms/_HOST@${realm}",
+ "type" : "service",
+ "configuration": "dbks-site/ranger.ks.kerberos.principal",
+ "local_username" : "keyadmin"
+ },
+ "keytab": {
+ "file": "${keytab_dir}/rangerkms.service.keytab",
+ "owner": {
+ "name": "${kms-env/kms_user}",
+ "access": "r"
+ },
+ "configuration": "dbks-site/ranger.ks.kerberos.keytab"
+ }
+ },
+ {
+ "name": "/RANGER_KMS/RANGER_KMS_SERVER/rangerkms",
+ "principal": {
+ "configuration": "ranger-kms-audit/xasecure.audit.jaas.Client.option.principal"
+ },
+ "keytab": {
+ "configuration": "ranger-kms-audit/xasecure.audit.jaas.Client.option.keyTab"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
[51/57] [abbrv] ambari git commit: AMBARI-21307 Draft implementation
of the group related attributes
Posted by lp...@apache.org.
AMBARI-21307 Draft implementation of the group related attributes
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d8813ffa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d8813ffa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d8813ffa
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: d8813ffa29d0f8aa8c892f6f36c90e3be3f3b0b8
Parents: 20f1ad2
Author: lpuskas <lp...@apache.org>
Authored: Tue Aug 8 13:54:29 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:01 2017 +0200
----------------------------------------------------------------------
.../server/ldap/AmbariLdapConfiguration.java | 22 +-
.../ldap/LdapConfigurationValidatorService.java | 34 +--
.../apache/ambari/server/ldap/LdapModule.java | 4 +-
.../server/ldap/service/AmbariLdapFacade.java | 25 +-
.../ldap/service/LdapConnectionService.java | 35 +++
.../ambari/server/ldap/service/LdapFacade.java | 9 +-
.../ad/AdLdapConfigurationValidatorService.java | 177 --------------
...efaultLdapConfigurationValidatorService.java | 232 +++++++++++++++++++
.../ad/DefaultLdapConnectionService.java | 63 +++++
.../service/ad/LdapConfigurationConverter.java | 50 ----
...AdLdapConfigurationValidatorServiceTest.java | 129 -----------
...ltLdapConfigurationValidatorServiceTest.java | 156 +++++++++++++
12 files changed, 552 insertions(+), 384 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
index 519f400..a6ff80b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
@@ -53,7 +53,7 @@ public class AmbariLdapConfiguration {
GROUP_OBJECT_CLASS("ambari.ldap.group.object.class"),
GROUP_NAME_ATTRIBUTE("ambari.ldap.group.name.attribute"),
GROUP_MEMBER_ATTRIBUTE("ambari.ldap.group.member.attribute"),
- GROUP_SEARCH_BASE("ambari.ldap.group.member.attribute"),
+ GROUP_SEARCH_BASE("ambari.ldap.group.search.base"),
DN_ATTRIBUTE("authentication.ldap.dnAttribute");
private String propertyName;
@@ -126,4 +126,24 @@ public class AmbariLdapConfiguration {
return (String) configurationValue(LdapConfigProperty.USER_NAME_ATTRIBUTE);
}
+ public String userSearchBase() {
+ return (String) configurationValue(LdapConfigProperty.USER_SEARCH_BASE);
+ }
+
+ public String groupObjectClass() {
+ return (String) configurationValue(LdapConfigProperty.GROUP_OBJECT_CLASS);
+ }
+
+ public String groupNameAttribute() {
+ return (String) configurationValue(LdapConfigProperty.GROUP_NAME_ATTRIBUTE);
+ }
+
+ public String groupMemberAttribute() {
+ return (String) configurationValue(LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE);
+ }
+
+ public String groupSearchBase() {
+ return (String) configurationValue(LdapConfigProperty.GROUP_SEARCH_BASE);
+ }
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
index 4667721..7efa3b7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
@@ -14,8 +14,11 @@
package org.apache.ambari.server.ldap;
+import java.util.Set;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ldap.service.AmbariLdapException;
+import org.apache.directory.ldap.client.api.LdapConnection;
/**
* Collection of operations for validating ldap configuration.
@@ -26,27 +29,32 @@ public interface LdapConfigurationValidatorService {
/**
* Tests the connection based on the provided configuration.
*
- * @param configuration the ambari ldap configuration instance
+ * @param ldapConnection connection instance
+ * @param configuration the ambari ldap configuration instance
* @throws AmbariLdapException if the connection is not possible
*/
- void checkConnection(AmbariLdapConfiguration configuration) throws AmbariLdapException;
+ void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration configuration) throws AmbariLdapException;
+
/**
- * Checks whether the group related LDAP attributes in the configuration are correct.
+ * Implements LDAP user related configuration settings validation logic.
+ * Implementers communicate with the LDAP server (search, bind) to validate attributes in the provided configuration
+ * instance
*
- * @param configuration the configuration instance holding the available properties
- * @throws AmbariException if the attributes are not valid
+ * @param ldapConnection connection instance used to connect to the LDAP server
+ * @param testUserName the test username
+ * @param testPassword the test password
+ * @param configuration the available ldap configuration
+ * @return The DN of the found user entry
+ * @throws AmbariException if the connection couldn't be estabilisheds
*/
- void checkGroupAttributes(AmbariLdapConfiguration configuration) throws AmbariException;
+ String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration configuration) throws AmbariLdapException;
/**
- * Tries to connect to the LDAP server with the given credentials.
- * Primarily used for testing the user before performing other operations (eg. attribute detection)s
+ * Checks whether the group related LDAP attributes in the configuration are correct.
*
- * @param username the username
- * @param password the password
- * @param configuration the available ldap configuration
- * @throws AmbariException if the connection couldn't be estabilished
+ * @throws AmbariException if the attributes are not valid
*/
- void checkUserAttributes(String username, String password, AmbariLdapConfiguration configuration) throws AmbariException;
+ Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException;
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
index 625ce8b..545f220 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
@@ -17,7 +17,7 @@ package org.apache.ambari.server.ldap;
import org.apache.ambari.server.ldap.service.AmbariLdapFacade;
import org.apache.ambari.server.ldap.service.LdapFacade;
-import org.apache.ambari.server.ldap.service.ad.AdLdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.ad.DefaultLdapConfigurationValidatorService;
import com.google.inject.AbstractModule;
import com.google.inject.assistedinject.FactoryModuleBuilder;
@@ -30,7 +30,7 @@ public class LdapModule extends AbstractModule {
@Override
protected void configure() {
bind(LdapFacade.class).to(AmbariLdapFacade.class);
- bind(LdapConfigurationValidatorService.class).to(AdLdapConfigurationValidatorService.class);
+ bind(LdapConfigurationValidatorService.class).to(DefaultLdapConfigurationValidatorService.class);
install(new FactoryModuleBuilder().build(LdapConfigurationFactory.class));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
index abd028a..abb464b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
@@ -16,13 +16,14 @@
package org.apache.ambari.server.ldap.service;
import java.util.Map;
+import java.util.Set;
import javax.inject.Inject;
import javax.inject.Singleton;
-import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.directory.ldap.client.api.LdapConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,17 +52,21 @@ public class AmbariLdapFacade implements LdapFacade {
private LdapConfigurationValidatorService ldapConfigurationValidatorService;
@Inject
+ private LdapConnectionService ldapConnectionService;
+
+ @Inject
public AmbariLdapFacade() {
}
@Override
- public void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException {
+ public void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
try {
LOGGER.info("Validating LDAP connection related configuration based on: {}", ambariLdapConfiguration);
- ldapConfigurationValidatorService.checkConnection(ambariLdapConfiguration);
+ LdapConnection connection = ldapConnectionService.createLdapConnection(ambariLdapConfiguration);
+ ldapConfigurationValidatorService.checkConnection(connection, ambariLdapConfiguration);
} catch (AmbariLdapException e) {
LOGGER.error("Validating LDAP connection configuration failed", e);
- throw new AmbariException("Validating LDAP connection configuration failed", e);
+ throw e;
}
LOGGER.info("Validating LDAP connection related configuration: SUCCESS");
}
@@ -74,7 +79,7 @@ public class AmbariLdapFacade implements LdapFacade {
}
@Override
- public void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ldapConfiguration) throws AmbariException {
+ public void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ldapConfiguration) throws AmbariLdapException {
String userName = getTestUserNameFromParameters(parameters);
String testUserPass = getTestUserPasswordFromParameters(parameters);
@@ -82,8 +87,14 @@ public class AmbariLdapFacade implements LdapFacade {
throw new IllegalArgumentException("No test user available for testing LDAP attributes");
}
- LOGGER.info("Testing LDAP attributes with test user: {}", userName);
- ldapConfigurationValidatorService.checkUserAttributes(userName, testUserPass, ldapConfiguration);
+ LdapConnection ldapConnection = ldapConnectionService.createLdapConnection(ldapConfiguration);
+
+ LOGGER.info("Testing LDAP user attributes with test user: {}", userName);
+ String userDn = ldapConfigurationValidatorService.checkUserAttributes(ldapConnection, userName, testUserPass, ldapConfiguration);
+
+ LOGGER.info("Testing LDAP group attributes with test user dn: {}", userDn);
+ Set<String> groups = ldapConfigurationValidatorService.checkGroupAttributes(ldapConnection, userDn, ldapConfiguration);
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapConnectionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapConnectionService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapConnectionService.java
new file mode 100644
index 0000000..50ee8ed
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapConnectionService.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service;
+
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+
+/**
+ * Contract defining factory methods for creating LDAP connection instances.
+ * Implementers contain the logic of creating different connection instances and the afferent boilerplate code.
+ */
+public interface LdapConnectionService {
+
+ /**
+ * Creates an LdapConnection instance based on the provided configuration
+ *
+ * @param ambariLdapConfiguration configuration instance with information for creating the connection instance
+ * @return a set up LdapConnection instance
+ */
+ LdapNetworkConnection createLdapConnection(AmbariLdapConfiguration ambariLdapConfiguration);
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
index 38553f0..7bb1198 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
@@ -16,7 +16,6 @@ package org.apache.ambari.server.ldap.service;
import java.util.Map;
-import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
/**
@@ -29,9 +28,9 @@ public interface LdapFacade {
* Tests the connection to the LDAP server based on the provided configuration.
*
* @param ambariLdapConfiguration the available ldap related configuration
- * @throws AmbariException if the connection fails or other problems occur during the operation
+ * @throws AmbariLdapException if the connection fails or other problems occur during the operation
*/
- void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException;
+ void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException;
/**
@@ -46,7 +45,7 @@ public interface LdapFacade {
*
* @param parameters a map of property name and value pairs holding information to facilitate checking the attributes
* @param ambariLdapConfiguration configutration instance with available attributes
- * @throws AmbariException if the attribute checking fails
+ * @throws AmbariLdapException if the attribute checking fails
*/
- void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException;
+ void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java
deleted file mode 100644
index 11e8655..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ad;
-
-import java.io.IOException;
-import java.util.List;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
-import org.apache.ambari.server.ldap.service.AmbariLdapException;
-import org.apache.directory.api.ldap.model.cursor.EntryCursor;
-import org.apache.directory.api.ldap.model.cursor.SearchCursor;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.message.SearchScope;
-import org.apache.directory.api.ldap.model.name.Dn;
-import org.apache.directory.ldap.client.api.LdapConnectionConfig;
-import org.apache.directory.ldap.client.api.LdapNetworkConnection;
-import org.apache.directory.ldap.client.api.search.FilterBuilder;
-import org.apache.directory.shared.ldap.constants.SchemaConstants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-/**
- * Implementation of the validation logic using the Apache Directory API.
- */
-@Singleton
-public class AdLdapConfigurationValidatorService implements LdapConfigurationValidatorService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(AdLdapConfigurationValidatorService.class);
-
- @Inject
- private LdapConfigurationConverter ldapConfigurationConverter;
-
- /**
- * Facilitating the instantiation
- */
- @Inject
- public AdLdapConfigurationValidatorService() {
- }
-
- @Override
- public void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- try {
- LOGGER.info("Testing the connection based on the configuration: {}", ambariLdapConfiguration);
-
- LdapConnectionConfig connectionConfig = ldapConfigurationConverter.getLdapConnectionConfig(ambariLdapConfiguration);
- LdapNetworkConnection connection = new LdapNetworkConnection(connectionConfig);
-
- if (ambariLdapConfiguration.bindAnonimously()) {
- LOGGER.debug("Binding anonimously ...");
- connection.bind();
- } else {
- LOGGER.debug("Binding with manager DN and manager password ...");
- connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
- }
-
- if (connection.isConnected()) {
- LOGGER.info("Successfully connected to the LDAP server.");
- }
-
- connection.close();
-
- } catch (Exception e) {
- LOGGER.warn("Could not bind to the LDAP server base don the provided configuration ...");
- throw new AmbariLdapException(e);
- }
- }
-
-
- /**
- * Checks the user attributes provided in the configuration instance by issuing a search for a (known) test user in the LDAP.
- * Attributes are considered correct if there is at least one entry found.
- *
- * Invalid attributes are signaled by throwing an exception.
- *
- * @param username the username
- * @param password the password
- * @param ambariLdapConfiguration configuration instance holding ldap configuration details
- * @throws AmbariException if the attributes are not valid or any errors occurs
- */
- @Override
- public void checkUserAttributes(String username, String password, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException {
- LdapNetworkConnection connection = null;
- SearchCursor searchCursor = null;
- try {
- LOGGER.info("Checking user attributes for user {} r ...", username);
-
- LdapConnectionConfig connectionConfig = ldapConfigurationConverter.getLdapConnectionConfig(ambariLdapConfiguration);
- connection = new LdapNetworkConnection(connectionConfig);
-
-
- if (!ambariLdapConfiguration.bindAnonimously()) {
- LOGGER.debug("Anonimous binding not supported, binding with the manager detailas...");
- connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
- } else {
- LOGGER.debug("Binding anonimously ...");
- connection.bind();
- }
-
- if (!connection.isConnected()) {
- LOGGER.error("Not connected to the LDAP server. Connection instance: {}", connection);
- throw new IllegalStateException("The connection to the LDAP server is not alive");
- }
-
- // set up a filter based on the provided attributes
- String filter = FilterBuilder.and(
- FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
- FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), username))
- .toString();
-
- LOGGER.info("Searching for the user: {} using the search filter: {}", username, filter);
- EntryCursor entryCursor = connection.search(new Dn(ambariLdapConfiguration.baseDn()), filter, SearchScope.SUBTREE);
-
- // collecting search result entries
- List<Entry> users = Lists.newArrayList();
- for (Entry entry : entryCursor) {
- users.add(entry);
- }
-
- // there should be at least one user found
- if (users.isEmpty()) {
- String msg = String.format("There are no users found using the filter: [ %s ]. Try changing the attribute values", filter);
- LOGGER.error(msg);
- throw new Exception(msg);
- }
-
- LOGGER.info("Attibute validation succeeded. Filter: {}", filter);
-
- } catch (Exception e) {
-
- LOGGER.error("Error while checking user attributes.");
- throw new AmbariException("Error while checking user attributes", e);
-
- } finally {
-
- LOGGER.debug("Closing the connection and searchresult ...");
-
- if (null != searchCursor) {
- searchCursor.close();
- }
-
- if (null != connection) {
- try {
- connection.close();
- } catch (IOException e) {
- LOGGER.error("Exception occurred while closing the connection", e);
- }
- }
-
- }
- }
-
- @Override
- public void checkGroupAttributes(AmbariLdapConfiguration configuration) throws AmbariException {
-
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
new file mode 100644
index 0000000..838ef4c
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
@@ -0,0 +1,232 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ad;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.AmbariLdapException;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.api.ldap.codec.decorators.SearchResultEntryDecorator;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.cursor.SearchCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.exception.LdapException;
+import org.apache.directory.api.ldap.model.message.Response;
+import org.apache.directory.api.ldap.model.message.SearchRequest;
+import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.api.ldap.model.name.Dn;
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.apache.directory.ldap.client.api.search.FilterBuilder;
+import org.apache.directory.shared.ldap.constants.SchemaConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/**
+ * Implementation of the validation logic using the Apache Directory API.
+ */
+@Singleton
+public class DefaultLdapConfigurationValidatorService implements LdapConfigurationValidatorService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
+
+ @Inject
+ private LdapConnectionService ldapConnectionService;
+
+ /**
+ * Facilitating the instantiation
+ */
+ @Inject
+ public DefaultLdapConfigurationValidatorService() {
+ }
+
+ @Override
+ public void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ try {
+ bind(ambariLdapConfiguration, ldapConnection);
+ } catch (LdapException e) {
+ LOGGER.error("Could not connect to the LDAP server", e);
+ throw new AmbariLdapException(e);
+ }
+ }
+
+
+ /**
+ * Checks the user attributes provided in the configuration instance by issuing a search for a (known) test user in the LDAP.
+ * Attributes are considered correct if there is at least one entry found.
+ *
+ * Invalid attributes are signaled by throwing an exception.
+ *
+ * @param testUserName the test username
+ * @param testPassword the test password
+ * @param ambariLdapConfiguration configuration instance holding ldap configuration details
+ * @throws AmbariException if the attributes are not valid or any errors occurs
+ */
+ @Override
+ public String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ LdapNetworkConnection connection = null;
+ SearchCursor searchCursor = null;
+ String userDn = null;
+ try {
+ LOGGER.info("Checking user attributes for user {} r ...", testUserName);
+
+ // bind anonimously or with manager data
+ bind(ambariLdapConfiguration, connection);
+
+ // set up a filter based on the provided attributes
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), testUserName))
+ .toString();
+
+ LOGGER.info("Searching for the user: {} using the search filter: {}", testUserName, filter);
+ EntryCursor entryCursor = connection.search(new Dn(ambariLdapConfiguration.userSearchBase()), filter, SearchScope.SUBTREE);
+
+ // collecting search result entries
+ List<Entry> users = Lists.newArrayList();
+ for (Entry entry : entryCursor) {
+ users.add(entry);
+ userDn = entry.getDn().getNormName();
+ }
+
+ // there should be at least one user found
+ if (users.isEmpty()) {
+ String msg = String.format("There are no users found using the filter: [ %s ]. Try changing the attribute values", filter);
+ LOGGER.error(msg);
+ throw new Exception(msg);
+ }
+
+ LOGGER.info("Attibute validation succeeded. Filter: {}", filter);
+
+ } catch (Exception e) {
+
+ LOGGER.error("User attributes validation failed.", e);
+ throw new AmbariLdapException(e.getMessage(), e);
+
+ } finally {
+ closeResources(connection, searchCursor);
+ }
+ return userDn;
+ }
+
+
+ @Override
+ public Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ SearchCursor searchCursor = null;
+ Set<Response> groupResponses = Sets.newHashSet();
+
+ try {
+ LOGGER.info("Checking group attributes for user dn {} ...", userDn);
+
+ bind(ambariLdapConfiguration, ldapConnection);
+
+ // set up a filter based on the provided attributes
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.groupObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.groupMemberAttribute(), userDn)
+ ).toString();
+
+ LOGGER.info("Searching for the groups the user dn: {} is member of using the search filter: {}", userDn, filter);
+
+ // assemble a search request
+ SearchRequest searchRequest = new SearchRequestImpl();
+ searchRequest.setFilter(filter);
+ searchRequest.setBase(new Dn(ambariLdapConfiguration.groupSearchBase()));
+ searchRequest.setScope(SearchScope.SUBTREE);
+ searchRequest.addAttributes(ambariLdapConfiguration.groupMemberAttribute(), ambariLdapConfiguration.groupNameAttribute());
+
+ // perform the search
+ searchCursor = ldapConnection.search(searchRequest);
+
+ for (Response response : searchCursor) {
+ groupResponses.add(response);
+ }
+
+ } catch (Exception e) {
+
+ LOGGER.error("User attributes validation failed.", e);
+ throw new AmbariLdapException(e.getMessage(), e);
+
+ } finally {
+
+ closeResources(ldapConnection, searchCursor);
+
+ }
+
+ return processGroupResults(groupResponses, ambariLdapConfiguration);
+ }
+
+ private void bind(AmbariLdapConfiguration ambariLdapConfiguration, LdapConnection connection) throws LdapException {
+ LOGGER.info("Connecting to LDAP ....");
+ if (!ambariLdapConfiguration.bindAnonimously()) {
+ LOGGER.debug("Anonimous binding not supported, binding with the manager detailas...");
+ connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
+ } else {
+ LOGGER.debug("Binding anonimously ...");
+ connection.bind();
+ }
+
+ if (!connection.isConnected()) {
+ LOGGER.error("Not connected to the LDAP server. Connection instance: {}", connection);
+ throw new IllegalStateException("The connection to the LDAP server is not alive");
+ }
+ LOGGER.info("Connected to LDAP.");
+ }
+
+
+ private Set<String> processGroupResults(Set<Response> groupResponses, AmbariLdapConfiguration ambariLdapConfiguration) {
+ Set<String> groupStrSet = Sets.newHashSet();
+ for (Response response : groupResponses) {
+ Entry entry = ((SearchResultEntryDecorator) response).getEntry();
+ groupStrSet.add(entry.get(ambariLdapConfiguration.groupNameAttribute()).get().getString());
+ }
+
+ LOGGER.debug("Extracted group names from group search responses: {}", groupStrSet);
+ return groupStrSet;
+ }
+
+ private void closeResources(LdapConnection connection, SearchCursor searchCursor) {
+ LOGGER.debug("Housekeeping: closing the connection and the search cursor ...");
+
+ if (null != searchCursor) {
+ // this method is idempotent
+ searchCursor.close();
+ }
+
+ if (null != connection) {
+ try {
+ connection.close();
+ } catch (IOException e) {
+ LOGGER.error("Exception occurred while closing the connection", e);
+ }
+ }
+ }
+
+}
+
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
new file mode 100644
index 0000000..b5559d9
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ad;
+
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Singleton
+public class DefaultLdapConnectionService implements LdapConnectionService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConnectionService.class);
+
+ @Override
+ public LdapNetworkConnection createLdapConnection(AmbariLdapConfiguration ambariLdapConfiguration) {
+ LOGGER.debug("Creating ldap connection instance from: {}", ambariLdapConfiguration);
+ return new LdapNetworkConnection(getLdapConnectionConfig(ambariLdapConfiguration));
+ }
+
+ private LdapConnectionConfig getLdapConnectionConfig(AmbariLdapConfiguration ambariAmbariLdapConfiguration) {
+ LOGGER.debug("Creating a configuration instance based on the ambari configuration: {}", ambariAmbariLdapConfiguration);
+
+ LdapConnectionConfig ldapConnectionConfig = new LdapConnectionConfig();
+ ldapConnectionConfig.setLdapHost(ambariAmbariLdapConfiguration.ldapServerHost());
+ ldapConnectionConfig.setLdapPort(ambariAmbariLdapConfiguration.ldapServerPort());
+ ldapConnectionConfig.setUseSsl(ambariAmbariLdapConfiguration.useSSL());
+
+ //todo set the other values as required
+ return ldapConnectionConfig;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java
deleted file mode 100644
index a8839f1..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ad;
-
-import javax.inject.Singleton;
-
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.directory.ldap.client.api.LdapConnectionConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Converts between ambari specific ldap types and the 3rd party ldap library
- */
-@Singleton
-public class LdapConfigurationConverter {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(LdapConfigurationConverter.class);
-
- /**
- * Creates a {@link LdapConnectionConfig} instance based on the provided ambari specific configurations
- *
- * @param ambariAmbariLdapConfiguration
- * @return
- */
- public LdapConnectionConfig getLdapConnectionConfig(AmbariLdapConfiguration ambariAmbariLdapConfiguration) {
- LOGGER.debug("Creating a configuration instance based on the ambari configuration: {}", ambariAmbariLdapConfiguration);
-
- LdapConnectionConfig ldapConnectionConfig = new LdapConnectionConfig();
- ldapConnectionConfig.setLdapHost(ambariAmbariLdapConfiguration.ldapServerHost());
- ldapConnectionConfig.setLdapPort(ambariAmbariLdapConfiguration.ldapServerPort());
- ldapConnectionConfig.setUseSsl(ambariAmbariLdapConfiguration.useSSL());
-
- //todo set the other values as required
- return ldapConnectionConfig;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java
deleted file mode 100644
index 0f57099..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ad;
-
-import static org.junit.Assert.assertNotNull;
-
-import java.util.Map;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
-import org.apache.directory.api.ldap.model.cursor.EntryCursor;
-import org.apache.directory.api.ldap.model.cursor.SearchCursor;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.message.Response;
-import org.apache.directory.api.ldap.model.message.SearchRequest;
-import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
-import org.apache.directory.api.ldap.model.message.SearchResultEntry;
-import org.apache.directory.api.ldap.model.message.SearchScope;
-import org.apache.directory.api.ldap.model.name.Dn;
-import org.apache.directory.ldap.client.api.LdapConnection;
-import org.apache.directory.ldap.client.api.LdapConnectionConfig;
-import org.apache.directory.ldap.client.api.LdapNetworkConnection;
-import org.apache.directory.ldap.client.api.search.FilterBuilder;
-import org.apache.directory.shared.ldap.constants.SchemaConstants;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Maps;
-
-public class AdLdapConfigurationValidatorServiceTest {
- private static final Logger LOGGER = LoggerFactory.getLogger(AdLdapConfigurationValidatorService.class);
- private static final String TEST_USER = "Jocika10";
-
- LdapConfigurationValidatorService ldapConfigurationValidatorService = new AdLdapConfigurationValidatorService();
-
-
- @Test
- public void testCheckAttributes() throws Exception {
-
- // WHEN
- LdapConnectionConfig config = new LdapConnectionConfig();
- config.setLdapHost("localhost");
- config.setLdapPort(389);
- LdapConnection connection = new LdapNetworkConnection(config);
-
- // THEN
- connection.anonymousBind();
-
-
- EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
-
- for (Entry entry : cursor) {
- assertNotNull(entry);
- System.out.println(entry);
- }
-
- cursor.close();
-
- }
-
- @Test
- public void testCheckUserAttributes() throws Exception {
- Map<String, Object> ldapPropsMap = Maps.newHashMap();
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), true);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "localhost");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=dev,dc=local");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
-
- AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
-
-
- try {
- LOGGER.info("Authenticating user {} against the LDAP server ...", TEST_USER);
- LdapConfigurationConverter ldapConfigurationConverter = new LdapConfigurationConverter();
-
- LdapConnectionConfig connectionConfig = ldapConfigurationConverter.getLdapConnectionConfig(ambariLdapConfiguration);
- LdapNetworkConnection connection = new LdapNetworkConnection(connectionConfig);
-
- String filter = FilterBuilder.and(
- FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
- FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), TEST_USER))
- .toString();
-
- SearchRequest searchRequest = new SearchRequestImpl();
- searchRequest.setBase(new Dn(ambariLdapConfiguration.baseDn()));
- searchRequest.setFilter(filter);
- searchRequest.setScope(SearchScope.SUBTREE);
-
- LOGGER.info("loking up user: {} based on the filtr: {}", TEST_USER, filter);
-
- connection.bind();
- SearchCursor searchCursor = connection.search(searchRequest);
-
- while (searchCursor.next()) {
- Response response = searchCursor.get();
-
- // process the SearchResultEntry
- if (response instanceof SearchResultEntry) {
- Entry resultEntry = ((SearchResultEntry) response).getEntry();
- System.out.println(resultEntry);
- }
- }
-
- searchCursor.close();
-
- } catch (Exception e) {
- throw new AmbariException("Error during user authentication check", e);
- }
-
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/d8813ffa/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
new file mode 100644
index 0000000..5c9d304
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ad;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Map;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.cursor.SearchCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.message.Response;
+import org.apache.directory.api.ldap.model.message.SearchRequest;
+import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
+import org.apache.directory.api.ldap.model.message.SearchResultEntry;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.api.ldap.model.name.Dn;
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.apache.directory.ldap.client.api.search.FilterBuilder;
+import org.apache.directory.shared.ldap.constants.SchemaConstants;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Maps;
+
+public class DefaultLdapConfigurationValidatorServiceTest {
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
+ private static final String TEST_USER = "einstein";
+
+ LdapConfigurationValidatorService ldapConfigurationValidatorService = new DefaultLdapConfigurationValidatorService();
+
+
+ @Test
+ public void testCheckAttributes() throws Exception {
+
+ // WHEN
+ LdapConnectionConfig config = new LdapConnectionConfig();
+ config.setLdapHost("localhost");
+ config.setLdapPort(389);
+ LdapConnection connection = new LdapNetworkConnection(config);
+
+ // THEN
+ connection.anonymousBind();
+
+
+ EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
+
+ for (Entry entry : cursor) {
+ assertNotNull(entry);
+ System.out.println(entry);
+ }
+
+ cursor.close();
+
+ }
+
+ @Test
+ public void testCheckUserAttributes() throws Exception {
+ Map<String, Object> ldapPropsMap = Maps.newHashMap();
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), false);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE.propertyName(), SchemaConstants.UNIQUE_MEMBER_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
+
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+
+
+ try {
+ LOGGER.info("Authenticating user {} against the LDAP server ...", TEST_USER);
+ LdapConnectionService connectionService = new DefaultLdapConnectionService();
+ LdapNetworkConnection connection = connectionService.createLdapConnection(ambariLdapConfiguration);
+
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), TEST_USER))
+ .toString();
+
+ SearchRequest searchRequest = new SearchRequestImpl();
+ searchRequest.setBase(new Dn(ambariLdapConfiguration.baseDn()));
+ searchRequest.setFilter(filter);
+ searchRequest.setScope(SearchScope.SUBTREE);
+
+ LOGGER.info("loking up user: {} based on the filtr: {}", TEST_USER, filter);
+
+ connection.bind();
+ SearchCursor searchCursor = connection.search(searchRequest);
+
+ while (searchCursor.next()) {
+ Response response = searchCursor.get();
+
+ // process the SearchResultEntry
+ if (response instanceof SearchResultEntry) {
+ Entry resultEntry = ((SearchResultEntry) response).getEntry();
+ System.out.println(resultEntry);
+ }
+ }
+
+ searchCursor.close();
+
+ } catch (Exception e) {
+ throw new AmbariException("Error during user authentication check", e);
+ }
+
+ }
+
+ @Test
+ public void testRetrieveGorupsForuser() throws Exception {
+ // GIVEN
+ Map<String, Object> ldapPropsMap = Maps.newHashMap();
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE.propertyName(), SchemaConstants.UNIQUE_MEMBER_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_SEARCH_BASE.propertyName(), "dc=example,dc=com");
+
+
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+ LdapConnectionService connectionService = new DefaultLdapConnectionService();
+ LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
+
+ ldapConfigurationValidatorService.checkGroupAttributes(ldapConnection, "uid=einstein,dc=example,dc=com", ambariLdapConfiguration);
+ }
+}
\ No newline at end of file
[09/57] [abbrv] ambari git commit: AMBARI-21892. Unable to proceed to
Choose services step (akovalenko)
Posted by lp...@apache.org.
AMBARI-21892. Unable to proceed to Choose services step (akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8de9b06d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8de9b06d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8de9b06d
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 8de9b06d93765f81487f5dc27f2b175bf475b0cb
Parents: 680f114
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Wed Sep 6 17:25:14 2017 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Wed Sep 6 17:26:32 2017 +0300
----------------------------------------------------------------------
ambari-web/app/controllers/installer.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/8de9b06d/ambari-web/app/controllers/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/installer.js b/ambari-web/app/controllers/installer.js
index b17cc1f..ea07b92 100644
--- a/ambari-web/app/controllers/installer.js
+++ b/ambari-web/app/controllers/installer.js
@@ -289,7 +289,7 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
var isStacksExistInDb = stacks && stacks.length;
if (isStacksExistInDb) {
stacks.forEach(function (_stack) {
- var stack = data.items.findProperty('VersionDefinition.repository_version', _stack.repository_version);
+ var stack = data.items.findProperty('VersionDefinition.id', _stack.id);
if (stack) {
stack.VersionDefinition.is_selected = _stack.is_selected;
}
[03/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
Posted by lp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/ranger_kms_unsupported_db_flavor.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/ranger_kms_unsupported_db_flavor.json b/ambari-server/src/test/python/common-services/configs/ranger_kms_unsupported_db_flavor.json
new file mode 100644
index 0000000..22a3a44
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/ranger_kms_unsupported_db_flavor.json
@@ -0,0 +1,802 @@
+{
+ "localComponents": [
+ "SECONDARY_NAMENODE",
+ "HDFS_CLIENT",
+ "DATANODE",
+ "NAMENODE",
+ "ZOOKEEPER_SERVER",
+ "ZOOKEEPER_CLIENT",
+ "RANGER_USERSYNC",
+ "RANGER_ADMIN",
+ "RANGER_TAGSYNC",
+ "RANGER_KMS_SERVER"
+ ],
+ "configuration_attributes": {
+ "ranger-hdfs-audit": {},
+ "ssl-client": {},
+ "ranger-admin-site": {},
+ "ranger-hdfs-policymgr-ssl": {},
+ "tagsync-application-properties": {},
+ "ranger-env": {},
+ "usersync-log4j": {},
+ "admin-properties": {},
+ "ranger-ugsync-site": {},
+ "hdfs-site": {
+ "final": {
+ "dfs.datanode.data.dir": "true",
+ "dfs.namenode.http-address": "true",
+ "dfs.datanode.failed.volumes.tolerated": "true",
+ "dfs.support.append": "true",
+ "dfs.namenode.name.dir": "true",
+ "dfs.webhdfs.enabled": "true"
+ }
+ },
+ "ranger-tagsync-site": {},
+ "zoo.cfg": {},
+ "hadoop-policy": {},
+ "hdfs-log4j": {},
+ "ranger-hdfs-plugin-properties": {},
+ "core-site": {
+ "final": {
+ "fs.defaultFS": "true"
+ }
+ },
+ "hadoop-env": {},
+ "zookeeper-log4j": {},
+ "ssl-server": {},
+ "ranger-site": {},
+ "admin-log4j": {},
+ "tagsync-log4j": {},
+ "ranger-hdfs-security": {},
+ "usersync-properties": {},
+ "zookeeper-env": {},
+ "cluster-env": {},
+ "dbks-site": {},
+ "kms-env": {},
+ "kms-log4j": {},
+ "kms-properties": {},
+ "kms-site": {},
+ "ranger-kms-security": {},
+ "ranger-kms-site": {},
+ "ranger-kms-policymgr-ssl": {},
+ "ranger-kms-audit": {}
+ },
+ "public_hostname": "c6401.ambari.apache.org",
+ "commandId": "9-1",
+ "hostname": "c6401.ambari.apache.org",
+ "kerberosCommandParams": [],
+ "serviceName": "RANGER_KMS",
+ "role": "RANGER_KMS_SERVER",
+ "forceRefreshConfigTagsBeforeExecution": [],
+ "requestId": 9,
+ "agentConfigParams": {
+ "agent": {
+ "parallel_execution": 0
+ }
+ },
+ "clusterName": "c1",
+ "commandType": "EXECUTION_COMMAND",
+ "taskId": 64,
+ "roleParams": {},
+ "configurationTags": {
+ "ranger-hdfs-audit": {
+ "tag": "version1466427664617"
+ },
+ "ssl-client": {
+ "tag": "version1"
+ },
+ "ranger-admin-site": {
+ "tag": "version1466427664621"
+ },
+ "ranger-hdfs-policymgr-ssl": {
+ "tag": "version1466427664617"
+ },
+ "tagsync-application-properties": {
+ "tag": "version1466427664621"
+ },
+ "ranger-env": {
+ "tag": "version1466427664621"
+ },
+ "usersync-log4j": {
+ "tag": "version1466427664621"
+ },
+ "admin-properties": {
+ "tag": "version1466427664621"
+ },
+ "ranger-ugsync-site": {
+ "tag": "version1466427664621"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "ranger-tagsync-site": {
+ "tag": "version1466427664621"
+ },
+ "zoo.cfg": {
+ "tag": "version1"
+ },
+ "hadoop-policy": {
+ "tag": "version1"
+ },
+ "hdfs-log4j": {
+ "tag": "version1"
+ },
+ "ranger-hdfs-plugin-properties": {
+ "tag": "version1466427664617"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "hadoop-env": {
+ "tag": "version1"
+ },
+ "zookeeper-log4j": {
+ "tag": "version1"
+ },
+ "ssl-server": {
+ "tag": "version1"
+ },
+ "ranger-site": {
+ "tag": "version1466427664621"
+ },
+ "admin-log4j": {
+ "tag": "version1466427664621"
+ },
+ "tagsync-log4j": {
+ "tag": "version1466427664621"
+ },
+ "ranger-hdfs-security": {
+ "tag": "version1466427664617"
+ },
+ "usersync-properties": {
+ "tag": "version1466427664621"
+ },
+ "zookeeper-env": {
+ "tag": "version1"
+ },
+ "cluster-env": {
+ "tag": "version1"
+ },
+ "dbks-site": {
+ "tag": "version1"
+ },
+ "kms-env": {
+ "tag": "version1"
+ },
+ "kms-log4j": {
+ "tag": "version1"
+ },
+ "kms-properties": {
+ "tag": "version1"
+ },
+ "kms-site": {
+ "tag": "version1"
+ },
+ "ranger-kms-security": {
+ "tag": "version1"
+ },
+ "ranger-kms-site": {
+ "tag": "version1"
+ },
+ "ranger-kms-policymgr-ssl": {
+ "tag": "version1"
+ },
+ "ranger-kms-audit": {
+ "tag": "version1"
+ }
+ },
+ "roleCommand": "START",
+ "hostLevelParams": {
+ "agent_stack_retry_on_unavailability": "false",
+ "stack_name": "HDP",
+ "custom_mysql_jdbc_name": "mysql-connector-java.jar",
+ "previous_custom_mysql_jdbc_name": "mysql-connector-java-old.jar",
+ "host_sys_prepped": "false",
+ "ambari_db_rca_username": "mapred",
+ "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
+ "agent_stack_retry_count": "5",
+ "stack_version": "2.5",
+ "jdk_name": "jdk-8u60-linux-x64.tar.gz",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "repository_version_id": "1",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "not_managed_hdfs_path_list": "[\"/tmp\"]",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "java_version": "8",
+ "repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-777\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.5\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-776\",\"baseSaved\":true},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.21\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"baseSaved\":true}]",
+ "package_list": "[{\"name\":\"ranger_${stack_version}-admin\",\"condition\":\"\",\"skipUpgrade\":false},{\"name\":\"ranger_${stack_version}-usersync\",\"condition\":\"\",\"skipUpgrade\":false},{\"name\":\"ranger_${stack_version}-tagsync\",\"condition\":\"should_install_ranger_tagsync\",\"skipUpgrade\":false},{\"name\":\"ambari-logsearch-solr-client\",\"condition\":\"should_install_logsearch_solr_client\",\"skipUpgrade\":false}]",
+ "db_name": "ambari",
+ "group_list": "[\"ranger\",\"hadoop\",\"users\"]",
+ "agentCacheDir": "/var/lib/ambari-agent/cache",
+ "ambari_db_rca_password": "mapred",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "oracle_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//ojdbc6.jar",
+ "db_driver_filename": "mysql-connector-java.jar",
+ "user_list": "[\"zookeeper\",\"ambari-qa\",\"ranger\",\"hdfs\"]",
+ "clientsToUpdateConfigs": "[\"*\"]"
+ },
+ "commandParams": {
+ "service_package_folder": "common-services/RANGER/0.4.0/package",
+ "script": "scripts/ranger_usersync.py",
+ "hooks_folder": "HDP/2.0.6/hooks",
+ "version": "2.5.0.0-777",
+ "max_duration_for_retries": "0",
+ "command_retry_enabled": "false",
+ "command_timeout": "600",
+ "script_type": "PYTHON"
+ },
+ "forceRefreshConfigTags": [],
+ "stageId": 1,
+ "clusterHostInfo": {
+ "snamenode_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "ambari_server_use_ssl": [
+ "false"
+ ],
+ "all_ping_ports": [
+ "8670"
+ ],
+ "ranger_tagsync_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_usersync_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "slave_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "namenode_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "ambari_server_port": [
+ "8080"
+ ],
+ "ranger_admin_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_racks": [
+ "/default-rack"
+ ],
+ "all_ipv4_ips": [
+ "172.22.125.4"
+ ],
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_kms_server_hosts": [
+ "c6401.ambari.apache.org"
+ ]
+ },
+ "configurations": {
+ "ranger-hdfs-audit": {
+ "xasecure.audit.destination.solr.zookeepers": "c6401.ambari.apache.org:2181/ranger_audits",
+ "xasecure.audit.destination.solr.urls": "",
+ "xasecure.audit.destination.solr.batch.filespool.dir": "/var/log/hadoop/hdfs/audit/solr/spool",
+ "xasecure.audit.destination.hdfs.batch.filespool.dir": "/var/log/hadoop/hdfs/audit/hdfs/spool",
+ "xasecure.audit.destination.hdfs": "true",
+ "xasecure.audit.destination.solr": "true",
+ "xasecure.audit.provider.summary.enabled": "false",
+ "xasecure.audit.destination.hdfs.dir": "hdfs://c6401.ambari.apache.org:8020/ranger/audit",
+ "xasecure.audit.is.enabled": "true"
+ },
+ "ssl-client": {
+ "ssl.client.truststore.reload.interval": "10000",
+ "ssl.client.keystore.password": "bigdata",
+ "ssl.client.truststore.type": "jks",
+ "ssl.client.keystore.location": "/etc/security/clientKeys/keystore.jks",
+ "ssl.client.truststore.location": "/etc/security/clientKeys/all.jks",
+ "ssl.client.truststore.password": "bigdata",
+ "ssl.client.keystore.type": "jks"
+ },
+ "ranger-admin-site": {
+ "ranger.admin.kerberos.cookie.domain": "",
+ "ranger.kms.service.user.hdfs": "hdfs",
+ "ranger.spnego.kerberos.principal": "",
+ "ranger.ldap.ad.url": "{{ranger_ug_ldap_url}}",
+ "ranger.plugins.hive.serviceuser": "hive",
+ "ranger.lookup.kerberos.keytab": "",
+ "ranger.plugins.kms.serviceuser": "kms",
+ "ranger.service.https.attrib.ssl.enabled": "false",
+ "ranger.sso.browser.useragent": "Mozilla,chrome",
+ "ranger.jpa.jdbc.url": "jdbc:mysql://c6401.ambari.apache.org:3306/ranger01",
+ "ranger.plugins.hbase.serviceuser": "hbase",
+ "ranger.plugins.hdfs.serviceuser": "hdfs",
+ "ranger.ldap.group.searchbase": "{{ranger_ug_ldap_group_searchbase}}",
+ "ranger.ldap.user.dnpattern": "uid={0},ou=users,dc=xasecure,dc=net",
+ "ranger.plugins.knox.serviceuser": "knox",
+ "ranger.ldap.base.dn": "dc=example,dc=com",
+ "ranger.sso.publicKey": "",
+ "ranger.admin.kerberos.cookie.path": "/",
+ "ranger.service.https.attrib.clientAuth": "want",
+ "ranger.jpa.jdbc.user": "{{ranger_db_user}}",
+ "ranger.ldap.ad.user.searchfilter": "(sAMAccountName={0})",
+ "ranger.ldap.group.roleattribute": "cn",
+ "ranger.plugins.kafka.serviceuser": "kafka",
+ "ranger.admin.kerberos.principal": "",
+ "ranger.ldap.ad.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.credential.provider.path": "/etc/ranger/admin/rangeradmin.jceks",
+ "ranger.ldap.referral": "ignore",
+ "ranger.service.http.port": "6080",
+ "ranger.ldap.user.searchfilter": "(uid={0})",
+ "ranger.plugins.atlas.serviceuser": "atlas",
+ "ranger.truststore.password": "changeit",
+ "ranger.ldap.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.password": "NONE",
+ "ranger.audit.solr.zookeepers": "c6401.ambari.apache.org:2181/ranger_audits",
+ "ranger.lookup.kerberos.principal": "",
+ "ranger.service.https.port": "6182",
+ "ranger.plugins.storm.serviceuser": "storm",
+ "ranger.externalurl": "{{ranger_external_url}}",
+ "ranger.truststore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.kms.service.user.hive": "",
+ "ranger.https.attrib.keystore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.jpa.jdbc.dialect": "{{jdbc_dialect}}",
+ "ranger.service.host": "{{ranger_host}}",
+ "ranger.service.https.attrib.keystore.keyalias": "rangeradmin",
+ "ranger.service.https.attrib.keystore.pass": "xasecure",
+ "ranger.unixauth.remote.login.enabled": "true",
+ "ranger.jpa.jdbc.credential.alias": "rangeradmin",
+ "ranger.ldap.ad.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.username": "ranger_solr",
+ "ranger.sso.enabled": "false",
+ "ranger.audit.solr.urls": "",
+ "ranger.ldap.ad.domain": "",
+ "ranger.plugins.yarn.serviceuser": "yarn",
+ "ranger.audit.source.type": "solr",
+ "ranger.ldap.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.ldap.url": "{{ranger_ug_ldap_url}}",
+ "ranger.authentication.method": "UNIX",
+ "ranger.service.http.enabled": "true",
+ "ranger.ldap.group.searchfilter": "{{ranger_ug_ldap_group_searchfilter}}",
+ "ranger.ldap.ad.referral": "ignore",
+ "ranger.ldap.ad.base.dn": "dc=example,dc=com",
+ "ranger.jpa.jdbc.password": "_",
+ "ranger.spnego.kerberos.keytab": "",
+ "ranger.sso.providerurl": "",
+ "ranger.unixauth.service.hostname": "{{ugsync_host}}",
+ "ranger.admin.kerberos.keytab": "",
+ "ranger.admin.kerberos.token.valid.seconds": "30",
+ "ranger.jpa.jdbc.driver": "com.mysql.jdbc.Driver",
+ "ranger.unixauth.service.port": "5151"
+ },
+ "ranger-hdfs-policymgr-ssl": {
+ "xasecure.policymgr.clientssl.keystore": "/usr/hdp/current/hadoop-client/conf/ranger-plugin-keystore.jks",
+ "xasecure.policymgr.clientssl.truststore.password": "changeit",
+ "xasecure.policymgr.clientssl.keystore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.truststore": "/usr/hdp/current/hadoop-client/conf/ranger-plugin-truststore.jks",
+ "xasecure.policymgr.clientssl.truststore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.keystore.password": "myKeyFilePassword"
+ },
+ "tagsync-application-properties": {
+ "atlas.kafka.entities.group.id": "ranger_entities_consumer",
+ "atlas.kafka.zookeeper.connect": "c6401.ambari.apache.org:2181",
+ "atlas.kafka.bootstrap.servers": "localhost:6667"
+ },
+ "ranger-env": {
+ "ranger_solr_shards": "1",
+ "ranger_solr_config_set": "ranger_audits",
+ "ranger_user": "ranger",
+ "xml_configurations_supported": "true",
+ "ranger-atlas-plugin-enabled": "No",
+ "ranger-hbase-plugin-enabled": "No",
+ "ranger-yarn-plugin-enabled": "No",
+ "bind_anonymous": "false",
+ "ranger_admin_username": "amb_ranger_admin",
+ "admin_password": "admin",
+ "is_solrCloud_enabled": "true",
+ "ranger-storm-plugin-enabled": "No",
+ "ranger-hdfs-plugin-enabled": "No",
+ "ranger_group": "ranger",
+ "ranger-knox-plugin-enabled": "No",
+ "ranger_admin_log_dir": "/var/log/ranger/admin",
+ "ranger-kafka-plugin-enabled": "No",
+ "ranger_privelege_user_jdbc_url": "jdbc:mysql://c6401.ambari.apache.org:3306",
+ "ranger-hive-plugin-enabled": "No",
+ "xasecure.audit.destination.solr": "true",
+ "ranger_pid_dir": "/var/run/ranger",
+ "xasecure.audit.destination.hdfs": "true",
+ "admin_username": "admin",
+ "xasecure.audit.destination.hdfs.dir": "hdfs://c6401.ambari.apache.org:8020/ranger/audit",
+ "create_db_dbuser": "true",
+ "ranger_solr_collection_name": "ranger_audits",
+ "ranger_admin_password": "P1!qLEQwP24KVlWY",
+ "ranger_usersync_log_dir": "/var/log/ranger/usersync"
+ },
+ "usersync-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nlog4j.rootLogger = info,logFile\n\n# logFile\nlog4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.logFile.file=${logdir}/usersync.log\nl
og4j.appender.logFile.datePattern='.'yyyy-MM-dd\nlog4j.appender.logFile.layout=org.apache.log4j.PatternLayout\nlog4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n\n\n# console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.Target=System.out\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n"
+ },
+ "admin-properties": {
+ "db_user": "rangeradmin01",
+ "DB_FLAVOR": "MYSQL",
+ "db_password": "rangeradmin01",
+ "db_root_user": "root",
+ "policymgr_external_url": "http://c6401.ambari.apache.org:6080",
+ "db_name": "ranger01",
+ "db_host": "c6401.ambari.apache.org",
+ "db_root_password": "vagrant",
+ "SQL_CONNECTOR_JAR": "{{driver_curl_target}}"
+ },
+ "ranger-ugsync-site": {
+ "ranger.usersync.ldap.binddn": "",
+ "ranger.usersync.policymgr.username": "rangerusersync",
+ "ranger.usersync.policymanager.mockrun": "false",
+ "ranger.usersync.group.searchbase": "",
+ "ranger.usersync.ldap.bindalias": "testldapalias",
+ "ranger.usersync.truststore.file": "/usr/hdp/current/ranger-usersync/conf/mytruststore.jks",
+ "ranger.usersync.port": "5151",
+ "ranger.usersync.pagedresultssize": "500",
+ "ranger.usersync.group.memberattributename": "",
+ "ranger.usersync.kerberos.principal": "",
+ "ranger.usersync.source.impl.class": "org.apache.ranger.unixusersync.process.UnixUserGroupBuilder",
+ "ranger.usersync.ldap.referral": "ignore",
+ "ranger.usersync.group.searchfilter": "",
+ "ranger.usersync.ldap.user.objectclass": "person",
+ "ranger.usersync.logdir": "{{usersync_log_dir}}",
+ "ranger.usersync.ldap.user.searchfilter": "",
+ "ranger.usersync.ldap.groupname.caseconversion": "none",
+ "ranger.usersync.ldap.ldapbindpassword": "",
+ "ranger.usersync.unix.minUserId": "500",
+ "ranger.usersync.policymanager.maxrecordsperapicall": "1000",
+ "ranger.usersync.group.nameattribute": "",
+ "ranger.usersync.policymgr.alias": "ranger.usersync.policymgr.password",
+ "ranger.usersync.keystore.file": "/usr/hdp/current/ranger-usersync/conf/unixauthservice.jks",
+ "ranger.usersync.user.searchenabled": "false",
+ "ranger.usersync.group.usermapsyncenabled": "true",
+ "ranger.usersync.ldap.bindkeystore": "",
+ "ranger.usersync.ldap.user.groupnameattribute": "memberof, ismemberof",
+ "ranger.usersync.kerberos.keytab": "",
+ "ranger.usersync.passwordvalidator.path": "./native/credValidator.uexe",
+ "ranger.usersync.group.objectclass": "",
+ "ranger.usersync.ldap.user.searchscope": "sub",
+ "ranger.usersync.unix.password.file": "/etc/passwd",
+ "ranger.usersync.ldap.user.nameattribute": "",
+ "ranger.usersync.pagedresultsenabled": "true",
+ "ranger.usersync.policymanager.baseURL": "{{ranger_external_url}}",
+ "ranger.usersync.group.search.first.enabled": "false",
+ "ranger.usersync.group.searchenabled": "false",
+ "ranger.usersync.sink.impl.class": "org.apache.ranger.unixusersync.process.PolicyMgrUserGroupBuilder",
+ "ranger.usersync.ssl": "true",
+ "ranger.usersync.ldap.url": "",
+ "ranger.usersync.ldap.searchBase": "dc=hadoop,dc=apache,dc=org",
+ "ranger.usersync.policymgr.keystore": "/usr/hdp/current/ranger-usersync/conf/ugsync.jceks",
+ "ranger.usersync.ldap.user.searchbase": "",
+ "ranger.usersync.ldap.username.caseconversion": "none",
+ "ranger.usersync.credstore.filename": "/usr/hdp/current/ranger-usersync/conf/ugsync.jceks",
+ "ranger.usersync.keystore.password": "UnIx529p",
+ "ranger.usersync.unix.group.file": "/etc/group",
+ "ranger.usersync.filesource.file": "/tmp/usergroup.txt",
+ "ranger.usersync.group.searchscope": "",
+ "ranger.usersync.truststore.password": "changeit",
+ "ranger.usersync.enabled": "true",
+ "ranger.usersync.sleeptimeinmillisbetweensynccycle": "60000",
+ "ranger.usersync.filesource.text.delimiter": ","
+ },
+ "hdfs-site": {
+ "dfs.namenode.checkpoint.period": "21600",
+ "dfs.namenode.avoid.write.stale.datanode": "true",
+ "dfs.namenode.startup.delay.block.deletion.sec": "3600",
+ "dfs.namenode.checkpoint.txns": "1000000",
+ "dfs.content-summary.limit": "5000",
+ "dfs.support.append": "true",
+ "dfs.datanode.address": "0.0.0.0:50010",
+ "dfs.cluster.administrators": " hdfs",
+ "dfs.namenode.audit.log.async": "true",
+ "dfs.datanode.balance.bandwidthPerSec": "6250000",
+ "dfs.namenode.safemode.threshold-pct": "1",
+ "dfs.namenode.checkpoint.edits.dir": "${dfs.namenode.checkpoint.dir}",
+ "dfs.namenode.rpc-address": "c6401.ambari.apache.org:8020",
+ "dfs.permissions.enabled": "true",
+ "dfs.client.read.shortcircuit": "true",
+ "dfs.https.port": "50470",
+ "dfs.namenode.https-address": "c6401.ambari.apache.org:50470",
+ "nfs.file.dump.dir": "/tmp/.hdfs-nfs",
+ "dfs.blocksize": "134217728",
+ "dfs.blockreport.initialDelay": "120",
+ "dfs.journalnode.edits.dir": "/hadoop/hdfs/journalnode",
+ "dfs.namenode.fslock.fair": "false",
+ "dfs.datanode.max.transfer.threads": "4096",
+ "dfs.heartbeat.interval": "3",
+ "dfs.replication": "3",
+ "dfs.namenode.handler.count": "50",
+ "dfs.namenode.checkpoint.dir": "/grid/0/hadoop/hdfs/namesecondary",
+ "fs.permissions.umask-mode": "022",
+ "dfs.namenode.stale.datanode.interval": "30000",
+ "dfs.datanode.ipc.address": "0.0.0.0:8010",
+ "dfs.datanode.failed.volumes.tolerated": "0",
+ "dfs.datanode.data.dir": "/grid/0/hadoop/hdfs/data",
+ "dfs.namenode.http-address": "c6401.ambari.apache.org:50070",
+ "dfs.webhdfs.enabled": "true",
+ "dfs.encrypt.data.transfer.cipher.suites": "AES/CTR/NoPadding",
+ "dfs.namenode.accesstime.precision": "0",
+ "dfs.datanode.https.address": "0.0.0.0:50475",
+ "dfs.namenode.write.stale.datanode.ratio": "1.0f",
+ "dfs.namenode.secondary.http-address": "c6401.ambari.apache.org:50090",
+ "nfs.exports.allowed.hosts": "* rw",
+ "dfs.datanode.http.address": "0.0.0.0:50075",
+ "dfs.datanode.du.reserved": "33011188224",
+ "dfs.client.read.shortcircuit.streams.cache.size": "4096",
+ "dfs.http.policy": "HTTP_ONLY",
+ "dfs.block.access.token.enable": "true",
+ "dfs.client.retry.policy.enabled": "false",
+ "dfs.namenode.name.dir.restore": "true",
+ "dfs.permissions.superusergroup": "hdfs",
+ "dfs.journalnode.https-address": "0.0.0.0:8481",
+ "dfs.journalnode.http-address": "0.0.0.0:8480",
+ "dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket",
+ "dfs.namenode.avoid.read.stale.datanode": "true",
+ "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude",
+ "dfs.datanode.data.dir.perm": "750",
+ "dfs.encryption.key.provider.uri": "",
+ "dfs.replication.max": "50",
+ "dfs.namenode.name.dir": "/grid/0/hadoop/hdfs/namenode"
+ },
+ "ranger-tagsync-site": {
+ "ranger.tagsync.atlas.to.ranger.service.mapping": "",
+ "ranger.tagsync.dest.ranger.ssl.config.filename": "/usr/hdp/current/ranger-tagsync/conf/mytruststore.jks",
+ "ranger.tagsync.source.file.check.interval.millis": "",
+ "ranger.tagsync.logdir": "/var/log/ranger/tagsync",
+ "ranger.tagsync.source.atlasrest.download.interval.millis": "",
+ "ranger.tagsync.keystore.filename": "/usr/hdp/current/ranger-tagsync/conf/rangertagsync.jceks",
+ "ranger.tagsync.source.atlasrest.endpoint": "",
+ "ranger.tagsync.dest.ranger.username": "rangertagsync",
+ "ranger.tagsync.kerberos.principal": "",
+ "ranger.tagsync.dest.ranger.endpoint": "{{ranger_external_url}}",
+ "ranger.tagsync.atlas.custom.resource.mappers": "",
+ "ranger.tagsync.kerberos.keytab": "",
+ "ranger.tagsync.source.atlas": "false",
+ "ranger.tagsync.source.atlasrest": "false",
+ "ranger.tagsync.source.file": "false",
+ "ranger.tagsync.source.file.filename": ""
+ },
+ "zoo.cfg": {
+ "clientPort": "2181",
+ "autopurge.purgeInterval": "24",
+ "syncLimit": "5",
+ "dataDir": "/grid/0/hadoop/zookeeper",
+ "initLimit": "10",
+ "tickTime": "2000",
+ "autopurge.snapRetainCount": "30"
+ },
+ "hadoop-policy": {
+ "security.job.client.protocol.acl": "*",
+ "security.job.task.protocol.acl": "*",
+ "security.datanode.protocol.acl": "*",
+ "security.namenode.protocol.acl": "*",
+ "security.client.datanode.protocol.acl": "*",
+ "security.inter.tracker.protocol.acl": "*",
+ "security.refresh.usertogroups.mappings.protocol.acl": "hadoop",
+ "security.client.protocol.acl": "*",
+ "security.refresh.policy.protocol.acl": "hadoop",
+ "security.admin.operations.protocol.acl": "hadoop",
+ "security.inter.datanode.protocol.acl": "*"
+ },
+ "hdfs-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n\n\n# Define some default values that can be overridden by system properties\n# To change daemon root logger use hadoop_root_logger in hadoop-env\nhadoop.root.logger=I
NFO,console\nhadoop.log.dir=.\nhadoop.log.file=hadoop.log\n\n\n# Define the root logger to the system property \"hadoop.root.logger\".\nlog4j.rootLogger=${hadoop.root.logger}, EventCounter\n\n# Logging Threshold\nlog4j.threshhold=ALL\n\n#\n# Daily Rolling File Appender\n#\n\nlog4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n# Rollver at midnight\nlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd\n\n# 30-day backup\n#log4j.appender.DRFA.MaxBackupIndex=30\nlog4j.appender.DRFA.layout=org.apache.log4j.PatternLayout\n\n# Pattern format: Date LogLevel LoggerName LogMessage\nlog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\n# Debugging Pattern format\n#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n\n#\n# console\n# Add \"console\" to rootlogger above if you want to use this\n#\n\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.conso
le.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n#\n# TaskLog Appender\n#\n\n#Default values\nhadoop.tasklog.taskid=null\nhadoop.tasklog.iscleanup=false\nhadoop.tasklog.noKeepSplits=4\nhadoop.tasklog.totalLogFileSize=100\nhadoop.tasklog.purgeLogSplits=true\nhadoop.tasklog.logsRetainHours=12\n\nlog4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender\nlog4j.appender.TLA.taskId=${hadoop.tasklog.taskid}\nlog4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}\nlog4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}\n\nlog4j.appender.TLA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\n\n#\n#Security audit appender\n#\nhadoop.security.logger=INFO,console\nhadoop.security.log.maxfilesize=256MB\nhadoop.security.log.maxbackupindex=20\nlog4j.category.SecurityLogger=${hadoop.security.logger}\nh
adoop.security.log.file=SecurityAuth.audit\nlog4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\nlog4j.appender.DRFAS.DatePattern=.yyyy-MM-dd\n\nlog4j.appender.RFAS=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}\nlog4j.appender.RFAS.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n\nlog4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}\nlog4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}\n\n#\n# hdfs audit logging\n#\nhdfs.audit.logger=INFO,console\nlog4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}\nlog4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false\
nlog4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log\nlog4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n\nlog4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# NameNode metrics logging.\n# The default is to retain two namenode-metrics.log files up to 64MB each.\n#\nnamenode.metrics.logger=INFO,NullAppender\nlog4j.logger.NameNodeMetricsLog=${namenode.metrics.logger}\nlog4j.additivity.NameNodeMetricsLog=false\nlog4j.appender.NNMETRICSRFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.NNMETRICSRFA.File=${hadoop.log.dir}/namenode-metrics.log\nlog4j.appender.NNMETRICSRFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.NNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n\nlog4j.appender.NNMETRICSRFA.MaxBackupIndex=1\nlog4j.appender.NNMETRICSRFA.MaxFileSize=64MB\n\n#\n# mapred audit logging\n#\nmapred.audit
.logger=INFO,console\nlog4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}\nlog4j.additivity.org.apache.hadoop.mapred.AuditLogger=false\nlog4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log\nlog4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout\nlog4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n\nlog4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd\n\n#\n# Rolling File Appender\n#\n\nlog4j.appender.RFA=org.apache.log4j.RollingFileAppender\nlog4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}\n\n# Logfile size and and 30-day backups\nlog4j.appender.RFA.MaxFileSize=256MB\nlog4j.appender.RFA.MaxBackupIndex=10\n\nlog4j.appender.RFA.layout=org.apache.log4j.PatternLayout\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n\nlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n\n# Custom Logging levels\n\n
hadoop.metrics.log.level=INFO\n#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG\n#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG\n#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG\nlog4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}\n\n# Jets3t library\nlog4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR\n\n#\n# Null Appender\n# Trap security logger on the hadoop client side\n#\nlog4j.appender.NullAppender=org.apache.log4j.varia.NullAppender\n\n#\n# Event Counter Appender\n# Sends counts of logging messages at different severity levels to Hadoop Metrics.\n#\nlog4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter\n\n# Removes \"deprecated\" messages\nlog4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN\n\n#\n# HDFS block state change log from block manager\n#\n# Uncomment the following to suppress normal block state change\n# messages from BlockManager in NameNode.\n#log4j.logger.BlockStateChange=W
ARN"
+ },
+ "ranger-hdfs-plugin-properties": {
+ "hadoop.rpc.protection": "authentication",
+ "ranger-hdfs-plugin-enabled": "No",
+ "REPOSITORY_CONFIG_USERNAME": "hadoop",
+ "policy_user": "ambari-qa",
+ "common.name.for.certificate": "",
+ "REPOSITORY_CONFIG_PASSWORD": "hadoop"
+ },
+ "core-site": {
+ "hadoop.proxyuser.root.hosts": "*",
+ "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",
+ "fs.trash.interval": "360",
+ "ha.failover-controller.active-standby-elector.zk.op.retries": "120",
+ "hadoop.http.authentication.simple.anonymous.allowed": "true",
+ "hadoop.security.authentication": "simple",
+ "hadoop.proxyuser.root.groups": "*",
+ "ipc.client.connection.maxidletime": "30000",
+ "hadoop.security.key.provider.path": "",
+ "mapreduce.jobtracker.webinterface.trusted": "false",
+ "hadoop.security.authorization": "false",
+ "net.topology.script.file.name": "/etc/hadoop/conf/topology_script.py",
+ "ipc.server.tcpnodelay": "true",
+ "ipc.client.connect.max.retries": "50",
+ "hadoop.security.auth_to_local": "DEFAULT",
+ "io.file.buffer.size": "131072",
+ "hadoop.proxyuser.hdfs.hosts": "*",
+ "hadoop.proxyuser.hdfs.groups": "*",
+ "ipc.client.idlethreshold": "8000",
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020",
+ "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec"
+ },
+ "hadoop-env": {
+ "keyserver_port": "",
+ "proxyuser_group": "users",
+ "hdfs_user_nproc_limit": "65536",
+ "hdfs_log_dir_prefix": "/var/log/hadoop",
+ "hdfs_user_nofile_limit": "128000",
+ "hdfs_user": "hdfs",
+ "keyserver_host": " ",
+ "namenode_opt_maxnewsize": "128m",
+ "namenode_opt_maxpermsize": "256m",
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n# The only required environment variable is JAVA_HOME. All others are\n# optional. When running a distributed configuration it is best to\n# set JAVA_HOME in this file, so that it is correctly defined on\n# remote nodes.\n\n# The java implementation to use. Required.\nexport JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hadoop Configuration Directory\n\n{# this is different for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java runtime options. Empty by default.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# Command specific options appended to H
ADOOP_OPTS when specified\nHADOOP_JOBTRACKER_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}\"\n\n{% if java_version < 8 %}\nSHARED_HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{na
menode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT\"\nexport HADOOP_NAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-namenode/bin/kill-name-node\\\" -Dorg.mortbay.jetty.Request.maxFormContentSize=-1 ${HADOOP_NAMENODE_OPTS}\"\nexport HADOOP_DATANODE_OPTS=\"-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -XX:PermSize=128m -XX:MaxPermSize=256m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtno
de_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-secondarynamenode/bin/kill-secondary-name-node\\\" ${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The following applies to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m -XX:MaxPermSize=512m $HADOOP_CLIENT_OPTS\"\n\n{% else %}\nSHARED_HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{namenode_h
eapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT\"\nexport HADOOP_NAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-namenode/bin/kill-name-node\\\" -Dorg.mortbay.jetty.Request.maxFormContentSize=-1 ${HADOOP_NAMENODE_OPTS}\"\nexport HADOOP_DATANODE_OPTS=\"-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtnode_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=\"${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\\\"/usr/hdp/current/hadoop-hdfs-secondarynamenode/bin/kill-secondary-name-node\\\" ${HADOOP_SECONDARYNAMENO
DE_OPTS}\"\n\n# The following applies to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n{% endif %}\n\nHADOOP_NFS3_OPTS=\"-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\n\n# On secure datanodes, user to run the datanode as after dropping privileges\nexport HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER:-{{hadoop_secure_dn_user}}}\n\n# Extra ssh options. Empty by default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored. $HADOOP_HOME/logs by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# Where log files are stored in the secure data environment.\nexport HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER
\n\n# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.\n# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where hadoop code should be rsync'd from. Unset by default.\n# export HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are stored. /tmp by default.\nexport HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# History server pid\nexport HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n# A string representing this instance of hadoop. $USER by default.\nexport HADOOP_IDENT_STRING=$USER\n\n# The scheduling pr
iority for daemon processes. See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Add database libraries\nJAVA_JDBC_LIBS=\"\"\nif [ -d \"/usr/share/java\" ]; then\n for jarFile in `ls /usr/share/java | grep -E \"(mysql|ojdbc|postgresql|sqljdbc)\" 2>/dev/null`\n do\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\n done\nfi\n\n# Add libraries to the hadoop classpath - some may not need a colon as they already include it\nexport HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}\n\n# Setting path to hdfs command line\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n# Mostly required for hadoop 2.0\nexport JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}\n\nexport HADOOP_OPTS=\"-Dhdp.version=$HDP_VERSION $HADOOP_OPTS\"\n\n{% if is_datanode_max_locked_memory_set %}\n# Fix temporary bug, when ulimit from conf files is not picked up, without full relogin. \n# Makes sense to fix only when runing DN as root \nif [ \"$command\" == \"datanode\" ] && [ \"$EUID\" -eq 0 ] && [ -n \"$HADOOP_SECURE_
DN_USER\" ]; then\n ulimit -l {{datanode_max_locked_memory}}\nfi\n{% endif %}",
+ "namenode_heapsize": "1024m",
+ "namenode_opt_newsize": "128m",
+ "nfsgateway_heapsize": "1024",
+ "dtnode_heapsize": "1024m",
+ "hadoop_root_logger": "INFO,RFA",
+ "hadoop_heapsize": "1024",
+ "hadoop_pid_dir_prefix": "/var/run/hadoop",
+ "namenode_opt_permsize": "128m",
+ "hdfs_tmp_dir": "/tmp"
+ },
+ "zookeeper-log4j": {
+ "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\n\n#\n# ZooKeeper Logging Configuration\n#\n\n# DEFAULT: console appender only\nlog4j.rootLogger=INFO, CONSOLE\n\n# Example with rolling log file\n#log4j.root
Logger=DEBUG, CONSOLE, ROLLINGFILE\n\n# Example with rolling log file and tracing\n#log4j.rootLogger=TRACE, CONSOLE, ROLLINGFILE, TRACEFILE\n\n#\n# Log INFO level and above messages to the console\n#\nlog4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender\nlog4j.appender.CONSOLE.Threshold=INFO\nlog4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout\nlog4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n\n\n#\n# Add ROLLINGFILE to rootLogger to get log file output\n# Log DEBUG level and above messages to a log file\nlog4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender\nlog4j.appender.ROLLINGFILE.Threshold=DEBUG\nlog4j.appender.ROLLINGFILE.File=zookeeper.log\n\n# Max log file size of 10MB\nlog4j.appender.ROLLINGFILE.MaxFileSize=10MB\n# uncomment the next line to limit number of backup files\n#log4j.appender.ROLLINGFILE.MaxBackupIndex=10\n\nlog4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout\nlog4j.appender.ROLLINGFILE.
layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n\n\n\n#\n# Add TRACEFILE to rootLogger to get log file output\n# Log DEBUG level and above messages to a log file\nlog4j.appender.TRACEFILE=org.apache.log4j.FileAppender\nlog4j.appender.TRACEFILE.Threshold=TRACE\nlog4j.appender.TRACEFILE.File=zookeeper_trace.log\n\nlog4j.appender.TRACEFILE.layout=org.apache.log4j.PatternLayout\n### Notice we are including log4j's NDC here (%x)\nlog4j.appender.TRACEFILE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L][%x] - %m%n"
+ },
+ "ssl-server": {
+ "ssl.server.keystore.location": "/etc/security/serverKeys/keystore.jks",
+ "ssl.server.keystore.keypassword": "bigdata",
+ "ssl.server.truststore.location": "/etc/security/serverKeys/all.jks",
+ "ssl.server.keystore.password": "bigdata",
+ "ssl.server.truststore.password": "bigdata",
+ "ssl.server.truststore.type": "jks",
+ "ssl.server.keystore.type": "jks",
+ "ssl.server.truststore.reload.interval": "10000"
+ },
+ "ranger-site": {},
+ "admin-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nlog4j.rootLogger = warn,xa_log_appender\n\n\n# xa_logger\nlog4j.appender.xa_log_appender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.xa_log_appender.
file=${logdir}/xa_portal.log\nlog4j.appender.xa_log_appender.datePattern='.'yyyy-MM-dd\nlog4j.appender.xa_log_appender.append=true\nlog4j.appender.xa_log_appender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.xa_log_appender.layout.ConversionPattern=%d [%t] %-5p %C{6} (%F:%L) - %m%n\n# xa_log_appender : category and additivity\nlog4j.category.org.springframework=warn,xa_log_appender\nlog4j.additivity.org.springframework=false\n\nlog4j.category.org.apache.ranger=info,xa_log_appender\nlog4j.additivity.org.apache.ranger=false\n\nlog4j.category.xa=info,xa_log_appender\nlog4j.additivity.xa=false\n\n# perf_logger\nlog4j.appender.perf_appender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.perf_appender.file=${logdir}/ranger_admin_perf.log\nlog4j.appender.perf_appender.datePattern='.'yyyy-MM-dd\nlog4j.appender.perf_appender.append=true\nlog4j.appender.perf_appender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.perf_appender.layout.ConversionPattern=%d [%t] %m%n\n
\n\n# sql_appender\nlog4j.appender.sql_appender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.sql_appender.file=${logdir}/xa_portal_sql.log\nlog4j.appender.sql_appender.datePattern='.'yyyy-MM-dd\nlog4j.appender.sql_appender.append=true\nlog4j.appender.sql_appender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.sql_appender.layout.ConversionPattern=%d [%t] %-5p %C{6} (%F:%L) - %m%n\n\n# sql_appender : category and additivity\nlog4j.category.org.hibernate.SQL=warn,sql_appender\nlog4j.additivity.org.hibernate.SQL=false\n\nlog4j.category.jdbc.sqlonly=fatal,sql_appender\nlog4j.additivity.jdbc.sqlonly=false\n\nlog4j.category.jdbc.sqltiming=warn,sql_appender\nlog4j.additivity.jdbc.sqltiming=false\n\nlog4j.category.jdbc.audit=fatal,sql_appender\nlog4j.additivity.jdbc.audit=false\n\nlog4j.category.jdbc.resultset=fatal,sql_appender\nlog4j.additivity.jdbc.resultset=false\n\nlog4j.category.jdbc.connection=fatal,sql_appender\nlog4j.additivity.jdbc.connection=false"
+ },
+ "tagsync-log4j": {
+ "content": "\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nlog4j.rootLogger = info,logFile\n\n# logFile\nlog4j.appender.logFile=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.logFile.file=${logdir}/tagsync.log\n
log4j.appender.logFile.datePattern='.'yyyy-MM-dd\nlog4j.appender.logFile.layout=org.apache.log4j.PatternLayout\nlog4j.appender.logFile.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %L %m%n\n\n# console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.Target=System.out\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %L %m%n"
+ },
+ "ranger-hdfs-security": {
+ "ranger.plugin.hdfs.policy.source.impl": "org.apache.ranger.admin.client.RangerAdminRESTClient",
+ "ranger.plugin.hdfs.service.name": "{{repo_name}}",
+ "ranger.plugin.hdfs.policy.cache.dir": "/etc/ranger/{{repo_name}}/policycache",
+ "ranger.plugin.hdfs.policy.pollIntervalMs": "30000",
+ "ranger.plugin.hdfs.policy.rest.url": "{{policymgr_mgr_url}}",
+ "ranger.plugin.hdfs.policy.rest.ssl.config.file": "/etc/hadoop/conf/ranger-policymgr-ssl.xml",
+ "xasecure.add-hadoop-authorization": "true"
+ },
+ "usersync-properties": {},
+ "zookeeper-env": {
+ "zk_log_dir": "/var/log/zookeeper",
+ "content": "\nexport JAVA_HOME={{java64_home}}\nexport ZOOKEEPER_HOME={{zk_home}}\nexport ZOO_LOG_DIR={{zk_log_dir}}\nexport ZOOPIDFILE={{zk_pid_file}}\nexport SERVER_JVMFLAGS={{zk_server_heapsize}}\nexport JAVA=$JAVA_HOME/bin/java\nexport CLASSPATH=$CLASSPATH:/usr/share/zookeeper/*\n\n{% if security_enabled %}\nexport SERVER_JVMFLAGS=\"$SERVER_JVMFLAGS -Djava.security.auth.login.config={{zk_server_jaas_file}}\"\nexport CLIENT_JVMFLAGS=\"$CLIENT_JVMFLAGS -Djava.security.auth.login.config={{zk_client_jaas_file}}\"\n{% endif %}",
+ "zk_server_heapsize": "1024m",
+ "zk_pid_dir": "/var/run/zookeeper",
+ "zk_user": "zookeeper"
+ },
+ "cluster-env": {
+ "security_enabled": "false",
+ "override_uid": "true",
+ "fetch_nonlocal_groups": "true",
+ "one_dir_per_partition": "true",
+ "repo_ubuntu_template": "{{package_type}} {{base_url}} {{components}}",
+ "ignore_groupsusers_create": "false",
+ "alerts_repeat_tolerance": "1",
+ "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab",
+ "kerberos_domain": "EXAMPLE.COM",
+ "manage_dirs_on_root": "true",
+ "recovery_lifetime_max_count": "1024",
+ "recovery_type": "AUTO_START",
+ "ignore_bad_mounts": "false",
+ "recovery_window_in_minutes": "60",
+ "user_group": "hadoop",
+ "stack_name": "HDP",
+ "stack_root": "{\"HDP\": \"/usr/hdp\"}",
+ "stack_tools": "{\n \"HDP\": { \"stack_selector\": [\"hdp-select\", \"/usr/bin/hdp-select\", \"hdp-select\"],\n \"conf_selector\": [\"conf-select\", \"/usr/bin/conf-select\", \"conf-select\"]\n}\n}",
+ "recovery_retry_interval": "5",
+ "recovery_enabled": "true",
+ "recovery_max_count": "6",
+ "repo_suse_rhel_template": "[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0",
+ "managed_hdfs_resource_property_names": "",
+ "smokeuser": "ambari-qa"
+ },
+ "dbks-site": {
+ "ranger.ks.jpa.jdbc.credential.provider.path": "/etc/ranger/kms/rangerkms.jceks",
+ "ranger.ks.kerberos.keytab": "/etc/security/keytabs/rangerkms.service.keytab",
+ "ranger.ks.hsm.partition.password": "_",
+ "ranger.ks.jpa.jdbc.driver": "com.mysql.jdbc.Driver",
+ "ranger.ks.jpa.jdbc.credential.alias": "ranger.ks.jdbc.password",
+ "ranger.ks.kerberos.principal": "rangerkms12/_HOST@EXAMPLE.COM",
+ "ranger.db.encrypt.key.password": "_",
+ "ranger.ks.hsm.enabled": "false",
+ "ranger.ks.jpa.jdbc.password": "_",
+ "ranger.ks.masterkey.credential.alias": "ranger.ks.masterkey.password",
+ "ranger.ks.jpa.jdbc.url": "jdbc:mysql://c6401.ambari.apache.org:3306/rangerkms01",
+ "hadoop.kms.blacklist.DECRYPT_EEK": "hdfs",
+ "ranger.ks.jdbc.sqlconnectorjar": "{{ews_lib_jar_path}}",
+ "ranger.ks.jpa.jdbc.user": "{{db_user}}",
+ "ranger.ks.hsm.partition.password.alias": "ranger.kms.hsm.partition.password",
+ "ranger.ks.hsm.type": "LunaProvider",
+ "ranger.ks.hsm.partition.name": "par19",
+ "ranger.ks.jpa.jdbc.dialect": "{{jdbc_dialect}}"
+ },
+ "kms-env": {
+ "kms_group": "kms",
+ "kms_log_dir": "/var/log/ranger/kms",
+ "hsm_partition_password": "",
+ "kms_user": "kms",
+ "create_db_user": "true",
+ "kms_port": "9292"
+ },
+ "kms-log4j": {
+ "content": "\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License. See accompanying LICENSE file.\n#\n\n# If the Java System property 'kms.log.dir' is not defined at KMS start up time\n# Setup sets its value to '${kms.home}/logs'\n\nlog4j.appender.kms=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kms.DatePattern='.'yyyy-MM-dd\nlog4j.appender.kms.File=${kms.log.dir}/kms.log\nlog4j.appender.kms.Append=true\nlog4j.appender.kms.layout=org.apache.log4j.PatternLayout\nlog4j
.appender.kms.layout.ConversionPattern=%d{ISO8601} %-5p %c{1} - %m%n\n\nlog4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kms-audit.DatePattern='.'yyyy-MM-dd\nlog4j.appender.kms-audit.File=${kms.log.dir}/kms-audit.log\nlog4j.appender.kms-audit.Append=true\nlog4j.appender.kms-audit.layout=org.apache.log4j.PatternLayout\nlog4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} %m%n\n\nlog4j.logger.kms-audit=INFO, kms-audit\nlog4j.additivity.kms-audit=false\n\nlog4j.rootLogger=ALL, kms\nlog4j.logger.org.apache.hadoop.conf=ERROR\nlog4j.logger.org.apache.hadoop=INFO\nlog4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF"
+ },
+ "kms-properties": {
+ "REPOSITORY_CONFIG_USERNAME": "keyadmin",
+ "db_user": "rangerkms01",
+ "DB_FLAVOR": "UNSUPPORTED",
+ "db_password": "rangerkms01",
+ "KMS_MASTER_KEY_PASSWD": "StrongPassword01",
+ "db_root_user": "root",
+ "db_name": "rangerkms01",
+ "db_host": "c6401.ambari.apache.org",
+ "db_root_password": "vagrant",
+ "SQL_CONNECTOR_JAR": "{{driver_curl_target}}",
+ "REPOSITORY_CONFIG_PASSWORD": "keyadmin"
+ },
+ "kms-site": {
+ "hadoop.kms.proxyuser.ranger.hosts": "*",
+ "hadoop.kms.authentication.type": "simple",
+ "hadoop.kms.proxyuser.ranger.groups": "*",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.path": "/hadoop-kms/hadoop-auth-signature-secret",
+ "hadoop.kms.security.authorization.manager": "org.apache.ranger.authorization.kms.authorizer.RangerKmsAuthorizer",
+ "hadoop.kms.authentication.kerberos.name.rules": "DEFAULT",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
+ "hadoop.kms.current.key.cache.timeout.ms": "30000",
+ "hadoop.kms.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
+ "hadoop.kms.audit.aggregation.window.ms": "10000",
+ "hadoop.kms.proxyuser.ranger.users": "*",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type": "kerberos",
+ "hadoop.kms.key.provider.uri": "dbks://http@localhost:9292/kms",
+ "hadoop.security.keystore.JavaKeyStoreProvider.password": "none",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM",
+ "hadoop.kms.authentication.signer.secret.provider": "random",
+ "hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string": "#HOSTNAME#:#PORT#,...",
+ "hadoop.kms.cache.enable": "true",
+ "hadoop.kms.cache.timeout.ms": "600000",
+ "hadoop.kms.authentication.kerberos.principal": "*"
+ },
+ "ranger-kms-audit": {
+ "xasecure.audit.destination.solr.zookeepers": "c6401.ambari.apache.org:2181/ranger_audits",
+ "xasecure.audit.destination.solr.urls": "",
+ "xasecure.audit.destination.solr.batch.filespool.dir": "/var/log/ranger/kms/audit/solr/spool",
+ "xasecure.audit.destination.hdfs.batch.filespool.dir": "/var/log/ranger/kms/audit/hdfs/spool",
+ "xasecure.audit.destination.hdfs": "true",
+ "xasecure.audit.destination.solr": "true",
+ "xasecure.audit.provider.summary.enabled": "false",
+ "xasecure.audit.destination.hdfs.dir": "hdfs://c6401.ambari.apache.org:8020/ranger/audit",
+ "xasecure.audit.is.enabled": "true"
+ },
+ "ranger-kms-policymgr-ssl": {
+ "xasecure.policymgr.clientssl.keystore": "/usr/hdp/current/ranger-kms/conf/ranger-plugin-keystore.jks",
+ "xasecure.policymgr.clientssl.truststore.password": "changeit",
+ "xasecure.policymgr.clientssl.keystore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.truststore": "/usr/hdp/current/ranger-kms/conf/ranger-plugin-truststore.jks",
+ "xasecure.policymgr.clientssl.truststore.credential.file": "jceks://file{{credential_file}}",
+ "xasecure.policymgr.clientssl.keystore.password": "myKeyFilePassword"
+ },
+ "ranger-kms-security": {
+ "ranger.plugin.kms.policy.pollIntervalMs": "30000",
+ "ranger.plugin.kms.service.name": "{{repo_name}}",
+ "ranger.plugin.kms.policy.source.impl": "org.apache.ranger.admin.client.RangerAdminRESTClient",
+ "ranger.plugin.kms.policy.rest.ssl.config.file": "/etc/ranger/kms/conf/ranger-policymgr-ssl.xml",
+ "ranger.plugin.kms.policy.cache.dir": "/etc/ranger/{{repo_name}}/policycache",
+ "ranger.plugin.kms.policy.rest.url": "{{policymgr_mgr_url}}"
+ },
+ "ranger-kms-site": {
+ "ranger.service.https.port": "9393",
+ "ranger.service.https.attrib.ssl.enabled": "false",
+ "xa.webapp.dir": "./webapp",
+ "ranger.service.host": "{{kms_host}}",
+ "ranger.service.shutdown.port": "7085",
+ "ranger.contextName": "/kms",
+ "ranger.service.http.port": "{{kms_port}}"
+ }
+ }
+}
[06/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
Posted by lp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/hive_unsupported_jdbc_type.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/hive_unsupported_jdbc_type.json b/ambari-server/src/test/python/common-services/configs/hive_unsupported_jdbc_type.json
new file mode 100644
index 0000000..d85dbfe
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/hive_unsupported_jdbc_type.json
@@ -0,0 +1,650 @@
+{
+ "roleCommand": "SERVICE_CHECK",
+ "clusterName": "c1",
+ "hostname": "c6401.ambari.apache.org",
+ "hostLevelParams": {
+ "not_managed_hdfs_path_list": "[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+ "agent_stack_retry_count": "5",
+ "agent_stack_retry_on_unavailability": "false",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "ambari_db_rca_password": "mapred",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "stack_version": "2.6",
+ "stack_name": "HDP",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "jdk_name": "jdk-7u67-linux-x64.tar.gz",
+ "ambari_db_rca_username": "mapred",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "db_name": "ambari",
+ "custom_mysql_jdbc_name": "mysql-connector-java.jar"
+ },
+ "commandType": "EXECUTION_COMMAND",
+ "roleParams": {},
+ "serviceName": "SLIDER",
+ "role": "SLIDER",
+ "commandParams": {
+ "version": "2.5.0.0-1235",
+ "command_timeout": "300",
+ "service_package_folder": "OOZIE",
+ "script_type": "PYTHON",
+ "script": "scripts/service_check.py",
+ "excluded_hosts": "host1,host2"
+ },
+ "taskId": 152,
+ "public_hostname": "c6401.ambari.apache.org",
+ "configurations": {
+ "hive-env" : {
+ "hcat_pid_dir": "/var/run/webhcat",
+ "hcat_user": "hcat",
+ "hive_ambari_database": "MySQL",
+ "hive_hostname": "abtest-3.c.pramod-thangali.internal",
+ "hive_metastore_port": "9083",
+ "webhcat_user": "hcat",
+ "content": "\n if [ \"$SERVICE\" = \"cli\" ]; then\n if [ -z \"$DEBUG\" ]; then\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n else\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n fi\n fi\n\n# The heap size of the jvm stared by hive shell script can be controlled via:\n\nexport HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n\n# Larger heap size may be required when running queries over large number of files or partitions.\n# By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be\n# appropriate for hive server (hwi etc).\n\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hive Configuration Directory can
be controlled by:\nexport HIVE_CONF_DIR={{conf_dir}}\n\n# Folder containing extra ibraries required for hive compilation/execution can be controlled by:\nif [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\nelif [ -d \"/usr/lib/hive-hcatalog/\" ]; then\n export HIVE_AUX_JARS_PATH=/usr/lib/hive-hcatalog/share/hcatalog/hive-hcatalog-core-*.jar\nelse\n export HIVE_AUX_JARS_PATH=/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar\nfi\nexport METASTORE_PORT={{hive_metastore_port}}",
+ "hive_database_name": "hive",
+ "hive_database_type": "mysql",
+ "hive_pid_dir": "/var/run/hive",
+ "hive_log_dir": "/var/log/hive",
+ "hive_user": "hive",
+ "hcat_log_dir": "/var/log/webhcat",
+ "hive_database": "New MySQL Database",
+ "hive_security_authorization": "None"
+ },
+ "hive-site": {
+ "hive.enforce.sorting": "true",
+ "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+ "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.UnsupportedDriver",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true",
+ "hive.security.metastore.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "fs.file.impl.disable.cache": "true",
+ "hive.auto.convert.join.noconditionaltask": "true",
+ "hive.map.aggr": "true",
+ "hive.optimize.index.filter": "true",
+ "hive.security.authorization.enabled": "false",
+ "hive.optimize.reducededuplication.min.reducer": "1",
+ "hive.optimize.bucketmapjoin": "true",
+ "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083",
+ "hive.mapjoin.bucket.cache.size": "10000",
+ "hive.auto.convert.join.noconditionaltask.size": "1000000000",
+ "hive.vectorized.execution.enabled": "false",
+ "javax.jdo.option.ConnectionUserName": "hive",
+ "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
+ "hive.optimize.reducededuplication": "true",
+ "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
+ "hive.metastore.client.socket.timeout": "60",
+ "hive.auto.convert.join": "true",
+ "hive.enforce.bucketing": "true",
+ "hive.mapred.reduce.tasks.speculative.execution": "false",
+ "hive.security.authenticator.manager": "org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator",
+ "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true",
+ "hive.auto.convert.sortmerge.join": "true",
+ "fs.hdfs.impl.disable.cache": "true",
+ "hive.security.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "ambari.hive.db.schema.name": "hive",
+ "hive.metastore.execute.setugi": "true",
+ "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
+ "hive.server2.enable.doAs": "true",
+ "hive.server2.authentication": "NOSASL",
+ "hive.server2.transport.mode": "binary",
+ "hive.optimize.mapjoin.mapreduce": "true",
+ "hive.exec.scratchdir" : "/custompath/tmp/hive"
+ },
+ "slider-client": {
+ "slider.yarn.queue": "default"
+ },
+ "sqoop-site": {
+ "atlas.cluster.name": "c1",
+ "sqoop.job.data.publish.class": "org.apache.atlas.sqoop.hook.SqoopHook"
+ },
+ "mahout-env": {
+ "mahout_user": "mahout"
+ },
+ "hbase-env": {
+ "hbase_user": "hbase"
+ },
+ "yarn-env": {
+ "yarn_user": "yarn"
+ },
+ "mahout-log4j": {
+ "content": "\n #\n #\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing,\n # software distributed under the License is distributed on an\n # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n # KIND, either express or implied. See the License for the\n # specific language governing permissions a
nd limitations\n # under the License.\n #\n #\n #\n\n # Set everything to be logged to the console\n log4j.rootCategory=WARN, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n # Settings to quiet third party logs that are too verbose\n log4j.logger.org.eclipse.jetty=WARN\n log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=WARN\n log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=WARN"
+ },
+ "hadoop-env": {
+ "hdfs_user": "hdfs",
+ "hdfs_tmp_dir": "/tmp"
+ },
+ "core-site": {
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
+ },
+ "hdfs-site": {
+ "a": "b"
+ },
+ "yarn-site": {
+ "yarn.application.classpath": "/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*",
+ "yarn.resourcemanager.address": "c6401.ambari.apache.org:8050",
+ "yarn.resourcemanager.scheduler.address": "c6401.ambari.apache.org:8030"
+ },
+ "cluster-env": {
+ "managed_hdfs_resource_property_names": "",
+ "security_enabled": "false",
+ "ignore_groupsusers_create": "false",
+ "smokeuser": "ambari-qa",
+ "kerberos_domain": "EXAMPLE.COM",
+ "user_group": "hadoop"
+ },
+ "webhcat-site": {
+ "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
+ "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
+ "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive.tar.gz",
+ "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz",
+ "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming.jar"
+ },
+ "slider-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "slider-env": {
+ "content": "envproperties\nline2"
+ },
+ "gateway-site": {
+ "java.security.auth.login.config": "/etc/knox/conf/krb5JAASLogin.conf",
+ "gateway.hadoop.kerberos.secured": "false",
+ "gateway.gateway.conf.dir": "deployments",
+ "gateway.path": "gateway",
+ "sun.security.krb5.debug": "true",
+ "java.security.krb5.conf": "/etc/knox/conf/krb5.conf",
+ "gateway.port": "8443"
+ },
+
+ "users-ldif": {
+ "content": "\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an \"AS IS\" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the Lice
nse.\n\n version: 1\n\n # Please replace with site specific values\n dn: dc=hadoop,dc=apache,dc=org\n objectclass: organization\n objectclass: dcObject\n o: Hadoop\n dc: hadoop\n\n # Entry for a sample people container\n # Please replace with site specific values\n dn: ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:organizationalUnit\n ou: people\n\n # Entry for a sample end user\n # Please replace with site specific values\n dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: Guest\n sn: User\n uid: guest\n userPassword:guest-password\n\n # entry for sample user admin\n dn:
uid=admin,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: Admin\n sn: Admin\n uid: admin\n userPassword:admin-password\n\n # entry for sample user sam\n dn: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: sam\n sn: sam\n uid: sam\n userPassword:sam-password\n\n # entry for sample user tom\n dn: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: tom\n sn: tom\n uid: tom\n userPasswor
d:tom-password\n\n # create FIRST Level groups branch\n dn: ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:organizationalUnit\n ou: groups\n description: generic groups branch\n\n # create the analyst group under groups\n dn: cn=analyst,ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass: groupofnames\n cn: analyst\n description:analyst group\n member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n member: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n\n\n # create the scientist group under groups\n dn: cn=scientist,ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass: groupofnames\n cn: scientist\n description: scientist group\n member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org"
+ },
+
+ "topology": {
+ "content": "\n <topology>\n\n <gateway>\n\n <provider>\n <role>authentication</role>\n <name>ShiroProvider</name>\n <enabled>true</enabled>\n <param>\n <name>sessionTimeout</name>\n <value>30</value>\n </param>\n <param>\n <name>main.ldapRealm</name>\n <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n </param>\n <param>\n <name>main.ldapRealm.userDnTemplate</name>\n <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n </param>\n <param>\n <name>main.ldapRealm.contextFactory.url</name>\n <value>ldap://{{knox_host_name}}:33389</value>\n
</param>\n <param>\n <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n <value>simple</value>\n </param>\n <param>\n <name>urls./**</name>\n <value>authcBasic</value>\n </param>\n </provider>\n\n <provider>\n <role>identity-assertion</role>\n <name>Default</name>\n <enabled>true</enabled>\n </provider>\n\n </gateway>\n\n <service>\n <role>NAMENODE</role>\n <url>hdfs://{{namenode_host}}:{{namenode_rpc_port}}</url>\n </service>\n\n <service>\n <role>JOBTRACKER</role>\n <url>rpc://{{rm_host}}:{{jt_rpc_port}}</url>\n </service>\n\n <service>\n <role>WEBHDFS</role
>\n <url>http://{{namenode_host}}:{{namenode_http_port}}/webhdfs</url>\n </service>\n\n <service>\n <role>WEBHCAT</role>\n <url>http://{{webhcat_server_host}}:{{templeton_port}}/templeton</url>\n </service>\n\n <service>\n <role>OOZIE</role>\n <url>http://{{oozie_server_host}}:{{oozie_server_port}}/oozie</url>\n </service>\n\n <service>\n <role>WEBHBASE</role>\n <url>http://{{hbase_master_host}}:{{hbase_master_port}}</url>\n </service>\n\n <service>\n <role>HIVE</role>\n <url>http://{{hive_server_host}}:{{hive_http_port}}/{{hive_http_path}}</url>\n </service>\n\n <service>\n <role>RESOURCEMANAGER</role>\n <url>http://{{rm_host}}:{{rm_port}}/ws</url>\n </service>\n </topology>"
+ },
+
+ "ldap-log4j": {
+ "content": "\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an \"AS IS\" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the License.\n #testing\n\n app.log.dir=${launcher.dir
}/../logs\n app.log.file=${launcher.name}.log\n\n log4j.rootLogger=ERROR, drfa\n log4j.logger.org.apache.directory.server.ldap.LdapServer=INFO\n log4j.logger.org.apache.directory=WARN\n\n log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n log4j.appender.drfa.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n"
+ },
+
+ "gateway-log4j": {
+ "content": "\n\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # \"License\"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an \"AS IS\" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the License.\n\n app.log.dir=${launcher.dir}/../logs\n app.log.file=${launcher.name}.log\n
app.audit.file=${launcher.name}-audit.log\n\n log4j.rootLogger=ERROR, drfa\n\n log4j.logger.org.apache.hadoop.gateway=INFO\n #log4j.logger.org.apache.hadoop.gateway=DEBUG\n\n #log4j.logger.org.eclipse.jetty=DEBUG\n #log4j.logger.org.apache.shiro=DEBUG\n #log4j.logger.org.apache.http=DEBUG\n #log4j.logger.org.apache.http.client=DEBUG\n #log4j.logger.org.apache.http.headers=DEBUG\n #log4j.logger.org.apache.http.wire=DEBUG\n\n log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n log4j.appender.drfa.layout.ConversionPattern=%
d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n log4j.logger.audit=INFO, auditfile\n log4j.appender.auditfile=org.apache.log4j.DailyRollingFileAppender\n log4j.appender.auditfile.File=${app.log.dir}/${app.audit.file}\n log4j.appender.auditfile.Append = true\n log4j.appender.auditfile.DatePattern = '.'yyyy-MM-dd\n log4j.appender.auditfile.layout = org.apache.hadoop.gateway.audit.log4j.layout.AuditLayout"
+ },
+ "knox-env": {
+ "knox_master_secret": "sa",
+ "knox_group": "knox",
+ "knox_pid_dir": "/var/run/knox",
+ "knox_user": "knox"
+ },
+ "kafka-env": {
+ "content": "\n#!/bin/bash\n\n# Set KAFKA specific environment variables here.\n\n# The java implementation to use.\nexport JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin",
+ "kafka_user": "kafka",
+ "kafka_log_dir": "/var/log/kafka",
+ "kafka_pid_dir": "/var/run/kafka"
+ },
+ "kafka-log4j": {
+ "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\nkafka.logs.dir=logs\n\nlog4j.rootLogger=INFO, stdout\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout=org.apache.log4j
.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log\nlog4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log\nlog4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-reques
t.log\nlog4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log\nlog4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log\nlog4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\n# Turn on all our debugging info\n#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender\n#log4j.logge
r.kafka.client.ClientUtils=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender\n#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG\nlog4j.logger.kafka=INFO, kafkaAppender\nlog4j.logger.kafka.network.RequestChannel$=WARN, requestAppender\nlog4j.additivity.kafka.network.RequestChannel$=false\n\n#log4j.logger.kafka.network.Processor=TRACE, requestAppender\n#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender\n#log4j.additivity.kafka.server.KafkaApis=false\nlog4j.logger.kafka.request.logger=WARN, requestAppender\nlog4j.additivity.kafka.request.logger=false\n\nlog4j.logger.kafka.controller=TRACE, controllerAppender\nlog4j.additivity.kafka.controller=false\n\nlog4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender\nlog4j.additivity.kafka.log.LogCleaner=false\n\nlog4j.logger.state.change.logger=TRACE, stateChangeAppender\nlog4j.additivity.state.change.logger=false"
+ },
+ "kafka-broker": {
+ "log.segment.bytes": "1073741824",
+ "socket.send.buffer.bytes": "102400",
+ "num.network.threads": "3",
+ "log.flush.scheduler.interval.ms": "3000",
+ "kafka.ganglia.metrics.host": "localhost",
+ "zookeeper.session.timeout.ms": "6000",
+ "replica.lag.time.max.ms": "10000",
+ "num.io.threads": "8",
+ "kafka.ganglia.metrics.group": "kafka",
+ "replica.lag.max.messages": "4000",
+ "port": "6667",
+ "log.retention.bytes": "-1",
+ "fetch.purgatory.purge.interval.requests": "10000",
+ "producer.purgatory.purge.interval.requests": "10000",
+ "default.replication.factor": "1",
+ "replica.high.watermark.checkpoint.interval.ms": "5000",
+ "zookeeper.connect": "c6402.ambari.apache.org:2181",
+ "controlled.shutdown.retry.backoff.ms": "5000",
+ "num.partitions": "1",
+ "log.flush.interval.messages": "10000",
+ "replica.fetch.min.bytes": "1",
+ "queued.max.requests": "500",
+ "controlled.shutdown.max.retries": "3",
+ "replica.fetch.wait.max.ms": "500",
+ "controlled.shutdown.enable": "false",
+ "log.roll.hours": "168",
+ "log.cleanup.interval.mins": "10",
+ "replica.socket.receive.buffer.bytes": "65536",
+ "zookeeper.connection.timeout.ms": "6000",
+ "replica.fetch.max.bytes": "1048576",
+ "num.replica.fetchers": "1",
+ "socket.request.max.bytes": "104857600",
+ "message.max.bytes": "1000000",
+ "zookeeper.sync.time.ms": "2000",
+ "socket.receive.buffer.bytes": "102400",
+ "controller.message.queue.size": "10",
+ "log.flush.interval.ms": "3000",
+ "log.dirs": "/tmp/log/dir",
+ "controller.socket.timeout.ms": "30000",
+ "replica.socket.timeout.ms": "30000",
+ "auto.create.topics.enable": "true",
+ "log.index.size.max.bytes": "10485760",
+ "kafka.ganglia.metrics.port": "8649",
+ "log.index.interval.bytes": "4096",
+ "log.retention.hours": "168"
+ },
+ "spark-defaults": {
+ "spark.yarn.applicationMaster.waitTries": "10",
+ "spark.history.kerberos.keytab": "none",
+ "spark.yarn.preserve.staging.files": "false",
+ "spark.yarn.submit.file.replication": "3",
+ "spark.history.kerberos.principal": "none",
+ "spark.yarn.driver.memoryOverhead": "384",
+ "spark.yarn.queue": "default",
+ "spark.yarn.containerLauncherMaxThreads": "25",
+ "spark.yarn.scheduler.heartbeat.interval-ms": "5000",
+ "spark.history.ui.port": "18080",
+ "spark.yarn.max.executor.failures": "3",
+ "spark.driver.extraJavaOptions": "",
+ "spark.history.provider": "org.apache.spark.deploy.yarn.history.YarnHistoryProvider",
+ "spark.yarn.am.extraJavaOptions": "",
+ "spark.yarn.executor.memoryOverhead": "384"
+ },
+ "spark-javaopts-properties": {
+ "content": " "
+ },
+ "spark-log4j-properties": {
+ "content": "\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO"
+ },
+ "spark-env": {
+ "content": "\n#!/usr/bin/env bash\n\n# This file is sourced when running various Spark programs.\n# Copy it as spark-env.sh and edit that to configure Spark for your site.\n\n# Options read in YARN client mode\n#SPARK_EXECUTOR_INSTANCES=\"2\" #Number of workers to start (Default: 2)\n#SPARK_EXECUTOR_CORES=\"1\" #Number of cores for the workers (Default: 1).\n#SPARK_EXECUTOR_MEMORY=\"1G\" #Memory per Worker (e.g. 1000M, 2G) (Default: 1G)\n#SPARK_DRIVER_MEMORY=\"512 Mb\" #Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)\n#SPARK_YARN_APP_NAME=\"spark\" #The name of your application (Default: Spark)\n#SPARK_YARN_QUEUE=\"~@~Xdefault~@~Y\" #The hadoop queue to use for allocation requests (Default: @~Xdefault~@~Y)\n#SPARK_YARN_DIST_FILES=\"\" #Comma separated list of files to be distributed with the job.\n#SPARK_YARN_DIST_ARCHIVES=\"\" #Comma separated list of archives to be distributed with the job.\n\n# Generic options for the daemons used in the standalone deploy mode\n\n# Alt
ernate conf dir. (Default: ${SPARK_HOME}/conf)\nexport SPARK_CONF_DIR=${SPARK_HOME:-{{spark_home}}}/conf\n\n# Where log files are stored.(Default:${SPARK_HOME}/logs)\n#export SPARK_LOG_DIR=${SPARK_HOME:-{{spark_home}}}/logs\nexport SPARK_LOG_DIR={{spark_log_dir}}\n\n# Where the pid file is stored. (Default: /tmp)\nexport SPARK_PID_DIR={{spark_pid_dir}}\n\n# A string representing this instance of spark.(Default: $USER)\nSPARK_IDENT_STRING=$USER\n\n# The scheduling priority for daemons. (Default: 0)\nSPARK_NICENESS=0\n\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\nexport HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n# The java implementation to use.\nexport JAVA_HOME={{java_home}}\n\nif [ -d \"/etc/tez/conf/\" ]; then\n export TEZ_CONF_DIR=/etc/tez/conf\nelse\n export TEZ_CONF_DIR=\nfi",
+ "spark_pid_dir": "/var/run/spark",
+ "spark_log_dir": "/var/log/spark",
+ "spark_group": "spark",
+ "spark_user": "spark"
+ },
+ "spark2-env": {
+ "content": "\n#!/usr/bin/env bash\n\n# This file is sourced when running various Spark programs.\n# Copy it as spark-env.sh and edit that to configure Spark for your site.\n\n# Options read in YARN client mode\n#SPARK_EXECUTOR_INSTANCES=\"2\" #Number of workers to start (Default: 2)\n#SPARK_EXECUTOR_CORES=\"1\" #Number of cores for the workers (Default: 1).\n#SPARK_EXECUTOR_MEMORY=\"1G\" #Memory per Worker (e.g. 1000M, 2G) (Default: 1G)\n#SPARK_DRIVER_MEMORY=\"512 Mb\" #Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)\n#SPARK_YARN_APP_NAME=\"spark\" #The name of your application (Default: Spark)\n#SPARK_YARN_QUEUE=\"~@~Xdefault~@~Y\" #The hadoop queue to use for allocation requests (Default: @~Xdefault~@~Y)\n#SPARK_YARN_DIST_FILES=\"\" #Comma separated list of files to be distributed with the job.\n#SPARK_YARN_DIST_ARCHIVES=\"\" #Comma separated list of archives to be distributed with the job.\n\n# Generic options for the daemons used in the standalone deploy mode\n\n# Alt
ernate conf dir. (Default: ${SPARK_HOME}/conf)\nexport SPARK_CONF_DIR=${SPARK_HOME:-{{spark_home}}}/conf\n\n# Where log files are stored.(Default:${SPARK_HOME}/logs)\n#export SPARK_LOG_DIR=${SPARK_HOME:-{{spark_home}}}/logs\nexport SPARK_LOG_DIR={{spark_log_dir}}\n\n# Where the pid file is stored. (Default: /tmp)\nexport SPARK_PID_DIR={{spark_pid_dir}}\n\n# A string representing this instance of spark.(Default: $USER)\nSPARK_IDENT_STRING=$USER\n\n# The scheduling priority for daemons. (Default: 0)\nSPARK_NICENESS=0\n\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\nexport HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n# The java implementation to use.\nexport JAVA_HOME={{java_home}}\n\nif [ -d \"/etc/tez/conf/\" ]; then\n export TEZ_CONF_DIR=/etc/tez/conf\nelse\n export TEZ_CONF_DIR=\nfi",
+ "spark_pid_dir": "/var/run/spark",
+ "spark_log_dir": "/var/log/spark",
+ "spark_group": "spark",
+ "spark_user": "spark"
+ },
+ "spark-metrics-properties": {
+ "content": "\n# syntax: [instance].sink|source.[name].[options]=[value]\n\n# This file configures Spark's internal metrics system. The metrics system is\n# divided into instances which correspond to internal components.\n# Each instance can be configured to report its metrics to one or more sinks.\n# Accepted values for [instance] are \"master\", \"worker\", \"executor\", \"driver\",\n# and \"applications\". A wild card \"*\" can be used as an instance name, in\n# which case all instances will inherit the supplied property.\n#\n# Within an instance, a \"source\" specifies a particular set of grouped metrics.\n# there are two kinds of sources:\n# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will\n# collect a Spark component's internal state. Each instance is paired with a\n# Spark source that is added automatically.\n# 2. Common sources, like JvmSource, which will collect low level state.\n# These can be added through configuration options and are then
loaded\n# using reflection.\n#\n# A \"sink\" specifies where metrics are delivered to. Each instance can be\n# assigned one or more sinks.\n#\n# The sink|source field specifies whether the property relates to a sink or\n# source.\n#\n# The [name] field specifies the name of source or sink.\n#\n# The [options] field is the specific property of this source or sink. The\n# source or sink is responsible for parsing this property.\n#\n# Notes:\n# 1. To add a new sink, set the \"class\" option to a fully qualified class\n# name (see examples below).\n# 2. Some sinks involve a polling period. The minimum allowed polling period\n# is 1 second.\n# 3. Wild card properties can be overridden by more specific properties.\n# For example, master.sink.console.period takes precedence over\n# *.sink.console.period.\n# 4. A metrics specific configuration\n# \"spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties\" should be\n# added to Java properties using -Dspark.metrics.conf=xxx if you want to\
n# customize metrics system. You can also put the file in ${SPARK_HOME}/conf\n# and it will be loaded automatically.\n# 5. MetricsServlet is added by default as a sink in master, worker and client\n# driver, you can send http request \"/metrics/json\" to get a snapshot of all the\n# registered metrics in json format. For master, requests \"/metrics/master/json\" and\n# \"/metrics/applications/json\" can be sent seperately to get metrics snapshot of\n# instance master and applications. MetricsServlet may not be configured by self.\n#\n\n## List of available sinks and their properties.\n\n# org.apache.spark.metrics.sink.ConsoleSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n\n# org.apache.spark.metrics.sink.CSVSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n# directory /tmp Where to store CSV files\n\n# org.apache.spark.metrics.sink.GangliaSink\n# Name: Default: Description:\n# host N
ONE Hostname or multicast group of Ganglia server\n# port NONE Port of Ganglia server(s)\n# period 10 Poll period\n# unit seconds Units of poll period\n# ttl 1 TTL of messages sent by Ganglia\n# mode multicast Ganglia network mode ('unicast' or 'multicast')\n\n# org.apache.spark.metrics.sink.JmxSink\n\n# org.apache.spark.metrics.sink.MetricsServlet\n# Name: Default: Description:\n# path VARIES* Path prefix from the web server root\n# sample false Whether to show entire set of samples for histograms ('false' or 'true')\n#\n# * Default path is /metrics/json for all instances except the master. The master has two paths:\n# /metrics/aplications/json # App information\n# /metrics/master/json # Master information\n\n# org.apache.spark.metrics.sink.GraphiteSink\n# Name: Default: Description:\n# host NONE Hostname of Graphite server\n# port NONE Port of Graphite server\n# period 10 Poll period\n# unit seconds Units of poll period\n# prefix EMPTY STRING Prefix to prepend to metric name\n\n##
Examples\n# Enable JmxSink for all instances by class name\n#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink\n\n# Enable ConsoleSink for all instances by class name\n#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink\n\n# Polling period for ConsoleSink\n#*.sink.console.period=10\n\n#*.sink.console.unit=seconds\n\n# Master instance overlap polling period\n#master.sink.console.period=15\n\n#master.sink.console.unit=seconds\n\n# Enable CsvSink for all instances\n#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink\n\n# Polling period for CsvSink\n#*.sink.csv.period=1\n\n#*.sink.csv.unit=minutes\n\n# Polling directory for CsvSink\n#*.sink.csv.directory=/tmp/\n\n# Worker instance overlap polling period\n#worker.sink.csv.period=10\n\n#worker.sink.csv.unit=minutes\n\n# Enable jvm source for instance master, worker, driver and executor\n#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSo
urce\n\n#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource"
+ },
+ "spark-metrics-properties": {
+ "content": "\n# syntax: [instance].sink|source.[name].[options]=[value]\n\n# This file configures Spark's internal metrics system. The metrics system is\n# divided into instances which correspond to internal components.\n# Each instance can be configured to report its metrics to one or more sinks.\n# Accepted values for [instance] are \"master\", \"worker\", \"executor\", \"driver\",\n# and \"applications\". A wild card \"*\" can be used as an instance name, in\n# which case all instances will inherit the supplied property.\n#\n# Within an instance, a \"source\" specifies a particular set of grouped metrics.\n# there are two kinds of sources:\n# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will\n# collect a Spark component's internal state. Each instance is paired with a\n# Spark source that is added automatically.\n# 2. Common sources, like JvmSource, which will collect low level state.\n# These can be added through configuration options and are then
loaded\n# using reflection.\n#\n# A \"sink\" specifies where metrics are delivered to. Each instance can be\n# assigned one or more sinks.\n#\n# The sink|source field specifies whether the property relates to a sink or\n# source.\n#\n# The [name] field specifies the name of source or sink.\n#\n# The [options] field is the specific property of this source or sink. The\n# source or sink is responsible for parsing this property.\n#\n# Notes:\n# 1. To add a new sink, set the \"class\" option to a fully qualified class\n# name (see examples below).\n# 2. Some sinks involve a polling period. The minimum allowed polling period\n# is 1 second.\n# 3. Wild card properties can be overridden by more specific properties.\n# For example, master.sink.console.period takes precedence over\n# *.sink.console.period.\n# 4. A metrics specific configuration\n# \"spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties\" should be\n# added to Java properties using -Dspark.metrics.conf=xxx if you want to\
n# customize metrics system. You can also put the file in ${SPARK_HOME}/conf\n# and it will be loaded automatically.\n# 5. MetricsServlet is added by default as a sink in master, worker and client\n# driver, you can send http request \"/metrics/json\" to get a snapshot of all the\n# registered metrics in json format. For master, requests \"/metrics/master/json\" and\n# \"/metrics/applications/json\" can be sent seperately to get metrics snapshot of\n# instance master and applications. MetricsServlet may not be configured by self.\n#\n\n## List of available sinks and their properties.\n\n# org.apache.spark.metrics.sink.ConsoleSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n\n# org.apache.spark.metrics.sink.CSVSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n# directory /tmp Where to store CSV files\n\n# org.apache.spark.metrics.sink.GangliaSink\n# Name: Default: Description:\n# host N
ONE Hostname or multicast group of Ganglia server\n# port NONE Port of Ganglia server(s)\n# period 10 Poll period\n# unit seconds Units of poll period\n# ttl 1 TTL of messages sent by Ganglia\n# mode multicast Ganglia network mode ('unicast' or 'multicast')\n\n# org.apache.spark.metrics.sink.JmxSink\n\n# org.apache.spark.metrics.sink.MetricsServlet\n# Name: Default: Description:\n# path VARIES* Path prefix from the web server root\n# sample false Whether to show entire set of samples for histograms ('false' or 'true')\n#\n# * Default path is /metrics/json for all instances except the master. The master has two paths:\n# /metrics/aplications/json # App information\n# /metrics/master/json # Master information\n\n# org.apache.spark.metrics.sink.GraphiteSink\n# Name: Default: Description:\n# host NONE Hostname of Graphite server\n# port NONE Port of Graphite server\n# period 10 Poll period\n# unit seconds Units of poll period\n# prefix EMPTY STRING Prefix to prepend to metric name\n\n##
Examples\n# Enable JmxSink for all instances by class name\n#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink\n\n# Enable ConsoleSink for all instances by class name\n#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink\n\n# Polling period for ConsoleSink\n#*.sink.console.period=10\n\n#*.sink.console.unit=seconds\n\n# Master instance overlap polling period\n#master.sink.console.period=15\n\n#master.sink.console.unit=seconds\n\n# Enable CsvSink for all instances\n#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink\n\n# Polling period for CsvSink\n#*.sink.csv.period=1\n\n#*.sink.csv.unit=minutes\n\n# Polling directory for CsvSink\n#*.sink.csv.directory=/tmp/\n\n# Worker instance overlap polling period\n#worker.sink.csv.period=10\n\n#worker.sink.csv.unit=minutes\n\n# Enable jvm source for instance master, worker, driver and executor\n#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSo
urce\n\n#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource"
+ },
+ "livy-log4j-properties": {
+ "content": "\n # Set everything to be logged to the console\n log4j.rootCategory=INFO, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n log4j.logger.org.eclipse.jetty=WARN"
+ },
+ "livy2-log4j-properties": {
+ "content": "\n # Set everything to be logged to the console\n log4j.rootCategory=INFO, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n log4j.logger.org.eclipse.jetty=WARN"
+ },
+ "livy-conf": {
+ "livy.server.port": "8998",
+ "livy.server.csrf_protection.enabled": "true",
+ "livy.environment": "production",
+ "livy.impersonation.enabled": "true",
+ "livy.server.session.timeout": "3600000"
+ },
+ "livy2-conf": {
+ "livy.server.port": "8999",
+ "livy.server.csrf_protection.enabled": "true",
+ "livy.environment": "production",
+ "livy.impersonation.enabled": "true",
+ "livy.server.session.timeout": "3600000"
+ },
+ "livy-spark-blacklist": {
+ "content": "\n #\n # Configuration override / blacklist. Defines a list of properties that users are not allowed\n # to override when starting Spark sessions.\n #\n # This file takes a list of property names (one per line). Empty lines and lines starting with \"#\"\n # are ignored.\n #"
+ },
+ "livy2-spark-blacklist": {
+ "content": "\n #\n # Configuration override / blacklist. Defines a list of properties that users are not allowed\n # to override when starting Spark sessions.\n #\n # This file takes a list of property names (one per line). Empty lines and lines starting with \"#\"\n # are ignored.\n #"
+ },
+ "livy-env": {
+ "livy_group": "livy",
+ "spark_home": "/usr/hdp/current/spark-client",
+ "content": "\n #!/usr/bin/env bash\n\n # - SPARK_HOME Spark which you would like to use in livy\n # - HADOOP_CONF_DIR Directory containing the Hadoop / YARN configuration to use.\n # - LIVY_LOG_DIR Where log files are stored. (Default: ${LIVY_HOME}/logs)\n # - LIVY_PID_DIR Where the pid file is stored. (Default: /tmp)\n # - LIVY_SERVER_JAVA_OPTS Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.)\n export SPARK_HOME=/usr/hdp/current/spark-client\n export HADOOP_CONF_DIR=/etc/hadoop/conf\n export LIVY_LOG_DIR={{livy_log_dir}}\n export LIVY_PID_DIR={{livy_pid_dir}}\n export LIVY_SERVER_JAVA_OPTS=\"-Xmx2g\"",
+ "livy_pid_dir": "/var/run/livy",
+ "livy_log_dir": "/var/log/livy",
+ "livy_user": "livy"
+ },
+ "livy2-env": {
+ "livy2_group": "livy",
+ "spark_home": "/usr/hdp/current/spark2-client",
+ "content": "\n #!/usr/bin/env bash\n\n # - SPARK_HOME Spark which you would like to use in livy\n # - HADOOP_CONF_DIR Directory containing the Hadoop / YARN configuration to use.\n # - LIVY_LOG_DIR Where log files are stored. (Default: ${LIVY_HOME}/logs)\n # - LIVY_PID_DIR Where the pid file is stored. (Default: /tmp)\n # - LIVY_SERVER_JAVA_OPTS Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.)\n export SPARK_HOME=/usr/hdp/current/spark2-client\n export HADOOP_CONF_DIR=/etc/hadoop/conf\n export LIVY_LOG_DIR={{livy_log_dir}}\n export LIVY_PID_DIR={{livy_pid_dir}}\n export LIVY_SERVER_JAVA_OPTS=\"-Xmx2g\"",
+ "livy2_pid_dir": "/var/run/livy2",
+ "livy2_log_dir": "/var/log/livy2",
+ "livy2_user": "livy"
+ },
+ "infra-solr-env": {
+ "infra_solr_znode": "/infra-solr",
+ "infra_solr_user": "solr",
+ "infra_solr_client_log_dir" :"/var/log/ambari-infra-solr-client"
+ },
+ "infra-solr-client-log4j" : {
+ "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
+ "content" : "content"
+ },
+ "application-properties": {
+ "atlas.cluster.name" : "c2",
+ "atlas.rest.address": "http://c6401.ambari.apache.org:21000",
+ "atlas.graph.storage.backend": "berkeleyje",
+ "atlas.graph.storage.directory": "data/berkley",
+ "atlas.graph.index.search.backend": "solr5",
+ "atlas.graph.index.search.directory": "data/es",
+ "atlas.graph.index.search.elasticsearch.client-only": false,
+ "atlas.graph.index.search.elasticsearch.local-mode": true,
+ "atlas.lineage.hive.table.type.name": "Table",
+ "atlas.lineage.hive.column.type.name": "Column",
+ "atlas.lineage.hive.table.column.name": "columns",
+ "atlas.lineage.hive.process.type.name": "LoadProcess",
+ "atlas.lineage.hive.process.inputs.name": "inputTables",
+ "atlas.lineage.hive.process.outputs.name": "outputTables",
+ "atlas.enableTLS": false,
+ "atlas.authentication.method": "simple",
+ "atlas.authentication.principal": "atlas",
+ "atlas.authentication.keytab": "/etc/security/keytabs/atlas.service.keytab",
+ "atlas.http.authentication.enabled": false,
+ "atlas.http.authentication.type": "simple",
+ "atlas.http.authentication.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM",
+ "atlas.http.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
+ "atlas.http.authentication.kerberos.name.rules": "DEFAULT",
+ "atlas.server.http.port" : "21000",
+ "atlas.notification.embedded" : false,
+ "atlas.kafka.bootstrap.servers" : "c6401.ambari.apache.org:6667",
+ "atlas.kafka.data" : "/usr/hdp/current/atlas-server/data/kafka",
+ "atlas.kafka.entities.group.id" : "entities",
+ "atlas.kafka.hook.group.id" : "atlas",
+ "atlas.kafka.zookeeper.connect" : "c6401.ambari.apache.org:2181"
+ },
+ "atlas-env": {
+ "content": "# The java implementation to use. If JAVA_HOME is not found we expect java and jar to be in path\nexport JAVA_HOME={{java64_home}}\n# any additional java opts you want to set. This will apply to both client and server operations\nexport METADATA_OPTS={{metadata_opts}}\n# metadata configuration directory\nexport METADATA_CONF={{conf_dir}}\n# Where log files are stored. Defatult is logs directory under the base install location\nexport METADATA_LOG_DIR={{log_dir}}\n# additional classpath entries\nexport METADATACPPATH={{metadata_classpath}}\n# data dir\nexport METADATA_DATA_DIR={{data_dir}}\n# Where do you want to expand the war file. By Default it is in /server/webapp dir under the base install dir.\nexport METADATA_EXPANDED_WEBAPP_DIR={{expanded_war_dir}}",
+ "metadata_user": "atlas",
+ "metadata_port": 21000,
+ "metadata_pid_dir": "/var/run/atlas",
+ "metadata_log_dir": "/var/log/atlas",
+ "metadata_data_dir": "/var/lib/atlas/data",
+ "metadata_expanded_war_dir": "/var/lib/atlas/server/webapp"
+ },
+ "atlas-log4j": {
+ "content": "<property><name>content</name><description>Custom log4j.properties</description><value></value></property>",
+ "atlas_log_level": "debug",
+ "audit_log_level": "OFF"
+ },
+ "atlas-solrconfig": {
+ "content": "<property><name>content</name><description>Custom solrconfig properties</description><value></value></property>"
+ },
+ "zeppelin-env": {
+ "zeppelin.server.kerberos.keytab": "",
+ "shiro_ini_content": "\n[users]\n# List of users with their password allowed to access Zeppelin.\n# To use a different strategy (LDAP / Database / ...) check the shiro doc at http://shiro.apache.org/configuration.html#Configuration-INISections\n#admin = password1\n#user1 = password2, role1, role2\n#user2 = password3, role3\n#user3 = password4, role2\n\n# Sample LDAP configuration, for user Authentication, currently tested for single Realm\n[main]\n#ldapRealm = org.apache.shiro.realm.ldap.JndiLdapRealm\n#ldapRealm.userDnTemplate = uid={0},cn=users,cn=accounts,dc=hortonworks,dc=com\n#ldapRealm.contextFactory.url = ldap://ldaphost:389\n#ldapRealm.contextFactory.authenticationMechanism = SIMPLE\n#sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager\n#securityManager.sessionManager = $sessionManager\n# 86,400,000 milliseconds = 24 hour\n#securityManager.sessionManager.globalSessionTimeout = 86400000\nshiro.loginUrl = /api/login\n\n[urls]\n# anon means the acce
ss is anonymous.\n# authcBasic means Basic Auth Security\n# To enfore security, comment the line below and uncomment the next one\n/api/version = anon\n/** = anon\n#/** = authc",
+ "zeppelin.spark.jar.dir": "/apps/zeppelin",
+ "zeppelin.executor.mem": "512m",
+ "zeppelin_pid_dir": "/var/run/zeppelin",
+ "zeppelin.executor.instances": "2",
+ "log4j_properties_content": "\nlog4j.rootLogger = INFO, dailyfile\nlog4j.appender.stdout = org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout = org.apache.log4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n\nlog4j.appender.dailyfile.DatePattern=.yyyy-MM-dd\nlog4j.appender.dailyfile.Threshold = INFO\nlog4j.appender.dailyfile = org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.dailyfile.File = ${zeppelin.log.file}\nlog4j.appender.dailyfile.layout = org.apache.log4j.PatternLayout\nlog4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n",
+ "zeppelin.server.kerberos.principal": "",
+ "zeppelin_user": "zeppelin",
+ "zeppelin_env_content": "\n# Spark master url. eg. spark://master_addr:7077. Leave empty if you want to use local mode\nexport MASTER=yarn-client\nexport SPARK_YARN_JAR={{spark_jar}}\n\n\n# Where log files are stored. PWD by default.\nexport ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}\n\n# The pid files are stored. /tmp by default.\nexport ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}\n\n\nexport JAVA_HOME={{java64_home}}\n\n# Additional jvm options. for example, export ZEPPELIN_JAVA_OPTS=\"-Dspark.executor.memory=8g -Dspark.cores.max=16\"\nexport ZEPPELIN_JAVA_OPTS=\"-Dhdp.version={{full_stack_version}} -Dspark.executor.memory={{executor_mem}} -Dspark.executor.instances={{executor_instances}} -Dspark.yarn.queue={{spark_queue}}\"\n\n\n# Zeppelin jvm mem options Default -Xmx1024m -XX:MaxPermSize=512m\n# export ZEPPELIN_MEM\n\n# zeppelin interpreter process jvm mem options. Defualt = ZEPPELIN_MEM\n# export ZEPPELIN_INTP_MEM\n\n# zeppelin interpreter process jvm options. Default = ZEPPELIN_JA
VA_OPTS\n# export ZEPPELIN_INTP_JAVA_OPTS\n\n# Where notebook saved\n# export ZEPPELIN_NOTEBOOK_DIR\n\n# Id of notebook to be displayed in homescreen. ex) 2A94M5J1Z\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN\n\n# hide homescreen notebook from list when this value set to \"true\". default \"false\"\n# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE\n\n# Bucket where notebook saved\n# export ZEPPELIN_NOTEBOOK_S3_BUCKET\n\n# User in bucket where notebook saved. For example bucket/user/notebook/2A94M5J1Z/note.json\n# export ZEPPELIN_NOTEBOOK_S3_USER\n\n# A string representing this instance of zeppelin. $USER by default\n# export ZEPPELIN_IDENT_STRING\n\n# The scheduling priority for daemons. Defaults to 0.\n# export ZEPPELIN_NICENESS\n\n\n#### Spark interpreter configuration ####\n\n## Use provided spark installation ##\n## defining SPARK_HOME makes Zeppelin run spark interpreter process using spark-submit\n##\n# (required) When it is defined, load it instead of Zeppelin embedded Spark libraries\n
export SPARK_HOME={{spark_home}}\n\n# (optional) extra options to pass to spark submit. eg) \"--driver-memory 512M --executor-memory 1G\".\n# export SPARK_SUBMIT_OPTIONS\n\n## Use embedded spark binaries ##\n## without SPARK_HOME defined, Zeppelin still able to run spark interpreter process using embedded spark binaries.\n## however, it is not encouraged when you can define SPARK_HOME\n##\n# Options read in YARN client mode\n# yarn-site.xml is located in configuration directory in HADOOP_CONF_DIR.\nexport HADOOP_CONF_DIR=/etc/hadoop/conf\n\n# Pyspark (supported with Spark 1.2.1 and above)\n# To configure pyspark, you need to set spark distribution's path to 'spark.home' property in Interpreter setting screen in Zeppelin GUI\n# path to the python command. must be the same path on the driver(Zeppelin) and all workers.\n# export PYSPARK_PYTHON\n\nexport PYTHONPATH=\"${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.8.2.1-src.zip\"\nexport SPARK_YARN_USER_ENV=\"PYTHONPATH=${PYTHONPAT
H}\"\n\n## Spark interpreter options ##\n##\n# Use HiveContext instead of SQLContext if set true. true by default.\n# export ZEPPELIN_SPARK_USEHIVECONTEXT\n\n# Execute multiple SQL concurrently if set true. false by default.\n# export ZEPPELIN_SPARK_CONCURRENTSQL\n\n# Max number of SparkSQL result to display. 1000 by default.\n# export ZEPPELIN_SPARK_MAXRESULT",
+ "zeppelin_log_dir": "/var/log/zeppelin",
+ "zeppelin_group": "zeppelin"
+ },
+ "zeppelin-config": {
+ "zeppelin.server.port": "9995",
+ "zeppelin.server.ssl.port": "9995",
+ "zeppelin.ssl.truststore.password": "change me",
+ "zeppelin.interpreters": "org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+ "zeppelin.interpreter.group.order": "spark,angular,jdbc,livy,md,sh",
+ "zeppelin.ssl.truststore.path": "conf/truststore",
+ "zeppelin.notebook.dir": "notebook",
+ "zeppelin.ssl.keystore.password": "change me",
+ "zeppelin.ssl.keystore.path": "conf/keystore",
+ "zeppelin.server.addr": "0.0.0.0",
+ "zeppelin.ssl.client.auth": "false",
+ "zeppelin.notebook.homescreen": " ",
+ "zeppelin.interpreter.dir": "interpreter",
+ "zeppelin.ssl.keystore.type": "JKS",
+ "zeppelin.notebook.s3.user": "user",
+ "zeppelin.ssl.key.manager.password": "change me",
+ "zeppelin.anonymous.allowed": "true",
+ "zeppelin.ssl.truststore.type": "JKS",
+ "zeppelin.ssl": "false",
+ "zeppelin.notebook.storage": "org.apache.zeppelin.notebook.repo.VFSNotebookRepo",
+ "zeppelin.websocket.max.text.message.size": "1024000",
+ "zeppelin.interpreter.connect.timeout": "30000",
+ "zeppelin.notebook.s3.bucket": "zeppelin",
+ "zeppelin.notebook.homescreen.hide": "false",
+ "zeppelin.server.allowed.origins": "*"
+ },
+ "zoo.cfg": {
+ "clientPort": "2181"
+ },
+ "ranger-hbase-plugin-properties": {
+ "ranger-hbase-plugin-enabled":"yes"
+ },
+ "ranger-hive-plugin-properties": {
+ "ranger-hive-plugin-enabled":"yes"
+ },
+ "ranger-env": {
+ "xml_configurations_supported" : "true"
+ },
+ "tagsync-application-properties": {
+ "atlas.kafka.hook.group.id": "atlas",
+ "atlas.kafka.zookeeper.connect": "os-mv-31-dev-4.novalocal:2181",
+ "atlas.kafka.acks": "1",
+ "atlas.kafka.entities.group.id": "ranger_entities_consumer",
+ "atlas.kafka.data": "/usr/hdp/current/atlas-server/data/kafka",
+ "atlas.kafka.bootstrap.servers": "localhost:2181",
+ "atlas.notification.embedded": "false"
+ },
+ "ranger-tagsync-site": {
+ "ranger.tagsync.sink.impl.class": "org.apache.ranger.tagsync.sink.tagadmin.TagAdminRESTSink",
+ "ranger.tagsync.atlasrestsource.endpoint": "",
+ "ranger.tagsync.tagadmin.rest.ssl.config.file": "/usr/hdp/current/ranger-tagsync/conf/mytruststore.jks",
+ "ranger.tagsync.logdir": "/var/log/ranger/tagsync",
+ "ranger.tagsync.filesource.filename": "/usr/hdp/current/ranger-tagsync/conf/etc/ranger/data/tags.json",
+ "ranger.tagsync.enabled": "true",
+ "ranger.tagsync.tagadmin.rest.url": "{{ranger_external_url}}",
+ "ranger.tagsync.atlasrestsource.download.interval": "",
+ "ranger.tagsync.filesource.modtime.check.interval": "60000",
+ "ranger.tagsync.tagadmin.password": "rangertagsync",
+ "ranger.tagsync.source.impl.class": "file",
+ "ranger.tagsync.source.atlas.custom.resource.mappers": "",
+ "ranger.tagsync.tagadmin.alias": "tagsync.tagadmin",
+ "ranger.tagsync.tagadmin.keystore": "/usr/hdp/current/ranger-tagsync/conf/rangertagsync.jceks",
+ "ranger.tagsync.atlas.to.service.mapping": ""
+ },
+ "druid-env": {
+ "druid_log_dir" : "/var/log/druid",
+ "druid_pid_dir" : "/var/run/druid",
+ "content" : "#!/bin/bash\n # Set DRUID specific environment variables here.\n# The java implementation to use\nexport JAVA_HOME={{java8_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport DRUID_PID_DIR={{druid_pid_dir}}\nexport DRUID_LOG_DIR={{druid_log_dir}}\nexport DRUID_CONF_DIR={{druid_conf_dir}}\nexport DRUID_LIB_DIR={{druid_home}}/lib",
+ "druid.coordinator.jvm.heap.memory" : 1024,
+ "druid.coordinator.jvm.direct.memory": 2048,
+ "druid.coordinator.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.broker.jvm.heap.memory" : 1024,
+ "druid.broker.jvm.direct.memory": 2048,
+ "druid.broker.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.middlemanager.jvm.heap.memory" : 1024,
+ "druid.middlemanager.jvm.direct.memory": 2048,
+ "druid.middlemanager.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.historical.jvm.heap.memory" : 1024,
+ "druid.historical.jvm.direct.memory": 2048,
+ "druid.historical.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.overlord.jvm.heap.memory" : 1024,
+ "druid.overlord.jvm.direct.memory": 2048,
+ "druid.overlord.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid.router.jvm.heap.memory" : 1024,
+ "druid.router.jvm.direct.memory": 2048,
+ "druid.router.jvm.opts": "-Duser.timezone=UTC -Dfile.encoding=UTF-8",
+ "druid_user": "druid"
+ },
+ "druid-common" : {
+ "druid.metadata.storage.type" : "mysql",
+ "druid.metadata.storage.connector.connectURI" : "jdbc:mysql://my-db-host:3306/druid?createDatabaseIfNotExist=true",
+ "druid.metadata.storage.connector.user" : "druid",
+ "druid.metadata.storage.connector.password" : "diurd",
+ "druid.storage.type" : "hdfs",
+ "druid.storage.storageDirectory" : "/user/druid/data",
+ "druid.indexer.logs.type": "hdfs",
+ "druid.indexer.logs.directory": "/user/druid/logs",
+ "druid.extensions.pullList": "[\"custom-druid-extension\"]",
+ "druid.extensions.repositoryList": "[\"http://custom-mvn-repo/public/release\"]",
+ "druid.extensions.loadList": "[\"mysql-metadata-storage\", \"druid-datasketches\"]",
+ "druid.security.extensions.loadList": "[\"druid-kerberos\"]"
+ },
+ "druid-historical" : {
+ "druid.segmentCache.infoDir" : "/apps/druid/segmentCache/info_dir",
+ "druid.segmentCache.locations" :"[{\"path\":\"/apps/druid/segmentCache\",\"maxSize\":300000000000}]"
+ },
+ "druid-coordinator" : {
+ "druid.service" : "druid/coordinator"
+ },
+ "druid-overlord" : {
+ "druid.service" : "druid/overlord"
+ },
+ "druid-broker" : {
+ "druid.service" : "druid/broker"
+ },
+ "druid-middlemanager" : {
+ "druid.service" : "druid/middlemanager",
+ "druid.indexer.task.hadoopWorkingPath" : "/tmp/druid-indexing",
+ "druid.indexer.task.baseTaskDir" : "/apps/druid/tasks"
+ },
+ "druid-router" : {
+ "druid.service" : "druid/router"
+ },
+ "druid-log4j" : {
+ "content" : "<![CDATA[<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n ~ Licensed to the Apache Software Foundation (ASF) under one\n ~ or more contributor license agreements. See the NOTICE file\n ~ distributed with this work for additional information\n ~ regarding copyright ownership. The ASF licenses this file\n ~ to you under the Apache License, Version 2.0 (the\n ~ \"License\"); you may not use this file except in compliance\n ~ with the License. You may obtain a copy of the License at\n ~\n ~ http://www.apache.org/licenses/LICENSE-2.0\n ~\n ~ Unless required by applicable law or agreed to in writing, software\n ~ distributed under the License is distributed on an \"AS IS\" BASIS,\n ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n ~ See the License for the specific language governing permissions and\n ~ limitations under the License.\n -->\n <Configuration>\n <Appenders>\n <Console name=\"Console\
" target=\"SYSTEM_OUT\">\n <PatternLayout pattern=\"%d{ISO8601} %p [%t] %c - %m%n\"/>\n </Console>\n </Appenders>\n <Loggers>\n <Logger name=\"com.metamx\" level=\"{{metamx_log_level}}\"/>\n <Logger name=\"io.druid\" level=\"{{druid_log_level}}\"/>\n <Root level=\"{{root_log_level}}\">\n <AppenderRef ref=\"Console\"/>\n </Root>\n </Loggers>\n </Configuration>\n ]]>\n"
+ },
+ "druid-logrotate" : {
+ "content" : "<![CDATA[\n {{druid_log_dir}}/*.log {\n copytruncate\n rotate 7\n daily\n nocompress\n missingok\n notifempty\n create 660 druid users\n dateext\n dateformat -%Y-%m-%d-%s\n }\n ]]>\n"
+ },
+ "druid-superset" : {
+ "SUPERSET_DATABASE_TYPE" : "sqllite"
+ }
+ },
+ "configuration_attributes": {
+ "sqoop-site": {},
+ "yarn-site": {
+ "final": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
+ "yarn.nodemanager.container-executor.class": "true",
+ "yarn.nodemanager.local-dirs": "true"
+ }
+ },
+ "yarn-site": {
+ "final": {
+ "is_supported_yarn_ranger": "true"
+ }
+ },
+ "hdfs-site": {
+ "final": {
+ "dfs.web.ugi": "true",
+ "dfs.support.append": "true",
+ "dfs.cluster.administrators": "true"
+ }
+ },
+ "core-site": {
+ "final": {
+ "hadoop.proxyuser.hive.groups": "true",
+ "webinterface.private.actions": "true",
+ "hadoop.proxyuser.oozie.hosts": "true"
+ }
+ },
+ "knox-env": {},
+ "gateway-site": {},
+ "users-ldif": {},
+ "kafka-env": {},
+ "kafka-log4j": {},
+ "kafka-broker": {},
+ "metadata-env": {},
+ "atlas-hbase-site": {},
+ "tagsync-application-properties": {},
+ "ranger-tagsync-site": {}
+ },
+ "configurationTags": {
+ "slider-client": {
+ "tag": "version1"
+ },
+ "slider-log4j": {
+ "tag": "version1"
+ },
+ "slider-env": {
+ "tag": "version1"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "yarn-site": {
+ "tag": "version1"
+ },
+ "gateway-site": {
+ "tag": "version1"
+ },
+ "topology": {
+ "tag": "version1"
+ },
+ "users-ldif": {
+ "tag": "version1"
+ },
+ "kafka-env": {
+ "tag": "version1"
+ },
+ "kafka-log4j": {
+ "tag": "version1"
+ },
+ "kafka-broker": {
+ "tag": "version1"
+ },
+ "metadata-env": {
+ "tag": "version1"
+ },
+ "tagsync-application-properties": {
+ "tag": "version1"
+ },
+ "ranger-tagsync-site": {
+ "tag": "version1"
+ }
+ },
+ "commandId": "7-1",
+ "clusterHostInfo": {
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_ping_ports": [
+ "8670",
+ "8670"
+ ],
+ "rm_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "knox_gateway_hosts": [
+ "jaimin-knox-1.c.pramod-thangali.internal"
+ ],
+ "kafka_broker_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "infra_solr_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_tagsync_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "atlas_server_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "zeppelin_master_hosts": [
+ "c6401.ambari.apache.org"
+ ]
+ }
+}
[21/57] [abbrv] ambari git commit: AMBARI-21898. Property provider
in-memory maps are refreshed too slowly after config updates. (swagle)
Posted by lp...@apache.org.
AMBARI-21898. Property provider in-memory maps are refreshed too slowly after config updates. (swagle)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5e242c9c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5e242c9c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5e242c9c
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 5e242c9ce099e7a72934974052ad56dc25cdb7ae
Parents: d7a787b
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Sep 7 11:58:47 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Sep 7 11:58:47 2017 -0700
----------------------------------------------------------------------
.../internal/AbstractProviderModule.java | 107 ++++++++-----------
.../org/apache/ambari/server/state/Cluster.java | 5 +
.../server/state/cluster/ClusterImpl.java | 13 ++-
3 files changed, 58 insertions(+), 67 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/5e242c9c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index 77549f5..1cd2d10 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -51,7 +51,6 @@ import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.PropertyProvider;
import org.apache.ambari.server.controller.spi.ProviderModule;
-import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.ResourceProvider;
import org.apache.ambari.server.controller.spi.SystemException;
@@ -67,6 +66,9 @@ import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -84,10 +86,6 @@ public abstract class AbstractProviderModule implements ProviderModule,
private static final int PROPERTY_REQUEST_CONNECT_TIMEOUT = 5000;
private static final int PROPERTY_REQUEST_READ_TIMEOUT = 10000;
- private static final String CLUSTER_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "cluster_name");
- private static final String HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("HostRoles", "cluster_name");
- private static final String HOST_COMPONENT_HOST_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("HostRoles", "host_name");
- private static final String HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("HostRoles", "component_name");
private static final String GANGLIA_SERVER = "GANGLIA_SERVER";
private static final String METRIC_SERVER = "METRICS_COLLECTOR";
private static final String PROPERTIES_CATEGORY = "properties";
@@ -220,6 +218,11 @@ public abstract class AbstractProviderModule implements ProviderModule,
*/
private final Map<Resource.Type, List<PropertyProvider>> propertyProviders = new HashMap<>();
+ /*
+ * TODO: Instantiation for the concrete impl of this class is not done through
+ * dependency injector (guice) so none of these field initialization
+ * are going to work unless refactoring is complete.
+ */
@Inject
AmbariManagementController managementController;
@@ -244,6 +247,8 @@ public abstract class AbstractProviderModule implements ProviderModule,
@Inject
protected AmbariEventPublisher eventPublisher;
+ @Inject
+ private Clusters clusters;
/**
* The map of host components.
@@ -258,8 +263,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
/**
* JMX ports read from the configs
*/
- private final Map<String, ConcurrentMap<String, ConcurrentMap<String, String>> >jmxPortMap =
- Collections.synchronizedMap(new HashMap<String, ConcurrentMap<String, ConcurrentMap<String, String>>>());
+ private final Map<String, ConcurrentMap<String, ConcurrentMap<String, String>>> jmxPortMap = new ConcurrentHashMap<>(1);
private volatile boolean initialized = false;
@@ -293,6 +297,10 @@ public abstract class AbstractProviderModule implements ProviderModule,
if (null == metricsCollectorHAManager && null != managementController) {
metricsCollectorHAManager = managementController.getMetricsCollectorHAManager();
}
+
+ if (null == clusters && null != managementController) {
+ clusters = managementController.getClusters();
+ }
}
@@ -516,17 +524,17 @@ public abstract class AbstractProviderModule implements ProviderModule,
@Override
public String getPort(String clusterName, String componentName, String hostName, boolean httpsEnabled) throws SystemException {
- // Parent map need not be synchronized
- ConcurrentMap<String, ConcurrentMap<String, String>> clusterJmxPorts = jmxPortMap.get(clusterName);
- if (clusterJmxPorts == null) {
+ ConcurrentMap<String, ConcurrentMap<String, String>> clusterJmxPorts;
+ // Still need double check to ensure single init
+ if (!jmxPortMap.containsKey(clusterName)) {
synchronized (jmxPortMap) {
- clusterJmxPorts = jmxPortMap.get(clusterName);
- if (clusterJmxPorts == null) {
+ if (!jmxPortMap.containsKey(clusterName)) {
clusterJmxPorts = new ConcurrentHashMap<>();
jmxPortMap.put(clusterName, clusterJmxPorts);
}
}
}
+ clusterJmxPorts = jmxPortMap.get(clusterName);
Service.Type service = componentServiceMap.get(componentName);
if (service != null) {
@@ -858,49 +866,33 @@ public abstract class AbstractProviderModule implements ProviderModule,
}
}
+ // TODO: Fix for multi-service feature support (trunk)
+ // Called from a synchornized block !
private void initProviderMaps() throws SystemException {
- ResourceProvider provider = getResourceProvider(Resource.Type.Cluster);
-
- Set<String> propertyIds = new HashSet<>();
- propertyIds.add(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID);
-
- Map<String, String> requestInfoProperties = new HashMap<>();
- requestInfoProperties.put(ClusterResourceProvider.GET_IGNORE_PERMISSIONS_PROPERTY_ID, "true");
-
- Request request = PropertyHelper.getReadRequest(propertyIds,
- requestInfoProperties, null, null, null);
-
- try {
- jmxPortMap.clear();
- Set<Resource> clusters = provider.getResources(request, null);
-
- clusterHostComponentMap = new HashMap<>();
- clusterGangliaCollectorMap = new HashMap<>();
- for (Resource cluster : clusters) {
+ jmxPortMap.clear();
+ clusterHostComponentMap = new HashMap<>();
+ clusterGangliaCollectorMap = new HashMap<>();
- String clusterName = (String) cluster.getPropertyValue(CLUSTER_NAME_PROPERTY_ID);
-
- // initialize the host component map and Ganglia server from the known hosts components...
- provider = getResourceProvider(Resource.Type.HostComponent);
-
- request = PropertyHelper.getReadRequest(HOST_COMPONENT_HOST_NAME_PROPERTY_ID,
- HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID);
-
- Predicate predicate = new PredicateBuilder().property(HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID).
- equals(clusterName).toPredicate();
+ Map<String, Cluster> clusterMap = clusters.getClusters();
+ if (MapUtils.isEmpty(clusterMap)) {
+ return;
+ }
- Set<Resource> hostComponents = provider.getResources(request, predicate);
- Map<String, String> hostComponentMap = clusterHostComponentMap.get(clusterName);
+ for (Cluster cluster : clusterMap.values()) {
+ String clusterName = cluster.getClusterName();
+ Map<String, String> hostComponentMap = clusterHostComponentMap.get(clusterName);
- if (hostComponentMap == null) {
- hostComponentMap = new HashMap<>();
- clusterHostComponentMap.put(clusterName, hostComponentMap);
- }
+ if (hostComponentMap == null) {
+ hostComponentMap = new HashMap<>();
+ clusterHostComponentMap.put(clusterName, hostComponentMap);
+ }
- for (Resource hostComponent : hostComponents) {
- String componentName = (String) hostComponent.getPropertyValue(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID);
- String hostName = (String) hostComponent.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID);
+ List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts();
+ if (!CollectionUtils.isEmpty(serviceComponentHosts)) {
+ for (ServiceComponentHost sch : serviceComponentHosts) {
+ String componentName = sch.getServiceComponentName();
+ String hostName = sch.getHostName();
hostComponentMap.put(componentName, hostName);
@@ -910,26 +902,11 @@ public abstract class AbstractProviderModule implements ProviderModule,
}
if (componentName.equals(METRIC_SERVER)) {
// If current collector host is null or if the host or the host component not live
- // Update clusterMetricCollectorMap.
+ // Update clusterMetricCollectorMap.
metricsCollectorHAManager.addCollectorHost(clusterName, hostName);
}
}
}
- } catch (UnsupportedPropertyException e) {
- if (LOG.isErrorEnabled()) {
- LOG.error("Caught UnsupportedPropertyException while trying to get the host mappings.", e);
- }
- throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
- } catch (NoSuchResourceException e) {
- if (LOG.isErrorEnabled()) {
- LOG.error("Caught NoSuchResourceException exception while trying to get the host mappings.", e);
- }
- throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
- } catch (NoSuchParentResourceException e) {
- if (LOG.isErrorEnabled()) {
- LOG.error("Caught NoSuchParentResourceException exception while trying to get the host mappings.", e);
- }
- throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5e242c9c/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 9597ba1..90dd611 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -119,6 +119,11 @@ public interface Cluster {
List<ServiceComponentHost> getServiceComponentHosts(String serviceName, String componentName);
/**
+ * Get all ServiceComponentHosts for this cluster.
+ */
+ List<ServiceComponentHost> getServiceComponentHosts();
+
+ /**
* Get all hosts associated with this cluster.
*
* @return collection of hosts that are associated with this cluster
http://git-wip-us.apache.org/repos/asf/ambari/blob/5e242c9c/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 3953184..8f1a882 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -554,8 +554,17 @@ public class ClusterImpl implements Cluster {
throw new ServiceComponentHostNotFoundException(getClusterName(),
serviceName, serviceComponentName, hostname);
}
- return serviceComponentHosts.get(serviceName).get(serviceComponentName).get(
- hostname);
+ return serviceComponentHosts.get(serviceName).get(serviceComponentName).get(hostname);
+ }
+
+ public List<ServiceComponentHost> getServiceComponentHosts() {
+ List<ServiceComponentHost> serviceComponentHosts = new ArrayList<>();
+ if (!serviceComponentHostsByHost.isEmpty()) {
+ for (List<ServiceComponentHost> schList : serviceComponentHostsByHost.values()) {
+ serviceComponentHosts.addAll(schList);
+ }
+ }
+ return Collections.unmodifiableList(serviceComponentHosts);
}
@Override
[10/57] [abbrv] ambari git commit: AMBARI-21891. Fix
AmbariServerTest.(vbrodetskyi)
Posted by lp...@apache.org.
AMBARI-21891. Fix AmbariServerTest.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f3232d20
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f3232d20
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f3232d20
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: f3232d20436bf3944ae0b8359e71d840a6d132ac
Parents: 8de9b06
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Wed Sep 6 17:27:51 2017 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Wed Sep 6 17:29:02 2017 +0300
----------------------------------------------------------------------
.../ambari/server/checks/DatabaseConsistencyCheckHelper.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f3232d20/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
index d52ea1f..6fef3b8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
@@ -314,7 +314,7 @@ public class DatabaseConsistencyCheckHelper {
} else if (tableRowCount != -1 && tableRowCount < TABLE_ROW_COUNT_LIMIT) {
LOG.info(String.format("The database table %s currently has %d rows and is within normal limits (%d)", tableName, tableRowCount, TABLE_ROW_COUNT_LIMIT));
} else {
- throw new SQLException();
+ warning("Unable to get size for table {}!", tableName);
}
} catch (SQLException ex) {
error(String.format("Failed to get %s row count: ", tableName), e);
[53/57] [abbrv] ambari git commit: AMBARI-21307 Draft
skimplementation for the user related ldap config attribute validation
Posted by lp...@apache.org.
AMBARI-21307 Draft skimplementation for the user related ldap config attribute validation
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/20f1ad27
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/20f1ad27
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/20f1ad27
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 20f1ad27961d1d7ab1343e2c4cd6f4a37f22e014
Parents: d5b3d29
Author: lpuskas <lp...@apache.org>
Authored: Tue Jul 18 17:48:07 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:01 2017 +0200
----------------------------------------------------------------------
ambari-project/pom.xml | 1 +
ambari-server/pom.xml | 7 +-
.../AmbariConfigurationRequestSwagger.java | 1 -
.../services/ldap/AmbariConfigurationDTO.java | 67 +++++++
.../ldap/LdapCheckConfigurationRequest.java | 47 +++++
.../api/services/ldap/LdapOperationRequest.java | 18 ++
.../api/services/ldap/LdapRequestInfo.java | 61 +++++++
.../api/services/ldap/LdapRestService.java | 132 ++++++++++++++
.../ambari/server/controller/AmbariServer.java | 3 +-
.../server/controller/ControllerModule.java | 1 +
.../server/ldap/AmbariLdapConfiguration.java | 129 ++++++++++++++
.../server/ldap/LdapConfigurationFactory.java | 21 +++
.../ldap/LdapConfigurationValidatorService.java | 52 ++++++
.../apache/ambari/server/ldap/LdapModule.java | 37 ++++
.../ldap/service/AmbariLdapException.java | 33 ++++
.../server/ldap/service/AmbariLdapFacade.java | 107 +++++++++++
.../ambari/server/ldap/service/LdapFacade.java | 52 ++++++
.../server/ldap/service/LdapSearchService.java | 18 ++
.../ad/AdLdapConfigurationValidatorService.java | 177 +++++++++++++++++++
.../service/ad/LdapConfigurationConverter.java | 50 ++++++
.../api/services/ldap/LDAPServiceTest.java | 85 +++++++++
...AdLdapConfigurationValidatorServiceTest.java | 129 ++++++++++++++
22 files changed, 1224 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-project/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-project/pom.xml b/ambari-project/pom.xml
index 00ba1bc..e4d4423 100644
--- a/ambari-project/pom.xml
+++ b/ambari-project/pom.xml
@@ -31,6 +31,7 @@
<ambari.dir>${project.parent.basedir}</ambari.dir>
<powermock.version>1.6.3</powermock.version>
<jetty.version>8.1.19.v20160209</jetty.version>
+ <ldap-api.version>1.0.0</ldap-api.version>
<checkstyle.version>6.19</checkstyle.version> <!-- last version that does not require Java 8 -->
<swagger.version>1.5.10</swagger.version>
<swagger.maven.plugin.version>3.1.4</swagger.maven.plugin.version>
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 29a820a..c0f30be 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -1287,7 +1287,6 @@
<dependency>
<groupId>org.apache.directory.shared</groupId>
<artifactId>shared-ldap</artifactId>
- <scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
@@ -1686,8 +1685,12 @@
<artifactId>jna</artifactId>
<version>4.2.2</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.directory.api</groupId>
+ <artifactId>api-all</artifactId>
+ <version>${ldap-api.version}</version>
+ </dependency>
</dependencies>
-
<pluginRepositories>
<pluginRepository>
<id>oss.sonatype.org</id>
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java
index d6714f9..5e8094e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationRequestSwagger.java
@@ -16,7 +16,6 @@ package org.apache.ambari.server.api.services;
import java.util.Map;
import org.apache.ambari.server.controller.ApiModel;
-import org.apache.ambari.server.orm.entities.ConfigurationBaseEntity;
import io.swagger.annotations.ApiModelProperty;
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java
new file mode 100644
index 0000000..1b134fe
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+
+public class AmbariConfigurationDTO {
+ private String type;
+ private Set<Map<String, Object>> data = Collections.emptySet();
+ private String versionTag;
+ private Integer version;
+ private long createdTs;
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Set<Map<String, Object>> getData() {
+ return data;
+ }
+
+ public void setData(Set<Map<String, Object>> data) {
+ this.data = data;
+ }
+
+ public String getVersionTag() {
+ return versionTag;
+ }
+
+ public void setVersionTag(String versionTag) {
+ this.versionTag = versionTag;
+ }
+
+ public Integer getVersion() {
+ return version;
+ }
+
+ public void setVersion(Integer version) {
+ this.version = version;
+ }
+
+ public long getCreatedTs() {
+ return createdTs;
+ }
+
+ public void setCreatedTs(long createdTs) {
+ this.createdTs = createdTs;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java
new file mode 100644
index 0000000..188f1b9
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+
+import com.google.gson.annotations.SerializedName;
+
+public class LdapCheckConfigurationRequest implements LdapOperationRequest {
+
+ @SerializedName("AmbariConfiguration")
+ private AmbariConfigurationDTO ambariConfiguration;
+
+ @SerializedName("RequestInfo")
+ private LdapRequestInfo requestInfo;
+
+ public LdapCheckConfigurationRequest() {
+ }
+
+
+ public AmbariConfigurationDTO getAmbariConfiguration() {
+ return ambariConfiguration;
+ }
+
+ public void setAmbariConfiguration(AmbariConfigurationDTO ambariConfiguration) {
+ this.ambariConfiguration = ambariConfiguration;
+ }
+
+ public LdapRequestInfo getRequestInfo() {
+ return requestInfo;
+ }
+
+ public void setRequestInfo(LdapRequestInfo requestInfo) {
+ this.requestInfo = requestInfo;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java
new file mode 100644
index 0000000..06f6c40
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java
@@ -0,0 +1,18 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+public interface LdapOperationRequest {
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRequestInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRequestInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRequestInfo.java
new file mode 100644
index 0000000..eeecfee
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRequestInfo.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+import java.util.Map;
+
+import org.apache.ambari.server.controller.RequestPostRequest;
+
+/**
+ * Bean holding LDAP request specific request information.
+ */
+public class LdapRequestInfo implements RequestPostRequest.RequestInfo {
+
+ // no-arg costructor facilitating JSON serialization
+ public LdapRequestInfo() {
+ }
+
+ private String action;
+
+ private Map<String, Object> parameters;
+
+ @Override
+ public String getAction() {
+ return action;
+ }
+
+ public void setAction(String action) {
+ this.action = action;
+ }
+
+ public void setParameters(Map<String, Object> parameters) {
+ this.parameters = parameters;
+ }
+
+ @Override
+ public String getCommand() {
+ return null;
+ }
+
+ @Override
+ public RequestPostRequest.OperationLevel getOperationLevel() {
+ return null;
+ }
+
+ @Override
+ public Map<String, Object> getParameters() {
+ return parameters;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
new file mode 100644
index 0000000..33b10fa
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+import javax.inject.Inject;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.ambari.annotations.ApiIgnore;
+import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.api.services.BaseService;
+import org.apache.ambari.server.api.services.Result;
+import org.apache.ambari.server.api.services.ResultImpl;
+import org.apache.ambari.server.api.services.ResultStatus;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationFactory;
+import org.apache.ambari.server.ldap.service.LdapFacade;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Endpoint designated to LDAP specific operations.
+ */
+@StaticallyInject
+@Path("/ldap")
+public class LdapRestService extends BaseService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(LdapRestService.class);
+
+ @Inject
+ private static LdapFacade ldapFacade;
+
+ @Inject
+ private static LdapConfigurationFactory ldapConfigurationFactory;
+
+ @POST
+ @ApiIgnore // until documented
+ @Path("/action") // todo this needs to be moved under the resource
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response validateConfiguration(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
+
+ Result result = new ResultImpl(new ResultStatus(ResultStatus.STATUS.OK));
+ try {
+
+ validateRequest(ldapCheckConfigurationRequest);
+
+ AmbariLdapConfiguration ambariLdapConfiguration = ldapConfigurationFactory.createLdapConfiguration(
+ ldapCheckConfigurationRequest.getAmbariConfiguration().getData().iterator().next());
+
+ switch (ldapCheckConfigurationRequest.getRequestInfo().getAction()) {
+ case "test-connection":
+
+ LOGGER.info("Testing connection to the LDAP server ...");
+ ldapFacade.checkConnection(ambariLdapConfiguration);
+
+ break;
+ case "test-attributes":
+
+ LOGGER.info("Testing LDAP attributes ....");
+ ldapFacade.checkLdapAttibutes(ldapCheckConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
+
+ break;
+ case "detect-attributes":
+
+ LOGGER.info("Detecting LDAP attributes ...");
+ ldapFacade.detectAttributes(ambariLdapConfiguration);
+
+ break;
+ default:
+ LOGGER.warn("No action provided ...");
+ throw new IllegalArgumentException("No request action provided");
+ }
+
+ } catch (Exception e) {
+ result = new ResultImpl(new ResultStatus(ResultStatus.STATUS.BAD_REQUEST, e));
+ }
+
+ return Response.status(result.getStatus().getStatusCode()).entity(getResultSerializer().serialize(result)).build();
+ }
+
+ private void validateRequest(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
+ String errMsg;
+
+ if (null == ldapCheckConfigurationRequest) {
+ errMsg = "No ldap configuraiton request provided";
+ LOGGER.error(errMsg);
+ throw new IllegalArgumentException(errMsg);
+ }
+
+ if (null == ldapCheckConfigurationRequest.getRequestInfo()) {
+ errMsg = String.format("No request information provided. Request: [%s]", ldapCheckConfigurationRequest);
+ LOGGER.error(errMsg);
+ throw new IllegalArgumentException(errMsg);
+ }
+
+ if (null == ldapCheckConfigurationRequest.getAmbariConfiguration()
+ || ldapCheckConfigurationRequest.getAmbariConfiguration().getData().size() != 1) {
+ errMsg = String.format("No / Invalid configuration data provided. Request: [%s]", ldapCheckConfigurationRequest);
+ LOGGER.error(errMsg);
+ throw new IllegalArgumentException(errMsg);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 8988be0..6ceed4a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -77,6 +77,7 @@ import org.apache.ambari.server.controller.internal.ViewPermissionResourceProvid
import org.apache.ambari.server.controller.metrics.ThreadPoolEnabledPropertyProvider;
import org.apache.ambari.server.controller.utilities.KerberosChecker;
import org.apache.ambari.server.controller.utilities.KerberosIdentityCleaner;
+import org.apache.ambari.server.ldap.LdapModule;
import org.apache.ambari.server.metrics.system.MetricsService;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.PersistenceType;
@@ -1061,7 +1062,7 @@ public class AmbariServer {
public static void main(String[] args) throws Exception {
logStartup();
- Injector injector = Guice.createInjector(new ControllerModule(), new AuditLoggerModule());
+ Injector injector = Guice.createInjector(new ControllerModule(), new AuditLoggerModule(), new LdapModule());
AmbariServer server = null;
try {
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index edabcdb..bf790a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -504,6 +504,7 @@ public class ControllerModule extends AbstractModule {
install(new FactoryModuleBuilder().implement(CollectionPersisterService.class, CsvFilePersisterService.class).build(CollectionPersisterServiceFactory.class));
install(new FactoryModuleBuilder().build(ConfigureClusterTaskFactory.class));
+
}
/**
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
new file mode 100644
index 0000000..519f400
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.ldap;
+
+import java.util.Map;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.assistedinject.Assisted;
+
+/**
+ * This class is an immutable representation of all the LDAP related configurationMap entries.
+ */
+@Singleton
+public class AmbariLdapConfiguration {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AmbariLdapConfiguration.class);
+
+ /**
+ * Constants representing supported LDAP related property names
+ */
+ public enum LdapConfigProperty {
+ LDAP_CONFIGURED("ambari.ldap.configured"),
+ AUTOMATIC_ATTRIBUTE_DETECTION("ambari.ldap.automatic.attribute.detection"),
+
+ USE_SSL("ambari.ldap.usessl"),
+ LDAP_SERVER_HOST("ambari.ldap.server.host"),
+ LDAP_SERVER_PORT("ambari.ldap.server.port"),
+ BASE_DN("ambari.ldap.base.dn"),
+
+ BIND_ANONIMOUSLY("ambari.ldap.bindanonymously"),
+ MANAGER_DN("ambari.ldap.managerdn"),
+ MANAGER_PASSWORD("ambari.ldap.managerpassword"),
+ USER_OBJECT_CLASS("ambari.ldap.user.object.class"),
+ USER_NAME_ATTRIBUTE("ambari.ldap.user.name.attribute"),
+ USER_SEARCH_BASE("ambari.ldap.user.search.Base"),
+
+ GROUP_OBJECT_CLASS("ambari.ldap.group.object.class"),
+ GROUP_NAME_ATTRIBUTE("ambari.ldap.group.name.attribute"),
+ GROUP_MEMBER_ATTRIBUTE("ambari.ldap.group.member.attribute"),
+ GROUP_SEARCH_BASE("ambari.ldap.group.member.attribute"),
+ DN_ATTRIBUTE("authentication.ldap.dnAttribute");
+
+ private String propertyName;
+
+ LdapConfigProperty(String propertyName) {
+ this.propertyName = propertyName;
+ }
+
+ public String propertyName() {
+ return this.propertyName;
+ }
+ }
+
+ private final Map<String, Object> configurationMap;
+
+ private Object configurationValue(LdapConfigProperty ldapConfigProperty) {
+ Object value = null;
+ if (configurationMap.containsKey(ldapConfigProperty.propertyName)) {
+ value = configurationMap.get(ldapConfigProperty.propertyName);
+ } else {
+ LOGGER.warn("Ldap configuration property [{}] hasn't been set", ldapConfigProperty.propertyName());
+ }
+
+ return value;
+ }
+
+ @Inject
+ public AmbariLdapConfiguration(@Assisted Map<String, Object> configuration) {
+ this.configurationMap = configuration;
+ }
+
+
+ public String ldapServerHost() {
+ return (String) configurationValue(LdapConfigProperty.LDAP_SERVER_HOST);
+ }
+
+ public int ldapServerPort() {
+ return Integer.valueOf((String) configurationValue(LdapConfigProperty.LDAP_SERVER_PORT));
+ }
+
+ public boolean useSSL() {
+ return Boolean.valueOf((String) configurationValue(LdapConfigProperty.USE_SSL));
+ }
+
+ public boolean bindAnonimously() {
+ return Boolean.valueOf((String) configurationValue(LdapConfigProperty.BIND_ANONIMOUSLY));
+ }
+
+ public String managerDn() {
+ return (String) configurationValue(LdapConfigProperty.MANAGER_DN);
+ }
+
+ public String managerPassword() {
+ return (String) configurationValue(LdapConfigProperty.MANAGER_PASSWORD);
+ }
+
+ public boolean automaticAttributeDetection() {
+ return Boolean.valueOf((String) configurationValue(LdapConfigProperty.AUTOMATIC_ATTRIBUTE_DETECTION));
+ }
+
+ public String baseDn() {
+ return (String) configurationValue(LdapConfigProperty.BASE_DN);
+ }
+
+ public String userObjectClass() {
+ return (String) configurationValue(LdapConfigProperty.USER_OBJECT_CLASS);
+ }
+
+ public String userNameAttribute() {
+ return (String) configurationValue(LdapConfigProperty.USER_NAME_ATTRIBUTE);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java
new file mode 100644
index 0000000..bcd6e39
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap;
+
+import java.util.Map;
+
+public interface LdapConfigurationFactory {
+ AmbariLdapConfiguration createLdapConfiguration(Map<String, Object> configuration);
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
new file mode 100644
index 0000000..4667721
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.service.AmbariLdapException;
+
+/**
+ * Collection of operations for validating ldap configuration.
+ * It's intended to decouple implementations using different libraries.
+ */
+public interface LdapConfigurationValidatorService {
+
+ /**
+ * Tests the connection based on the provided configuration.
+ *
+ * @param configuration the ambari ldap configuration instance
+ * @throws AmbariLdapException if the connection is not possible
+ */
+ void checkConnection(AmbariLdapConfiguration configuration) throws AmbariLdapException;
+
+ /**
+ * Checks whether the group related LDAP attributes in the configuration are correct.
+ *
+ * @param configuration the configuration instance holding the available properties
+ * @throws AmbariException if the attributes are not valid
+ */
+ void checkGroupAttributes(AmbariLdapConfiguration configuration) throws AmbariException;
+
+ /**
+ * Tries to connect to the LDAP server with the given credentials.
+ * Primarily used for testing the user before performing other operations (eg. attribute detection)s
+ *
+ * @param username the username
+ * @param password the password
+ * @param configuration the available ldap configuration
+ * @throws AmbariException if the connection couldn't be estabilished
+ */
+ void checkUserAttributes(String username, String password, AmbariLdapConfiguration configuration) throws AmbariException;
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
new file mode 100644
index 0000000..625ce8b
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.server.ldap;
+
+import org.apache.ambari.server.ldap.service.AmbariLdapFacade;
+import org.apache.ambari.server.ldap.service.LdapFacade;
+import org.apache.ambari.server.ldap.service.ad.AdLdapConfigurationValidatorService;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.assistedinject.FactoryModuleBuilder;
+
+/**
+ * GUICE configuration module for setting up LDAP related infrastructure.
+ */
+public class LdapModule extends AbstractModule {
+
+ @Override
+ protected void configure() {
+ bind(LdapFacade.class).to(AmbariLdapFacade.class);
+ bind(LdapConfigurationValidatorService.class).to(AdLdapConfigurationValidatorService.class);
+
+ install(new FactoryModuleBuilder().build(LdapConfigurationFactory.class));
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapException.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapException.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapException.java
new file mode 100644
index 0000000..cb38acc
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapException.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service;
+
+public class AmbariLdapException extends Exception {
+ public AmbariLdapException() {
+ super();
+ }
+
+ public AmbariLdapException(String message) {
+ super(message);
+ }
+
+ public AmbariLdapException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public AmbariLdapException(Throwable cause) {
+ super(cause);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
new file mode 100644
index 0000000..abd028a
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.server.ldap.service;
+
+import java.util.Map;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Singleton
+public class AmbariLdapFacade implements LdapFacade {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AmbariLdapFacade.class);
+
+ private enum Parameters {
+ TEST_USER_NAME("ldap.test.user.name"),
+ TEST_USER_PASSWORD("ldap.test.user.password");
+
+ private String parameterKey;
+
+ Parameters(String parameterKey) {
+ this.parameterKey = parameterKey;
+ }
+
+ private String getParameterKey() {
+ return parameterKey;
+ }
+
+ }
+
+ @Inject
+ private LdapConfigurationValidatorService ldapConfigurationValidatorService;
+
+ @Inject
+ public AmbariLdapFacade() {
+ }
+
+ @Override
+ public void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException {
+ try {
+ LOGGER.info("Validating LDAP connection related configuration based on: {}", ambariLdapConfiguration);
+ ldapConfigurationValidatorService.checkConnection(ambariLdapConfiguration);
+ } catch (AmbariLdapException e) {
+ LOGGER.error("Validating LDAP connection configuration failed", e);
+ throw new AmbariException("Validating LDAP connection configuration failed", e);
+ }
+ LOGGER.info("Validating LDAP connection related configuration: SUCCESS");
+ }
+
+
+ @Override
+ public void detectAttributes(AmbariLdapConfiguration ambariLdapConfiguration) {
+ LOGGER.info("Detecting LDAP configuration attributes ...");
+ throw new UnsupportedOperationException("Not yet implemented");
+ }
+
+ @Override
+ public void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ldapConfiguration) throws AmbariException {
+ String userName = getTestUserNameFromParameters(parameters);
+ String testUserPass = getTestUserPasswordFromParameters(parameters);
+
+ if (null == userName) {
+ throw new IllegalArgumentException("No test user available for testing LDAP attributes");
+ }
+
+ LOGGER.info("Testing LDAP attributes with test user: {}", userName);
+ ldapConfigurationValidatorService.checkUserAttributes(userName, testUserPass, ldapConfiguration);
+ }
+
+
+ private String getTestUserNameFromParameters(Map<String, Object> parameters) {
+ return (String) parameterValue(parameters, Parameters.TEST_USER_NAME);
+ }
+
+ private String getTestUserPasswordFromParameters(Map<String, Object> parameters) {
+ return (String) parameterValue(parameters, Parameters.TEST_USER_PASSWORD);
+ }
+
+ private Object parameterValue(Map<String, Object> parameters, Parameters parameter) {
+ Object value = null;
+ if (parameters.containsKey(parameter.getParameterKey())) {
+ value = parameters.get(parameter.getParameterKey());
+ } else {
+ LOGGER.warn("Parameter [{}] is missing from parameters", parameter.getParameterKey());
+ }
+ return value;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
new file mode 100644
index 0000000..38553f0
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service;
+
+import java.util.Map;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+
+/**
+ * The contract defining all the operations required by the application when communicating with an arbitrary LDAP server.
+ * This interface is intended to decouple LDAP specific details from the application.
+ */
+public interface LdapFacade {
+
+ /**
+ * Tests the connection to the LDAP server based on the provided configuration.
+ *
+ * @param ambariLdapConfiguration the available ldap related configuration
+ * @throws AmbariException if the connection fails or other problems occur during the operation
+ */
+ void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException;
+
+
+ /**
+ * Runs the user and group attribute detection algorithms
+ *
+ * @param ambariLdapConfiguration
+ */
+ void detectAttributes(AmbariLdapConfiguration ambariLdapConfiguration);
+
+ /**
+ * Checks user and group related LDAP configuration attributes in the configuration object with the help of the provided parameters
+ *
+ * @param parameters a map of property name and value pairs holding information to facilitate checking the attributes
+ * @param ambariLdapConfiguration configutration instance with available attributes
+ * @throws AmbariException if the attribute checking fails
+ */
+ void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException;
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapSearchService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapSearchService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapSearchService.java
new file mode 100644
index 0000000..f1abc8b
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapSearchService.java
@@ -0,0 +1,18 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service;
+
+public interface LdapSearchService {
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java
new file mode 100644
index 0000000..11e8655
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorService.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ad;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.AmbariLdapException;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.cursor.SearchCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.api.ldap.model.name.Dn;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.apache.directory.ldap.client.api.search.FilterBuilder;
+import org.apache.directory.shared.ldap.constants.SchemaConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Implementation of the validation logic using the Apache Directory API.
+ */
+@Singleton
+public class AdLdapConfigurationValidatorService implements LdapConfigurationValidatorService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AdLdapConfigurationValidatorService.class);
+
+ @Inject
+ private LdapConfigurationConverter ldapConfigurationConverter;
+
+ /**
+ * Facilitating the instantiation
+ */
+ @Inject
+ public AdLdapConfigurationValidatorService() {
+ }
+
+ @Override
+ public void checkConnection(AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ try {
+ LOGGER.info("Testing the connection based on the configuration: {}", ambariLdapConfiguration);
+
+ LdapConnectionConfig connectionConfig = ldapConfigurationConverter.getLdapConnectionConfig(ambariLdapConfiguration);
+ LdapNetworkConnection connection = new LdapNetworkConnection(connectionConfig);
+
+ if (ambariLdapConfiguration.bindAnonimously()) {
+ LOGGER.debug("Binding anonimously ...");
+ connection.bind();
+ } else {
+ LOGGER.debug("Binding with manager DN and manager password ...");
+ connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
+ }
+
+ if (connection.isConnected()) {
+ LOGGER.info("Successfully connected to the LDAP server.");
+ }
+
+ connection.close();
+
+ } catch (Exception e) {
+ LOGGER.warn("Could not bind to the LDAP server base don the provided configuration ...");
+ throw new AmbariLdapException(e);
+ }
+ }
+
+
+ /**
+ * Checks the user attributes provided in the configuration instance by issuing a search for a (known) test user in the LDAP.
+ * Attributes are considered correct if there is at least one entry found.
+ *
+ * Invalid attributes are signaled by throwing an exception.
+ *
+ * @param username the username
+ * @param password the password
+ * @param ambariLdapConfiguration configuration instance holding ldap configuration details
+ * @throws AmbariException if the attributes are not valid or any errors occurs
+ */
+ @Override
+ public void checkUserAttributes(String username, String password, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariException {
+ LdapNetworkConnection connection = null;
+ SearchCursor searchCursor = null;
+ try {
+ LOGGER.info("Checking user attributes for user {} r ...", username);
+
+ LdapConnectionConfig connectionConfig = ldapConfigurationConverter.getLdapConnectionConfig(ambariLdapConfiguration);
+ connection = new LdapNetworkConnection(connectionConfig);
+
+
+ if (!ambariLdapConfiguration.bindAnonimously()) {
+ LOGGER.debug("Anonimous binding not supported, binding with the manager detailas...");
+ connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
+ } else {
+ LOGGER.debug("Binding anonimously ...");
+ connection.bind();
+ }
+
+ if (!connection.isConnected()) {
+ LOGGER.error("Not connected to the LDAP server. Connection instance: {}", connection);
+ throw new IllegalStateException("The connection to the LDAP server is not alive");
+ }
+
+ // set up a filter based on the provided attributes
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), username))
+ .toString();
+
+ LOGGER.info("Searching for the user: {} using the search filter: {}", username, filter);
+ EntryCursor entryCursor = connection.search(new Dn(ambariLdapConfiguration.baseDn()), filter, SearchScope.SUBTREE);
+
+ // collecting search result entries
+ List<Entry> users = Lists.newArrayList();
+ for (Entry entry : entryCursor) {
+ users.add(entry);
+ }
+
+ // there should be at least one user found
+ if (users.isEmpty()) {
+ String msg = String.format("There are no users found using the filter: [ %s ]. Try changing the attribute values", filter);
+ LOGGER.error(msg);
+ throw new Exception(msg);
+ }
+
+ LOGGER.info("Attibute validation succeeded. Filter: {}", filter);
+
+ } catch (Exception e) {
+
+ LOGGER.error("Error while checking user attributes.");
+ throw new AmbariException("Error while checking user attributes", e);
+
+ } finally {
+
+ LOGGER.debug("Closing the connection and searchresult ...");
+
+ if (null != searchCursor) {
+ searchCursor.close();
+ }
+
+ if (null != connection) {
+ try {
+ connection.close();
+ } catch (IOException e) {
+ LOGGER.error("Exception occurred while closing the connection", e);
+ }
+ }
+
+ }
+ }
+
+ @Override
+ public void checkGroupAttributes(AmbariLdapConfiguration configuration) throws AmbariException {
+
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java
new file mode 100644
index 0000000..a8839f1
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/LdapConfigurationConverter.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ad;
+
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Converts between ambari specific ldap types and the 3rd party ldap library
+ */
+@Singleton
+public class LdapConfigurationConverter {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(LdapConfigurationConverter.class);
+
+ /**
+ * Creates a {@link LdapConnectionConfig} instance based on the provided ambari specific configurations
+ *
+ * @param ambariAmbariLdapConfiguration
+ * @return
+ */
+ public LdapConnectionConfig getLdapConnectionConfig(AmbariLdapConfiguration ambariAmbariLdapConfiguration) {
+ LOGGER.debug("Creating a configuration instance based on the ambari configuration: {}", ambariAmbariLdapConfiguration);
+
+ LdapConnectionConfig ldapConnectionConfig = new LdapConnectionConfig();
+ ldapConnectionConfig.setLdapHost(ambariAmbariLdapConfiguration.ldapServerHost());
+ ldapConnectionConfig.setLdapPort(ambariAmbariLdapConfiguration.ldapServerPort());
+ ldapConnectionConfig.setUseSsl(ambariAmbariLdapConfiguration.useSSL());
+
+ //todo set the other values as required
+ return ldapConnectionConfig;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java
new file mode 100644
index 0000000..f20cd1f
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+public class LDAPServiceTest {
+
+ private static String JSON_STRING = "{\n" +
+ " \"AmbariConfiguration\": {\n" +
+ " \"type\": \"ldap-config\",\n" +
+ " \"data\": [{\n" +
+ " \"authentication.ldap.primaryUrl\": \"localhost:33389\",\n" +
+ " \"authentication.ldap.secondaryUrl\": \"localhost:333\",\n" +
+ " \"authentication.ldap.baseDn\": \"dc=ambari,dc=apache,dc=org\"\n" +
+ " }]\n" +
+ " }\n" +
+ "}";
+
+ @Test
+ public void testJaxRsJsonTransformation() throws Exception {
+ // GIVEN
+ ObjectMapper objectMapper = new ObjectMapper();
+
+ Gson gsonJsonProvider = new GsonBuilder().create();
+
+
+ // WHEN
+ LdapCheckConfigurationRequest ldapCheckConfigurationRequest = gsonJsonProvider.fromJson(JSON_STRING, LdapCheckConfigurationRequest.class);
+ // LdapCheckConfigurationRequest ldapCheckConfigurationRequest = objectMapper.readValue(JSON_STRING, LdapCheckConfigurationRequest.class);
+
+ // THEN
+ Assert.assertNotNull(ldapCheckConfigurationRequest);
+
+ }
+
+
+ @Test
+ public void testLdapConnection() throws Exception {
+ // GIVEN
+ LdapConnection connection = new LdapNetworkConnection("localhost", 389);
+
+ // WHEN
+ connection.bind();
+ // THEN
+
+ }
+
+
+ @Test
+ public void testLdapConnectionConfigs() throws Exception {
+ // GIVEN
+ LdapConnectionConfig config = new LdapConnectionConfig();
+ config.setLdapHost("localhost");
+ config.setLdapPort(389);
+
+ // WHEN
+ LdapConnection connection = new LdapNetworkConnection(config);
+
+ // THEN
+ connection.anonymousBind();
+
+ Assert.assertNotNull(connection);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/20f1ad27/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java
new file mode 100644
index 0000000..0f57099
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/AdLdapConfigurationValidatorServiceTest.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ad;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Map;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.cursor.SearchCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.message.Response;
+import org.apache.directory.api.ldap.model.message.SearchRequest;
+import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
+import org.apache.directory.api.ldap.model.message.SearchResultEntry;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.api.ldap.model.name.Dn;
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.apache.directory.ldap.client.api.search.FilterBuilder;
+import org.apache.directory.shared.ldap.constants.SchemaConstants;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Maps;
+
+public class AdLdapConfigurationValidatorServiceTest {
+ private static final Logger LOGGER = LoggerFactory.getLogger(AdLdapConfigurationValidatorService.class);
+ private static final String TEST_USER = "Jocika10";
+
+ LdapConfigurationValidatorService ldapConfigurationValidatorService = new AdLdapConfigurationValidatorService();
+
+
+ @Test
+ public void testCheckAttributes() throws Exception {
+
+ // WHEN
+ LdapConnectionConfig config = new LdapConnectionConfig();
+ config.setLdapHost("localhost");
+ config.setLdapPort(389);
+ LdapConnection connection = new LdapNetworkConnection(config);
+
+ // THEN
+ connection.anonymousBind();
+
+
+ EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
+
+ for (Entry entry : cursor) {
+ assertNotNull(entry);
+ System.out.println(entry);
+ }
+
+ cursor.close();
+
+ }
+
+ @Test
+ public void testCheckUserAttributes() throws Exception {
+ Map<String, Object> ldapPropsMap = Maps.newHashMap();
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), true);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "localhost");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=dev,dc=local");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
+
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+
+
+ try {
+ LOGGER.info("Authenticating user {} against the LDAP server ...", TEST_USER);
+ LdapConfigurationConverter ldapConfigurationConverter = new LdapConfigurationConverter();
+
+ LdapConnectionConfig connectionConfig = ldapConfigurationConverter.getLdapConnectionConfig(ambariLdapConfiguration);
+ LdapNetworkConnection connection = new LdapNetworkConnection(connectionConfig);
+
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), TEST_USER))
+ .toString();
+
+ SearchRequest searchRequest = new SearchRequestImpl();
+ searchRequest.setBase(new Dn(ambariLdapConfiguration.baseDn()));
+ searchRequest.setFilter(filter);
+ searchRequest.setScope(SearchScope.SUBTREE);
+
+ LOGGER.info("loking up user: {} based on the filtr: {}", TEST_USER, filter);
+
+ connection.bind();
+ SearchCursor searchCursor = connection.search(searchRequest);
+
+ while (searchCursor.next()) {
+ Response response = searchCursor.get();
+
+ // process the SearchResultEntry
+ if (response instanceof SearchResultEntry) {
+ Entry resultEntry = ((SearchResultEntry) response).getEntry();
+ System.out.println(resultEntry);
+ }
+ }
+
+ searchCursor.close();
+
+ } catch (Exception e) {
+ throw new AmbariException("Error during user authentication check", e);
+ }
+
+ }
+
+}
\ No newline at end of file
[15/57] [abbrv] ambari git commit: AMBARI-21898. Property provider
in-memory maps are refreshed too slowly after config updates. (swagle)
Posted by lp...@apache.org.
AMBARI-21898. Property provider in-memory maps are refreshed too slowly after config updates. (swagle)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8cb94239
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8cb94239
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8cb94239
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 8cb942393ce15efb5f6fbc9f594287c30971c296
Parents: 98b0009
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Wed Sep 6 18:45:06 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Wed Sep 6 18:45:06 2017 -0700
----------------------------------------------------------------------
.../internal/AbstractProviderModule.java | 93 ++++++++------------
.../org/apache/ambari/server/state/Cluster.java | 5 ++
.../server/state/cluster/ClusterImpl.java | 13 ++-
3 files changed, 51 insertions(+), 60 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/8cb94239/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index 77549f5..e0df487 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -67,6 +67,9 @@ import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -244,6 +247,8 @@ public abstract class AbstractProviderModule implements ProviderModule,
@Inject
protected AmbariEventPublisher eventPublisher;
+ @Inject
+ private Clusters clusters;
/**
* The map of host components.
@@ -258,8 +263,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
/**
* JMX ports read from the configs
*/
- private final Map<String, ConcurrentMap<String, ConcurrentMap<String, String>> >jmxPortMap =
- Collections.synchronizedMap(new HashMap<String, ConcurrentMap<String, ConcurrentMap<String, String>>>());
+ private final Map<String, ConcurrentMap<String, ConcurrentMap<String, String>>> jmxPortMap = new ConcurrentHashMap<>(1);
private volatile boolean initialized = false;
@@ -517,16 +521,19 @@ public abstract class AbstractProviderModule implements ProviderModule,
@Override
public String getPort(String clusterName, String componentName, String hostName, boolean httpsEnabled) throws SystemException {
// Parent map need not be synchronized
- ConcurrentMap<String, ConcurrentMap<String, String>> clusterJmxPorts = jmxPortMap.get(clusterName);
- if (clusterJmxPorts == null) {
+ ConcurrentMap<String, ConcurrentMap<String, String>> clusterJmxPorts;
+ // Still need double check to ensure single init
+ if (!jmxPortMap.containsKey(clusterName)) {
synchronized (jmxPortMap) {
- clusterJmxPorts = jmxPortMap.get(clusterName);
- if (clusterJmxPorts == null) {
+ if (!jmxPortMap.containsKey(clusterName)) {
clusterJmxPorts = new ConcurrentHashMap<>();
jmxPortMap.put(clusterName, clusterJmxPorts);
}
}
}
+
+ clusterJmxPorts = jmxPortMap.get(clusterName);
+
Service.Type service = componentServiceMap.get(componentName);
if (service != null) {
@@ -858,49 +865,34 @@ public abstract class AbstractProviderModule implements ProviderModule,
}
}
+ // TODO: Fix for multi-service feature support (trunk)
+ // Called from a synchornized block !
private void initProviderMaps() throws SystemException {
- ResourceProvider provider = getResourceProvider(Resource.Type.Cluster);
-
- Set<String> propertyIds = new HashSet<>();
- propertyIds.add(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID);
-
- Map<String, String> requestInfoProperties = new HashMap<>();
- requestInfoProperties.put(ClusterResourceProvider.GET_IGNORE_PERMISSIONS_PROPERTY_ID, "true");
-
- Request request = PropertyHelper.getReadRequest(propertyIds,
- requestInfoProperties, null, null, null);
-
- try {
- jmxPortMap.clear();
- Set<Resource> clusters = provider.getResources(request, null);
-
- clusterHostComponentMap = new HashMap<>();
- clusterGangliaCollectorMap = new HashMap<>();
- for (Resource cluster : clusters) {
+ jmxPortMap.clear();
+ clusterHostComponentMap = new HashMap<>();
+ clusterGangliaCollectorMap = new HashMap<>();
- String clusterName = (String) cluster.getPropertyValue(CLUSTER_NAME_PROPERTY_ID);
-
- // initialize the host component map and Ganglia server from the known hosts components...
- provider = getResourceProvider(Resource.Type.HostComponent);
-
- request = PropertyHelper.getReadRequest(HOST_COMPONENT_HOST_NAME_PROPERTY_ID,
- HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID);
+ Map<String, Cluster> clusterMap = clusters.getClusters();
+ if (MapUtils.isEmpty(clusterMap)) {
+ return;
+ }
- Predicate predicate = new PredicateBuilder().property(HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID).
- equals(clusterName).toPredicate();
+ for (Cluster cluster : clusterMap.values()) {
+ String clusterName = cluster.getClusterName();
- Set<Resource> hostComponents = provider.getResources(request, predicate);
- Map<String, String> hostComponentMap = clusterHostComponentMap.get(clusterName);
+ Map<String, String> hostComponentMap = clusterHostComponentMap.get(clusterName);
+ if (hostComponentMap == null) {
+ hostComponentMap = new HashMap<>();
+ clusterHostComponentMap.put(clusterName, hostComponentMap);
+ }
- if (hostComponentMap == null) {
- hostComponentMap = new HashMap<>();
- clusterHostComponentMap.put(clusterName, hostComponentMap);
- }
+ List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts();
- for (Resource hostComponent : hostComponents) {
- String componentName = (String) hostComponent.getPropertyValue(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID);
- String hostName = (String) hostComponent.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID);
+ if (!CollectionUtils.isEmpty(serviceComponentHosts)) {
+ for (ServiceComponentHost sch : serviceComponentHosts) {
+ String componentName = sch.getServiceComponentName();
+ String hostName = sch.getHostName();
hostComponentMap.put(componentName, hostName);
@@ -910,26 +902,11 @@ public abstract class AbstractProviderModule implements ProviderModule,
}
if (componentName.equals(METRIC_SERVER)) {
// If current collector host is null or if the host or the host component not live
- // Update clusterMetricCollectorMap.
+ // Update clusterMetricCollectorMap.
metricsCollectorHAManager.addCollectorHost(clusterName, hostName);
}
}
}
- } catch (UnsupportedPropertyException e) {
- if (LOG.isErrorEnabled()) {
- LOG.error("Caught UnsupportedPropertyException while trying to get the host mappings.", e);
- }
- throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
- } catch (NoSuchResourceException e) {
- if (LOG.isErrorEnabled()) {
- LOG.error("Caught NoSuchResourceException exception while trying to get the host mappings.", e);
- }
- throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
- } catch (NoSuchParentResourceException e) {
- if (LOG.isErrorEnabled()) {
- LOG.error("Caught NoSuchParentResourceException exception while trying to get the host mappings.", e);
- }
- throw new SystemException("An exception occurred while initializing the host mappings: " + e, e);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8cb94239/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 9597ba1..90dd611 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -119,6 +119,11 @@ public interface Cluster {
List<ServiceComponentHost> getServiceComponentHosts(String serviceName, String componentName);
/**
+ * Get all ServiceComponentHosts for this cluster.
+ */
+ List<ServiceComponentHost> getServiceComponentHosts();
+
+ /**
* Get all hosts associated with this cluster.
*
* @return collection of hosts that are associated with this cluster
http://git-wip-us.apache.org/repos/asf/ambari/blob/8cb94239/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 3953184..8f1a882 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -554,8 +554,17 @@ public class ClusterImpl implements Cluster {
throw new ServiceComponentHostNotFoundException(getClusterName(),
serviceName, serviceComponentName, hostname);
}
- return serviceComponentHosts.get(serviceName).get(serviceComponentName).get(
- hostname);
+ return serviceComponentHosts.get(serviceName).get(serviceComponentName).get(hostname);
+ }
+
+ public List<ServiceComponentHost> getServiceComponentHosts() {
+ List<ServiceComponentHost> serviceComponentHosts = new ArrayList<>();
+ if (!serviceComponentHostsByHost.isEmpty()) {
+ for (List<ServiceComponentHost> schList : serviceComponentHostsByHost.values()) {
+ serviceComponentHosts.addAll(schList);
+ }
+ }
+ return Collections.unmodifiableList(serviceComponentHosts);
}
@Override
[54/57] [abbrv] ambari git commit: AMBARI-21307 refactoring - renamed,
moved classes, variables
Posted by lp...@apache.org.
AMBARI-21307 refactoring - renamed, moved classes, variables
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/12294a06
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/12294a06
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/12294a06
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 12294a063db244695d6a2a3a357def9714aa1ecc
Parents: 5826261
Author: lpuskas <lp...@apache.org>
Authored: Tue Aug 22 10:03:24 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:02 2017 +0200
----------------------------------------------------------------------
.../api/services/ldap/AmbariConfiguration.java | 87 +++++++
.../services/ldap/AmbariConfigurationDTO.java | 67 -----
.../ldap/LdapCheckConfigurationRequest.java | 47 ----
.../api/services/ldap/LdapConfigOperation.java | 43 ++++
.../services/ldap/LdapConfigurationRequest.java | 49 ++++
.../services/ldap/LdapConfigurationService.java | 53 +---
.../api/services/ldap/LdapOperationRequest.java | 18 --
.../server/ldap/LdapConfigurationService.java | 60 +++++
.../ldap/LdapConfigurationValidatorService.java | 60 -----
.../apache/ambari/server/ldap/LdapModule.java | 4 +-
.../server/ldap/service/AmbariLdapFacade.java | 10 +-
.../ads/DefaultLdapConfigurationService.java | 243 +++++++++++++++++++
...efaultLdapConfigurationValidatorService.java | 243 -------------------
.../api/services/ldap/LDAPServiceTest.java | 6 +-
.../DefaultLdapConfigurationServiceTest.java | 113 +++++++++
...ltLdapConfigurationValidatorServiceTest.java | 113 ---------
16 files changed, 618 insertions(+), 598 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfiguration.java
new file mode 100644
index 0000000..b5cc921
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfiguration.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Domain POJO representing generic ambari configuration data.
+ */
+public class AmbariConfiguration {
+
+ /**
+ * The type of the configuration, eg.: ldap-configuration
+ */
+ private String type;
+
+ /**
+ * Version tag
+ */
+ private String versionTag;
+
+ /**
+ * Version number
+ */
+ private Integer version;
+
+ /**
+ * Created timestamp
+ */
+ private long createdTs;
+
+ private Set<Map<String, Object>> data = Collections.emptySet();
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Set<Map<String, Object>> getData() {
+ return data;
+ }
+
+ public void setData(Set<Map<String, Object>> data) {
+ this.data = data;
+ }
+
+ public String getVersionTag() {
+ return versionTag;
+ }
+
+ public void setVersionTag(String versionTag) {
+ this.versionTag = versionTag;
+ }
+
+ public Integer getVersion() {
+ return version;
+ }
+
+ public void setVersion(Integer version) {
+ this.version = version;
+ }
+
+ public long getCreatedTs() {
+ return createdTs;
+ }
+
+ public void setCreatedTs(long createdTs) {
+ this.createdTs = createdTs;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java
deleted file mode 100644
index 1b134fe..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/AmbariConfigurationDTO.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.api.services.ldap;
-
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-
-public class AmbariConfigurationDTO {
- private String type;
- private Set<Map<String, Object>> data = Collections.emptySet();
- private String versionTag;
- private Integer version;
- private long createdTs;
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public Set<Map<String, Object>> getData() {
- return data;
- }
-
- public void setData(Set<Map<String, Object>> data) {
- this.data = data;
- }
-
- public String getVersionTag() {
- return versionTag;
- }
-
- public void setVersionTag(String versionTag) {
- this.versionTag = versionTag;
- }
-
- public Integer getVersion() {
- return version;
- }
-
- public void setVersion(Integer version) {
- this.version = version;
- }
-
- public long getCreatedTs() {
- return createdTs;
- }
-
- public void setCreatedTs(long createdTs) {
- this.createdTs = createdTs;
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java
deleted file mode 100644
index 188f1b9..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapCheckConfigurationRequest.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.api.services.ldap;
-
-
-import com.google.gson.annotations.SerializedName;
-
-public class LdapCheckConfigurationRequest implements LdapOperationRequest {
-
- @SerializedName("AmbariConfiguration")
- private AmbariConfigurationDTO ambariConfiguration;
-
- @SerializedName("RequestInfo")
- private LdapRequestInfo requestInfo;
-
- public LdapCheckConfigurationRequest() {
- }
-
-
- public AmbariConfigurationDTO getAmbariConfiguration() {
- return ambariConfiguration;
- }
-
- public void setAmbariConfiguration(AmbariConfigurationDTO ambariConfiguration) {
- this.ambariConfiguration = ambariConfiguration;
- }
-
- public LdapRequestInfo getRequestInfo() {
- return requestInfo;
- }
-
- public void setRequestInfo(LdapRequestInfo requestInfo) {
- this.requestInfo = requestInfo;
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigOperation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigOperation.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigOperation.java
new file mode 100644
index 0000000..478d4ff
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigOperation.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+/**
+ * Enumeration for supported operations related to LDAP configuration.
+ */
+public enum LdapConfigOperation {
+ TEST_CONNECTION("test-connection"),
+ TEST_ATTRIBUTES("test-attributes"),
+ DETECT_ATTRIBUTES("detect-attributes");
+
+ private String actionStr;
+
+ LdapConfigOperation(String actionStr) {
+ this.actionStr = actionStr;
+ }
+
+ public static LdapConfigOperation fromAction(String action) {
+ for (LdapConfigOperation val : LdapConfigOperation.values()) {
+ if (val.action().equals(action)) {
+ return val;
+ }
+ }
+ throw new IllegalStateException("Action [ " + action + " ] is not supported");
+ }
+
+ public String action() {
+ return this.actionStr;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationRequest.java
new file mode 100644
index 0000000..2e478c4
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationRequest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * Request object wrapping information for LDAP configuration related request calls.
+ */
+public class LdapConfigurationRequest {
+
+ @SerializedName("AmbariConfiguration")
+ private AmbariConfiguration ambariConfiguration;
+
+ @SerializedName("RequestInfo")
+ private LdapRequestInfo requestInfo;
+
+ public LdapConfigurationRequest() {
+ }
+
+ public AmbariConfiguration getAmbariConfiguration() {
+ return ambariConfiguration;
+ }
+
+ public void setAmbariConfiguration(AmbariConfiguration ambariConfiguration) {
+ this.ambariConfiguration = ambariConfiguration;
+ }
+
+ public LdapRequestInfo getRequestInfo() {
+ return requestInfo;
+ }
+
+ public void setRequestInfo(LdapRequestInfo requestInfo) {
+ this.requestInfo = requestInfo;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
index 52244bc..fc6bd41 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
@@ -74,40 +74,13 @@ public class LdapConfigurationService extends AmbariConfigurationService {
@Inject
private static LdapConfigurationFactory ldapConfigurationFactory;
- /**
- * Actions supported by this endpoint
- */
- private enum LdapAction {
- TEST_CONNECTION("test-connection"),
- TEST_ATTRIBUTES("test-attributes"),
- DETECT_ATTRIBUTES("detect-attributes");
-
- private String actionStr;
-
- LdapAction(String actionStr) {
- this.actionStr = actionStr;
- }
-
- public static LdapAction fromAction(String action) {
- for (LdapAction val : LdapAction.values()) {
- if (val.action().equals(action)) {
- return val;
- }
- }
- throw new IllegalStateException("Action [ " + action + " ] is not supported");
- }
-
- public String action() {
- return this.actionStr;
- }
- }
@POST
@ApiIgnore // until documented
@Path("/validate")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
- public Response validateConfiguration(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
+ public Response validateConfiguration(LdapConfigurationRequest ldapConfigurationRequest) {
authorize();
@@ -116,12 +89,12 @@ public class LdapConfigurationService extends AmbariConfigurationService {
Result result = new ResultImpl(new ResultStatus(ResultStatus.STATUS.OK));
try {
- validateRequest(ldapCheckConfigurationRequest);
+ validateRequest(ldapConfigurationRequest);
AmbariLdapConfiguration ambariLdapConfiguration = ldapConfigurationFactory.createLdapConfiguration(
- ldapCheckConfigurationRequest.getAmbariConfiguration().getData().iterator().next());
+ ldapConfigurationRequest.getAmbariConfiguration().getData().iterator().next());
- LdapAction action = LdapAction.fromAction(ldapCheckConfigurationRequest.getRequestInfo().getAction());
+ LdapConfigOperation action = LdapConfigOperation.fromAction(ldapConfigurationRequest.getRequestInfo().getAction());
switch (action) {
case TEST_CONNECTION:
@@ -133,7 +106,7 @@ public class LdapConfigurationService extends AmbariConfigurationService {
case TEST_ATTRIBUTES:
LOGGER.info("Testing LDAP attributes ....");
- groups = ldapFacade.checkLdapAttibutes(ldapCheckConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
+ groups = ldapFacade.checkLdapAttibutes(ldapConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
setResult(groups, result);
break;
@@ -161,24 +134,24 @@ public class LdapConfigurationService extends AmbariConfigurationService {
result.getResultTree().addChild(resource, "payload");
}
- private void validateRequest(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
+ private void validateRequest(LdapConfigurationRequest ldapConfigurationRequest) {
String errMsg;
- if (null == ldapCheckConfigurationRequest) {
+ if (null == ldapConfigurationRequest) {
errMsg = "No ldap configuraiton request provided";
LOGGER.error(errMsg);
throw new IllegalArgumentException(errMsg);
}
- if (null == ldapCheckConfigurationRequest.getRequestInfo()) {
- errMsg = String.format("No request information provided. Request: [%s]", ldapCheckConfigurationRequest);
+ if (null == ldapConfigurationRequest.getRequestInfo()) {
+ errMsg = String.format("No request information provided. Request: [%s]", ldapConfigurationRequest);
LOGGER.error(errMsg);
throw new IllegalArgumentException(errMsg);
}
- if (null == ldapCheckConfigurationRequest.getAmbariConfiguration()
- || ldapCheckConfigurationRequest.getAmbariConfiguration().getData().size() != 1) {
- errMsg = String.format("No / Invalid configuration data provided. Request: [%s]", ldapCheckConfigurationRequest);
+ if (null == ldapConfigurationRequest.getAmbariConfiguration()
+ || ldapConfigurationRequest.getAmbariConfiguration().getData().size() != 1) {
+ errMsg = String.format("No / Invalid configuration data provided. Request: [%s]", ldapConfigurationRequest);
LOGGER.error(errMsg);
throw new IllegalArgumentException(errMsg);
}
@@ -202,7 +175,7 @@ public class LdapConfigurationService extends AmbariConfigurationService {
}
- Set<RoleAuthorization> requiredAuthorizations() {
+ private Set<RoleAuthorization> requiredAuthorizations() {
return Sets.newHashSet(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java
deleted file mode 100644
index 06f6c40..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapOperationRequest.java
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.api.services.ldap;
-
-public interface LdapOperationRequest {
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationService.java
new file mode 100644
index 0000000..69a641f
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationService.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap;
+
+import java.util.Set;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.service.AmbariLdapException;
+import org.apache.directory.ldap.client.api.LdapConnection;
+
+/**
+ * Collection of operations for validating ldap configuration.
+ * It's intended to decouple implementations using different libraries.
+ */
+public interface LdapConfigurationService {
+
+ /**
+ * Tests the connection based on the provided configuration.
+ *
+ * @param ldapConnection connection instance
+ * @param configuration the ambari ldap configuration instance
+ * @throws AmbariLdapException if the connection is not possible
+ */
+ void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration configuration) throws AmbariLdapException;
+
+
+ /**
+ * Implements LDAP user related configuration settings validation logic.
+ * Implementers communicate with the LDAP server (search, bind) to validate attributes in the provided configuration
+ * instance
+ *
+ * @param ldapConnection connection instance used to connect to the LDAP server
+ * @param testUserName the test username
+ * @param testPassword the test password
+ * @param configuration the available ldap configuration
+ * @return The DN of the found user entry
+ * @throws AmbariException if the connection couldn't be estabilisheds
+ */
+ String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration configuration) throws AmbariLdapException;
+
+ /**
+ * Checks whether the group related LDAP attributes in the configuration are correct.
+ *
+ * @throws AmbariException if the attributes are not valid
+ */
+ Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException;
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
deleted file mode 100644
index 7efa3b7..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapConfigurationValidatorService.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap;
-
-import java.util.Set;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ldap.service.AmbariLdapException;
-import org.apache.directory.ldap.client.api.LdapConnection;
-
-/**
- * Collection of operations for validating ldap configuration.
- * It's intended to decouple implementations using different libraries.
- */
-public interface LdapConfigurationValidatorService {
-
- /**
- * Tests the connection based on the provided configuration.
- *
- * @param ldapConnection connection instance
- * @param configuration the ambari ldap configuration instance
- * @throws AmbariLdapException if the connection is not possible
- */
- void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration configuration) throws AmbariLdapException;
-
-
- /**
- * Implements LDAP user related configuration settings validation logic.
- * Implementers communicate with the LDAP server (search, bind) to validate attributes in the provided configuration
- * instance
- *
- * @param ldapConnection connection instance used to connect to the LDAP server
- * @param testUserName the test username
- * @param testPassword the test password
- * @param configuration the available ldap configuration
- * @return The DN of the found user entry
- * @throws AmbariException if the connection couldn't be estabilisheds
- */
- String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration configuration) throws AmbariLdapException;
-
- /**
- * Checks whether the group related LDAP attributes in the configuration are correct.
- *
- * @throws AmbariException if the attributes are not valid
- */
- Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException;
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
index a4ad2ee..3ae4587 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
@@ -18,7 +18,7 @@ package org.apache.ambari.server.ldap;
import org.apache.ambari.server.ldap.service.AmbariLdapFacade;
import org.apache.ambari.server.ldap.service.LdapConnectionService;
import org.apache.ambari.server.ldap.service.LdapFacade;
-import org.apache.ambari.server.ldap.service.ads.DefaultLdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.ads.DefaultLdapConfigurationService;
import org.apache.ambari.server.ldap.service.ads.DefaultLdapConnectionService;
import com.google.inject.AbstractModule;
@@ -32,7 +32,7 @@ public class LdapModule extends AbstractModule {
@Override
protected void configure() {
bind(LdapFacade.class).to(AmbariLdapFacade.class);
- bind(LdapConfigurationValidatorService.class).to(DefaultLdapConfigurationValidatorService.class);
+ bind(LdapConfigurationService.class).to(DefaultLdapConfigurationService.class);
bind(LdapConnectionService.class).to(DefaultLdapConnectionService.class);
install(new FactoryModuleBuilder().build(LdapConfigurationFactory.class));
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
index eec47ce..683ed43 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
@@ -22,7 +22,7 @@ import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.LdapConfigurationService;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -49,7 +49,7 @@ public class AmbariLdapFacade implements LdapFacade {
}
@Inject
- private LdapConfigurationValidatorService ldapConfigurationValidatorService;
+ private LdapConfigurationService ldapConfigurationService;
@Inject
private LdapConnectionService ldapConnectionService;
@@ -63,7 +63,7 @@ public class AmbariLdapFacade implements LdapFacade {
try {
LOGGER.info("Validating LDAP connection related configuration based on: {}", ambariLdapConfiguration);
LdapConnection connection = ldapConnectionService.createLdapConnection(ambariLdapConfiguration);
- ldapConfigurationValidatorService.checkConnection(connection, ambariLdapConfiguration);
+ ldapConfigurationService.checkConnection(connection, ambariLdapConfiguration);
} catch (AmbariLdapException e) {
LOGGER.error("Validating LDAP connection configuration failed", e);
throw e;
@@ -90,10 +90,10 @@ public class AmbariLdapFacade implements LdapFacade {
LdapConnection ldapConnection = ldapConnectionService.createLdapConnection(ldapConfiguration);
LOGGER.info("Testing LDAP user attributes with test user: {}", userName);
- String userDn = ldapConfigurationValidatorService.checkUserAttributes(ldapConnection, userName, testUserPass, ldapConfiguration);
+ String userDn = ldapConfigurationService.checkUserAttributes(ldapConnection, userName, testUserPass, ldapConfiguration);
LOGGER.info("Testing LDAP group attributes with test user dn: {}", userDn);
- Set<String> groups = ldapConfigurationValidatorService.checkGroupAttributes(ldapConnection, userDn, ldapConfiguration);
+ Set<String> groups = ldapConfigurationService.checkGroupAttributes(ldapConnection, userDn, ldapConfiguration);
return groups;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationService.java
new file mode 100644
index 0000000..abc9201
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationService.java
@@ -0,0 +1,243 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ads;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationService;
+import org.apache.ambari.server.ldap.service.AmbariLdapException;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.api.ldap.codec.decorators.SearchResultEntryDecorator;
+import org.apache.directory.api.ldap.model.constants.SchemaConstants;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.cursor.SearchCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.exception.LdapException;
+import org.apache.directory.api.ldap.model.message.Response;
+import org.apache.directory.api.ldap.model.message.SearchRequest;
+import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.api.ldap.model.name.Dn;
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.search.FilterBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/**
+ * Implementation of the validation logic using the Apache Directory API.
+ */
+@Singleton
+public class DefaultLdapConfigurationService implements LdapConfigurationService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationService.class);
+
+ @Inject
+ private LdapConnectionService ldapConnectionService;
+
+ /**
+ * Facilitating the instantiation
+ */
+ @Inject
+ public DefaultLdapConfigurationService() {
+ }
+
+ @Override
+ public void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ try {
+ bind(ambariLdapConfiguration, ldapConnection);
+ } catch (LdapException e) {
+ LOGGER.error("Could not connect to the LDAP server", e);
+ throw new AmbariLdapException(e);
+ }
+ }
+
+
+ /**
+ * Checks the user attributes provided in the configuration instance by issuing a search for a (known) test user in the LDAP.
+ * Attributes are considered correct if there is at least one entry found.
+ *
+ * Invalid attributes are signaled by throwing an exception.
+ *
+ * @param testUserName the test username
+ * @param testPassword the test password
+ * @param ambariLdapConfiguration configuration instance holding ldap configuration details
+ * @return the DN of the test user
+ * @throws AmbariException if the attributes are not valid or any errors occurs
+ */
+ @Override
+ public String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ SearchCursor searchCursor = null;
+ String userDn = null;
+ try {
+ LOGGER.info("Checking user attributes for user {} r ...", testUserName);
+
+ // bind anonimously or with manager data
+ bind(ambariLdapConfiguration, ldapConnection);
+
+ // set up a filter based on the provided attributes
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), testUserName))
+ .toString();
+
+ LOGGER.info("Searching for the user: {} using the search filter: {}", testUserName, filter);
+ EntryCursor entryCursor = ldapConnection.search(new Dn(ambariLdapConfiguration.userSearchBase()), filter, SearchScope.SUBTREE);
+
+ // collecting search result entries
+ List<Entry> users = Lists.newArrayList();
+ for (Entry entry : entryCursor) {
+ users.add(entry);
+ userDn = entry.getDn().getNormName();
+ }
+
+ // there should be at least one user found
+ if (users.isEmpty()) {
+ String msg = String.format("There are no users found using the filter: [ %s ]. Try changing the attribute values", filter);
+ LOGGER.error(msg);
+ throw new Exception(msg);
+ }
+
+ LOGGER.info("Attibute validation succeeded. Filter: {}", filter);
+
+ } catch (Exception e) {
+
+ LOGGER.error("User attributes validation failed.", e);
+ throw new AmbariLdapException(e.getMessage(), e);
+
+ } finally {
+ closeResources(ldapConnection, searchCursor);
+ }
+ return userDn;
+ }
+
+
+ @Override
+ public Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
+ SearchCursor searchCursor = null;
+ Set<Response> groupResponses = Sets.newHashSet();
+
+ try {
+ LOGGER.info("Checking group attributes for user dn {} ...", userDn);
+
+ bind(ambariLdapConfiguration, ldapConnection);
+
+ // set up a filter based on the provided attributes
+ String filter = FilterBuilder.and(
+ FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.groupObjectClass()),
+ FilterBuilder.equal(ambariLdapConfiguration.groupMemberAttribute(), userDn)
+ ).toString();
+
+ LOGGER.info("Searching for the groups the user dn: {} is member of using the search filter: {}", userDn, filter);
+
+ // assemble a search request
+ SearchRequest searchRequest = new SearchRequestImpl();
+ searchRequest.setFilter(filter);
+ searchRequest.setBase(new Dn(ambariLdapConfiguration.groupSearchBase()));
+ searchRequest.setScope(SearchScope.SUBTREE);
+ searchRequest.addAttributes(ambariLdapConfiguration.groupMemberAttribute(), ambariLdapConfiguration.groupNameAttribute());
+
+ // perform the search
+ searchCursor = ldapConnection.search(searchRequest);
+
+ for (Response response : searchCursor) {
+ groupResponses.add(response);
+ }
+
+ } catch (Exception e) {
+
+ LOGGER.error("User attributes validation failed.", e);
+ throw new AmbariLdapException(e.getMessage(), e);
+
+ } finally {
+ closeResources(ldapConnection, searchCursor);
+ }
+
+ return processGroupResults(groupResponses, ambariLdapConfiguration);
+ }
+
+ /**
+ * Binds to the LDAP server (anonimously or wit manager credentials)
+ *
+ * @param ambariLdapConfiguration configuration instance
+ * @param connection connection instance
+ * @throws LdapException if the bind operation fails
+ */
+ private void bind(AmbariLdapConfiguration ambariLdapConfiguration, LdapConnection connection) throws LdapException {
+ LOGGER.info("Connecting to LDAP ....");
+ if (!ambariLdapConfiguration.bindAnonimously()) {
+ LOGGER.debug("Anonimous binding not supported, binding with the manager detailas...");
+ connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
+ } else {
+ LOGGER.debug("Binding anonimously ...");
+ connection.bind();
+ }
+
+ if (!connection.isConnected()) {
+ LOGGER.error("Not connected to the LDAP server. Connection instance: {}", connection);
+ throw new IllegalStateException("The connection to the LDAP server is not alive");
+ }
+ LOGGER.info("Connected to LDAP.");
+ }
+
+
+ /**
+ * Extracts meaningful values from the search result.
+ *
+ * @param groupResponses the result entries returned by the search
+ * @param ambariLdapConfiguration holds the keys of the meaningful attributes
+ * @return a set with the group names the test user belongs to
+ */
+ private Set<String> processGroupResults(Set<Response> groupResponses, AmbariLdapConfiguration ambariLdapConfiguration) {
+ Set<String> groupStrSet = Sets.newHashSet();
+ for (Response response : groupResponses) {
+ Entry entry = ((SearchResultEntryDecorator) response).getEntry();
+ groupStrSet.add(entry.get(ambariLdapConfiguration.groupNameAttribute()).get().getString());
+ }
+
+ LOGGER.debug("Extracted group names from group search responses: {}", groupStrSet);
+ return groupStrSet;
+ }
+
+ private void closeResources(LdapConnection connection, SearchCursor searchCursor) {
+ LOGGER.debug("Housekeeping: closing the connection and the search cursor ...");
+
+ if (null != searchCursor) {
+ // this method is idempotent
+ searchCursor.close();
+ }
+
+ if (null != connection) {
+ try {
+ connection.close();
+ } catch (IOException e) {
+ LOGGER.error("Exception occurred while closing the connection", e);
+ }
+ }
+ }
+
+}
+
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java
deleted file mode 100644
index 040983a..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorService.java
+++ /dev/null
@@ -1,243 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ads;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Set;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
-import org.apache.ambari.server.ldap.service.AmbariLdapException;
-import org.apache.ambari.server.ldap.service.LdapConnectionService;
-import org.apache.directory.api.ldap.codec.decorators.SearchResultEntryDecorator;
-import org.apache.directory.api.ldap.model.constants.SchemaConstants;
-import org.apache.directory.api.ldap.model.cursor.EntryCursor;
-import org.apache.directory.api.ldap.model.cursor.SearchCursor;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.exception.LdapException;
-import org.apache.directory.api.ldap.model.message.Response;
-import org.apache.directory.api.ldap.model.message.SearchRequest;
-import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
-import org.apache.directory.api.ldap.model.message.SearchScope;
-import org.apache.directory.api.ldap.model.name.Dn;
-import org.apache.directory.ldap.client.api.LdapConnection;
-import org.apache.directory.ldap.client.api.search.FilterBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
-/**
- * Implementation of the validation logic using the Apache Directory API.
- */
-@Singleton
-public class DefaultLdapConfigurationValidatorService implements LdapConfigurationValidatorService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
-
- @Inject
- private LdapConnectionService ldapConnectionService;
-
- /**
- * Facilitating the instantiation
- */
- @Inject
- public DefaultLdapConfigurationValidatorService() {
- }
-
- @Override
- public void checkConnection(LdapConnection ldapConnection, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- try {
- bind(ambariLdapConfiguration, ldapConnection);
- } catch (LdapException e) {
- LOGGER.error("Could not connect to the LDAP server", e);
- throw new AmbariLdapException(e);
- }
- }
-
-
- /**
- * Checks the user attributes provided in the configuration instance by issuing a search for a (known) test user in the LDAP.
- * Attributes are considered correct if there is at least one entry found.
- *
- * Invalid attributes are signaled by throwing an exception.
- *
- * @param testUserName the test username
- * @param testPassword the test password
- * @param ambariLdapConfiguration configuration instance holding ldap configuration details
- * @return the DN of the test user
- * @throws AmbariException if the attributes are not valid or any errors occurs
- */
- @Override
- public String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- SearchCursor searchCursor = null;
- String userDn = null;
- try {
- LOGGER.info("Checking user attributes for user {} r ...", testUserName);
-
- // bind anonimously or with manager data
- bind(ambariLdapConfiguration, ldapConnection);
-
- // set up a filter based on the provided attributes
- String filter = FilterBuilder.and(
- FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
- FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), testUserName))
- .toString();
-
- LOGGER.info("Searching for the user: {} using the search filter: {}", testUserName, filter);
- EntryCursor entryCursor = ldapConnection.search(new Dn(ambariLdapConfiguration.userSearchBase()), filter, SearchScope.SUBTREE);
-
- // collecting search result entries
- List<Entry> users = Lists.newArrayList();
- for (Entry entry : entryCursor) {
- users.add(entry);
- userDn = entry.getDn().getNormName();
- }
-
- // there should be at least one user found
- if (users.isEmpty()) {
- String msg = String.format("There are no users found using the filter: [ %s ]. Try changing the attribute values", filter);
- LOGGER.error(msg);
- throw new Exception(msg);
- }
-
- LOGGER.info("Attibute validation succeeded. Filter: {}", filter);
-
- } catch (Exception e) {
-
- LOGGER.error("User attributes validation failed.", e);
- throw new AmbariLdapException(e.getMessage(), e);
-
- } finally {
- closeResources(ldapConnection, searchCursor);
- }
- return userDn;
- }
-
-
- @Override
- public Set<String> checkGroupAttributes(LdapConnection ldapConnection, String userDn, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- SearchCursor searchCursor = null;
- Set<Response> groupResponses = Sets.newHashSet();
-
- try {
- LOGGER.info("Checking group attributes for user dn {} ...", userDn);
-
- bind(ambariLdapConfiguration, ldapConnection);
-
- // set up a filter based on the provided attributes
- String filter = FilterBuilder.and(
- FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.groupObjectClass()),
- FilterBuilder.equal(ambariLdapConfiguration.groupMemberAttribute(), userDn)
- ).toString();
-
- LOGGER.info("Searching for the groups the user dn: {} is member of using the search filter: {}", userDn, filter);
-
- // assemble a search request
- SearchRequest searchRequest = new SearchRequestImpl();
- searchRequest.setFilter(filter);
- searchRequest.setBase(new Dn(ambariLdapConfiguration.groupSearchBase()));
- searchRequest.setScope(SearchScope.SUBTREE);
- searchRequest.addAttributes(ambariLdapConfiguration.groupMemberAttribute(), ambariLdapConfiguration.groupNameAttribute());
-
- // perform the search
- searchCursor = ldapConnection.search(searchRequest);
-
- for (Response response : searchCursor) {
- groupResponses.add(response);
- }
-
- } catch (Exception e) {
-
- LOGGER.error("User attributes validation failed.", e);
- throw new AmbariLdapException(e.getMessage(), e);
-
- } finally {
- closeResources(ldapConnection, searchCursor);
- }
-
- return processGroupResults(groupResponses, ambariLdapConfiguration);
- }
-
- /**
- * Binds to the LDAP server (anonimously or wit manager credentials)
- *
- * @param ambariLdapConfiguration configuration instance
- * @param connection connection instance
- * @throws LdapException if the bind operation fails
- */
- private void bind(AmbariLdapConfiguration ambariLdapConfiguration, LdapConnection connection) throws LdapException {
- LOGGER.info("Connecting to LDAP ....");
- if (!ambariLdapConfiguration.bindAnonimously()) {
- LOGGER.debug("Anonimous binding not supported, binding with the manager detailas...");
- connection.bind(ambariLdapConfiguration.managerDn(), ambariLdapConfiguration.managerPassword());
- } else {
- LOGGER.debug("Binding anonimously ...");
- connection.bind();
- }
-
- if (!connection.isConnected()) {
- LOGGER.error("Not connected to the LDAP server. Connection instance: {}", connection);
- throw new IllegalStateException("The connection to the LDAP server is not alive");
- }
- LOGGER.info("Connected to LDAP.");
- }
-
-
- /**
- * Extracts meaningful values from the search result.
- *
- * @param groupResponses the result entries returned by the search
- * @param ambariLdapConfiguration holds the keys of the meaningful attributes
- * @return a set with the group names the test user belongs to
- */
- private Set<String> processGroupResults(Set<Response> groupResponses, AmbariLdapConfiguration ambariLdapConfiguration) {
- Set<String> groupStrSet = Sets.newHashSet();
- for (Response response : groupResponses) {
- Entry entry = ((SearchResultEntryDecorator) response).getEntry();
- groupStrSet.add(entry.get(ambariLdapConfiguration.groupNameAttribute()).get().getString());
- }
-
- LOGGER.debug("Extracted group names from group search responses: {}", groupStrSet);
- return groupStrSet;
- }
-
- private void closeResources(LdapConnection connection, SearchCursor searchCursor) {
- LOGGER.debug("Housekeeping: closing the connection and the search cursor ...");
-
- if (null != searchCursor) {
- // this method is idempotent
- searchCursor.close();
- }
-
- if (null != connection) {
- try {
- connection.close();
- } catch (IOException e) {
- LOGGER.error("Exception occurred while closing the connection", e);
- }
- }
- }
-
-}
-
-
-
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java
index f20cd1f..5e8eac6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/ldap/LDAPServiceTest.java
@@ -46,11 +46,11 @@ public class LDAPServiceTest {
// WHEN
- LdapCheckConfigurationRequest ldapCheckConfigurationRequest = gsonJsonProvider.fromJson(JSON_STRING, LdapCheckConfigurationRequest.class);
- // LdapCheckConfigurationRequest ldapCheckConfigurationRequest = objectMapper.readValue(JSON_STRING, LdapCheckConfigurationRequest.class);
+ LdapConfigurationRequest ldapConfigurationRequest = gsonJsonProvider.fromJson(JSON_STRING, LdapConfigurationRequest.class);
+ // LdapConfigurationRequest ldapConfigurationRequest = objectMapper.readValue(JSON_STRING, LdapConfigurationRequest.class);
// THEN
- Assert.assertNotNull(ldapCheckConfigurationRequest);
+ Assert.assertNotNull(ldapConfigurationRequest);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java
new file mode 100644
index 0000000..2b7448e
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationServiceTest.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.ldap.service.ads;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Map;
+
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationService;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
+import org.apache.directory.api.ldap.model.constants.SchemaConstants;
+import org.apache.directory.api.ldap.model.cursor.EntryCursor;
+import org.apache.directory.api.ldap.model.entry.Entry;
+import org.apache.directory.api.ldap.model.message.SearchScope;
+import org.apache.directory.ldap.client.api.LdapConnection;
+import org.apache.directory.ldap.client.api.LdapConnectionConfig;
+import org.apache.directory.ldap.client.api.LdapNetworkConnection;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Maps;
+
+public class DefaultLdapConfigurationServiceTest {
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationService.class);
+ private static final String TEST_USER = "einstein";
+
+ LdapConfigurationService ldapConfigurationService = new DefaultLdapConfigurationService();
+
+
+ @Test
+ public void testCheckAttributes() throws Exception {
+
+ // WHEN
+ LdapConnectionConfig config = new LdapConnectionConfig();
+ config.setLdapHost("localhost");
+ config.setLdapPort(389);
+ LdapConnection connection = new LdapNetworkConnection(config);
+
+ // THEN
+ connection.anonymousBind();
+
+
+ EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
+
+ for (Entry entry : cursor) {
+ assertNotNull(entry);
+ System.out.println(entry);
+ }
+
+ cursor.close();
+
+ }
+
+ @Test
+ public void testCheckUserAttributes() throws Exception {
+ // GIVEN
+ Map<String, Object> ldapPropsMap = Maps.newHashMap();
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_SEARCH_BASE.propertyName(), "dc=example,dc=com");
+
+
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+ LdapConnectionService connectionService = new DefaultLdapConnectionService();
+ LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
+
+ ldapConfigurationService.checkUserAttributes(ldapConnection, "einstein", "", ambariLdapConfiguration);
+ }
+
+ @Test
+ public void testRetrieveGorupsForuser() throws Exception {
+ // GIVEN
+ Map<String, Object> ldapPropsMap = Maps.newHashMap();
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
+
+
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE.propertyName(), SchemaConstants.UNIQUE_MEMBER_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_SEARCH_BASE.propertyName(), "dc=example,dc=com");
+
+
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+ LdapConnectionService connectionService = new DefaultLdapConnectionService();
+ LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
+
+ ldapConfigurationService.checkGroupAttributes(ldapConnection, "uid=einstein,dc=example,dc=com", ambariLdapConfiguration);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/12294a06/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java
deleted file mode 100644
index 1c7f75d..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ads/DefaultLdapConfigurationValidatorServiceTest.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.ldap.service.ads;
-
-import static org.junit.Assert.assertNotNull;
-
-import java.util.Map;
-
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
-import org.apache.ambari.server.ldap.service.LdapConnectionService;
-import org.apache.directory.api.ldap.model.constants.SchemaConstants;
-import org.apache.directory.api.ldap.model.cursor.EntryCursor;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.message.SearchScope;
-import org.apache.directory.ldap.client.api.LdapConnection;
-import org.apache.directory.ldap.client.api.LdapConnectionConfig;
-import org.apache.directory.ldap.client.api.LdapNetworkConnection;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Maps;
-
-public class DefaultLdapConfigurationValidatorServiceTest {
- private static final Logger LOGGER = LoggerFactory.getLogger(DefaultLdapConfigurationValidatorService.class);
- private static final String TEST_USER = "einstein";
-
- LdapConfigurationValidatorService ldapConfigurationValidatorService = new DefaultLdapConfigurationValidatorService();
-
-
- @Test
- public void testCheckAttributes() throws Exception {
-
- // WHEN
- LdapConnectionConfig config = new LdapConnectionConfig();
- config.setLdapHost("localhost");
- config.setLdapPort(389);
- LdapConnection connection = new LdapNetworkConnection(config);
-
- // THEN
- connection.anonymousBind();
-
-
- EntryCursor cursor = connection.search("dc=dev,dc=local", "(objectclass=*)", SearchScope.ONELEVEL);
-
- for (Entry entry : cursor) {
- assertNotNull(entry);
- System.out.println(entry);
- }
-
- cursor.close();
-
- }
-
- @Test
- public void testCheckUserAttributes() throws Exception {
- // GIVEN
- Map<String, Object> ldapPropsMap = Maps.newHashMap();
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_SEARCH_BASE.propertyName(), "dc=example,dc=com");
-
-
- AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
- LdapConnectionService connectionService = new DefaultLdapConnectionService();
- LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
-
- ldapConfigurationValidatorService.checkUserAttributes(ldapConnection, "einstein", "", ambariLdapConfiguration);
- }
-
- @Test
- public void testRetrieveGorupsForuser() throws Exception {
- // GIVEN
- Map<String, Object> ldapPropsMap = Maps.newHashMap();
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
-
-
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE.propertyName(), SchemaConstants.UNIQUE_MEMBER_AT);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_SEARCH_BASE.propertyName(), "dc=example,dc=com");
-
-
- AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
- LdapConnectionService connectionService = new DefaultLdapConnectionService();
- LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
-
- ldapConfigurationValidatorService.checkGroupAttributes(ldapConnection, "uid=einstein,dc=example,dc=com", ambariLdapConfiguration);
- }
-}
\ No newline at end of file
[49/57] [abbrv] ambari git commit: AMBARI-21307 Groups for the test
user returned to the caller
Posted by lp...@apache.org.
AMBARI-21307 Groups for the test user returned to the caller
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ff9b3788
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ff9b3788
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ff9b3788
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: ff9b3788d2a0467c998e556d82c32d4aeb9f1bb0
Parents: d8813ff
Author: lpuskas <lp...@apache.org>
Authored: Tue Aug 8 15:50:29 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:01 2017 +0200
----------------------------------------------------------------------
.../api/services/ldap/LdapRestService.java | 16 +++++-
.../server/ldap/AmbariLdapConfiguration.java | 2 +-
.../apache/ambari/server/ldap/LdapModule.java | 3 +
.../server/ldap/service/AmbariLdapFacade.java | 3 +-
.../ambari/server/ldap/service/LdapFacade.java | 3 +-
...efaultLdapConfigurationValidatorService.java | 25 ++++++---
.../ad/DefaultLdapConnectionService.java | 2 +-
...ltLdapConfigurationValidatorServiceTest.java | 59 +++-----------------
8 files changed, 49 insertions(+), 64 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
index 33b10fa..8578204 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
@@ -28,6 +28,8 @@
package org.apache.ambari.server.api.services.ldap;
+import java.util.Set;
+
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
@@ -41,12 +43,16 @@ import org.apache.ambari.server.api.services.BaseService;
import org.apache.ambari.server.api.services.Result;
import org.apache.ambari.server.api.services.ResultImpl;
import org.apache.ambari.server.api.services.ResultStatus;
+import org.apache.ambari.server.controller.internal.ResourceImpl;
+import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
import org.apache.ambari.server.ldap.LdapConfigurationFactory;
import org.apache.ambari.server.ldap.service.LdapFacade;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.collect.Sets;
+
/**
* Endpoint designated to LDAP specific operations.
*/
@@ -68,6 +74,8 @@ public class LdapRestService extends BaseService {
@Consumes(MediaType.APPLICATION_JSON)
public Response validateConfiguration(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
+ Set<String> groups = Sets.newHashSet();
+
Result result = new ResultImpl(new ResultStatus(ResultStatus.STATUS.OK));
try {
@@ -86,7 +94,11 @@ public class LdapRestService extends BaseService {
case "test-attributes":
LOGGER.info("Testing LDAP attributes ....");
- ldapFacade.checkLdapAttibutes(ldapCheckConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
+ groups = ldapFacade.checkLdapAttibutes(ldapCheckConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
+ // todo factor out the resource creation, design better the structure in the response
+ Resource resource = new ResourceImpl(Resource.Type.AmbariConfiguration);
+ resource.setProperty("groups", groups);
+ result.getResultTree().addChild(resource, "payload");
break;
case "detect-attributes":
@@ -101,7 +113,7 @@ public class LdapRestService extends BaseService {
}
} catch (Exception e) {
- result = new ResultImpl(new ResultStatus(ResultStatus.STATUS.BAD_REQUEST, e));
+ result.setResultStatus(new ResultStatus(ResultStatus.STATUS.BAD_REQUEST, e));
}
return Response.status(result.getStatus().getStatusCode()).entity(getResultSerializer().serialize(result)).build();
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
index a6ff80b..8ab587b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/AmbariLdapConfiguration.java
@@ -48,7 +48,7 @@ public class AmbariLdapConfiguration {
MANAGER_PASSWORD("ambari.ldap.managerpassword"),
USER_OBJECT_CLASS("ambari.ldap.user.object.class"),
USER_NAME_ATTRIBUTE("ambari.ldap.user.name.attribute"),
- USER_SEARCH_BASE("ambari.ldap.user.search.Base"),
+ USER_SEARCH_BASE("ambari.ldap.user.search.base"),
GROUP_OBJECT_CLASS("ambari.ldap.group.object.class"),
GROUP_NAME_ATTRIBUTE("ambari.ldap.group.name.attribute"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
index 545f220..1b49159 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/LdapModule.java
@@ -16,8 +16,10 @@
package org.apache.ambari.server.ldap;
import org.apache.ambari.server.ldap.service.AmbariLdapFacade;
+import org.apache.ambari.server.ldap.service.LdapConnectionService;
import org.apache.ambari.server.ldap.service.LdapFacade;
import org.apache.ambari.server.ldap.service.ad.DefaultLdapConfigurationValidatorService;
+import org.apache.ambari.server.ldap.service.ad.DefaultLdapConnectionService;
import com.google.inject.AbstractModule;
import com.google.inject.assistedinject.FactoryModuleBuilder;
@@ -31,6 +33,7 @@ public class LdapModule extends AbstractModule {
protected void configure() {
bind(LdapFacade.class).to(AmbariLdapFacade.class);
bind(LdapConfigurationValidatorService.class).to(DefaultLdapConfigurationValidatorService.class);
+ bind(LdapConnectionService.class).to(DefaultLdapConnectionService.class);
install(new FactoryModuleBuilder().build(LdapConfigurationFactory.class));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
index abb464b..eec47ce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/AmbariLdapFacade.java
@@ -79,7 +79,7 @@ public class AmbariLdapFacade implements LdapFacade {
}
@Override
- public void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ldapConfiguration) throws AmbariLdapException {
+ public Set<String> checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ldapConfiguration) throws AmbariLdapException {
String userName = getTestUserNameFromParameters(parameters);
String testUserPass = getTestUserPasswordFromParameters(parameters);
@@ -95,6 +95,7 @@ public class AmbariLdapFacade implements LdapFacade {
LOGGER.info("Testing LDAP group attributes with test user dn: {}", userDn);
Set<String> groups = ldapConfigurationValidatorService.checkGroupAttributes(ldapConnection, userDn, ldapConfiguration);
+ return groups;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
index 7bb1198..eadff7d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/LdapFacade.java
@@ -15,6 +15,7 @@
package org.apache.ambari.server.ldap.service;
import java.util.Map;
+import java.util.Set;
import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
@@ -47,5 +48,5 @@ public interface LdapFacade {
* @param ambariLdapConfiguration configutration instance with available attributes
* @throws AmbariLdapException if the attribute checking fails
*/
- void checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException;
+ Set<String> checkLdapAttibutes(Map<String, Object> parameters, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
index 838ef4c..a8503ca 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorService.java
@@ -37,7 +37,6 @@ import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.ldap.client.api.LdapConnection;
-import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import org.apache.directory.ldap.client.api.search.FilterBuilder;
import org.apache.directory.shared.ldap.constants.SchemaConstants;
import org.slf4j.Logger;
@@ -84,18 +83,18 @@ public class DefaultLdapConfigurationValidatorService implements LdapConfigurati
* @param testUserName the test username
* @param testPassword the test password
* @param ambariLdapConfiguration configuration instance holding ldap configuration details
+ * @return the DN of the test user
* @throws AmbariException if the attributes are not valid or any errors occurs
*/
@Override
public String checkUserAttributes(LdapConnection ldapConnection, String testUserName, String testPassword, AmbariLdapConfiguration ambariLdapConfiguration) throws AmbariLdapException {
- LdapNetworkConnection connection = null;
SearchCursor searchCursor = null;
String userDn = null;
try {
LOGGER.info("Checking user attributes for user {} r ...", testUserName);
// bind anonimously or with manager data
- bind(ambariLdapConfiguration, connection);
+ bind(ambariLdapConfiguration, ldapConnection);
// set up a filter based on the provided attributes
String filter = FilterBuilder.and(
@@ -104,7 +103,7 @@ public class DefaultLdapConfigurationValidatorService implements LdapConfigurati
.toString();
LOGGER.info("Searching for the user: {} using the search filter: {}", testUserName, filter);
- EntryCursor entryCursor = connection.search(new Dn(ambariLdapConfiguration.userSearchBase()), filter, SearchScope.SUBTREE);
+ EntryCursor entryCursor = ldapConnection.search(new Dn(ambariLdapConfiguration.userSearchBase()), filter, SearchScope.SUBTREE);
// collecting search result entries
List<Entry> users = Lists.newArrayList();
@@ -128,7 +127,7 @@ public class DefaultLdapConfigurationValidatorService implements LdapConfigurati
throw new AmbariLdapException(e.getMessage(), e);
} finally {
- closeResources(connection, searchCursor);
+ closeResources(ldapConnection, searchCursor);
}
return userDn;
}
@@ -172,14 +171,19 @@ public class DefaultLdapConfigurationValidatorService implements LdapConfigurati
throw new AmbariLdapException(e.getMessage(), e);
} finally {
-
closeResources(ldapConnection, searchCursor);
-
}
return processGroupResults(groupResponses, ambariLdapConfiguration);
}
+ /**
+ * Binds to the LDAP server (anonimously or wit manager credentials)
+ *
+ * @param ambariLdapConfiguration configuration instance
+ * @param connection connection instance
+ * @throws LdapException if the bind operation fails
+ */
private void bind(AmbariLdapConfiguration ambariLdapConfiguration, LdapConnection connection) throws LdapException {
LOGGER.info("Connecting to LDAP ....");
if (!ambariLdapConfiguration.bindAnonimously()) {
@@ -198,6 +202,13 @@ public class DefaultLdapConfigurationValidatorService implements LdapConfigurati
}
+ /**
+ * Extracts meaningful values from the search result.
+ *
+ * @param groupResponses the result entries returned by the search
+ * @param ambariLdapConfiguration holds the keys of the meaningful attributes
+ * @return a set with the group names the test user belongs to
+ */
private Set<String> processGroupResults(Set<Response> groupResponses, AmbariLdapConfiguration ambariLdapConfiguration) {
Set<String> groupStrSet = Sets.newHashSet();
for (Response response : groupResponses) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
index b5559d9..25dc1f2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConnectionService.java
@@ -56,7 +56,7 @@ public class DefaultLdapConnectionService implements LdapConnectionService {
ldapConnectionConfig.setLdapPort(ambariAmbariLdapConfiguration.ldapServerPort());
ldapConnectionConfig.setUseSsl(ambariAmbariLdapConfiguration.useSSL());
- //todo set the other values as required
+ // todo set the other values as required
return ldapConnectionConfig;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9b3788/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
index 5c9d304..663ea12 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/ldap/service/ad/DefaultLdapConfigurationValidatorServiceTest.java
@@ -18,23 +18,15 @@ import static org.junit.Assert.assertNotNull;
import java.util.Map;
-import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
import org.apache.ambari.server.ldap.LdapConfigurationValidatorService;
import org.apache.ambari.server.ldap.service.LdapConnectionService;
import org.apache.directory.api.ldap.model.cursor.EntryCursor;
-import org.apache.directory.api.ldap.model.cursor.SearchCursor;
import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.message.Response;
-import org.apache.directory.api.ldap.model.message.SearchRequest;
-import org.apache.directory.api.ldap.model.message.SearchRequestImpl;
-import org.apache.directory.api.ldap.model.message.SearchResultEntry;
import org.apache.directory.api.ldap.model.message.SearchScope;
-import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
-import org.apache.directory.ldap.client.api.search.FilterBuilder;
import org.apache.directory.shared.ldap.constants.SchemaConstants;
import org.junit.Test;
import org.slf4j.Logger;
@@ -75,57 +67,24 @@ public class DefaultLdapConfigurationValidatorServiceTest {
@Test
public void testCheckUserAttributes() throws Exception {
+ // GIVEN
Map<String, Object> ldapPropsMap = Maps.newHashMap();
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), false);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BIND_ANONIMOUSLY.propertyName(), "true");
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_HOST.propertyName(), "ldap.forumsys.com");
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
+
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_MEMBER_ATTRIBUTE.propertyName(), SchemaConstants.UNIQUE_MEMBER_AT);
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
+ ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_SEARCH_BASE.propertyName(), "dc=example,dc=com");
- AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
-
-
- try {
- LOGGER.info("Authenticating user {} against the LDAP server ...", TEST_USER);
- LdapConnectionService connectionService = new DefaultLdapConnectionService();
- LdapNetworkConnection connection = connectionService.createLdapConnection(ambariLdapConfiguration);
-
- String filter = FilterBuilder.and(
- FilterBuilder.equal(SchemaConstants.OBJECT_CLASS_AT, ambariLdapConfiguration.userObjectClass()),
- FilterBuilder.equal(ambariLdapConfiguration.userNameAttribute(), TEST_USER))
- .toString();
-
- SearchRequest searchRequest = new SearchRequestImpl();
- searchRequest.setBase(new Dn(ambariLdapConfiguration.baseDn()));
- searchRequest.setFilter(filter);
- searchRequest.setScope(SearchScope.SUBTREE);
- LOGGER.info("loking up user: {} based on the filtr: {}", TEST_USER, filter);
-
- connection.bind();
- SearchCursor searchCursor = connection.search(searchRequest);
-
- while (searchCursor.next()) {
- Response response = searchCursor.get();
-
- // process the SearchResultEntry
- if (response instanceof SearchResultEntry) {
- Entry resultEntry = ((SearchResultEntry) response).getEntry();
- System.out.println(resultEntry);
- }
- }
-
- searchCursor.close();
-
- } catch (Exception e) {
- throw new AmbariException("Error during user authentication check", e);
- }
+ AmbariLdapConfiguration ambariLdapConfiguration = new AmbariLdapConfiguration(ldapPropsMap);
+ LdapConnectionService connectionService = new DefaultLdapConnectionService();
+ LdapNetworkConnection ldapConnection = connectionService.createLdapConnection(ambariLdapConfiguration);
+ ldapConfigurationValidatorService.checkUserAttributes(ldapConnection, "einstein", "", ambariLdapConfiguration);
}
@Test
@@ -138,8 +97,6 @@ public class DefaultLdapConfigurationValidatorServiceTest {
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.LDAP_SERVER_PORT.propertyName(), "389");
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.BASE_DN.propertyName(), "dc=example,dc=com");
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_OBJECT_CLASS.propertyName(), SchemaConstants.PERSON_OC);
- ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.USER_NAME_ATTRIBUTE.propertyName(), SchemaConstants.UID_AT);
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_OBJECT_CLASS.propertyName(), SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC);
ldapPropsMap.put(AmbariLdapConfiguration.LdapConfigProperty.GROUP_NAME_ATTRIBUTE.propertyName(), SchemaConstants.CN_AT);
[02/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
Posted by lp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/sqoop_default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/sqoop_default.json b/ambari-server/src/test/python/common-services/configs/sqoop_default.json
new file mode 100644
index 0000000..73a810d
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/sqoop_default.json
@@ -0,0 +1,879 @@
+{
+ "roleCommand": "SERVICE_CHECK",
+ "clusterName": "c1",
+ "hostname": "c6401.ambari.apache.org",
+ "hostLevelParams": {
+ "not_managed_hdfs_path_list": "[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+ "agent_stack_retry_count": "5",
+ "agent_stack_retry_on_unavailability": "false",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "ambari_db_rca_password": "mapred",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "repo_info": "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "stack_version": "2.0",
+ "stack_name": "HDP",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "jdk_name": "jdk-7u67-linux-x64.tar.gz",
+ "ambari_db_rca_username": "mapred",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "java_version": "8",
+ "db_name": "ambari"
+ },
+ "commandType": "EXECUTION_COMMAND",
+ "roleParams": {},
+ "serviceName": "HIVE",
+ "role": "HIVE_CLIENT",
+ "commandParams": {
+ "command_timeout": "300",
+ "service_package_folder": "OOZIE",
+ "script_type": "PYTHON",
+ "script": "scripts/service_check.py",
+ "excluded_hosts": "host1,host2",
+ "mark_draining_only" : "false",
+ "xml_configs_list":[{"hdfs-site.xml":"hdfs-site"}],
+ "env_configs_list":[{"hadoop-env.sh":"hadoop-env"}],
+ "output_file":"HDFS_CLIENT-configs.tar.gz"
+
+ },
+ "taskId": 152,
+ "public_hostname": "c6401.ambari.apache.org",
+ "configurations": {
+ "mapred-site": {
+ "mapreduce.jobhistory.address": "c6402.ambari.apache.org:10020",
+ "mapreduce.cluster.administrators": " hadoop",
+ "mapreduce.reduce.input.buffer.percent": "0.0",
+ "mapreduce.output.fileoutputformat.compress": "false",
+ "mapreduce.framework.name": "yarn",
+ "mapreduce.map.speculative": "false",
+ "mapreduce.reduce.shuffle.merge.percent": "0.66",
+ "yarn.app.mapreduce.am.resource.mb": "683",
+ "mapreduce.map.java.opts": "-Xmx273m",
+ "mapreduce.application.classpath": "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*",
+ "mapreduce.job.reduce.slowstart.completedmaps": "0.05",
+ "mapreduce.output.fileoutputformat.compress.type": "BLOCK",
+ "mapreduce.reduce.speculative": "false",
+ "mapreduce.reduce.java.opts": "-Xmx546m",
+ "mapreduce.am.max-attempts": "2",
+ "yarn.app.mapreduce.am.admin-command-opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN",
+ "mapreduce.reduce.log.level": "INFO",
+ "mapreduce.map.sort.spill.percent": "0.7",
+ "mapreduce.task.timeout": "300000",
+ "mapreduce.map.memory.mb": "341",
+ "mapreduce.task.io.sort.factor": "100",
+ "mapreduce.jobhistory.intermediate-done-dir": "/mr-history/tmp",
+ "mapreduce.reduce.memory.mb": "683",
+ "yarn.app.mapreduce.am.log.level": "INFO",
+ "mapreduce.map.log.level": "INFO",
+ "mapreduce.shuffle.port": "13562",
+ "mapreduce.admin.user.env": "LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &> /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`",
+ "mapreduce.map.output.compress": "false",
+ "yarn.app.mapreduce.am.staging-dir": "/user",
+ "mapreduce.reduce.shuffle.parallelcopies": "30",
+ "mapreduce.reduce.shuffle.input.buffer.percent": "0.7",
+ "mapreduce.jobhistory.webapp.address": "c6402.ambari.apache.org:19888",
+ "mapreduce.jobhistory.done-dir": "/mr-history/done",
+ "mapreduce.admin.reduce.child.java.opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN",
+ "mapreduce.task.io.sort.mb": "136",
+ "yarn.app.mapreduce.am.command-opts": "-Xmx546m",
+ "mapreduce.admin.map.child.java.opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN"
+ },
+ "oozie-site": {
+ "oozie.service.PurgeService.purge.interval": "3600",
+ "oozie.service.CallableQueueService.queue.size": "1000",
+ "oozie.service.SchemaService.wf.ext.schemas": "shell-action-0.1.xsd,email-action-0.1.xsd,hive-action-0.2.xsd,sqoop-action-0.2.xsd,ssh-action-0.1.xsd,distcp-action-0.1.xsd,shell-action-0.2.xsd,oozie-sla-0.1.xsd,oozie-sla-0.2.xsd,hive-action-0.3.xsd",
+ "oozie.service.JPAService.jdbc.url": "jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;create=true",
+ "oozie.service.HadoopAccessorService.nameNode.whitelist": " ",
+ "use.system.libpath.for.mapreduce.and.pig.jobs": "false",
+ "oozie.db.schema.name": "oozie",
+ "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials",
+ "oozie.service.JPAService.create.db.schema": "false",
+ "oozie.authentication.kerberos.name.rules": "\n RULE:[2:$1@$0]([jt]t@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-MAPREDUSER/\n RULE:[2:$1@$0]([nd]n@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-HDFSUSER/\n RULE:[2:$1@$0](hm@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-HBASE-USER/\n RULE:[2:$1@$0](rs@.*TODO-KERBEROS-DOMAIN)s/.*/TODO-HBASE-USER/\n DEFAULT",
+ "oozie.service.ActionService.executor.ext.classes": "\n org.apache.oozie.action.email.EmailActionExecutor,\n org.apache.oozie.action.hadoop.HiveActionExecutor,\n org.apache.oozie.action.hadoop.ShellActionExecutor,\n org.apache.oozie.action.hadoop.SqoopActionExecutor,\n org.apache.oozie.action.hadoop.DistcpActionExecutor",
+ "oozie.base.url": "http://c6402.ambari.apache.org:11000/oozie",
+ "oozie.service.JPAService.jdbc.password": "asd",
+ "oozie.service.coord.normal.default.timeout": "120",
+ "oozie.service.AuthorizationService.security.enabled": "true",
+ "oozie.service.JPAService.pool.max.active.conn": "10",
+ "oozie.service.PurgeService.older.than": "30",
+ "oozie.service.coord.push.check.requeue.interval": "30000",
+ "oozie.service.HadoopAccessorService.hadoop.configurations": "*=/etc/hadoop/conf",
+ "oozie.service.HadoopAccessorService.jobTracker.whitelist": " ",
+ "oozie.service.CallableQueueService.callable.concurrency": "3",
+ "oozie.service.JPAService.jdbc.username": "oozie",
+ "oozie.service.CallableQueueService.threads": "10",
+ "oozie.services.ext": "org.apache.oozie.service.PartitionDependencyManagerService,org.apache.oozie.service.HCatAccessorService",
+ "oozie.systemmode": "NORMAL",
+ "oozie.service.WorkflowAppService.system.libpath": "/user/${user.name}/share/lib",
+ "oozie.services": "\n org.apache.oozie.service.SchedulerService,\n org.apache.oozie.service.InstrumentationService,\n org.apache.oozie.service.CallableQueueService,\n org.apache.oozie.service.UUIDService,\n org.apache.oozie.service.ELService,\n org.apache.oozie.service.AuthorizationService,\n org.apache.oozie.service.UserGroupInformationService,\n org.apache.oozie.service.HadoopAccessorService,\n org.apache.oozie.service.URIHandlerService,\n org.apache.oozie.service.MemoryLocksService,\n org.apache.oozie.service.DagXLogInfoService,\n org.apache.oozie.service.SchemaService,\n org.apache.oozie.service.LiteWorkflowAppService,\n org.apache.oozie.service.JPAService,\n org.apache.oozie.service.StoreService,\n org.apache.oozie.service.CoordinatorStoreService,\n org.apache.oozie.service.SLAStoreService,\n org.apache.oozie.service.DBLiteWorkflowStoreService,\n
org.apache.oozie.service.CallbackService,\n org.apache.oozie.service.ActionService,\n org.apache.oozie.service.ActionCheckerService,\n org.apache.oozie.service.RecoveryService,\n org.apache.oozie.service.PurgeService,\n org.apache.oozie.service.CoordinatorEngineService,\n org.apache.oozie.service.BundleEngineService,\n org.apache.oozie.service.DagEngineService,\n org.apache.oozie.service.CoordMaterializeTriggerService,\n org.apache.oozie.service.StatusTransitService,\n org.apache.oozie.service.PauseTransitService,\n org.apache.oozie.service.GroupsService,\n org.apache.oozie.service.ProxyUserService",
+ "oozie.service.URIHandlerService.uri.handlers": "org.apache.oozie.dependency.FSURIHandler,org.apache.oozie.dependency.HCatURIHandler",
+ "oozie.authentication.type": "simple",
+ "oozie.service.JPAService.jdbc.driver": "org.apache.derby.jdbc.EmbeddedDriver",
+ "oozie.system.id": "oozie-${user.name}"
+ },
+ "storm-site": {
+ "topology.tuple.serializer": "backtype.storm.serialization.types.ListDelegateSerializer",
+ "topology.workers": "1",
+ "drpc.worker.threads": "64",
+ "storm.zookeeper.servers": "['c6401.ambari.apache.org','c6402.ambari.apache.org']",
+ "supervisor.heartbeat.frequency.secs": "5",
+ "topology.executor.send.buffer.size": "1024",
+ "drpc.childopts": "-Xmx768m",
+ "nimbus.thrift.port": "6627",
+ "storm.zookeeper.retry.intervalceiling.millis": "30000",
+ "storm.local.dir": "/hadoop/storm",
+ "topology.receiver.buffer.size": "8",
+ "storm.messaging.netty.client_worker_threads": "1",
+ "transactional.zookeeper.root": "/transactional",
+ "drpc.request.timeout.secs": "600",
+ "topology.skip.missing.kryo.registrations": "false",
+ "worker.heartbeat.frequency.secs": "1",
+ "zmq.hwm": "0",
+ "storm.zookeeper.connection.timeout": "15000",
+ "topology.max.error.report.per.interval": "5",
+ "storm.messaging.netty.server_worker_threads": "1",
+ "supervisor.worker.start.timeout.secs": "120",
+ "zmq.threads": "1",
+ "topology.acker.executors": "null",
+ "storm.local.mode.zmq": "false",
+ "topology.max.task.parallelism": "null",
+ "storm.zookeeper.port": "2181",
+ "nimbus.childopts": "-Xmx1024m",
+ "worker.childopts": "-Xmx768m",
+ "drpc.queue.size": "128",
+ "storm.zookeeper.retry.times": "5",
+ "nimbus.monitor.freq.secs": "10",
+ "storm.cluster.mode": "distributed",
+ "dev.zookeeper.path": "/tmp/dev-storm-zookeeper",
+ "drpc.invocations.port": "3773",
+ "storm.zookeeper.root": "/storm",
+ "logviewer.childopts": "-Xmx128m",
+ "transactional.zookeeper.port": "null",
+ "topology.worker.childopts": "null",
+ "topology.max.spout.pending": "null",
+ "nimbus.cleanup.inbox.freq.secs": "600",
+ "storm.messaging.netty.min_wait_ms": "100",
+ "nimbus.task.timeout.secs": "30",
+ "nimbus.thrift.max_buffer_size": "1048576",
+ "topology.sleep.spout.wait.strategy.time.ms": "1",
+ "topology.optimize": "true",
+ "nimbus.reassign": "true",
+ "storm.messaging.transport": "backtype.storm.messaging.netty.Context",
+ "logviewer.appender.name": "A1",
+ "nimbus.host": "c6401.ambari.apache.org",
+ "ui.port": "8744",
+ "supervisor.slots.ports": "[6700, 6701]",
+ "nimbus.file.copy.expiration.secs": "600",
+ "supervisor.monitor.frequency.secs": "3",
+ "ui.childopts": "-Xmx768m",
+ "transactional.zookeeper.servers": "null",
+ "zmq.linger.millis": "5000",
+ "topology.error.throttle.interval.secs": "10",
+ "topology.worker.shared.thread.pool.size": "4",
+ "java.library.path": "/usr/local/lib:/opt/local/lib:/usr/lib",
+ "topology.spout.wait.strategy": "backtype.storm.spout.SleepSpoutWaitStrategy",
+ "task.heartbeat.frequency.secs": "3",
+ "topology.transfer.buffer.size": "1024",
+ "storm.zookeeper.session.timeout": "20000",
+ "topology.executor.receive.buffer.size": "1024",
+ "topology.stats.sample.rate": "0.05",
+ "topology.fall.back.on.java.serialization": "true",
+ "supervisor.childopts": "-Xmx256m",
+ "topology.enable.message.timeouts": "true",
+ "storm.messaging.netty.max_wait_ms": "1000",
+ "nimbus.topology.validator": "backtype.storm.nimbus.DefaultTopologyValidator",
+ "nimbus.supervisor.timeout.secs": "60",
+ "topology.disruptor.wait.strategy": "com.lmax.disruptor.BlockingWaitStrategy",
+ "nimbus.inbox.jar.expiration.secs": "3600",
+ "drpc.port": "3772",
+ "topology.kryo.factory": "backtype.storm.serialization.DefaultKryoFactory",
+ "storm.zookeeper.retry.interval": "1000",
+ "storm.messaging.netty.max_retries": "30",
+ "topology.tick.tuple.freq.secs": "null",
+ "supervisor.enable": "true",
+ "nimbus.task.launch.secs": "120",
+ "task.refresh.poll.secs": "10",
+ "topology.message.timeout.secs": "30",
+ "storm.messaging.netty.buffer_size": "5242880",
+ "topology.state.synchronization.timeout.secs": "60",
+ "supervisor.worker.timeout.secs": "30",
+ "topology.trident.batch.emit.interval.millis": "500",
+ "topology.builtin.metrics.bucket.size.secs": "60",
+ "storm.thrift.transport": "backtype.storm.security.auth.SimpleTransportPlugin",
+ "logviewer.port": "8000",
+ "topology.debug": "false"
+ },
+ "ranger-hive-plugin-properties": {
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "policy_user": "ambari-qa",
+ "UPDATE_XAPOLICIES_ON_GRANT_REVOKE": "true",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "ranger-hive-plugin-enabled": "No",
+ "jdbc.driverClassName": "org.apache.hive.jdbc.HiveDriver",
+ "REPOSITORY_CONFIG_USERNAME": "hive",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "hive",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+ },
+ "webhcat-site": {
+ "templeton.pig.path": "pig.tar.gz/pig/bin/pig",
+ "templeton.exec.timeout": "60000",
+ "templeton.override.enabled": "false",
+ "templeton.jar": "/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar",
+ "templeton.zookeeper.hosts": "c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181",
+ "templeton.hive.properties": "hive.metastore.local=false,hive.metastore.uris=thrift://c6402.ambari.apache.org:9083,hive.metastore.sasl.enabled=yes,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse",
+ "templeton.storage.class": "org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage",
+ "templeton.hive.archive": "hdfs:///apps/webhcat/hive.tar.gz",
+ "templeton.streaming.jar": "hdfs:///apps/webhcat/hadoop-streaming.jar",
+ "templeton.port": "50111",
+ "templeton.libjars": "/usr/lib/zookeeper/zookeeper.jar",
+ "templeton.hadoop": "/usr/bin/hadoop",
+ "templeton.hive.path": "hive.tar.gz/hive/bin/hive",
+ "templeton.hadoop.conf.dir": "/etc/hadoop/conf",
+ "templeton.hcat": "/usr/bin/hcat",
+ "templeton.pig.archive": "hdfs:///apps/webhcat/pig.tar.gz"
+ },
+ "capacity-scheduler": {
+ "yarn.scheduler.capacity.node-locality-delay": "40",
+ "yarn.scheduler.capacity.root.capacity": "100",
+ "yarn.scheduler.capacity.root.acl_administer_queue": "*",
+ "yarn.scheduler.capacity.root.queues": "default",
+ "yarn.scheduler.capacity.maximum-applications": "10000",
+ "yarn.scheduler.capacity.root.default.user-limit-factor": "1",
+ "yarn.scheduler.capacity.root.default.maximum-capacity": "100",
+ "yarn.scheduler.capacity.root.default.state": "RUNNING",
+ "yarn.scheduler.capacity.maximum-am-resource-percent": "0.2",
+ "yarn.scheduler.capacity.root.default.acl_administer_jobs": "*",
+ "yarn.scheduler.capacity.root.default.capacity": "100",
+ "yarn.scheduler.capacity.root.default.acl_submit_applications": "*"
+ },
+ "hdfs-site": {
+ "dfs.namenode.checkpoint.period": "21600",
+ "dfs.namenode.avoid.write.stale.datanode": "true",
+ "dfs.block.access.token.enable": "true",
+ "dfs.support.append": "true",
+ "dfs.datanode.address": "0.0.0.0:${ambari.dfs.datanode.port}",
+ "dfs.cluster.administrators": " hdfs",
+ "dfs.replication": "3",
+ "ambari.dfs.datanode.http.port": "50075",
+ "dfs.datanode.balance.bandwidthPerSec": "6250000",
+ "dfs.namenode.safemode.threshold-pct": "1.0f",
+ "dfs.namenode.checkpoint.edits.dir": "${dfs.namenode.checkpoint.dir}",
+ "dfs.permissions.enabled": "true",
+ "fs.checkpoint.size": "67108864",
+ "dfs.client.read.shortcircuit": "true",
+ "dfs.namenode.https-address": "c6401.ambari.apache.org:50470",
+ "dfs.journalnode.edits.dir": "/grid/0/hdfs/journal",
+ "dfs.blocksize": "134217728",
+ "dfs.datanode.max.transfer.threads": "1024",
+ "dfs.datanode.du.reserved": "1073741824",
+ "dfs.webhdfs.enabled": "true",
+ "dfs.namenode.handler.count": "100",
+ "dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary",
+ "fs.permissions.umask-mode": "022",
+ "dfs.datanode.http.address": "0.0.0.0:50075",
+ "dfs.datanode.ipc.address": "0.0.0.0:8010",
+ "dfs.datanode.data.dir": "/hadoop/hdfs/data",
+ "dfs.namenode.http-address": "c6401.ambari.apache.org:50070",
+ "dfs.blockreport.initialDelay": "120",
+ "dfs.datanode.failed.volumes.tolerated": "0",
+ "dfs.namenode.accesstime.precision": "0",
+ "ambari.dfs.datanode.port": "50010",
+ "dfs.namenode.avoid.read.stale.datanode": "true",
+ "dfs.namenode.secondary.http-address": "c6402.ambari.apache.org:50090",
+ "dfs.namenode.stale.datanode.interval": "30000",
+ "dfs.heartbeat.interval": "3",
+ "dfs.client.read.shortcircuit.streams.cache.size": "4096",
+ "dfs.permissions.superusergroup": "hdfs",
+ "dfs.https.port": "50470",
+ "dfs.journalnode.http-address": "0.0.0.0:8480",
+ "dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket",
+ "dfs.namenode.write.stale.datanode.ratio": "1.0f",
+ "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude",
+ "dfs.datanode.data.dir.perm": "750",
+ "dfs.namenode.name.dir.restore": "true",
+ "dfs.replication.max": "50",
+ "dfs.namenode.name.dir": "/hadoop/hdfs/namenode"
+ },
+ "hbase-site": {
+ "hbase.hstore.flush.retries.number": "120",
+ "hbase.client.keyvalue.maxsize": "10485760",
+ "hbase.hstore.compactionThreshold": "3",
+ "hbase.rootdir": "hdfs://c6401.ambari.apache.org:8020/apps/hbase/data",
+ "hbase.regionserver.handler.count": "60",
+ "hbase.regionserver.global.memstore.lowerLimit": "0.38",
+ "hbase.hregion.memstore.block.multiplier": "2",
+ "hbase.hregion.memstore.flush.size": "134217728",
+ "hbase.superuser": "hbase",
+ "hbase.zookeeper.property.clientPort": "2181",
+ "hbase.regionserver.global.memstore.upperLimit": "0.4",
+ "zookeeper.session.timeout": "30000",
+ "hbase.tmp.dir": "/hadoop/hbase",
+ "hbase.local.dir": "${hbase.tmp.dir}/local",
+ "hbase.hregion.max.filesize": "10737418240",
+ "hfile.block.cache.size": "0.40",
+ "hbase.security.authentication": "simple",
+ "hbase.defaults.for.version.skip": "true",
+ "hbase.zookeeper.quorum": "c6401.ambari.apache.org,c6402.ambari.apache.org",
+ "zookeeper.znode.parent": "/hbase-unsecure",
+ "hbase.hstore.blockingStoreFiles": "10",
+ "hbase.master.port": "60000",
+ "hbase.hregion.majorcompaction": "86400000",
+ "hbase.security.authorization": "false",
+ "hbase.cluster.distributed": "true",
+ "hbase.hregion.memstore.mslab.enabled": "true",
+ "hbase.client.scanner.caching": "100",
+ "hbase.zookeeper.useMulti": "true"
+ },
+ "core-site": {
+ "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",
+ "gluster.daemon.user": "null",
+ "hadoop.proxyuser.oozie.groups": "users",
+ "hadoop.proxyuser.hcat.hosts": "c6402.ambari.apache.org",
+ "hadoop.proxyuser.hive.groups": "users",
+ "hadoop.security.authentication": "simple",
+ "hadoop.proxyuser.oozie.hosts": "c6402.ambari.apache.org",
+ "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec",
+ "mapreduce.jobtracker.webinterface.trusted": "false",
+ "fs.AbstractFileSystem.glusterfs.impl": "null",
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020",
+ "fs.trash.interval": "360",
+ "ipc.client.idlethreshold": "8000",
+ "io.file.buffer.size": "131072",
+ "hadoop.security.authorization": "false",
+ "hadoop.proxyuser.hive.hosts": "c6402.ambari.apache.org",
+ "hadoop.security.auth_to_local": "\n RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n DEFAULT",
+ "hadoop.proxyuser.hcat.groups": "users",
+ "ipc.client.connection.maxidletime": "30000",
+ "ipc.client.connect.max.retries": "50"
+ },
+ "hive-site": {
+ "hive.enforce.sorting": "true",
+ "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+ "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true",
+ "hive.security.metastore.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "fs.file.impl.disable.cache": "true",
+ "hive.auto.convert.join.noconditionaltask": "true",
+ "hive.map.aggr": "true",
+ "hive.optimize.index.filter": "true",
+ "hive.security.authorization.enabled": "false",
+ "hive.optimize.reducededuplication.min.reducer": "1",
+ "hive.optimize.bucketmapjoin": "true",
+ "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083",
+ "hive.mapjoin.bucket.cache.size": "10000",
+ "hive.auto.convert.join.noconditionaltask.size": "1000000000",
+ "hive.vectorized.execution.enabled": "false",
+ "javax.jdo.option.ConnectionUserName": "hive",
+ "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
+ "hive.optimize.reducededuplication": "true",
+ "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
+ "hive.metastore.client.socket.timeout": "60",
+ "hive.auto.convert.join": "true",
+ "hive.enforce.bucketing": "true",
+ "hive.mapred.reduce.tasks.speculative.execution": "false",
+ "hive.security.authenticator.manager": "org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator",
+ "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true",
+ "hive.auto.convert.sortmerge.join": "true",
+ "fs.hdfs.impl.disable.cache": "true",
+ "hive.security.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "ambari.hive.db.schema.name": "hive",
+ "hive.metastore.execute.setugi": "true",
+ "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
+ "hive.server2.enable.doAs": "true",
+ "hive.server2.transport.mode": "binary",
+ "hive.optimize.mapjoin.mapreduce": "true"
+ },
+ "hive-interactive-site": {
+ "hive.enforce.sorting": "true",
+ "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+ "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true",
+ "hive.security.metastore.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "fs.file.impl.disable.cache": "true",
+ "hive.auto.convert.join.noconditionaltask": "true",
+ "hive.map.aggr": "true",
+ "hive.optimize.index.filter": "true",
+ "hive.security.authorization.enabled": "false",
+ "hive.optimize.reducededuplication.min.reducer": "1",
+ "hive.optimize.bucketmapjoin": "true",
+ "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9084",
+ "hive.mapjoin.bucket.cache.size": "10000",
+ "hive.auto.convert.join.noconditionaltask.size": "1000000000",
+ "hive.vectorized.execution.enabled": "false",
+ "javax.jdo.option.ConnectionUserName": "hive",
+ "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
+ "hive.optimize.reducededuplication": "true",
+ "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
+ "hive.metastore.client.socket.timeout": "60",
+ "hive.auto.convert.join": "true",
+ "hive.enforce.bucketing": "true",
+ "hive.mapred.reduce.tasks.speculative.execution": "false",
+ "hive.security.authenticator.manager": "org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator",
+ "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true",
+ "hive.auto.convert.sortmerge.join": "true",
+ "fs.hdfs.impl.disable.cache": "true",
+ "hive.security.authorization.manager": "org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+ "ambari.hive.db.schema.name": "hive",
+ "hive.metastore.execute.setugi": "true",
+ "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
+ "hive.server2.enable.doAs": "true",
+ "hive.server2.transport.mode": "binary",
+ "hive.optimize.mapjoin.mapreduce": "true"
+ },
+ "yarn-site": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "0.25",
+ "yarn.nodemanager.container-executor.class": "org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor",
+ "yarn.nodemanager.local-dirs": "/hadoop/yarn/local,/hadoop/yarn/local1",
+ "yarn.resourcemanager.resource-tracker.address": "c6402.ambari.apache.org:8025",
+ "yarn.nodemanager.remote-app-log-dir-suffix": "logs",
+ "yarn.resourcemanager.hostname": "c6402.ambari.apache.org",
+ "yarn.nodemanager.health-checker.script.timeout-ms": "60000",
+ "yarn.resourcemanager.scheduler.class": "org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler",
+ "yarn.nodemanager.resource.memory-mb": "2048",
+ "yarn.scheduler.minimum-allocation-mb": "683",
+ "yarn.resourcemanager.address": "c6402.ambari.apache.org:8050",
+ "yarn.resourcemanager.scheduler.address": "c6402.ambari.apache.org:8030",
+ "yarn.log-aggregation.retain-seconds": "2592000",
+ "yarn.scheduler.maximum-allocation-mb": "2048",
+ "yarn.log-aggregation-enable": "true",
+ "yarn.nodemanager.address": "0.0.0.0:45454",
+ "yarn.nodemanager.container-monitor.interval-ms": "3000",
+ "yarn.nodemanager.log-aggregation.compression-type": "gz",
+ "yarn.nodemanager.log.retain-seconds": "604800",
+ "yarn.nodemanager.delete.debug-delay-sec": "0",
+ "yarn.nodemanager.log-dirs": "/hadoop/yarn/log,/hadoop/yarn/log1",
+ "yarn.nodemanager.health-checker.interval-ms": "135000",
+ "yarn.resourcemanager.am.max-attempts": "2",
+ "yarn.nodemanager.remote-app-log-dir": "/app-logs",
+ "yarn.nodemanager.admin-env": "MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX",
+ "yarn.nodemanager.aux-services": "mapreduce_shuffle",
+ "yarn.nodemanager.vmem-check-enabled": "false",
+ "yarn.nodemanager.vmem-pmem-ratio": "2.1",
+ "yarn.admin.acl": "*",
+ "yarn.resourcemanager.webapp.address": "c6402.ambari.apache.org:8088",
+ "yarn.resourcemanager.nodes.exclude-path": "/etc/hadoop/conf/yarn.exclude",
+ "yarn.nodemanager.linux-container-executor.group": "hadoop",
+ "yarn.acl.enable": "true",
+ "yarn.log.server.url": "http://c6402.ambari.apache.org:19888/jobhistory/logs",
+ "yarn.application.classpath": "/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*",
+ "yarn.resourcemanager.admin.address": "c6402.ambari.apache.org:8141",
+ "yarn.nodemanager.aux-services.mapreduce_shuffle.class": "org.apache.hadoop.mapred.ShuffleHandler",
+ "yarn.timeline-service.leveldb-timeline-store.path": "/var/log/hadoop-yarn/timeline"
+ },
+ "tez-site": {
+ "tez.am.log.level": "WARN",
+ "tez.lib.uris": "hdfs:///apps/tez/,hdfs:///apps/tez/lib/",
+ "tez.staging-dir": "/tmp/${user.name}/staging",
+ "tez.am.am-rm.heartbeat.interval-ms.max": "250"
+ },
+ "yarn-env": {
+ "yarn_pid_dir_prefix": "/var/run/hadoop-yarn",
+ "apptimelineserver_heapsize": "1024",
+ "nodemanager_heapsize": "1024",
+ "content": "\nexport HADOOP_YARN_HOME={{hadoop_yarn_home}}\nexport YARN_LOG_DIR={{yarn_log_dir_prefix}}/$USER\nexport YARN_PID_DIR={{yarn_pid_dir_prefix}}/$USER\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\nexport JAVA_HOME={{java64_home}}\n\n# User for YARN daemons\nexport HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn}\n\n# resolve links - $0 may be a softlink\nexport YARN_CONF_DIR=\"${YARN_CONF_DIR:-$HADOOP_YARN_HOME/conf}\"\n\n# some Java parameters\n# export JAVA_HOME=/home/y/libexec/jdk1.6.0/\nif [ \"$JAVA_HOME\" != \"\" ]; then\n #echo \"run java in $JAVA_HOME\"\n JAVA_HOME=$JAVA_HOME\nfi\n\nif [ \"$JAVA_HOME\" = \"\" ]; then\n echo \"Error: JAVA_HOME is not set.\"\n exit 1\nfi\n\nJAVA=$JAVA_HOME/bin/java\nJAVA_HEAP_MAX=-Xmx1000m\n\n# For setting YARN specific HEAP sizes please use this\n# Parameter and set appropriately\nYARN_HEAPSIZE={{yarn_heapsize}}\n\n# check envvars which might override default args\nif [ \"$YARN_HEAPSIZE\" != \"\" ]; then\n JAVA_HEAP_MAX=\"-
Xmx\"\"$YARN_HEAPSIZE\"\"m\"\nfi\n\n# Resource Manager specific parameters\n\n# Specify the max Heapsize for the ResourceManager using a numerical value\n# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set\n# the value to 1000.\n# This value will be overridden by an Xmx setting specified in either YARN_OPTS\n# and/or YARN_RESOURCEMANAGER_OPTS.\n# If not specified, the default value will be picked from either YARN_HEAPMAX\n# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\nexport YARN_RESOURCEMANAGER_HEAPSIZE={{resourcemanager_heapsize}}\n\n# Specify the JVM options to be used when starting the ResourceManager.\n# These options will be appended to the options specified as YARN_OPTS\n# and therefore may override any similar flags set in YARN_OPTS\n#export YARN_RESOURCEMANAGER_OPTS=\n\n# Node Manager specific parameters\n\n# Specify the max Heapsize for the NodeManager using a numerical value\n# in the scale of MB. For example, to specif
y an jvm option of -Xmx1000m, set\n# the value to 1000.\n# This value will be overridden by an Xmx setting specified in either YARN_OPTS\n# and/or YARN_NODEMANAGER_OPTS.\n# If not specified, the default value will be picked from either YARN_HEAPMAX\n# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\nexport YARN_NODEMANAGER_HEAPSIZE={{nodemanager_heapsize}}\n\n# Specify the max Heapsize for the HistoryManager using a numerical value\n# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set\n# the value to 1024.\n# This value will be overridden by an Xmx setting specified in either YARN_OPTS\n# and/or YARN_HISTORYSERVER_OPTS.\n# If not specified, the default value will be picked from either YARN_HEAPMAX\n# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.\nexport YARN_HISTORYSERVER_HEAPSIZE={{apptimelineserver_heapsize}}\n\n# Specify the JVM options to be used when starting the NodeManager.\n# These options will be appen
ded to the options specified as YARN_OPTS\n# and therefore may override any similar flags set in YARN_OPTS\n#export YARN_NODEMANAGER_OPTS=\n\n# so that filenames w/ spaces are handled correctly in loops below\nIFS=\n\n\n# default log directory and file\nif [ \"$YARN_LOG_DIR\" = \"\" ]; then\n YARN_LOG_DIR=\"$HADOOP_YARN_HOME/logs\"\nfi\nif [ \"$YARN_LOGFILE\" = \"\" ]; then\n YARN_LOGFILE='yarn.log'\nfi\n\n# default policy file for service-level authorization\nif [ \"$YARN_POLICYFILE\" = \"\" ]; then\n YARN_POLICYFILE=\"hadoop-policy.xml\"\nfi\n\n# restore ordinary behaviour\nunset IFS\n\n\nYARN_OPTS=\"$YARN_OPTS -Dhadoop.log.dir=$YARN_LOG_DIR\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.log.dir=$YARN_LOG_DIR\"\nYARN_OPTS=\"$YARN_OPTS -Dhadoop.log.file=$YARN_LOGFILE\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.log.file=$YARN_LOGFILE\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.home.dir=$YARN_COMMON_HOME\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.id.str=$YARN_IDENT_STRING\"\nYARN_OPTS=\"$YARN_OPTS -Dhadoop.root.logger=${YARN_
ROOT_LOGGER:-INFO,console}\"\nYARN_OPTS=\"$YARN_OPTS -Dyarn.root.logger=${YARN_ROOT_LOGGER:-INFO,console}\"\nif [ \"x$JAVA_LIBRARY_PATH\" != \"x\" ]; then\n YARN_OPTS=\"$YARN_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH\"\nfi\nYARN_OPTS=\"$YARN_OPTS -Dyarn.policy.file=$YARN_POLICYFILE\"",
+ "yarn_heapsize": "1024",
+ "yarn_user": "yarn",
+ "resourcemanager_heapsize": "1024",
+ "yarn_log_dir_prefix": "/var/log/hadoop-yarn",
+ "min_user_id": "1000"
+ },
+ "cluster-env": {
+ "managed_hdfs_resource_property_names": "",
+ "security_enabled": "false",
+ "ignore_groupsusers_create": "false",
+ "smokeuser": "ambari-qa",
+ "kerberos_domain": "EXAMPLE.COM",
+ "user_group": "hadoop"
+ },
+ "hadoop-env": {
+ "namenode_opt_maxnewsize": "200m",
+ "hdfs_log_dir_prefix": "/var/log/hadoop",
+ "namenode_heapsize": "1024m",
+ "namenode_opt_newsize": "200m",
+ "namenode_opt_permsize" : "128m",
+ "namenode_opt_maxpermsize" : "256m",
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n# The only required environment variable is JAVA_HOME. All others are\n# optional. When running a distributed configuration it is best to\n# set JAVA_HOME in this file, so that it is correctly defined on\n# remote nodes.\n\n# The java implementation to use. Required.\nexport JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home directory\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n# Hadoop Configuration Directory\n#TODO: if env var set that can cause problems\nexport HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n{# this is different for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java runtime options. Empty by default.\nex
port HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# Command specific options appended to HADOOP_OPTS when specified\nexport HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}\"\nHADOOP_JOBTRACKER_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USE
R/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}\"\nHADOOP_DATANODE_OPTS=\"-Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/
gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The following applies to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n# On secure datanodes, user to run the datanode as after dropping privileges\nexport HADOOP_SECURE_DN_USER={{hdfs_user}}\n\n# Extra ssh options. Empty by default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored. $HADOOP_HOME/logs by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# Where log files are stored in the secure data environment.\nexport HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SE
CURE_DN_USER\n\n# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.\n# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where hadoop code should be rsync'd from. Unset by default.\n# export HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are stored. /tmp by default.\nexport HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# History server pid\nexport HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n# A string representing this instance of hadoop. $USER by default.\nexport HADOOP_IDENT_STRING=$USER\n\n# The s
cheduling priority for daemon processes. See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Use libraries from standard classpath\nJAVA_JDBC_LIBS=\"\"\n#Add libraries required by mysql connector\nfor jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by oracle connector\nfor jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\nif [ -d \"/usr/lib/tez\" ]; then\n export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf\nfi\n\n# Setting path to hdfs command line\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n#Mostly required for hadoop 2.0\nexport JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64",
+ "hdfs_user": "hdfs",
+ "dtnode_heapsize": "1024m",
+ "proxyuser_group": "users",
+ "hadoop_heapsize": "1024",
+ "hadoop_pid_dir_prefix": "/var/run/hadoop"
+ },
+ "hive-env": {
+ "hcat_pid_dir": "/var/run/webhcat",
+ "hcat_user": "hcat",
+ "hive_ambari_database": "MySQL",
+ "hive_hostname": "abtest-3.c.pramod-thangali.internal",
+ "hive_metastore_port": "9083",
+ "webhcat_user": "hcat",
+ "content": "\n if [ \"$SERVICE\" = \"cli\" ]; then\n if [ -z \"$DEBUG\" ]; then\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n else\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n fi\n fi\n\n# The heap size of the jvm stared by hive shell script can be controlled via:\n\nexport HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n\n# Larger heap size may be required when running queries over large number of files or partitions.\n# By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be\n# appropriate for hive server (hwi etc).\n\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hive Configuration Directory can
be controlled by:\nexport HIVE_CONF_DIR={{conf_dir}}\n\n# Folder containing extra ibraries required for hive compilation/execution can be controlled by:\nif [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\nelif [ -d \"/usr/lib/hive-hcatalog/\" ]; then\n export HIVE_AUX_JARS_PATH=/usr/lib/hive-hcatalog/share/hcatalog/hive-hcatalog-core-*.jar\nelse\n export HIVE_AUX_JARS_PATH=/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar\nfi\nexport METASTORE_PORT={{hive_metastore_port}}",
+ "hive_database_name": "hive",
+ "hive_database_type": "mysql",
+ "hive_pid_dir": "/var/run/hive",
+ "hive_log_dir": "/var/log/hive",
+ "hive_user": "hive",
+ "hcat_log_dir": "/var/log/webhcat",
+ "hive_database": "New MySQL Database",
+ "hive_security_authorization": "None"
+ },
+ "hbase-env": {
+ "hbase_pid_dir": "/var/run/hbase",
+ "hbase_user": "hbase",
+ "hbase_master_heapsize": "1024m",
+ "content": "\n# Set environment variables here.\n\n# The java implementation to use. Java 1.6 required.\nexport JAVA_HOME={{java64_home}}\n\n# HBase Configuration directory\nexport HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}\n\n# Extra Java CLASSPATH elements. Optional.\nexport HBASE_CLASSPATH=${HBASE_CLASSPATH}\n\n# The maximum amount of heap to use, in MB. Default is 1000.\n# export HBASE_HEAPSIZE=1000\n\n# Extra Java runtime options.\n# Below are what we set by default. May only work with SUN JVM.\n# For more on why as well as other possible settings,\n# see http://wiki.apache.org/hadoop/PerformanceTuning\nexport HBASE_OPTS=\"-XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log\"\nexport SERVER_GC_OPTS=\"-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`\"\n# Uncomment below to enable java garbage collection logging.\n# export HBASE_OPTS=\"$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateS
tamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log\"\n\n# Uncomment and adjust to enable JMX exporting\n# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.\n# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html\n#\n# export HBASE_JMX_BASE=\"-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false\"\nexport HBASE_MASTER_OPTS=\"-Xmx{{master_heapsize}}\"\nexport HBASE_REGIONSERVER_OPTS=\"-Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}\"\n# export HBASE_THRIFT_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103\"\n# export HBASE_ZOOKEEPER_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104\"\n\n# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.\nexport HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers\n\n# Extra
ssh options. Empty by default.\n# export HBASE_SSH_OPTS=\"-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR\"\n\n# Where log files are stored. $HBASE_HOME/logs by default.\nexport HBASE_LOG_DIR={{log_dir}}\n\n# A string representing this instance of hbase. $USER by default.\n# export HBASE_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes. See 'man nice'.\n# export HBASE_NICENESS=10\n\n# The directory where pid files are stored. /tmp by default.\nexport HBASE_PID_DIR={{pid_dir}}\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HBASE_SLAVE_SLEEP=0.1\n\n# Tell HBase whether it should manage it's own instance of Zookeeper or not.\nexport HBASE_MANAGES_ZK=false\n\n{% if security_enabled %}\nexport HBASE_OPTS=\"$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}}\"\nexport HBASE_MASTER_OPTS=\"$HBAS
E_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}}\"\nexport HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}}\"\n{% endif %}",
+ "hbase_regionserver_heapsize": "1024m",
+ "hbase_regionserver_xmn_max": "512",
+ "hbase_regionserver_xmn_ratio": "0.2",
+ "hbase_log_dir": "/var/log/hbase",
+ "hbase_java_io_tmpdir" : "/tmp"
+ },
+ "ganglia-env": {
+ "gmond_user": "nobody",
+ "ganglia_runtime_dir": "/var/run/ganglia/hdp",
+ "rrdcached_base_dir": "/var/lib/ganglia/rrds",
+ "rrdcached_flush_timeout": "7200",
+ "gmetad_user": "nobody",
+ "rrdcached_write_threads": "4",
+ "rrdcached_delay": "1800",
+ "rrdcached_timeout": "3600"
+ },
+ "zookeeper-env": {
+ "clientPort": "2181",
+ "zk_user": "zookeeper",
+ "zk_log_dir": "/var/log/zookeeper",
+ "syncLimit": "5",
+ "content": "\nexport JAVA_HOME={{java64_home}}\nexport ZOO_LOG_DIR={{zk_log_dir}}\nexport ZOOPIDFILE={{zk_pid_file}}\nexport SERVER_JVMFLAGS={{zk_server_heapsize}}\nexport JAVA=$JAVA_HOME/bin/java\nexport CLASSPATH=$CLASSPATH:/usr/share/zookeeper/*\n\n{% if security_enabled %}\nexport SERVER_JVMFLAGS=\"$SERVER_JVMFLAGS -Djava.security.auth.login.config={{zk_server_jaas_file}}\"\nexport CLIENT_JVMFLAGS=\"$CLIENT_JVMFLAGS -Djava.security.auth.login.config={{zk_client_jaas_file}}\"\n{% endif %}",
+ "initLimit": "10",
+ "zk_pid_dir": "/var/run/zookeeper",
+ "zk_data_dir": "/hadoop/zookeeper",
+ "tickTime": "2000"
+ },
+ "mapred-env": {
+ "content": "\n# export JAVA_HOME=/home/y/libexec/jdk1.6.0/\n\nexport HADOOP_JOB_HISTORYSERVER_HEAPSIZE={{jobhistory_heapsize}}\n\nexport HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA\n\n#export HADOOP_JOB_HISTORYSERVER_OPTS=\n#export HADOOP_MAPRED_LOG_DIR=\"\" # Where log files are stored. $HADOOP_MAPRED_HOME/logs by default.\n#export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.\n#export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.\n#export HADOOP_MAPRED_IDENT_STRING= #A string representing this instance of hadoop. $USER by default\n#export HADOOP_MAPRED_NICENESS= #The scheduling priority for daemons. Defaults to 0.",
+ "mapred_pid_dir_prefix": "/var/run/hadoop-mapreduce",
+ "mapred_user": "mapred",
+ "jobhistory_heapsize": "900",
+ "mapred_log_dir_prefix": "/var/log/hadoop-mapreduce"
+ },
+ "tez-env": {
+ "content": "\n# Tez specific configuration\nexport TEZ_CONF_DIR={{config_dir}}\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# The java implementation to use.\nexport JAVA_HOME={{java64_home}}",
+ "tez_user": "tez"
+ },
+ "storm-env": {
+ "content": "\n#!/bin/bash\n\n# Set Storm specific environment variables here.\n\n# The java implementation to use.\nexport JAVA_HOME={{java_home}}\n\n# export STORM_CONF_DIR=\"\"",
+ "storm_log_dir": "/var/log/storm",
+ "storm_pid_dir": "/var/run/storm",
+ "storm_user": "storm"
+ },
+ "falcon-env": {
+ "falcon_port": "15000",
+ "falcon_pid_dir": "/var/run/falcon",
+ "falcon_log_dir": "/var/log/falcon",
+ "falcon.emeddedmq.port": "61616",
+ "falcon_user": "falcon",
+ "falcon_local_dir": "/hadoop/falcon",
+ "content": "\n# The java implementation to use. If JAVA_HOME is not found we expect java and jar to be in path\nexport JAVA_HOME={{java_home}}\n\n# any additional java opts you want to set. This will apply to both client and server operations\n#export FALCON_OPTS=\n\n# any additional java opts that you want to set for client only\n#export FALCON_CLIENT_OPTS=\n\n# java heap size we want to set for the client. Default is 1024MB\n#export FALCON_CLIENT_HEAP=\n\n# any additional opts you want to set for prisim service.\n#export FALCON_PRISM_OPTS=\n\n# java heap size we want to set for the prisim service. Default is 1024MB\n#export FALCON_PRISM_HEAP=\n\n# any additional opts you want to set for falcon service.\nexport FALCON_SERVER_OPTS=\"-Dfalcon.embeddedmq={{falcon_embeddedmq_enabled}} -Dfalcon.emeddedmq.port={{falcon_emeddedmq_port}}\"\n\n# java heap size we want to set for the falcon server. Default is 1024MB\n#export FALCON_SERVER_HEAP=\n\n# What is is considered as falcon home
dir. Default is the base locaion of the installed software\n#export FALCON_HOME_DIR=\n\n# Where log files are stored. Defatult is logs directory under the base install location\nexport FALCON_LOG_DIR={{falcon_log_dir}}\n\n# Where pid files are stored. Defatult is logs directory under the base install location\nexport FALCON_PID_DIR={{falcon_pid_dir}}\n\n# where the falcon active mq data is stored. Defatult is logs/data directory under the base install location\nexport FALCON_DATA_DIR={{falcon_embeddedmq_data}}\n\n# Where do you want to expand the war file. By Default it is in /server/webapp dir under the base install dir.\n#export FALCON_EXPANDED_WEBAPP_DIR=",
+ "falcon.embeddedmq.data": "/hadoop/falcon/embeddedmq/data",
+ "falcon.embeddedmq": "true",
+ "falcon_store_uri": "file:///hadoop/falcon/store"
+ },
+ "oozie-env": {
+ "oozie_derby_database": "Derby",
+ "oozie_admin_port": "11001",
+ "oozie_hostname": "abtest-3.c.pramod-thangali.internal",
+ "oozie_pid_dir": "/var/run/oozie",
+ "content": "\n#!/bin/bash\n\nif [ -d \"/usr/lib/bigtop-tomcat\" ]; then\n export OOZIE_CONFIG=${OOZIE_CONFIG:-/etc/oozie/conf}\n export CATALINA_BASE=${CATALINA_BASE:-/var/lib/oozie/oozie-server}\n export CATALINA_TMPDIR=${CATALINA_TMPDIR:-/var/tmp/oozie}\n export OOZIE_CATALINA_HOME=/usr/lib/bigtop-tomcat\nfi\n\n#Set JAVA HOME\nexport JAVA_HOME={{java_home}}\n\nexport JRE_HOME=${JAVA_HOME}\n\n# Set Oozie specific environment variables here.\n\n# Settings for the Embedded Tomcat that runs Oozie\n# Java System properties for Oozie should be specified in this variable\n#\n# export CATALINA_OPTS=\n\n# Oozie configuration file to load from Oozie configuration directory\n#\n# export OOZIE_CONFIG_FILE=oozie-site.xml\n\n# Oozie logs directory\n#\nexport OOZIE_LOG={{oozie_log_dir}}\n\n# Oozie pid directory\n#\nexport CATALINA_PID={{pid_file}}\n\n#Location of the data for oozie\nexport OOZIE_DATA={{oozie_data_dir}}\n\n# Oozie Log4J configuration file to load from Oozie configuratio
n directory\n#\n# export OOZIE_LOG4J_FILE=oozie-log4j.properties\n\n# Reload interval of the Log4J configuration file, in seconds\n#\n# export OOZIE_LOG4J_RELOAD=10\n\n# The port Oozie server runs\n#\nexport OOZIE_HTTP_PORT={{oozie_server_port}}\n\n# The admin port Oozie server runs\n#\nexport OOZIE_ADMIN_PORT={{oozie_server_admin_port}}\n\n# The host name Oozie server runs on\n#\n# export OOZIE_HTTP_HOSTNAME=`hostname -f`\n\n# The base URL for callback URLs to Oozie\n#\n# export OOZIE_BASE_URL=\"http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie\"\nexport JAVA_LIBRARY_PATH=/usr/lib/hadoop/lib/native/Linux-amd64-64",
+ "oozie_user": "oozie",
+ "oozie_database": "New Derby Database",
+ "oozie_data_dir": "/hadoop/oozie/data",
+ "oozie_log_dir": "/var/log/oozie"
+ },
+ "webhcat-env": {
+ "content": "\n# The file containing the running pid\nPID_FILE={{pid_file}}\n\nTEMPLETON_LOG_DIR={{templeton_log_dir}}/\n\n\nWEBHCAT_LOG_DIR={{templeton_log_dir}}/\n\n# The console error log\nERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log\n\n# The console log\nCONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log\n\n#TEMPLETON_JAR=templeton_jar_name\n\n#HADOOP_PREFIX=hadoop_prefix\n\n#HCAT_PREFIX=hive_prefix\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME=/usr/lib/hadoop"
+ },
+ "pig-env": {
+ "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
+ },
+ "sqoop-env": {
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+ "sqoop_user": "sqoop",
+ "jdbc_drivers" : "com.microsoft.sqlserver.jdbc.SQLServerDriver,com.mysql.jdbc.Driver"
+ },
+ "hdfs-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "yarn-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "hbase-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "hive-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "hive-exec-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "webhcat-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "zookeeper-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "pig-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "pig-properties": {
+ "content": "pigproperties\nline2"
+ },
+ "oozie-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "flume-conf": {
+ "content": "a1.sources = r1\n a1.sinks = k1\n a1.channels = c1\n # Describe/configure the source\n a1.sources.r1.type = netcat\n a1.sources.r1.bind = localhost\n a1.sources.r1.port = 44444\n \n # Describe the sink\n a1.sinks.k1.type = logger\n \n # Use a channel which buffers events in memory\n a1.channels.c1.type = memory\n a1.channels.c1.capacity = 1000\n a1.channels.c1.transactionCapacity = 100\n \n # Bind the source and sink to the channel\n a1.sources.r1.channels = c1\n a1.sinks.k1.channel = c1\n"
+ },
+ "flume-log4j": {
+ "content": "log4jproperties\nline2"
+ }
+ },
+ "configuration_attributes": {
+ "yarn-site": {
+ "final": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
+ "yarn.nodemanager.container-executor.class": "true",
+ "yarn.nodemanager.local-dirs": "true"
+ }
+ },
+ "tez-site": {
+ "final": {
+ "tez.am.log.level": "true"
+ }
+ },
+ "capacity-scheduler": {
+ "final": {
+ "yarn.scheduler.capacity.node-locality-delay": "true"
+ }
+ },
+ "mapred-site": {
+ "final": {
+ "mapred.healthChecker.script.path": "true",
+ "mapreduce.jobtracker.staging.root.dir": "true"
+ }
+ },
+ "oozie-site": {
+ "final": {
+ "oozie.service.PurgeService.purge.interval": "true",
+ "oozie.service.CallableQueueService.queue.size": "true"
+ }
+ },
+ "webhcat-site": {
+ "final": {
+ "templeton.pig.path": "true",
+ "templeton.exec.timeout": "true",
+ "templeton.override.enabled": "true"
+ }
+ },
+ "hdfs-site": {
+ "final": {
+ "dfs.web.ugi": "true",
+ "dfs.support.append": "true",
+ "dfs.cluster.administrators": "true"
+ }
+ },
+ "hbase-site": {
+ "final": {
+ "hbase.client.keyvalue.maxsize": "true",
+ "hbase.hstore.compactionThreshold": "true",
+ "hbase.rootdir": "true"
+ }
+ },
+ "core-site": {
+ "final": {
+ "hadoop.proxyuser.hive.groups": "true",
+ "webinterface.private.actions": "true",
+ "hadoop.proxyuser.oozie.hosts": "true"
+ }
+ },
+ "hive-site": {
+ "final": {
+ "javax.jdo.option.ConnectionPassword": "true",
+ "javax.jdo.option.ConnectionDriverName": "true",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true"
+ }
+ },
+ "hive-interactive-site": {
+ "final": {
+ "javax.jdo.option.ConnectionPassword": "true",
+ "javax.jdo.option.ConnectionDriverName": "true",
+ "hive.optimize.bucketmapjoin.sortedmerge": "true"
+ }
+ }
+ },
+ "configurationTags": {
+ "capacity-scheduler": {
+ "tag": "version1"
+ },
+ "oozie-site": {
+ "tag": "version1"
+ },
+ "storm-site": {
+ "tag": "version1"
+ },
+ "webhcat-site": {
+ "tag": "version1"
+ },
+ "global": {
+ "tag": "version1"
+ },
+ "mapred-site": {
+ "tag": "version1"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "hbase-site": {
+ "tag": "version1"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "yarn-site": {
+ "tag": "version1"
+ },
+ "hive-site": {
+ "tag": "version1"
+ },
+ "hive-interactive-site": {
+ "tag": "version1"
+ },
+ "hdfs-log4j": {
+ "tag": "version1"
+ },
+ "yarn-log4j": {
+ "tag": "version1"
+ },
+ "hbase-log4j": {
+ "tag": "version1"
+ },
+ "hive-log4j": {
+ "tag": "version1"
+ },
+ "hive-exec-log4j": {
+ "tag": "version1"
+ },
+ "zookeeper-log4j": {
+ "tag": "version1"
+ },
+ "oozie-log4j": {
+ "tag": "version1"
+ },
+ "pig-log4j": {
+ "tag": "version1"
+ },
+ "pig-properties": {
+ "tag": "version1"
+ }
+ },
+ "commandId": "7-1",
+ "clusterHostInfo": {
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "snamenode_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "nm_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "drpc_server_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "slave_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "ganglia_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "hive_server_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "logviewer_server_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "hive_metastore_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "hbase_rs_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "webhcat_server_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "supervisor_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "ganglia_monitor_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "all_ping_ports": [
+ "8670",
+ "8670"
+ ],
+ "rm_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "storm_ui_server_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "oozie_server": [
+ "c6402.ambari.apache.org"
+ ],
+ "hs_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "nimbus_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "namenode_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "hbase_master_hosts": [
+ "c6402.ambari.apache.org"
+ ],
+ "hive_mysql_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "falcon_server_hosts": [
+ "c6402.ambari.apache.org"
+ ]
+ }
+}
[44/57] [abbrv] ambari git commit: BUG-87284 : NullPointerException
during blueprint generation (mradhakrishnan)
Posted by lp...@apache.org.
BUG-87284 : NullPointerException during blueprint generation (mradhakrishnan)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c241b9a0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c241b9a0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c241b9a0
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: c241b9a02d253269067a90c71a8fbc2260cb0645
Parents: ca87e8d
Author: Madhuvanthi Radhakrishnan <mr...@hortonworks.com>
Authored: Mon Sep 11 19:33:01 2017 -0700
Committer: Madhuvanthi Radhakrishnan <mr...@hortonworks.com>
Committed: Mon Sep 11 19:34:27 2017 -0700
----------------------------------------------------------------------
.../internal/BlueprintConfigurationProcessor.java | 14 +++++++++-----
1 file changed, 9 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c241b9a0/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index b4e1027..3538945 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -1298,12 +1298,16 @@ public class BlueprintConfigurationProcessor {
* elements in this property
*/
private static String[] splitAndTrimStrings(String propertyName) {
- List<String> namesWithoutWhitespace = new LinkedList<>();
- for (String service : propertyName.split(",")) {
- namesWithoutWhitespace.add(service.trim());
- }
+ if(propertyName != null) {
+ List<String> namesWithoutWhitespace = new LinkedList<>();
+ for (String service : propertyName.split(",")) {
+ namesWithoutWhitespace.add(service.trim());
+ }
- return namesWithoutWhitespace.toArray(new String[namesWithoutWhitespace.size()]);
+ return namesWithoutWhitespace.toArray(new String[namesWithoutWhitespace.size()]);
+ } else {
+ return new String[0];
+ }
}
/**
[16/57] [abbrv] ambari git commit: AMBARI-21894 - PATCH and MAINT
Repositories Should Indicate that they can be Reverted (jonathanhurley)
Posted by lp...@apache.org.
AMBARI-21894 - PATCH and MAINT Repositories Should Indicate that they can be Reverted (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8b5d697c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8b5d697c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8b5d697c
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 8b5d697c0adb5ae92b16f1df2e2d6d75b066fce5
Parents: 8cb9423
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Sep 6 14:32:29 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Sep 7 00:06:08 2017 -0400
----------------------------------------------------------------------
.../ClusterStackVersionResourceProvider.java | 34 +++++-
.../internal/UpgradeResourceProvider.java | 16 ++-
.../ambari/server/orm/dao/UpgradeDAO.java | 46 +++++++-
.../server/orm/entities/UpgradeEntity.java | 109 +++++++++++++++----
.../upgrades/AbstractUpgradeServerAction.java | 7 ++
.../upgrades/FinalizeUpgradeAction.java | 11 ++
.../ambari/server/state/RepositoryType.java | 7 ++
.../ambari/server/state/UpgradeContext.java | 33 +++++-
.../server/upgrade/UpgradeCatalog260.java | 8 +-
.../main/resources/Ambari-DDL-Derby-CREATE.sql | 1 +
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 1 +
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 1 +
.../resources/Ambari-DDL-Postgres-CREATE.sql | 1 +
.../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 1 +
.../resources/Ambari-DDL-SQLServer-CREATE.sql | 1 +
.../internal/UpgradeResourceProviderTest.java | 62 +++++++++++
.../ambari/server/orm/dao/UpgradeDAOTest.java | 105 +++++++++++++++++-
.../ambari/server/state/UpgradeContextTest.java | 50 +++++++++
.../server/upgrade/UpgradeCatalog260Test.java | 30 ++++-
19 files changed, 489 insertions(+), 35 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index 1766da3..85a7596 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@ -60,6 +60,7 @@ import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.dao.HostVersionDAO;
import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.dao.UpgradeDAO;
import org.apache.ambari.server.orm.entities.HostVersionEntity;
import org.apache.ambari.server.orm.entities.OperatingSystemEntity;
import org.apache.ambari.server.orm.entities.RepositoryEntity;
@@ -112,6 +113,8 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
protected static final String CLUSTER_STACK_VERSION_STATE_PROPERTY_ID = PropertyHelper.getPropertyId("ClusterStackVersions", "state");
protected static final String CLUSTER_STACK_VERSION_HOST_STATES_PROPERTY_ID = PropertyHelper.getPropertyId("ClusterStackVersions", "host_states");
protected static final String CLUSTER_STACK_VERSION_REPO_SUMMARY_PROPERTY_ID = PropertyHelper.getPropertyId("ClusterStackVersions", "repository_summary");
+ protected static final String CLUSTER_STACK_VERSION_REPO_SUPPORTS_REVERT= PropertyHelper.getPropertyId("ClusterStackVersions", "supports_revert");
+ protected static final String CLUSTER_STACK_VERSION_REPO_REVERT_UPGRADE_ID = PropertyHelper.getPropertyId("ClusterStackVersions", "revert_upgrade_id");
protected static final String CLUSTER_STACK_VERSION_REPOSITORY_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("ClusterStackVersions", "repository_version");
protected static final String CLUSTER_STACK_VERSION_STAGE_SUCCESS_FACTOR = PropertyHelper.getPropertyId("ClusterStackVersions", "success_factor");
@@ -153,7 +156,8 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
CLUSTER_STACK_VERSION_VERSION_PROPERTY_ID, CLUSTER_STACK_VERSION_HOST_STATES_PROPERTY_ID,
CLUSTER_STACK_VERSION_STATE_PROPERTY_ID, CLUSTER_STACK_VERSION_REPOSITORY_VERSION_PROPERTY_ID,
CLUSTER_STACK_VERSION_STAGE_SUCCESS_FACTOR,
- CLUSTER_STACK_VERSION_FORCE, CLUSTER_STACK_VERSION_REPO_SUMMARY_PROPERTY_ID);
+ CLUSTER_STACK_VERSION_FORCE, CLUSTER_STACK_VERSION_REPO_SUMMARY_PROPERTY_ID,
+ CLUSTER_STACK_VERSION_REPO_SUPPORTS_REVERT, CLUSTER_STACK_VERSION_REPO_REVERT_UPGRADE_ID);
private static Map<Type, String> keyPropertyIds = ImmutableMap.<Type, String> builder()
.put(Type.Cluster, CLUSTER_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID)
@@ -166,6 +170,12 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
@Inject
private static HostVersionDAO hostVersionDAO;
+ /**
+ * Used for looking up revertable upgrades.
+ */
+ @Inject
+ private static UpgradeDAO upgradeDAO;
+
@Inject
private static RepositoryVersionDAO repositoryVersionDAO;
@@ -260,6 +270,12 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
throw new SystemException("Could not find any repositories to show");
}
+ // find the 1 repository version which is revertable, if any
+ UpgradeEntity revertableUpgrade = null;
+ if (null == cluster.getUpgradeInProgress()) {
+ revertableUpgrade = upgradeDAO.findRevertable(cluster.getClusterId());
+ }
+
for (Long repositoryVersionId : requestedEntities) {
final Resource resource = new ResourceImpl(Resource.Type.ClusterStackVersion);
@@ -298,7 +314,6 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
setResourceProperty(resource, CLUSTER_STACK_VERSION_HOST_STATES_PROPERTY_ID, hostStates, requestedIds);
setResourceProperty(resource, CLUSTER_STACK_VERSION_REPO_SUMMARY_PROPERTY_ID, versionSummary, requestedIds);
-
setResourceProperty(resource, CLUSTER_STACK_VERSION_ID_PROPERTY_ID, repositoryVersion.getId(), requestedIds);
setResourceProperty(resource, CLUSTER_STACK_VERSION_STACK_PROPERTY_ID, repoVersionStackId.getStackName(), requestedIds);
setResourceProperty(resource, CLUSTER_STACK_VERSION_VERSION_PROPERTY_ID, repoVersionStackId.getStackVersion(), requestedIds);
@@ -309,6 +324,21 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
RepositoryVersionState aggregateState = RepositoryVersionState.getAggregateState(allStates);
setResourceProperty(resource, CLUSTER_STACK_VERSION_STATE_PROPERTY_ID, aggregateState, requestedIds);
+ // mark whether this repo is revertable for this cluster
+ boolean revertable = false;
+ if (null != revertableUpgrade) {
+ RepositoryVersionEntity revertableRepositoryVersion = revertableUpgrade.getRepositoryVersion();
+ revertable = revertableRepositoryVersion.getId() == repositoryVersionId;
+ }
+
+ setResourceProperty(resource, CLUSTER_STACK_VERSION_REPO_SUPPORTS_REVERT, revertable, requestedIds);
+
+ // if the repo is revertable, indicate which upgrade to revert if necessary
+ if (revertable) {
+ setResourceProperty(resource, CLUSTER_STACK_VERSION_REPO_REVERT_UPGRADE_ID,
+ revertableUpgrade.getId(), requestedIds);
+ }
+
if (predicate == null || predicate.evaluate(resource)) {
resources.add(resource);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index a35f380..0ff21a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -765,7 +765,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
upgrade.setUpgradeGroups(groupEntities);
req.getRequestStatusResponse();
- return createUpgradeInsideTransaction(cluster, req, upgrade);
+ return createUpgradeInsideTransaction(cluster, req, upgrade, upgradeContext);
}
/**
@@ -782,6 +782,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
* @param upgradeEntity
* the upgrade to create and associate with the newly created request
* (not {@code null}).
+ * @param upgradeContext
+ * the upgrade context associated with the upgrade being created.
* @return the persisted {@link UpgradeEntity} encapsulating all
* {@link UpgradeGroupEntity} and {@link UpgradeItemEntity}.
* @throws AmbariException
@@ -789,7 +791,17 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
@Transactional
UpgradeEntity createUpgradeInsideTransaction(Cluster cluster,
RequestStageContainer request,
- UpgradeEntity upgradeEntity) throws AmbariException {
+ UpgradeEntity upgradeEntity, UpgradeContext upgradeContext) throws AmbariException {
+
+ // if this is a patch reversion, then we must unset the revertable flag of
+ // the upgrade being reverted
+ if (upgradeContext.isPatchRevert()) {
+ UpgradeEntity upgradeBeingReverted = s_upgradeDAO.findUpgrade(
+ upgradeContext.getPatchRevertUpgradeId());
+
+ upgradeBeingReverted.setRevertAllowed(false);
+ upgradeBeingReverted = s_upgradeDAO.merge(upgradeBeingReverted);
+ }
request.persist();
RequestEntity requestEntity = s_requestDAO.findByPK(request.getId());
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
index 4e091fa..22a7505 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
@@ -26,6 +26,7 @@ import org.apache.ambari.server.orm.RequiresSession;
import org.apache.ambari.server.orm.entities.UpgradeEntity;
import org.apache.ambari.server.orm.entities.UpgradeGroupEntity;
import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
+import org.apache.ambari.server.state.RepositoryType;
import org.apache.ambari.server.state.stack.upgrade.Direction;
import com.google.inject.Inject;
@@ -168,7 +169,8 @@ public class UpgradeDAO {
}
/**
- * @param clusterId the cluster id
+ * @param clusterId
+ * the cluster id
* @return the upgrade entity, or {@code null} if not found
*/
@RequiresSession
@@ -181,6 +183,48 @@ public class UpgradeDAO {
return daoUtils.selectSingle(query);
}
+ /**
+ * Gets the only revertable upgrade if one exists. By definition, only the
+ * most recent {@code RepositoryType#PATCH} or {@code RepositoryType#MAINT}
+ * upgrade which doesn't have a downgrade already is revertable.
+ *
+ * @param clusterId
+ * the cluster id
+ * @return the upgrade which can be reverted, or {@code null} if not found
+ */
+ @RequiresSession
+ public UpgradeEntity findRevertable(long clusterId) {
+ TypedQuery<UpgradeEntity> query = entityManagerProvider.get().createNamedQuery(
+ "UpgradeEntity.findRevertable", UpgradeEntity.class);
+ query.setMaxResults(1);
+ query.setParameter("clusterId", clusterId);
+
+ return daoUtils.selectSingle(query);
+ }
+
+ /**
+ * Gets the only revertable upgrade if one exists. By definition, only the
+ * most recent {@code RepositoryType#PATCH} or {@code RepositoryType#MAINT}
+ * upgrade which doesn't have a downgrade already is revertable.
+ * <p>
+ * This method tries to use some fancy SQL to do the work instead of relying
+ * on columns to be set correctly.
+ *
+ * @param clusterId
+ * the cluster id
+ * @return the upgrade which can be reverted, or {@code null} if not found
+ */
+ @RequiresSession
+ public UpgradeEntity findRevertableUsingJPQL(long clusterId) {
+ TypedQuery<UpgradeEntity> query = entityManagerProvider.get().createNamedQuery(
+ "UpgradeEntity.findRevertableUsingJPQL", UpgradeEntity.class);
+ query.setMaxResults(1);
+ query.setParameter("clusterId", clusterId);
+ query.setParameter("revertableTypes", RepositoryType.REVERTABLE);
+
+ return daoUtils.selectSingle(query);
+ }
+
@Transactional
public UpgradeEntity merge(UpgradeEntity upgradeEntity) {
return entityManagerProvider.get().merge(upgradeEntity);
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
index 7f4824f..1361c94 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
@@ -34,6 +34,7 @@ import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
+import javax.persistence.QueryHint;
import javax.persistence.Table;
import javax.persistence.TableGenerator;
@@ -58,21 +59,42 @@ import com.google.common.base.Objects;
pkColumnValue = "upgrade_id_seq",
initialValue = 0)
@NamedQueries({
- @NamedQuery(name = "UpgradeEntity.findAll",
- query = "SELECT u FROM UpgradeEntity u"),
- @NamedQuery(name = "UpgradeEntity.findAllForCluster",
- query = "SELECT u FROM UpgradeEntity u WHERE u.clusterId = :clusterId"),
- @NamedQuery(name = "UpgradeEntity.findUpgrade",
- query = "SELECT u FROM UpgradeEntity u WHERE u.upgradeId = :upgradeId"),
- @NamedQuery(name = "UpgradeEntity.findUpgradeByRequestId",
- query = "SELECT u FROM UpgradeEntity u WHERE u.requestId = :requestId"),
- @NamedQuery(name = "UpgradeEntity.findLatestForClusterInDirection",
- query = "SELECT u FROM UpgradeEntity u JOIN RequestEntity r ON u.requestId = r.requestId WHERE u.clusterId = :clusterId AND u.direction = :direction ORDER BY r.startTime DESC, u.upgradeId DESC"),
- @NamedQuery(name = "UpgradeEntity.findLatestForCluster",
- query = "SELECT u FROM UpgradeEntity u JOIN RequestEntity r ON u.requestId = r.requestId WHERE u.clusterId = :clusterId ORDER BY r.startTime DESC"),
- @NamedQuery(name = "UpgradeEntity.findAllRequestIds",
- query = "SELECT upgrade.requestId FROM UpgradeEntity upgrade")
-})
+ @NamedQuery(name = "UpgradeEntity.findAll", query = "SELECT u FROM UpgradeEntity u"),
+ @NamedQuery(
+ name = "UpgradeEntity.findAllForCluster",
+ query = "SELECT u FROM UpgradeEntity u WHERE u.clusterId = :clusterId"),
+ @NamedQuery(
+ name = "UpgradeEntity.findUpgrade",
+ query = "SELECT u FROM UpgradeEntity u WHERE u.upgradeId = :upgradeId"),
+ @NamedQuery(
+ name = "UpgradeEntity.findUpgradeByRequestId",
+ query = "SELECT u FROM UpgradeEntity u WHERE u.requestId = :requestId"),
+ @NamedQuery(
+ name = "UpgradeEntity.findLatestForClusterInDirection",
+ query = "SELECT u FROM UpgradeEntity u JOIN RequestEntity r ON u.requestId = r.requestId WHERE u.clusterId = :clusterId AND u.direction = :direction ORDER BY r.startTime DESC, u.upgradeId DESC"),
+ @NamedQuery(
+ name = "UpgradeEntity.findLatestForCluster",
+ query = "SELECT u FROM UpgradeEntity u JOIN RequestEntity r ON u.requestId = r.requestId WHERE u.clusterId = :clusterId ORDER BY r.startTime DESC"),
+ @NamedQuery(
+ name = "UpgradeEntity.findAllRequestIds",
+ query = "SELECT upgrade.requestId FROM UpgradeEntity upgrade"),
+ @NamedQuery(
+ name = "UpgradeEntity.findRevertable",
+ query = "SELECT upgrade FROM UpgradeEntity upgrade WHERE upgrade.revertAllowed = 1 AND upgrade.clusterId = :clusterId ORDER BY upgrade.upgradeId DESC",
+ hints = {
+ @QueryHint(name = "eclipselink.query-results-cache", value = "true"),
+ @QueryHint(name = "eclipselink.query-results-cache.ignore-null", value = "false"),
+ @QueryHint(name = "eclipselink.query-results-cache.size", value = "1")
+ }),
+ @NamedQuery(
+ name = "UpgradeEntity.findRevertableUsingJPQL",
+ query = "SELECT upgrade FROM UpgradeEntity upgrade WHERE upgrade.repoVersionId IN (SELECT upgrade.repoVersionId FROM UpgradeEntity upgrade WHERE upgrade.clusterId = :clusterId AND upgrade.orchestration IN :revertableTypes GROUP BY upgrade.repoVersionId HAVING MOD(COUNT(upgrade.repoVersionId), 2) != 0) ORDER BY upgrade.upgradeId DESC",
+ hints = {
+ @QueryHint(name = "eclipselink.query-results-cache", value = "true"),
+ @QueryHint(name = "eclipselink.query-results-cache.ignore-null", value = "false"),
+ @QueryHint(name = "eclipselink.query-results-cache.size", value = "1")
+ })
+ })
public class UpgradeEntity {
@Id
@@ -107,6 +129,9 @@ public class UpgradeEntity {
@Enumerated(value = EnumType.STRING)
private UpgradeType upgradeType;
+ @Column(name = "repo_version_id", insertable = false, updatable = false)
+ private Long repoVersionId;
+
@JoinColumn(name = "repo_version_id", referencedColumnName = "repo_version_id", nullable = false)
private RepositoryVersionEntity repositoryVersion;
@@ -117,7 +142,26 @@ public class UpgradeEntity {
private Integer skipServiceCheckFailures = 0;
@Column(name="downgrade_allowed", nullable = false)
- private Short downgrade_allowed = 1;
+ private Short downgradeAllowed = 1;
+
+ /**
+ * Whether this upgrade is a candidate to be reverted. The current restriction
+ * on this behavior is that only the most recent
+ * {@link RepositoryType#PATCH}/{@link RepositoryType#MAINT} for a given
+ * cluster can be reverted at a time.
+ * <p/>
+ * All upgrades are created with this value defaulted to {@code false}. Upon
+ * successful finalization of the upgrade, if the upgrade was the correct type
+ * and direction, then it becomes a candidate for reversion and this value is
+ * set to {@code true}. If an upgrade is reverted after being finalized, then
+ * this value to should set to {@code false} explicitely.
+ * <p/>
+ * There can exist <i>n</i> number of upgrades with this value set to
+ * {@code true}. The idea is that only the most recent upgrade with this value
+ * set to {@code true} will be able to be reverted.
+ */
+ @Column(name = "revert_allowed", nullable = false)
+ private Short revertAllowed = 0;
@Column(name="orchestration", nullable = false)
@Enumerated(value = EnumType.STRING)
@@ -222,18 +266,45 @@ public class UpgradeEntity {
* @return possibility to process downgrade
*/
public Boolean isDowngradeAllowed() {
- return downgrade_allowed != null ? (downgrade_allowed != 0) : null;
+ return downgradeAllowed != null ? (downgradeAllowed != 0) : null;
}
/**
* @param canDowngrade {@code true} to allow downgrade, {@code false} to disallow downgrade
*/
public void setDowngradeAllowed(boolean canDowngrade) {
- downgrade_allowed = (!canDowngrade ? (short)0 : (short)1);
+ downgradeAllowed = (!canDowngrade ? (short) 0 : (short) 1);
+ }
+
+ /**
+ * Gets whether this upgrade supports being reverted. Upgrades can be reverted
+ * (downgraded after finalization) if they are either
+ * {@link RepositoryType#MAINT} or {@link RepositoryType#PATCH} and have never
+ * been previously downgraded.
+ *
+ * @return {@code true} if this upgrade can potentially be revereted.
+ */
+ public Boolean isRevertAllowed() {
+ return revertAllowed != null ? (revertAllowed != 0) : null;
+ }
+
+ /**
+ * Sets whether this upgrade supports being reverted. This should only ever be
+ * called from the finalization of an upgrade. {@link RepositoryType#MAINT} or
+ * {@link RepositoryType#PATCH} upgrades can be revereted only if they have
+ * not previously been downgraded.
+ *
+ * @param revertable
+ * {@code true} to mark this as being revertable, {@code false}
+ * otherwise.
+ */
+ public void setRevertAllowed(boolean revertable) {
+ revertAllowed = (!revertable ? (short) 0 : (short) 1);
}
/**
- * @param upgradeType the upgrade type to set
+ * @param upgradeType
+ * the upgrade type to set
*/
public void setUpgradeType(UpgradeType upgradeType) {
this.upgradeType = upgradeType;
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
index e012dac..8ebb186 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
@@ -17,6 +17,7 @@
*/
package org.apache.ambari.server.serveraction.upgrades;
+import org.apache.ambari.server.orm.dao.UpgradeDAO;
import org.apache.ambari.server.orm.entities.UpgradeEntity;
import org.apache.ambari.server.serveraction.AbstractServerAction;
import org.apache.ambari.server.state.Cluster;
@@ -42,6 +43,12 @@ public abstract class AbstractUpgradeServerAction extends AbstractServerAction {
protected UpgradeHelper m_upgradeHelper;
/**
+ * Used to lookup or update {@link UpgradeEntity} instances.
+ */
+ @Inject
+ protected UpgradeDAO m_upgradeDAO;
+
+ /**
* Used to create instances of {@link UpgradeContext} with injected
* dependencies.
*/
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index 26dcf27..5ec0692 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -41,6 +41,7 @@ import org.apache.ambari.server.orm.dao.HostVersionDAO;
import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
import org.apache.ambari.server.orm.entities.HostVersionEntity;
import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.RepositoryType;
@@ -104,6 +105,9 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
private CommandReport finalizeUpgrade(UpgradeContext upgradeContext)
throws AmbariException, InterruptedException {
+ Direction direction = upgradeContext.getDirection();
+ RepositoryType repositoryType = upgradeContext.getOrchestrationType();
+
StringBuilder outSB = new StringBuilder();
StringBuilder errSB = new StringBuilder();
@@ -198,6 +202,13 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
// longer used
finalizeHostRepositoryVersions(cluster);
+ // mark revertable
+ if (repositoryType.isRevertable() && direction == Direction.UPGRADE) {
+ UpgradeEntity upgrade = cluster.getUpgradeInProgress();
+ upgrade.setRevertAllowed(true);
+ upgrade = m_upgradeDAO.merge(upgrade);
+ }
+
// Reset upgrade state
cluster.setUpgradeEntity(null);
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryType.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryType.java b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryType.java
index 3f7d447..bf7eab0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryType.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryType.java
@@ -17,6 +17,8 @@
*/
package org.apache.ambari.server.state;
+import java.util.EnumSet;
+
/**
* Identifies the type of repository
*/
@@ -45,6 +47,11 @@ public enum RepositoryType {
*/
SERVICE;
+ /**
+ * The types of repositories which are revertable.
+ */
+ public static final EnumSet<RepositoryType> REVERTABLE = EnumSet.of(RepositoryType.MAINT,
+ RepositoryType.PATCH);
/**
* Gets whether applications of this repository are revertable after they have
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index 64da7c3..67a8950 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -252,6 +252,11 @@ public class UpgradeContext {
private final boolean m_isRevert;
/**
+ * The ID of the upgrade being reverted if this is a reversion.
+ */
+ private long m_revertUpgradeId;
+
+ /**
* Defines orchestration type. This is not the repository type when reverting a patch.
*/
private RepositoryType m_orchestration = RepositoryType.STANDARD;
@@ -286,11 +291,19 @@ public class UpgradeContext {
m_isRevert = upgradeRequestMap.containsKey(UPGRADE_REVERT_UPGRADE_ID);
if (m_isRevert) {
- Long revertUpgradeId = Long.valueOf(upgradeRequestMap.get(UPGRADE_REVERT_UPGRADE_ID).toString());
- UpgradeEntity revertUpgrade = m_upgradeDAO.findUpgrade(revertUpgradeId);
+ m_revertUpgradeId = Long.valueOf(upgradeRequestMap.get(UPGRADE_REVERT_UPGRADE_ID).toString());
+ UpgradeEntity revertUpgrade = m_upgradeDAO.findUpgrade(m_revertUpgradeId);
+ UpgradeEntity revertableUpgrade = m_upgradeDAO.findRevertable(cluster.getClusterId());
if (null == revertUpgrade) {
- throw new AmbariException(String.format("Could not find Upgrade with id %s to revert.", revertUpgradeId));
+ throw new AmbariException(
+ String.format("Could not find Upgrade with id %s to revert.", m_revertUpgradeId));
+ }
+
+ if (null == revertableUpgrade) {
+ throw new AmbariException(
+ String.format("There are no upgrades for cluster %s which are marked as revertable",
+ cluster.getClusterName()));
}
if (!revertUpgrade.getOrchestration().isRevertable()) {
@@ -299,7 +312,15 @@ public class UpgradeContext {
}
if (revertUpgrade.getDirection() != Direction.UPGRADE) {
- throw new AmbariException("Can only revert successful upgrades, not downgrades.");
+ throw new AmbariException(
+ "Only successfully completed upgrades can be reverted. Downgrades cannot be reverted.");
+ }
+
+ if (revertableUpgrade.getId() != revertUpgrade.getId()) {
+ throw new AmbariException(String.format(
+ "The only upgrade which is currently allowed to be reverted for cluster %s is upgrade ID %s which was an upgrade to %s",
+ cluster.getClusterName(), revertableUpgrade.getId(),
+ revertableUpgrade.getRepositoryVersion().getVersion()));
}
Set<RepositoryVersionEntity> priors = new HashSet<>();
@@ -858,6 +879,10 @@ public class UpgradeContext {
return m_isRevert;
}
+ public long getPatchRevertUpgradeId() {
+ return m_revertUpgradeId;
+ }
+
/**
* Gets a POJO of the upgrade suitable to serialize.
*
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index 665b350..d1de998 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -93,6 +93,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
public static final String FROM_REPO_VERSION_ID_COLUMN = "from_repo_version_id";
public static final String TO_REPO_VERSION_ID_COLUMN = "to_repo_version_id";
public static final String ORCHESTRATION_COLUMN = "orchestration";
+ public static final String ALLOW_REVERT_COLUMN = "revert_allowed";
public static final String FK_UPGRADE_FROM_REPO_ID = "FK_upgrade_from_repo_id";
public static final String FK_UPGRADE_TO_REPO_ID = "FK_upgrade_to_repo_id";
public static final String FK_UPGRADE_REPO_VERSION_ID = "FK_upgrade_repo_version_id";
@@ -146,6 +147,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
/**
* {@inheritDoc}
*/
+ @Override
public String getTargetVersion() {
return "2.6.0";
}
@@ -187,7 +189,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
EntityManager entityManager = getEntityManagerProvider().get();
Query query = entityManager.createNamedQuery("ClusterConfigEntity.findNotMappedClusterConfigsToService",ClusterConfigEntity.class);
- List<ClusterConfigEntity> notMappedConfigs = (List<ClusterConfigEntity>) query.getResultList();
+ List<ClusterConfigEntity> notMappedConfigs = query.getResultList();
if (notMappedConfigs != null) {
for (ClusterConfigEntity clusterConfigEntity : notMappedConfigs) {
clusterConfigEntity.setUnmapped(true);
@@ -243,9 +245,13 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
dbAccessor.addColumn(UPGRADE_TABLE,
new DBAccessor.DBColumnInfo(REPO_VERSION_ID_COLUMN, Long.class, null, null, false));
+
dbAccessor.addColumn(UPGRADE_TABLE,
new DBAccessor.DBColumnInfo(ORCHESTRATION_COLUMN, String.class, 255, STANDARD, false));
+ dbAccessor.addColumn(UPGRADE_TABLE,
+ new DBAccessor.DBColumnInfo(ALLOW_REVERT_COLUMN, Short.class, null, 0, false));
+
dbAccessor.addFKConstraint(UPGRADE_TABLE, FK_UPGRADE_REPO_VERSION_ID, REPO_VERSION_ID_COLUMN, REPO_VERSION_TABLE, REPO_VERSION_ID_COLUMN, false);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index dc7f79a..e7359a7 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -819,6 +819,7 @@ CREATE TABLE upgrade (
skip_failures SMALLINT DEFAULT 0 NOT NULL,
skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
+ revert_allowed SMALLINT DEFAULT 0 NOT NULL,
suspended SMALLINT DEFAULT 0 NOT NULL,
CONSTRAINT PK_upgrade PRIMARY KEY (upgrade_id),
FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 0c28012..c1e1953 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -836,6 +836,7 @@ CREATE TABLE upgrade (
skip_failures TINYINT(1) NOT NULL DEFAULT 0,
skip_sc_failures TINYINT(1) NOT NULL DEFAULT 0,
downgrade_allowed TINYINT(1) NOT NULL DEFAULT 1,
+ revert_allowed TINYINT(1) NOT NULL DEFAULT 0,
suspended TINYINT(1) DEFAULT 0 NOT NULL,
CONSTRAINT PK_upgrade PRIMARY KEY (upgrade_id),
FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 6cd330c..c0b2f0c 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -815,6 +815,7 @@ CREATE TABLE upgrade (
skip_failures NUMBER(1) DEFAULT 0 NOT NULL,
skip_sc_failures NUMBER(1) DEFAULT 0 NOT NULL,
downgrade_allowed NUMBER(1) DEFAULT 1 NOT NULL,
+ revert_allowed NUMBER(1) DEFAULT 0 NOT NULL,
suspended NUMBER(1) DEFAULT 0 NOT NULL,
CONSTRAINT PK_upgrade PRIMARY KEY (upgrade_id),
FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 8c8ed5c..90cdbfe 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -817,6 +817,7 @@ CREATE TABLE upgrade (
skip_failures SMALLINT DEFAULT 0 NOT NULL,
skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
+ revert_allowed SMALLINT DEFAULT 0 NOT NULL,
suspended SMALLINT DEFAULT 0 NOT NULL,
CONSTRAINT PK_upgrade PRIMARY KEY (upgrade_id),
FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 59abd8b..7f39535 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -813,6 +813,7 @@ CREATE TABLE upgrade (
skip_failures BIT NOT NULL DEFAULT 0,
skip_sc_failures BIT NOT NULL DEFAULT 0,
downgrade_allowed BIT NOT NULL DEFAULT 1,
+ revert_allowed BIT NOT NULL DEFAULT 0,
suspended BIT DEFAULT 0 NOT NULL,
CONSTRAINT PK_upgrade PRIMARY KEY (upgrade_id),
FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index ea92256..aa06c4d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -834,6 +834,7 @@ CREATE TABLE upgrade (
skip_failures BIT NOT NULL DEFAULT 0,
skip_sc_failures BIT NOT NULL DEFAULT 0,
downgrade_allowed BIT NOT NULL DEFAULT 1,
+ revert_allowed BIT NOT NULL DEFAULT 0,
suspended BIT DEFAULT 0 NOT NULL,
CONSTRAINT PK_upgrade PRIMARY KEY CLUSTERED (upgrade_id),
FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 5eca6b3..37a7b44 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -1608,6 +1608,7 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
sch.setVersion("2.1.1.0");
File f = new File("src/test/resources/hbase_version_test.xml");
+ repoVersionEntity2112.setType(RepositoryType.PATCH);
repoVersionEntity2112.setVersionXml(IOUtils.toString(new FileInputStream(f)));
repoVersionEntity2112.setVersionXsd("version_definition.xsd");
repoVersionDao.merge(repoVersionEntity2112);
@@ -1629,9 +1630,17 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
assertEquals(1, upgrades.size());
+
UpgradeEntity upgradeEntity = upgrades.get(0);
assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+ // should be false since only finalization actually sets this bit
+ assertEquals(false, upgradeEntity.isRevertAllowed());
+
+ // fake it now so the rest of the test passes
+ upgradeEntity.setRevertAllowed(true);
+ upgradeEntity = upgradeDao.merge(upgradeEntity);
+
// !!! make it look like the cluster is done
cluster.setUpgradeEntity(null);
@@ -1671,6 +1680,59 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
assertTrue(found);
}
+ /**
+ * Tests that when there is no revertable upgrade, a reversion of a specific
+ * ugprade ID is not allowed.
+ */
+ @Test(expected = SystemException.class)
+ public void testRevertFailsWhenNoRevertableUpgradeIsFound() throws Exception {
+ Cluster cluster = clusters.getCluster("c1");
+
+ // add a single ZK server and client on 2.1.1.0
+ Service service = cluster.addService("HBASE", repoVersionEntity2110);
+ ServiceComponent component = service.addServiceComponent("HBASE_MASTER");
+ ServiceComponentHost sch = component.addServiceComponentHost("h1");
+ sch.setVersion("2.1.1.0");
+
+ File f = new File("src/test/resources/hbase_version_test.xml");
+ repoVersionEntity2112.setType(RepositoryType.PATCH);
+ repoVersionEntity2112.setVersionXml(IOUtils.toString(new FileInputStream(f)));
+ repoVersionEntity2112.setVersionXsd("version_definition.xsd");
+ repoVersionDao.merge(repoVersionEntity2112);
+
+ List<UpgradeEntity> upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
+ assertEquals(0, upgrades.size());
+
+ Map<String, Object> requestProps = new HashMap<>();
+ requestProps.put(UpgradeResourceProvider.UPGRADE_CLUSTER_NAME, "c1");
+ requestProps.put(UpgradeResourceProvider.UPGRADE_REPO_VERSION_ID,String.valueOf(repoVersionEntity2112.getId()));
+ requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test");
+ requestProps.put(UpgradeResourceProvider.UPGRADE_SKIP_PREREQUISITE_CHECKS, "true");
+ requestProps.put(UpgradeResourceProvider.UPGRADE_DIRECTION, Direction.UPGRADE.name());
+
+ ResourceProvider upgradeResourceProvider = createProvider(amc);
+
+ Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+ upgradeResourceProvider.createResources(request);
+
+ upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
+ assertEquals(1, upgrades.size());
+
+ UpgradeEntity upgradeEntity = upgrades.get(0);
+ assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+
+ // !!! make it look like the cluster is done
+ cluster.setUpgradeEntity(null);
+
+ requestProps = new HashMap<>();
+ requestProps.put(UpgradeResourceProvider.UPGRADE_CLUSTER_NAME, "c1");
+ requestProps.put(UpgradeResourceProvider.UPGRADE_REVERT_UPGRADE_ID, upgradeEntity.getId());
+ requestProps.put(UpgradeResourceProvider.UPGRADE_SKIP_PREREQUISITE_CHECKS, Boolean.TRUE.toString());
+
+ request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+ upgradeResourceProvider.createResources(request);
+ }
+
private String parseSingleMessage(String msgStr){
JsonParser parser = new JsonParser();
JsonArray msgArray = (JsonArray) parser.parse(msgStr);
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
index f23e10d..47fde03 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
@@ -35,9 +35,11 @@ import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.OrmTestHelper;
import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.StageEntity;
import org.apache.ambari.server.orm.entities.UpgradeEntity;
import org.apache.ambari.server.orm.entities.UpgradeGroupEntity;
import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
+import org.apache.ambari.server.state.RepositoryType;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.UpgradeState;
import org.apache.ambari.server.state.stack.upgrade.Direction;
@@ -240,4 +242,105 @@ public class UpgradeDAOTest {
Assert.assertTrue(lastUpgradeForCluster.isComponentFailureAutoSkipped());
Assert.assertTrue(lastUpgradeForCluster.isServiceCheckFailureAutoSkipped());
}
-}
+
+ /**
+ * Tests the logic that finds the one-and-only revertable upgrade.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testFindRevertableUpgrade() throws Exception {
+ // create upgrade entities
+ UpgradeEntity revertable = dao.findRevertable(1L);
+ UpgradeEntity revertableViaJPQL = dao.findRevertableUsingJPQL(1L);
+ assertEquals(null, revertable);
+ assertEquals(null, revertableViaJPQL);
+
+ RequestEntity requestEntity = new RequestEntity();
+ requestEntity.setRequestId(1L);
+ requestEntity.setClusterId(clusterId.longValue());
+ requestEntity.setStatus(HostRoleStatus.PENDING);
+ requestEntity.setStages(new ArrayList<StageEntity>());
+ requestDAO.create(requestEntity);
+
+ UpgradeEntity entity1 = new UpgradeEntity();
+ entity1.setId(11L);
+ entity1.setClusterId(clusterId.longValue());
+ entity1.setDirection(Direction.UPGRADE);
+ entity1.setRequestEntity(requestEntity);
+ entity1.setRepositoryVersion(repositoryVersion2500);
+ entity1.setUpgradeType(UpgradeType.ROLLING);
+ entity1.setUpgradePackage("test-upgrade");
+ entity1.setDowngradeAllowed(true);
+ entity1.setOrchestration(RepositoryType.PATCH);
+ entity1.setRevertAllowed(true);
+ dao.create(entity1);
+
+ revertable = dao.findRevertable(1L);
+ revertableViaJPQL = dao.findRevertableUsingJPQL(1L);
+ assertEquals(revertable.getId(), entity1.getId());
+ assertEquals(revertableViaJPQL.getId(), entity1.getId());
+
+ UpgradeEntity entity2 = new UpgradeEntity();
+ entity2.setId(22L);
+ entity2.setClusterId(clusterId.longValue());
+ entity2.setDirection(Direction.UPGRADE);
+ entity2.setRequestEntity(requestEntity);
+ entity2.setRepositoryVersion(repositoryVersion2511);
+ entity2.setUpgradeType(UpgradeType.ROLLING);
+ entity2.setUpgradePackage("test-upgrade");
+ entity2.setDowngradeAllowed(true);
+ entity2.setOrchestration(RepositoryType.MAINT);
+ entity2.setRevertAllowed(true);
+ dao.create(entity2);
+
+ revertable = dao.findRevertable(1L);
+ revertableViaJPQL = dao.findRevertableUsingJPQL(1L);
+ assertEquals(revertable.getId(), entity2.getId());
+ assertEquals(revertableViaJPQL.getId(), entity2.getId());
+
+ // now make it look like upgrade ID 22 was reverted
+ entity2.setRevertAllowed(false);
+ entity2 = dao.merge(entity2);
+
+ // create a downgrade for ID 22
+ UpgradeEntity entity3 = new UpgradeEntity();
+ entity3.setId(33L);
+ entity3.setClusterId(clusterId.longValue());
+ entity3.setDirection(Direction.DOWNGRADE);
+ entity3.setRequestEntity(requestEntity);
+ entity3.setRepositoryVersion(repositoryVersion2511);
+ entity3.setUpgradeType(UpgradeType.ROLLING);
+ entity3.setUpgradePackage("test-upgrade");
+ entity3.setOrchestration(RepositoryType.MAINT);
+ entity3.setDowngradeAllowed(false);
+ dao.create(entity3);
+
+ revertable = dao.findRevertable(1L);
+ revertableViaJPQL = dao.findRevertableUsingJPQL(1L);
+ assertEquals(revertable.getId(), entity1.getId());
+ assertEquals(revertableViaJPQL.getId(), entity1.getId());
+
+ // now make it look like upgrade ID 11 was reverted
+ entity1.setRevertAllowed(false);
+ entity1 = dao.merge(entity1);
+
+ // create a downgrade for ID 11
+ UpgradeEntity entity4 = new UpgradeEntity();
+ entity4.setId(44L);
+ entity4.setClusterId(clusterId.longValue());
+ entity4.setDirection(Direction.DOWNGRADE);
+ entity4.setRequestEntity(requestEntity);
+ entity4.setRepositoryVersion(repositoryVersion2500);
+ entity4.setUpgradeType(UpgradeType.ROLLING);
+ entity4.setUpgradePackage("test-upgrade");
+ entity4.setOrchestration(RepositoryType.MAINT);
+ entity4.setDowngradeAllowed(false);
+ dao.create(entity4);
+
+ revertable = dao.findRevertable(1L);
+ revertableViaJPQL = dao.findRevertableUsingJPQL(1L);
+ assertEquals(null, revertable);
+ assertEquals(null, revertableViaJPQL);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
index 629ea9b..dc77fa6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeContextTest.java
@@ -27,6 +27,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.controller.internal.UpgradeResourceProvider;
import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
import org.apache.ambari.server.orm.dao.UpgradeDAO;
@@ -138,6 +139,7 @@ public class UpgradeContextTest extends EasyMockSupport {
m_upgradeDAO.findLastUpgradeForCluster(EasyMock.anyLong(),
eq(Direction.UPGRADE))).andReturn(m_completedRevertableUpgrade).anyTimes();
+ expect(m_completedRevertableUpgrade.getId()).andReturn(1L).anyTimes();
expect(m_completedRevertableUpgrade.getDirection()).andReturn(Direction.UPGRADE).anyTimes();
expect(m_completedRevertableUpgrade.getRepositoryVersion()).andReturn(m_targetRepositoryVersion).anyTimes();
expect(m_completedRevertableUpgrade.getOrchestration()).andReturn(RepositoryType.PATCH).anyTimes();
@@ -309,6 +311,8 @@ public class UpgradeContextTest extends EasyMockSupport {
EasyMock.anyObject(UpgradeType.class), EasyMock.anyString())).andReturn(upgradePack).once();
+ expect(m_upgradeDAO.findRevertable(1L)).andReturn(m_completedRevertableUpgrade).once();
+
Map<String, Object> requestMap = new HashMap<>();
requestMap.put(UpgradeResourceProvider.UPGRADE_TYPE, UpgradeType.ROLLING.name());
requestMap.put(UpgradeResourceProvider.UPGRADE_REVERT_UPGRADE_ID, "1");
@@ -327,6 +331,52 @@ public class UpgradeContextTest extends EasyMockSupport {
}
/**
+ * Tests that if a different {@link UpgradeEntity} is returned instead of the one
+ * specified by the
+ *
+ * @throws Exception
+ */
+ @Test(expected = AmbariException.class)
+ public void testWrongUpgradeBeingReverted() throws Exception {
+ Long upgradeIdBeingReverted = 1L;
+ Long upgradeIdWhichCanBeReverted = 99L;
+
+ UpgradeHelper upgradeHelper = createNiceMock(UpgradeHelper.class);
+ ConfigHelper configHelper = createNiceMock(ConfigHelper.class);
+
+ UpgradePack upgradePack = createNiceMock(UpgradePack.class);
+
+ expect(upgradeHelper.suggestUpgradePack(EasyMock.anyString(), EasyMock.anyObject(StackId.class),
+ EasyMock.anyObject(StackId.class), EasyMock.anyObject(Direction.class),
+ EasyMock.anyObject(UpgradeType.class), EasyMock.anyString())).andReturn(upgradePack).once();
+
+ RepositoryVersionEntity repositoryVersionEntity = createNiceMock(RepositoryVersionEntity.class);
+ expect(repositoryVersionEntity.getVersion()).andReturn("1.2.3.4").anyTimes();
+
+ UpgradeEntity wrongRevertableUpgrade = createNiceMock(UpgradeEntity.class);
+ expect(wrongRevertableUpgrade.getId()).andReturn(upgradeIdWhichCanBeReverted).atLeastOnce();
+ expect(wrongRevertableUpgrade.getRepositoryVersion()).andReturn(repositoryVersionEntity).atLeastOnce();
+
+ expect(m_upgradeDAO.findRevertable(1L)).andReturn(wrongRevertableUpgrade).once();
+
+ Map<String, Object> requestMap = new HashMap<>();
+ requestMap.put(UpgradeResourceProvider.UPGRADE_TYPE, UpgradeType.ROLLING.name());
+ requestMap.put(UpgradeResourceProvider.UPGRADE_REVERT_UPGRADE_ID, upgradeIdBeingReverted.toString());
+
+ replayAll();
+
+ UpgradeContext context = new UpgradeContext(m_cluster, requestMap, null, upgradeHelper,
+ m_upgradeDAO, m_repositoryVersionDAO, configHelper);
+
+ assertEquals(Direction.DOWNGRADE, context.getDirection());
+ assertEquals(RepositoryType.PATCH, context.getOrchestrationType());
+ assertEquals(1, context.getSupportedServices().size());
+ assertTrue(context.isPatchRevert());
+
+ verifyAll();
+ }
+
+ /**
* Tests that the {@link UpgradeContext} for a patch downgrade has the
* correcting scope/orchestration set.
*
http://git-wip-us.apache.org/repos/asf/ambari/blob/8b5d697c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index 4b0404d..2a62f2e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -181,7 +181,8 @@ public class UpgradeCatalog260Test {
Capture<DBColumnInfo> rvid = newCapture();
Capture<DBColumnInfo> orchestration = newCapture();
- expectUpdateUpgradeTable(rvid, orchestration);
+ Capture<DBColumnInfo> revertAllowed = newCapture();
+ expectUpdateUpgradeTable(rvid, orchestration, revertAllowed);
Capture<List<DBAccessor.DBColumnInfo>> columns = newCapture();
expectCreateUpgradeHistoryTable(columns);
@@ -216,7 +217,7 @@ public class UpgradeCatalog260Test {
verifyUpdateServiceComponentDesiredStateTable(scdstadd1, scdstalter1, scdstadd2, scdstalter2);
verifyUpdateServiceDesiredStateTable(sdstadd, sdstalter);
verifyAddSelectedCollumsToClusterconfigTable(selectedColumnInfo, selectedmappingColumnInfo, selectedTimestampColumnInfo, createTimestampColumnInfo);
- verifyUpdateUpgradeTable(rvid, orchestration);
+ verifyUpdateUpgradeTable(rvid, orchestration, revertAllowed);
verifyCreateUpgradeHistoryTable(columns);
verifyUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture);
}
@@ -299,7 +300,8 @@ public class UpgradeCatalog260Test {
expectLastCall().once();
}
- public void verifyUpdateUpgradeTable(Capture<DBColumnInfo> rvid, Capture<DBColumnInfo> orchestration) {
+ public void verifyUpdateUpgradeTable(Capture<DBColumnInfo> rvid,
+ Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed) {
DBColumnInfo rvidValue = rvid.getValue();
Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_ID_COLUMN, rvidValue.getName());
Assert.assertEquals(Long.class, rvidValue.getType());
@@ -313,25 +315,43 @@ public class UpgradeCatalog260Test {
Assert.assertEquals(Integer.valueOf(255), orchestrationValue.getLength());
Assert.assertEquals(UpgradeCatalog260.STANDARD, orchestrationValue.getDefaultValue());
Assert.assertEquals(false, orchestrationValue.isNullable());
+
+ DBColumnInfo revertAllowedValue = revertAllowed.getValue();
+ Assert.assertEquals(UpgradeCatalog260.ALLOW_REVERT_COLUMN, revertAllowedValue.getName());
+ Assert.assertEquals(Short.class, revertAllowedValue.getType());
+ Assert.assertEquals(null, revertAllowedValue.getLength());
+ Assert.assertEquals(0, revertAllowedValue.getDefaultValue());
+ Assert.assertEquals(false, revertAllowedValue.isNullable());
}
- public void expectUpdateUpgradeTable(Capture<DBColumnInfo> rvid, Capture<DBColumnInfo> orchestration) throws SQLException {
+ public void expectUpdateUpgradeTable(Capture<DBColumnInfo> rvid,
+ Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed)
+ throws SQLException {
+
dbAccessor.clearTable(eq(UpgradeCatalog260.UPGRADE_TABLE));
expectLastCall().once();
+
dbAccessor.dropFKConstraint(eq(UpgradeCatalog260.UPGRADE_TABLE), eq(UpgradeCatalog260.FK_UPGRADE_FROM_REPO_ID));
expectLastCall().once();
+
dbAccessor.dropFKConstraint(eq(UpgradeCatalog260.UPGRADE_TABLE), eq(UpgradeCatalog260.FK_UPGRADE_TO_REPO_ID));
expectLastCall().once();
+
dbAccessor.dropColumn(eq(UpgradeCatalog260.UPGRADE_TABLE), eq(UpgradeCatalog260.FROM_REPO_VERSION_ID_COLUMN));
expectLastCall().once();
+
dbAccessor.dropColumn(eq(UpgradeCatalog260.UPGRADE_TABLE), eq(UpgradeCatalog260.TO_REPO_VERSION_ID_COLUMN));
expectLastCall().once();
dbAccessor.addColumn(eq(UpgradeCatalog260.UPGRADE_TABLE), capture(rvid));
expectLastCall().once();
+
dbAccessor.addColumn(eq(UpgradeCatalog260.UPGRADE_TABLE), capture(orchestration));
expectLastCall().once();
+ dbAccessor.addColumn(eq(UpgradeCatalog260.UPGRADE_TABLE), capture(revertAllowed));
+ expectLastCall().once();
+
dbAccessor.addFKConstraint(eq(UpgradeCatalog260.UPGRADE_TABLE), eq(UpgradeCatalog260.FK_UPGRADE_REPO_VERSION_ID), eq(UpgradeCatalog260.REPO_VERSION_ID_COLUMN), eq(UpgradeCatalog260.REPO_VERSION_TABLE), eq(UpgradeCatalog260.REPO_VERSION_ID_COLUMN), eq(false));
expectLastCall().once();
}
@@ -487,7 +507,7 @@ public class UpgradeCatalog260Test {
@Test
public void testRemoveDruidSuperset() throws Exception {
- List<Integer> current = new ArrayList<Integer>();
+ List<Integer> current = new ArrayList<>();
current.add(1);
expect(dbAccessor.getConnection()).andReturn(connection).anyTimes();
[39/57] [abbrv] ambari git commit: AMBARI-21896. Ambari should honor
permissions specified for dfs.datanode.data.dir.perm when creating datanode
dir's (aonishuk)
Posted by lp...@apache.org.
AMBARI-21896. Ambari should honor permissions specified for dfs.datanode.data.dir.perm when creating datanode dir's (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3ac4340e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3ac4340e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3ac4340e
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 3ac4340e6a14610bb7687c91a01362ec10837488
Parents: 4bbbe1f
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 11 14:59:24 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 11 14:59:24 2017 +0300
----------------------------------------------------------------------
.../HDFS/2.1.0.2.0/package/scripts/hdfs_datanode.py | 2 +-
.../HDFS/2.1.0.2.0/package/scripts/params_linux.py | 2 ++
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py | 4 ++--
3 files changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/3ac4340e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_datanode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_datanode.py
index 2d3d4f5..c61a117 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_datanode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_datanode.py
@@ -34,7 +34,7 @@ def create_dirs(data_dir):
Directory(data_dir,
create_parents = True,
cd_access="a",
- mode=0755,
+ mode=params.dfs_data_dirs_perm,
owner=params.hdfs_user,
group=params.user_group,
ignore_failures=True
http://git-wip-us.apache.org/repos/asf/ambari/blob/3ac4340e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 256211f..76b430b 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -277,6 +277,8 @@ else:
fs_checkpoint_dirs = default("/configurations/hdfs-site/dfs.namenode.checkpoint.dir", "").split(',')
dfs_data_dirs = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
+dfs_data_dirs_perm = default("/configurations/hdfs-site/dfs.datanode.data.dir.perm", "755")
+dfs_data_dirs_perm = int(dfs_data_dirs_perm, base=8) # convert int from octal representation
data_dir_mount_file = "/var/lib/ambari-agent/data/datanode/dfs_data_dir_mount.hist"
http://git-wip-us.apache.org/repos/asf/ambari/blob/3ac4340e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 966254a..24b0347 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -377,7 +377,7 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
ignore_failures = True,
group = 'hadoop',
- mode = 0755,
+ mode = 0750,
create_parents = True,
cd_access='a'
)
@@ -463,7 +463,7 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
ignore_failures = True,
group = 'hadoop',
- mode = 0755,
+ mode = 0750,
create_parents = True,
cd_access='a'
)
[33/57] [abbrv] ambari git commit: AMBARI-21906. Consider Not
Scheduling ConfigureAction Tasks During PATCH/MAINT Upgrades (ncole)
Posted by lp...@apache.org.
AMBARI-21906. Consider Not Scheduling ConfigureAction Tasks During PATCH/MAINT Upgrades (ncole)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a9d622ed
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a9d622ed
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a9d622ed
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: a9d622edb4daf207c225c3da017ad0912ce7cf81
Parents: 4fa4f80
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Sep 8 10:11:40 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Sep 8 10:11:40 2017 -0400
----------------------------------------------------------------------
.../internal/UpgradeResourceProvider.java | 24 +-
.../state/stack/upgrade/ConfigureTask.java | 3 +
.../src/main/resources/upgrade-pack.xsd | 1 +
.../AmbariManagementControllerTest.java | 2 +-
.../internal/UpgradeResourceProviderTest.java | 108 ++++++++
.../server/state/stack/UpgradePackTest.java | 1 +
.../upgrade_test_force_config_change.xml | 267 +++++++++++++++++++
.../version_definition_test_patch_config.xml | 55 ++++
8 files changed, 456 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 0ff21a2..52f66bc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -731,11 +731,12 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
itemEntity.setText(wrapper.getText());
itemEntity.setTasks(wrapper.getTasksJson());
itemEntity.setHosts(wrapper.getHostsJson());
- itemEntities.add(itemEntity);
injectVariables(configHelper, cluster, itemEntity);
- makeServerSideStage(group, upgradeContext, effectiveRepositoryVersion, req,
- itemEntity, (ServerSideActionTask) task, configUpgradePack);
+ if (makeServerSideStage(group, upgradeContext, effectiveRepositoryVersion, req,
+ itemEntity, (ServerSideActionTask) task, configUpgradePack)) {
+ itemEntities.add(itemEntity);
+ }
}
}
} else {
@@ -1184,7 +1185,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
* upgrade
* @throws AmbariException
*/
- private void makeServerSideStage(UpgradeGroupHolder group, UpgradeContext context,
+ private boolean makeServerSideStage(UpgradeGroupHolder group, UpgradeContext context,
RepositoryVersionEntity effectiveRepositoryVersion, RequestStageContainer request,
UpgradeItemEntity entity, ServerSideActionTask task, ConfigUpgradePack configUpgradePack)
throws AmbariException {
@@ -1201,6 +1202,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
String itemDetail = entity.getText();
String stageText = StringUtils.abbreviate(entity.getText(), 255);
+ boolean process = true;
+
switch (task.getType()) {
case SERVER_ACTION:
case MANUAL: {
@@ -1236,6 +1239,13 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
}
case CONFIGURE: {
ConfigureTask ct = (ConfigureTask) task;
+
+ // !!! would prefer to do this in the sequence generator, but there's too many
+ // places to miss
+ if (context.getOrchestrationType().isRevertable() && !ct.supportsPatch) {
+ process = false;
+ }
+
Map<String, String> configurationChanges =
ct.getConfigurationChanges(cluster, configUpgradePack);
@@ -1266,6 +1276,10 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
break;
}
+ if (!process) {
+ return false;
+ }
+
ActionExecutionContext actionContext = new ActionExecutionContext(cluster.getClusterName(),
Role.AMBARI_SERVER_ACTION.toString(), Collections.emptyList(),
commandParams);
@@ -1303,6 +1317,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
context.isComponentFailureAutoSkipped());
request.addStages(Collections.singletonList(stage));
+
+ return true;
}
/**
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
index 68dc63f..f88691d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
@@ -110,6 +110,9 @@ public class ConfigureTask extends ServerSideActionTask {
@XmlAttribute(name = "id")
public String id;
+ @XmlAttribute(name="supports-patch")
+ public boolean supportsPatch = false;
+
/**
* {@inheritDoc}
*/
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/main/resources/upgrade-pack.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/upgrade-pack.xsd b/ambari-server/src/main/resources/upgrade-pack.xsd
index aa7ddd8..21606bd 100644
--- a/ambari-server/src/main/resources/upgrade-pack.xsd
+++ b/ambari-server/src/main/resources/upgrade-pack.xsd
@@ -331,6 +331,7 @@
<xs:extension base="abstract-server-task-type">
<xs:sequence />
<xs:attribute name="id" use="required"/>
+ <xs:attribute name="supports-patch" type="xs:boolean" />
</xs:extension>
</xs:complexContent>
</xs:complexType>
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 9309abe..dea4870 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -6977,7 +6977,7 @@ public class AmbariManagementControllerTest {
Assert.assertEquals(1, responsesWithParams.size());
StackVersionResponse resp = responsesWithParams.iterator().next();
assertNotNull(resp.getUpgradePacks());
- assertEquals(15, resp.getUpgradePacks().size());
+ assertTrue(resp.getUpgradePacks().size() > 0);
assertTrue(resp.getUpgradePacks().contains("upgrade_test"));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 37a7b44..fea56d9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -84,6 +84,7 @@ import org.apache.ambari.server.orm.entities.UpgradeHistoryEntity;
import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
import org.apache.ambari.server.security.TestAuthenticationFactory;
import org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryAction;
+import org.apache.ambari.server.serveraction.upgrades.ConfigureAction;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
@@ -1733,6 +1734,113 @@ public class UpgradeResourceProviderTest extends EasyMockSupport {
upgradeResourceProvider.createResources(request);
}
+ @Test
+ public void testCreatePatchWithConfigChanges() throws Exception {
+ Cluster cluster = clusters.getCluster("c1");
+
+ File f = new File("src/test/resources/version_definition_test_patch_config.xml");
+ repoVersionEntity2112.setType(RepositoryType.PATCH);
+ repoVersionEntity2112.setVersionXml(IOUtils.toString(new FileInputStream(f)));
+ repoVersionEntity2112.setVersionXsd("version_definition.xsd");
+ repoVersionDao.merge(repoVersionEntity2112);
+
+ List<UpgradeEntity> upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
+ assertEquals(0, upgrades.size());
+
+ Map<String, Object> requestProps = new HashMap<>();
+ requestProps.put(UpgradeResourceProvider.UPGRADE_CLUSTER_NAME, "c1");
+ requestProps.put(UpgradeResourceProvider.UPGRADE_REPO_VERSION_ID, String.valueOf(repoVersionEntity2112.getId()));
+ requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test");
+ requestProps.put(UpgradeResourceProvider.UPGRADE_SKIP_PREREQUISITE_CHECKS, "true");
+ requestProps.put(UpgradeResourceProvider.UPGRADE_DIRECTION, Direction.UPGRADE.name());
+
+ // !!! test that a PATCH upgrade skips config changes
+ ResourceProvider upgradeResourceProvider = createProvider(amc);
+
+ Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+ RequestStatus status = upgradeResourceProvider.createResources(request);
+ Set<Resource> resources = status.getAssociatedResources();
+ assertEquals(1, resources.size());
+ Long requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+ assertNotNull(requestId);
+
+ UpgradeEntity upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+ assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+
+ HostRoleCommandDAO hrcDAO = injector.getInstance(HostRoleCommandDAO.class);
+ List<HostRoleCommandEntity> commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+ boolean foundConfigTask = false;
+ for (HostRoleCommandEntity command : commands) {
+ if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+ command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+ foundConfigTask = true;
+ break;
+ }
+ }
+ assertFalse(foundConfigTask);
+
+ // !!! test that a patch with a supported patch change gets picked up
+ cluster.setUpgradeEntity(null);
+ requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test_force_config_change");
+ request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+
+ status = upgradeResourceProvider.createResources(request);
+ resources = status.getAssociatedResources();
+ assertEquals(1, resources.size());
+ requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+ assertNotNull(requestId);
+
+ upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+ assertEquals(RepositoryType.PATCH, upgradeEntity.getOrchestration());
+
+ commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+ foundConfigTask = false;
+ for (HostRoleCommandEntity command : commands) {
+ if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+ command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+ foundConfigTask = true;
+ break;
+ }
+ }
+ assertTrue(foundConfigTask);
+
+
+
+ // !!! test that a regular upgrade will pick up the config change
+ cluster.setUpgradeEntity(null);
+ repoVersionEntity2112.setType(RepositoryType.STANDARD);
+ repoVersionDao.merge(repoVersionEntity2112);
+
+ requestProps.put(UpgradeResourceProvider.UPGRADE_PACK, "upgrade_test");
+ request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+
+ status = upgradeResourceProvider.createResources(request);
+ resources = status.getAssociatedResources();
+ assertEquals(1, resources.size());
+ requestId = (Long) resources.iterator().next().getPropertyValue("Upgrade/request_id");
+ assertNotNull(requestId);
+
+ upgradeEntity = upgradeDao.findUpgradeByRequestId(requestId);
+ assertEquals(RepositoryType.STANDARD, upgradeEntity.getOrchestration());
+
+ commands = hrcDAO.findByRequest(upgradeEntity.getRequestId());
+
+ foundConfigTask = false;
+ for (HostRoleCommandEntity command : commands) {
+ if (StringUtils.isNotBlank(command.getCustomCommandName()) &&
+ command.getCustomCommandName().equals(ConfigureAction.class.getName())) {
+ foundConfigTask = true;
+ break;
+ }
+ }
+ assertTrue(foundConfigTask);
+
+ }
+
+
+
private String parseSingleMessage(String msgStr){
JsonParser parser = new JsonParser();
JsonArray msgArray = (JsonArray) parser.parse(msgStr);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
index 616139c..0eac2be 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
@@ -173,6 +173,7 @@ public class UpgradePackTest {
ConfigureTask ct = (ConfigureTask) t;
// check that the Configure task successfully parsed id
assertEquals("hdp_2_1_1_nm_pre_upgrade", ct.getId());
+ assertFalse(ct.supportsPatch);
}
@Test
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_force_config_change.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_force_config_change.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_force_config_change.xml
new file mode 100644
index 0000000..a438afc
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_force_config_change.xml
@@ -0,0 +1,267 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<upgrade xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="upgrade-pack.xsd">
+ <target>2.2.*.*</target>
+ <target-stack>HDP-2.2.0</target-stack>
+ <type>ROLLING</type>
+ <prerequisite-checks>
+ <!-- List of additional pre-req checks to run in addition to the required pre-reqs -->
+ <check>org.apache.ambari.server.checks.HiveMultipleMetastoreCheck</check>
+ <check>org.apache.ambari.server.checks.MapReduce2JobHistoryStatePreservingCheck</check>
+ <check>org.apache.ambari.server.checks.SecondaryNamenodeDeletedCheck</check>
+ <check>org.apache.ambari.server.checks.ServicesMapReduceDistributedCacheCheck</check>
+ <check>org.apache.ambari.server.checks.ServicesNamenodeHighAvailabilityCheck</check>
+ <check>org.apache.ambari.server.checks.ServicesTezDistributedCacheCheck</check>
+ <check>org.apache.ambari.server.checks.ServicesYarnWorkPreservingCheck</check>
+ <check>org.apache.ambari.server.checks.YarnRMHighAvailabilityCheck</check>
+ <check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
+ </prerequisite-checks>
+
+ <order>
+ <group xsi:type="cluster" name="PRE_CLUSTER" title="Pre {{direction.text.proper}}">
+ <execute-stage title="Confirm 1">
+ <task xsi:type="manual">
+ <message>Foo</message>
+ </task>
+ </execute-stage>
+ <execute-stage service="HDFS" component="NAMENODE" title="Pre Upgrade HIVE">
+ <task xsi:type="manual">
+ <message>Back stuff up.</message>
+ </task>
+ </execute-stage>
+ <execute-stage service="HDFS" component="NAMENODE" title="Finalize HDFS">
+ <task xsi:type="execute">
+ <script>foo</script>
+ <function>list</function>
+ </task>
+ </execute-stage>
+ <execute-stage title="Confirm 2">
+ <task xsi:type="manual">
+ <message>Foo</message>
+ </task>
+ </execute-stage>
+ </group>
+
+ <group name="ZOOKEEPER" title="Zookeeper">
+ <skippable>true</skippable>
+ <allow-retry>false</allow-retry>
+ <service name="ZOOKEEPER">
+ <component>ZOOKEEPER_SERVER</component>
+ <component>ZOOKEEPER_CLIENT</component>
+ </service>
+ </group>
+
+ <group name="CORE_MASTER" title="Core Masters">
+ <service name="HDFS">
+ <component>JOURNALNODE</component>
+ <component>NAMENODE</component>
+ </service>
+ <service name="YARN">
+ <component>RESOURCEMANAGER</component>
+ </service>
+ </group>
+
+ <group name="CORE_SLAVES" title="Core Slaves" xsi:type="colocated">
+ <skippable>true</skippable> <!-- set skippable for test -->
+ <allow-retry>false</allow-retry> <!-- set no retry for test -->
+ <service name="HDFS">
+ <component>DATANODE</component>
+ </service>
+ <service name="HBASE">
+ <component>REGIONSERVER</component>
+ </service>
+ <service name="YARN">
+ <component>NODEMANAGER</component>
+ </service>
+
+ <batch>
+ <percent>20</percent>
+ <message>Please run additional tests on {{components}}</message>
+ </batch>
+ </group>
+
+ <group name="HIVE" title="Hive">
+ <skippable>true</skippable>
+ <service name="HIVE">
+ <component>HIVE_METASTORE</component>
+ <component>HIVE_SERVER</component>
+ <component>WEBHCAT_SERVER</component>
+ </service>
+ </group>
+
+ <group name="OOZIE" title="Oozie">
+ <skippable>true</skippable>
+ <supports-auto-skip-failure>false</supports-auto-skip-failure>
+ <service-check>false</service-check>
+ <service name="OOZIE">
+ <component>OOZIE_SERVER</component>
+ <component>OOZIE_CLIENT</component>
+ </service>
+ </group>
+
+ <group xsi:type="cluster" name="POST_CLUSTER" title="Finalize {{direction.text.proper}}">
+ <execute-stage title="Confirm Finalize">
+ <task xsi:type="manual">
+ <message>Please confirm you are ready to finalize</message>
+ </task>
+ </execute-stage>
+ <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
+ <task xsi:type="execute">
+ <script>foo</script>
+ <function>list</function>
+ </task>
+ </execute-stage>
+ <execute-stage title="Save Cluster State">
+ <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FinalizeUpgradeAction">
+ </task>
+ </execute-stage>
+ </group>
+
+ </order>
+
+
+ <processing>
+ <service name="ZOOKEEPER">
+ <component name="ZOOKEEPER_SERVER">
+ <pre-upgrade>
+ <task xsi:type="manual">
+ <summary>SUMMARY OF PREPARE</summary>
+ <message>This is a manual task with a placeholder of {{foo/bar}}</message>
+ </task>
+ </pre-upgrade>
+ <pre-downgrade copy-upgrade="true" />
+ <upgrade>
+ <task xsi:type="restart-task" timeout-config="upgrade.parameter.zk-server.timeout"/>
+ </upgrade>
+ <post-upgrade>
+ <task xsi:type="configure" id="hdp_2_1_1_zookeeper_new_config_type" supports-patch="true" />
+ </post-upgrade>
+ <post-downgrade copy-upgrade="true" />
+ </component>
+ </service>
+
+ <service name="HDFS">
+ <component name="NAMENODE">
+ <pre-upgrade>
+ <task xsi:type="execute" hosts="master">
+ <script>foo</script>
+ <function>list</function>
+ </task>
+ <task xsi:type="configure" id="hdp_2_1_1_nn_pre_upgrade" />
+ <task xsi:type="manual">
+ <message>{{direction.verb.proper}} your database</message>
+ </task>
+ </pre-upgrade>
+ <pre-downgrade copy-upgrade="true" />
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ <post-upgrade>
+ <task xsi:type="execute">
+ <script>foo</script>
+ <function>list</function>
+ </task>
+ </post-upgrade>
+ <post-downgrade copy-upgrade="true" />
+ </component>
+ <component name="DATANODE">
+ <pre-downgrade />
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ <post-downgrade>
+ <task xsi:type="manual">
+ <message>Manual Downgrade</message>
+ </task>
+ </post-downgrade>
+ </component>
+ </service>
+
+ <service name="YARN">
+ <component name="RESOURCEMANAGER">
+ <pre-upgrade>
+ <task xsi:type="execute">
+ <script>foo</script>
+ <function>list</function>
+ </task>
+ </pre-upgrade>
+ <pre-downgrade copy-upgrade="true" />
+ <upgrade />
+ </component>
+ <component name="NODEMANAGER">
+ <pre-upgrade>
+ <task xsi:type="execute">
+ <script>foo</script>
+ <function>list</function>
+ </task>
+ <task xsi:type="configure" id="hdp_2_1_1_nm_pre_upgrade"/>
+ </pre-upgrade>
+ <pre-downgrade copy-upgrade="true" />
+ <upgrade />
+ </component>
+ </service>
+
+ <service name="HIVE">
+ <component name="HIVE_SERVER">
+ <pre-upgrade>
+ <task xsi:type="manual">
+ <summary>HiveServer Port Availability</summary>
+ <message>The HiveServer port will now change to 10010 if hive is using a binary transfer mode or 10011 if hive is using an http transport mode. You can use "netstat -anp | grep 1001[01]" to determine if the port is available on each of following HiveServer host(s): {{hosts.all}}. If the port is not available, the process using it must be terminated.</message>
+ </task>
+ <task xsi:type="configure" id="hdp_2_1_1_hive_server_foo"/>
+ <task xsi:type="configure" id="hdp_2_1_1_hive_server_conditions"/>
+ <task xsi:type="configure" id="hdp_2_1_1_hive_server_conditions_skip"/>
+ <task xsi:type="configure" id="hdp_2_1_1_no_conditions_met"/>
+ </pre-upgrade>
+ <pre-downgrade copy-upgrade="true" />
+ <upgrade />
+ </component>
+ </service>
+
+ <service name="OOZIE">
+ <component name="OOZIE_SERVER">
+ <pre-upgrade>
+ <!-- This is important, do not remove it since UpgradeHelperTest.java :
+ testUpgradeWithMultipleTasksWithMultipleHostTypes() asserts
+ that these tasks each run on their own stage. -->
+ <task xsi:type="execute" hosts="all" sequential="true">
+ <summary>Shut down all Oozie servers</summary>
+ <script>scripts/oozie_server.py</script>
+ <function>stop</function>
+ </task>
+
+ <task xsi:type="execute" hosts="any" sequential="true">
+ <summary>Upgrading the Oozie database and creating a new sharelib</summary>
+ <script>scripts/oozie_server_upgrade.py</script>
+ <function>upgrade_oozie_database_and_sharelib</function>
+ </task>
+ </pre-upgrade>
+ <pre-downgrade copy-upgrade="true" />
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ </component>
+
+ <component name="OOZIE_CLIENT">
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ </component>
+ </service>
+ </processing>
+</upgrade>
http://git-wip-us.apache.org/repos/asf/ambari/blob/a9d622ed/ambari-server/src/test/resources/version_definition_test_patch_config.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/version_definition_test_patch_config.xml b/ambari-server/src/test/resources/version_definition_test_patch_config.xml
new file mode 100644
index 0000000..9260c5f
--- /dev/null
+++ b/ambari-server/src/test/resources/version_definition_test_patch_config.xml
@@ -0,0 +1,55 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<repository-version xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:noNamespaceSchemaLocation="version_definition.xsd">
+
+ <release>
+ <type>PATCH</type>
+ <stack-id>HDP-2.3</stack-id>
+ <version>2.3.4.1</version>
+ <build>1234</build>
+ <compatible-with>2.3.4.[1-9]</compatible-with>
+ <release-notes>http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/</release-notes>
+ </release>
+
+ <manifest>
+ <service id="ZOOKEEPER-346" name="ZOOKEEPER" version="3.4.6" />
+ </manifest>
+
+ <available-services>
+ <service idref="ZOOKEEPER-346" />
+ </available-services>
+
+ <repository-info>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0</baseurl>
+ <repoid>HDP-2.3</repoid>
+ <reponame>HDP</reponame>
+ <unique>true</unique>
+ </repo>
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6</baseurl>
+ <repoid>HDP-UTILS-1.1.0.20</repoid>
+ <reponame>HDP-UTILS</reponame>
+ <unique>false</unique>
+ </repo>
+ </os>
+ </repository-info>
+</repository-version>
[32/57] [abbrv] ambari git commit: AMBARI-21914 Agent on
ambari-server host does not start when server is not upgraded (dsen)
Posted by lp...@apache.org.
AMBARI-21914 Agent on ambari-server host does not start when server is not upgraded (dsen)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4fa4f806
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4fa4f806
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4fa4f806
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 4fa4f806751b267983d24769b04b06dd153273a5
Parents: e142dcb
Author: Dmytro Sen <ds...@apache.org>
Authored: Fri Sep 8 16:52:22 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Fri Sep 8 16:52:22 2017 +0300
----------------------------------------------------------------------
ambari-agent/conf/unix/ambari-agent | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/4fa4f806/ambari-agent/conf/unix/ambari-agent
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent b/ambari-agent/conf/unix/ambari-agent
index 70f58ed..f010800 100755
--- a/ambari-agent/conf/unix/ambari-agent
+++ b/ambari-agent/conf/unix/ambari-agent
@@ -83,6 +83,8 @@ AGENT_SCRIPT=/usr/lib/python2.6/site-packages/ambari_agent/main.py
AGENT_TMP_DIR=/var/lib/ambari-agent/tmp
AGENT_WORKING_DIR=/var/lib/ambari-agent
AMBARI_AGENT_PY_SCRIPT=/usr/lib/python2.6/site-packages/ambari_agent/AmbariAgent.py
+COMMON_DIR=/usr/lib/python2.6/site-packages/ambari_commons
+COMMON_DIR_AGENT=/usr/lib/ambari-agent/lib/ambari_commons
OK=0
NOTOK=1
@@ -165,6 +167,19 @@ check_python_version ()
return $OK
}
+check_ambari_common_dir ()
+{
+ echo "Checking ambari-common dir..."
+ # recursively compare all files except 'pyc' and 'pyo' in agent common dir and actual common dir to ensure they are up to date
+ diff -r $COMMON_DIR $COMMON_DIR_AGENT -x '*.py?'
+ OUT=$?
+ if [ $OUT -ne 0 ];then
+ echo "ERROR: ambari_commons folder mismatch. $COMMON_DIR content should be the same as $COMMON_DIR_AGENT. Either ambari-agent is co-hosted with ambari-server and agent was upgraded without server or the link was broken."
+ return $NOTOK
+ fi
+ return $OK
+}
+
retcode=0
case "${1:-}" in
@@ -189,6 +204,11 @@ case "${1:-}" in
fi
change_files_permissions
+ check_ambari_common_dir
+ if [ "$?" -eq "$NOTOK" ]; then
+ exit -1
+ fi
+
echo "Starting ambari-agent"
if [ "${AMBARI_AGENT_RUN_IN_FOREGROUND:-}" == true ] ; then
[56/57] [abbrv] ambari git commit: AMBARI-21307 LDAP config rest
service implementation extends the ambari config rest implementaiton
Posted by lp...@apache.org.
AMBARI-21307 LDAP config rest service implementation extends the ambari config rest implementaiton
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/58262615
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/58262615
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/58262615
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 582626157d7f6f91b7e9e49c4635b4a2b5a8509b
Parents: 30dded6
Author: lpuskas <lp...@apache.org>
Authored: Mon Aug 21 15:53:45 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:02 2017 +0200
----------------------------------------------------------------------
.../services/ldap/LdapConfigurationService.java | 208 +++++++++++++++++++
.../api/services/ldap/LdapRestService.java | 149 -------------
2 files changed, 208 insertions(+), 149 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/58262615/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
new file mode 100644
index 0000000..52244bc
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapConfigurationService.java
@@ -0,0 +1,208 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.ldap;
+
+import java.util.Set;
+
+import javax.inject.Inject;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.ambari.annotations.ApiIgnore;
+import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.api.services.AmbariConfigurationService;
+import org.apache.ambari.server.api.services.Result;
+import org.apache.ambari.server.api.services.ResultImpl;
+import org.apache.ambari.server.api.services.ResultStatus;
+import org.apache.ambari.server.controller.internal.ResourceImpl;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
+import org.apache.ambari.server.ldap.LdapConfigurationFactory;
+import org.apache.ambari.server.ldap.service.LdapFacade;
+import org.apache.ambari.server.security.authorization.AuthorizationException;
+import org.apache.ambari.server.security.authorization.AuthorizationHelper;
+import org.apache.ambari.server.security.authorization.ResourceType;
+import org.apache.ambari.server.security.authorization.RoleAuthorization;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.security.core.Authentication;
+
+import com.google.common.collect.Sets;
+
+/**
+ * Endpoint designated to LDAP specific operations.
+ */
+@StaticallyInject
+@Path("/ldapconfigs/")
+public class LdapConfigurationService extends AmbariConfigurationService {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(LdapConfigurationService.class);
+
+ @Inject
+ private static LdapFacade ldapFacade;
+
+ @Inject
+ private static LdapConfigurationFactory ldapConfigurationFactory;
+
+ /**
+ * Actions supported by this endpoint
+ */
+ private enum LdapAction {
+ TEST_CONNECTION("test-connection"),
+ TEST_ATTRIBUTES("test-attributes"),
+ DETECT_ATTRIBUTES("detect-attributes");
+
+ private String actionStr;
+
+ LdapAction(String actionStr) {
+ this.actionStr = actionStr;
+ }
+
+ public static LdapAction fromAction(String action) {
+ for (LdapAction val : LdapAction.values()) {
+ if (val.action().equals(action)) {
+ return val;
+ }
+ }
+ throw new IllegalStateException("Action [ " + action + " ] is not supported");
+ }
+
+ public String action() {
+ return this.actionStr;
+ }
+ }
+
+ @POST
+ @ApiIgnore // until documented
+ @Path("/validate")
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response validateConfiguration(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
+
+ authorize();
+
+ Set<String> groups = Sets.newHashSet();
+
+ Result result = new ResultImpl(new ResultStatus(ResultStatus.STATUS.OK));
+ try {
+
+ validateRequest(ldapCheckConfigurationRequest);
+
+ AmbariLdapConfiguration ambariLdapConfiguration = ldapConfigurationFactory.createLdapConfiguration(
+ ldapCheckConfigurationRequest.getAmbariConfiguration().getData().iterator().next());
+
+ LdapAction action = LdapAction.fromAction(ldapCheckConfigurationRequest.getRequestInfo().getAction());
+ switch (action) {
+
+ case TEST_CONNECTION:
+
+ LOGGER.info("Testing connection to the LDAP server ...");
+ ldapFacade.checkConnection(ambariLdapConfiguration);
+
+ break;
+ case TEST_ATTRIBUTES:
+
+ LOGGER.info("Testing LDAP attributes ....");
+ groups = ldapFacade.checkLdapAttibutes(ldapCheckConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
+ setResult(groups, result);
+
+ break;
+ case DETECT_ATTRIBUTES:
+
+ LOGGER.info("Detecting LDAP attributes ...");
+ ldapFacade.detectAttributes(ambariLdapConfiguration);
+
+ break;
+ default:
+ LOGGER.warn("No action provided ...");
+ throw new IllegalArgumentException("No request action provided");
+ }
+
+ } catch (Exception e) {
+ result.setResultStatus(new ResultStatus(ResultStatus.STATUS.BAD_REQUEST, e));
+ }
+
+ return Response.status(result.getStatus().getStatusCode()).entity(getResultSerializer().serialize(result)).build();
+ }
+
+ private void setResult(Set<String> groups, Result result) {
+ Resource resource = new ResourceImpl(Resource.Type.AmbariConfiguration);
+ resource.setProperty("groups", groups);
+ result.getResultTree().addChild(resource, "payload");
+ }
+
+ private void validateRequest(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
+ String errMsg;
+
+ if (null == ldapCheckConfigurationRequest) {
+ errMsg = "No ldap configuraiton request provided";
+ LOGGER.error(errMsg);
+ throw new IllegalArgumentException(errMsg);
+ }
+
+ if (null == ldapCheckConfigurationRequest.getRequestInfo()) {
+ errMsg = String.format("No request information provided. Request: [%s]", ldapCheckConfigurationRequest);
+ LOGGER.error(errMsg);
+ throw new IllegalArgumentException(errMsg);
+ }
+
+ if (null == ldapCheckConfigurationRequest.getAmbariConfiguration()
+ || ldapCheckConfigurationRequest.getAmbariConfiguration().getData().size() != 1) {
+ errMsg = String.format("No / Invalid configuration data provided. Request: [%s]", ldapCheckConfigurationRequest);
+ LOGGER.error(errMsg);
+ throw new IllegalArgumentException(errMsg);
+ }
+ }
+
+ private void authorize() {
+ try {
+ Authentication authentication = AuthorizationHelper.getAuthentication();
+
+ if (authentication == null || !authentication.isAuthenticated()) {
+ throw new AuthorizationException("Authentication data is not available, authorization to perform the requested operation is not granted");
+ }
+
+ if (!AuthorizationHelper.isAuthorized(authentication, ResourceType.AMBARI, null, requiredAuthorizations())) {
+ throw new AuthorizationException("The authenticated user does not have the appropriate authorizations to create the requested resource(s)");
+ }
+ } catch (AuthorizationException e) {
+ LOGGER.error("Unauthorized operation.", e);
+ throw new IllegalArgumentException("User is not authorized to perform the operation", e);
+ }
+
+ }
+
+ Set<RoleAuthorization> requiredAuthorizations() {
+ return Sets.newHashSet(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/58262615/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
deleted file mode 100644
index 4e654dc..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/ldap/LdapRestService.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.api.services.ldap;
-
-import java.util.Set;
-
-import javax.inject.Inject;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-import org.apache.ambari.annotations.ApiIgnore;
-import org.apache.ambari.server.StaticallyInject;
-import org.apache.ambari.server.api.services.BaseService;
-import org.apache.ambari.server.api.services.Result;
-import org.apache.ambari.server.api.services.ResultImpl;
-import org.apache.ambari.server.api.services.ResultStatus;
-import org.apache.ambari.server.controller.internal.ResourceImpl;
-import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.ldap.AmbariLdapConfiguration;
-import org.apache.ambari.server.ldap.LdapConfigurationFactory;
-import org.apache.ambari.server.ldap.service.LdapFacade;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Sets;
-
-/**
- * Endpoint designated to LDAP specific operations.
- */
-@StaticallyInject
-@Path("/ldap")
-public class LdapRestService extends BaseService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(LdapRestService.class);
-
- @Inject
- private static LdapFacade ldapFacade;
-
- @Inject
- private static LdapConfigurationFactory ldapConfigurationFactory;
-
- @POST
- @ApiIgnore // until documented
- @Path("/validate") // todo this needs to be moved under the resource
- @Consumes(MediaType.APPLICATION_JSON)
- @Produces(MediaType.APPLICATION_JSON)
- public Response validateConfiguration(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
-
- Set<String> groups = Sets.newHashSet();
-
- Result result = new ResultImpl(new ResultStatus(ResultStatus.STATUS.OK));
- try {
-
- validateRequest(ldapCheckConfigurationRequest);
-
- AmbariLdapConfiguration ambariLdapConfiguration = ldapConfigurationFactory.createLdapConfiguration(
- ldapCheckConfigurationRequest.getAmbariConfiguration().getData().iterator().next());
-
- switch (ldapCheckConfigurationRequest.getRequestInfo().getAction()) {
- case "test-connection":
-
- LOGGER.info("Testing connection to the LDAP server ...");
- ldapFacade.checkConnection(ambariLdapConfiguration);
-
- break;
- case "test-attributes":
-
- LOGGER.info("Testing LDAP attributes ....");
- groups = ldapFacade.checkLdapAttibutes(ldapCheckConfigurationRequest.getRequestInfo().getParameters(), ambariLdapConfiguration);
- setResult(groups, result);
-
- break;
- case "detect-attributes":
-
- LOGGER.info("Detecting LDAP attributes ...");
- ldapFacade.detectAttributes(ambariLdapConfiguration);
-
- break;
- default:
- LOGGER.warn("No action provided ...");
- throw new IllegalArgumentException("No request action provided");
- }
-
- } catch (Exception e) {
- result.setResultStatus(new ResultStatus(ResultStatus.STATUS.BAD_REQUEST, e));
- }
-
- return Response.status(result.getStatus().getStatusCode()).entity(getResultSerializer().serialize(result)).build();
- }
-
- private void setResult(Set<String> groups, Result result) {
- Resource resource = new ResourceImpl(Resource.Type.AmbariConfiguration);
- resource.setProperty("groups", groups);
- result.getResultTree().addChild(resource, "payload");
- }
-
- private void validateRequest(LdapCheckConfigurationRequest ldapCheckConfigurationRequest) {
- String errMsg;
-
- if (null == ldapCheckConfigurationRequest) {
- errMsg = "No ldap configuraiton request provided";
- LOGGER.error(errMsg);
- throw new IllegalArgumentException(errMsg);
- }
-
- if (null == ldapCheckConfigurationRequest.getRequestInfo()) {
- errMsg = String.format("No request information provided. Request: [%s]", ldapCheckConfigurationRequest);
- LOGGER.error(errMsg);
- throw new IllegalArgumentException(errMsg);
- }
-
- if (null == ldapCheckConfigurationRequest.getAmbariConfiguration()
- || ldapCheckConfigurationRequest.getAmbariConfiguration().getData().size() != 1) {
- errMsg = String.format("No / Invalid configuration data provided. Request: [%s]", ldapCheckConfigurationRequest);
- LOGGER.error(errMsg);
- throw new IllegalArgumentException(errMsg);
- }
- }
-}
[22/57] [abbrv] ambari git commit: AMBARI-21908. Server returns 500
error for create config group request. (swagle)
Posted by lp...@apache.org.
AMBARI-21908. Server returns 500 error for create config group request. (swagle)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5b1a63b7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5b1a63b7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5b1a63b7
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 5b1a63b7168296aab5a56744edd14dc1cc199425
Parents: 5e242c9
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Sep 7 13:50:43 2017 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Sep 7 13:50:43 2017 -0700
----------------------------------------------------------------------
.../ambari/server/controller/ConfigGroupRequest.java | 14 ++++++++++++--
.../internal/ConfigGroupResourceProvider.java | 8 ++++++--
.../apache/ambari/server/topology/AmbariContext.java | 4 ++--
ambari-server/src/main/resources/properties.json | 1 +
4 files changed, 21 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/5b1a63b7/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
index cb20328..babdf10 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
@@ -27,18 +27,20 @@ public class ConfigGroupRequest {
private String clusterName;
private String groupName;
private String tag;
+ private String serviceName;
private String description;
private String serviceConfigVersionNote;
private Set<String> hosts;
private Map<String, Config> configs;
public ConfigGroupRequest(Long id, String clusterName, String groupName,
- String tag, String description, Set<String> hosts,
- Map<String, Config> configs) {
+ String tag, String serviceName, String description,
+ Set<String> hosts, Map<String, Config> configs) {
this.id = id;
this.clusterName = clusterName;
this.groupName = groupName;
this.tag = tag;
+ this.serviceName = serviceName;
this.description = description;
this.hosts = hosts;
this.configs = configs;
@@ -68,6 +70,14 @@ public class ConfigGroupRequest {
this.tag = tag;
}
+ public String getServiceName() {
+ return serviceName;
+ }
+
+ public void setServiceName(String serviceName) {
+ this.serviceName = serviceName;
+ }
+
public String getDescription() {
return description;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5b1a63b7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
index 2a45f02..737bfa4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
@@ -62,6 +62,7 @@ import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.configgroup.ConfigGroup;
import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -84,6 +85,8 @@ public class ConfigGroupResourceProvider extends
.getPropertyId("ConfigGroup", "group_name");
protected static final String CONFIGGROUP_TAG_PROPERTY_ID = PropertyHelper
.getPropertyId("ConfigGroup", "tag");
+ protected static final String CONFIGGROUP_SERVICENAME_PROPERTY_ID = PropertyHelper
+ .getPropertyId("ConfigGroup", "service_name");
protected static final String CONFIGGROUP_DESC_PROPERTY_ID = PropertyHelper
.getPropertyId("ConfigGroup", "description");
protected static final String CONFIGGROUP_SCV_NOTE_ID = PropertyHelper
@@ -562,8 +565,8 @@ public class ConfigGroupResourceProvider extends
verifyHostList(cluster, hosts, request);
- String serviceName = null;
- if (request.getConfigs() != null && !request.getConfigs().isEmpty()) {
+ String serviceName = request.getServiceName();
+ if (serviceName == null && !MapUtils.isEmpty(request.getConfigs())) {
try {
serviceName = cluster.getServiceForConfigTypes(request.getConfigs().keySet());
} catch (IllegalArgumentException e) {
@@ -751,6 +754,7 @@ public class ConfigGroupResourceProvider extends
(String) properties.get(CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID),
(String) properties.get(CONFIGGROUP_NAME_PROPERTY_ID),
(String) properties.get(CONFIGGROUP_TAG_PROPERTY_ID),
+ (String) properties.get(CONFIGGROUP_SERVICENAME_PROPERTY_ID),
(String) properties.get(CONFIGGROUP_DESC_PROPERTY_ID),
null,
null);
http://git-wip-us.apache.org/repos/asf/ambari/blob/5b1a63b7/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
index f81ff99..1556b0d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
@@ -769,8 +769,8 @@ public class AmbariContext {
}
});
- ConfigGroupRequest request = new ConfigGroupRequest(
- null, clusterName, absoluteGroupName, service, "Host Group Configuration",
+ ConfigGroupRequest request = new ConfigGroupRequest(null, clusterName,
+ absoluteGroupName, service, service, "Host Group Configuration",
Sets.newHashSet(filteredGroupHosts), serviceConfigs);
// get the config group provider and create config group resource
http://git-wip-us.apache.org/repos/asf/ambari/blob/5b1a63b7/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 5f3acdd..e42864f 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -63,6 +63,7 @@
"ConfigGroup/id",
"ConfigGroup/cluster_name",
"ConfigGroup/group_name",
+ "ConfigGroup/service_name",
"ConfigGroup/tag",
"ConfigGroup/description",
"ConfigGroup/hosts",
[31/57] [abbrv] ambari git commit: AMBARI-21877 - Spark Service check
failure in Ambari with kerberos enabled (Mingjie Tang via jonathanhurley)
Posted by lp...@apache.org.
AMBARI-21877 - Spark Service check failure in Ambari with kerberos enabled (Mingjie Tang via jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e142dcb6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e142dcb6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e142dcb6
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: e142dcb6ca4f19fa986208dd69acde5a6e50cc67
Parents: de94def
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Sep 8 09:22:07 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 8 09:22:50 2017 -0400
----------------------------------------------------------------------
.../SPARK/1.2.1/package/scripts/service_check.py | 3 ++-
.../SPARK2/2.0.0/package/scripts/service_check.py | 3 ++-
.../test/python/stacks/2.2/SPARK/test_spark_service_check.py | 8 ++++++--
3 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e142dcb6/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py
index 4699b2e..64641d0 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py
@@ -37,7 +37,8 @@ class SparkServiceCheck(Script):
Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
tries=5,
try_sleep=3,
- logoutput=True
+ logoutput=True,
+ user=params.spark_user
)
if params.has_livyserver:
live_livyserver_host = "";
http://git-wip-us.apache.org/repos/asf/ambari/blob/e142dcb6/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
index 7667191..01d7370 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/service_check.py
@@ -38,7 +38,8 @@ class SparkServiceCheck(Script):
Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k {spark_history_scheme}://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
tries=5,
try_sleep=3,
- logoutput=True
+ logoutput=True,
+ user=params.spark_user
)
if params.has_livyserver:
live_livyserver_host = ""
http://git-wip-us.apache.org/repos/asf/ambari/blob/e142dcb6/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
index c2c3e03..3f6c410 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
@@ -39,7 +39,8 @@ class TestServiceCheck(RMFTestCase):
self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
tries = 5,
try_sleep = 3,
- logoutput = True
+ logoutput = True,
+ user = 'spark'
)
self.assertNoMoreResources()
@@ -58,6 +59,9 @@ class TestServiceCheck(RMFTestCase):
self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
tries = 5,
try_sleep = 3,
- logoutput = True
+ logoutput = True,
+ user = 'spark'
)
self.assertNoMoreResources()
+
+
[05/57] [abbrv] ambari git commit: AMBARI-21882. Throw an error if
unsupported database JDBC driver is configured for HDP services. (stoader)
Posted by lp...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/ranger_admin_default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/ranger_admin_default.json b/ambari-server/src/test/python/common-services/configs/ranger_admin_default.json
new file mode 100644
index 0000000..b360c07
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/ranger_admin_default.json
@@ -0,0 +1,386 @@
+{
+ "roleCommand": "SERVICE_CHECK",
+ "clusterName": "c1",
+ "hostname": "c6401.ambari.apache.org",
+ "hostLevelParams": {
+ "not_managed_hdfs_path_list": "[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "ambari_db_rca_password": "mapred",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "stack_version": "2.2",
+ "stack_name": "HDP",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "jdk_name": "jdk-7u67-linux-x64.tar.gz",
+ "ambari_db_rca_username": "mapred",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "java_version": "8",
+ "db_name": "ambari",
+ "custom_mysql_jdbc_name" : "mysql-connector-java.jar",
+ "custom_oracle_jdbc_name" : "oracle-jdbc-driver.jar",
+ "custom_postgres_jdbc_name" : "test-postgres-jdbc.jar",
+ "custom_mssql_jdbc_name" : "mssql-jdbc-driver.jar",
+ "custom_sqlanywhere_jdbc_name" : "sqla-client-jdbc.tar.gz"
+ },
+ "commandType": "EXECUTION_COMMAND",
+ "roleParams": {},
+ "serviceName": "SLIDER",
+ "role": "SLIDER",
+ "commandParams": {
+ "version": "2.2.1.0-2067",
+ "command_timeout": "300",
+ "service_package_folder": "OOZIE",
+ "script_type": "PYTHON",
+ "script": "scripts/service_check.py",
+ "excluded_hosts": "host1,host2"
+ },
+ "taskId": 152,
+ "public_hostname": "c6401.ambari.apache.org",
+ "configurations": {
+ "admin-properties": {
+ "authentication_method": "UNIX",
+ "db_root_user": "root",
+ "xa_ldap_groupSearchBase": "\"ou=groups,dc=xasecure,dc=net\"",
+ "audit_db_name": "ranger_audit",
+ "xa_ldap_ad_domain": "\"xasecure.net\"",
+ "remoteLoginEnabled": "true",
+ "SQL_CONNECTOR_JAR": "/usr/share/java/mysql-connector-java.jar",
+ "xa_ldap_userDNpattern": "\"uid={0},ou=users,dc=xasecure,dc=net\"",
+ "SQL_COMMAND_INVOKER": "mysql",
+ "db_user": "rangeradmin",
+ "db_password": "aa",
+ "authServicePort": "5151",
+ "audit_db_password": "aa",
+ "DB_FLAVOR": "MYSQL",
+ "audit_db_user": "rangerlogger",
+ "db_root_password": "aa",
+ "xa_ldap_url": "\"ldap://71.127.43.33:389\"",
+ "db_name": "ranger",
+ "xa_ldap_groupSearchFilter": "\"(member=uid={0},ou=users,dc=xasecure,dc=net)\"",
+ "authServiceHostName": "localhost",
+ "xa_ldap_ad_url": "\"ldap://ad.xasecure.net:389\"",
+ "policymgr_external_url": "http://localhost:6080",
+ "policymgr_http_enabled": "true",
+ "db_host": "localhost",
+ "xa_ldap_groupRoleAttribute": "\"cn\""
+ },
+ "ranger-site": {
+ "http.enabled": "true",
+ "http.service.port": "6080",
+ "https.attrib.keystorePass": "ranger",
+ "https.attrib.clientAuth": "want",
+ "https.attrib.keystoreFile": "/etc/ranger/admin/keys/server.jks",
+ "https.service.port": "6182",
+ "https.attrib.keyAlias": "myKey"
+ },
+ "ranger-admin-site": {
+ "ranger.admin.kerberos.cookie.domain": "",
+ "ranger.kms.service.user.hdfs": "hdfs",
+ "ranger.spnego.kerberos.principal": "",
+ "ranger.ldap.ad.url": "{{ranger_ug_ldap_url}}",
+ "ranger.plugins.hive.serviceuser": "hive",
+ "ranger.lookup.kerberos.keytab": "",
+ "ranger.plugins.kms.serviceuser": "kms",
+ "ranger.service.https.attrib.ssl.enabled": "false",
+ "ranger.sso.browser.useragent": "Mozilla,chrome",
+ "ranger.jpa.jdbc.url": "jdbc:mysql://c6401.ambari.apache.org:3306/ranger01",
+ "ranger.plugins.hbase.serviceuser": "hbase",
+ "ranger.plugins.hdfs.serviceuser": "hdfs",
+ "ranger.ldap.group.searchbase": "{{ranger_ug_ldap_group_searchbase}}",
+ "ranger.ldap.user.dnpattern": "uid={0},ou=users,dc=xasecure,dc=net",
+ "ranger.plugins.knox.serviceuser": "knox",
+ "ranger.ldap.base.dn": "dc=example,dc=com",
+ "ranger.sso.publicKey": "",
+ "ranger.admin.kerberos.cookie.path": "/",
+ "ranger.service.https.attrib.clientAuth": "want",
+ "ranger.jpa.jdbc.user": "{{ranger_db_user}}",
+ "ranger.ldap.ad.user.searchfilter": "(sAMAccountName={0})",
+ "ranger.ldap.group.roleattribute": "cn",
+ "ranger.plugins.kafka.serviceuser": "kafka",
+ "ranger.admin.kerberos.principal": "",
+ "ranger.ldap.ad.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.credential.provider.path": "/etc/ranger/admin/rangeradmin.jceks",
+ "ranger.ldap.referral": "ignore",
+ "ranger.service.http.port": "6080",
+ "ranger.ldap.user.searchfilter": "(uid={0})",
+ "ranger.plugins.atlas.serviceuser": "atlas",
+ "ranger.truststore.password": "changeit",
+ "ranger.ldap.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.password": "NONE",
+ "ranger.audit.solr.zookeepers": "c6401.ambari.apache.org:2181/ambari-solr",
+ "ranger.lookup.kerberos.principal": "",
+ "ranger.service.https.port": "6182",
+ "ranger.plugins.storm.serviceuser": "storm",
+ "ranger.externalurl": "{{ranger_external_url}}",
+ "ranger.truststore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.kms.service.user.hive": "",
+ "ranger.https.attrib.keystore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.jpa.jdbc.dialect": "{{jdbc_dialect}}",
+ "ranger.service.host": "{{ranger_host}}",
+ "ranger.service.https.attrib.keystore.keyalias": "rangeradmin",
+ "ranger.service.https.attrib.keystore.pass": "xasecure",
+ "ranger.unixauth.remote.login.enabled": "true",
+ "ranger.jpa.jdbc.credential.alias": "rangeradmin",
+ "ranger.ldap.ad.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.username": "ranger_solr",
+ "ranger.sso.enabled": "false",
+ "ranger.audit.solr.urls": "",
+ "ranger.ldap.ad.domain": "",
+ "ranger.plugins.yarn.serviceuser": "yarn",
+ "ranger.audit.source.type": "solr",
+ "ranger.ldap.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.ldap.url": "{{ranger_ug_ldap_url}}",
+ "ranger.authentication.method": "UNIX",
+ "ranger.service.http.enabled": "true",
+ "ranger.ldap.group.searchfilter": "{{ranger_ug_ldap_group_searchfilter}}",
+ "ranger.ldap.ad.referral": "ignore",
+ "ranger.ldap.ad.base.dn": "dc=example,dc=com",
+ "ranger.jpa.jdbc.password": "_",
+ "ranger.spnego.kerberos.keytab": "",
+ "ranger.sso.providerurl": "",
+ "ranger.unixauth.service.hostname": "{{ugsync_host}}",
+ "ranger.admin.kerberos.keytab": "",
+ "ranger.admin.kerberos.token.valid.seconds": "30",
+ "ranger.jpa.jdbc.driver": "com.mysql.jdbc.Driver",
+ "ranger.unixauth.service.port": "5151"
+ },
+ "usersync-properties": {
+ "SYNC_INTERVAL": "1",
+ "SYNC_LDAP_USERNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_FILTER": "-",
+ "SYNC_LDAP_URL": "ldap://localhost:389",
+ "SYNC_LDAP_GROUPNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_SCOPE": "sub",
+ "SYNC_LDAP_BIND_PASSWORD": "admin321",
+ "SYNC_LDAP_USER_NAME_ATTRIBUTE": "cn",
+ "MIN_UNIX_USER_ID_TO_SYNC": "1000",
+ "SYNC_LDAP_USER_SEARCH_BASE": "ou=users,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_OBJECT_CLASS": "person",
+ "CRED_KEYSTORE_FILENAME": "/usr/lib/xausersync/.jceks/xausersync.jceks",
+ "SYNC_SOURCE": "unix",
+ "SYNC_LDAP_BIND_DN": "cn=admin,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_GROUP_NAME_ATTRIBUTE": "memberof,ismemberof",
+ "logdir": "logs"
+ },
+ "usersync-properties": {
+ "SYNC_INTERVAL": "1",
+ "SYNC_LDAP_USERNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_FILTER": "-",
+ "SYNC_LDAP_URL": "ldap://localhost:389",
+ "SYNC_LDAP_GROUPNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_SCOPE": "sub",
+ "SYNC_LDAP_BIND_PASSWORD": "admin321",
+ "SYNC_LDAP_USER_NAME_ATTRIBUTE": "cn",
+ "MIN_UNIX_USER_ID_TO_SYNC": "1000",
+ "SYNC_LDAP_USER_SEARCH_BASE": "ou=users,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_OBJECT_CLASS": "person",
+ "CRED_KEYSTORE_FILENAME": "/usr/lib/xausersync/.jceks/xausersync.jceks",
+ "SYNC_SOURCE": "unix",
+ "SYNC_LDAP_BIND_DN": "cn=admin,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_GROUP_NAME_ATTRIBUTE": "memberof,ismemberof",
+ "logdir": "logs"
+ },
+ "ranger-env": {
+ "ranger_group": "ranger",
+ "ranger_admin_log_dir": "/var/log/ranger/admin",
+ "oracle_home": "-",
+ "admin_username": "admin",
+ "ranger_user": "ranger",
+ "ranger_admin_username": "amb_ranger_admin",
+ "admin_password": "admin",
+ "ranger_admin_password": "aa",
+ "ranger_usersync_log_dir": "/var/log/ranger/usersync",
+ "xml_configurations_supported" : "false"
+ },
+ "spark-javaopts-properties": {
+ "content": " "
+ },
+ "hadoop-env": {
+ "dtnode_heapsize": "1024m",
+ "namenode_opt_maxnewsize": "256m",
+ "hdfs_log_dir_prefix": "/var/log/hadoop",
+ "namenode_heapsize": "1024m",
+ "proxyuser_group": "users",
+ "hadoop_pid_dir_prefix": "/var/run/hadoop",
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n# The only required environment variable is JAVA_HOME. All others are\n# optional. When running a distributed configuration it is best to\n# set JAVA_HOME in this file, so that it is correctly defined on\n# remote nodes.\n\n# The java implementation to use. Required.\nexport JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hadoop Configuration Directory\n\n{# this is different for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java runtime options. Empty by default.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# Command specific options appended to H
ADOOP_OPTS when specified\nexport HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}\"\nHADOOP_JOBTRACKER_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xm
x{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}\"\nexport HADOOP_DATANODE_OPTS=\"-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -XX:PermSize=128m -XX:MaxPermSize=256m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtnode_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=$HADOOP_NAMENODE_OPTS\n\n# The following appli
es to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m -XX:MaxPermSize=512m $HADOOP_CLIENT_OPTS\"\n\n# On secure datanodes, user to run the datanode as after dropping privileges\nexport HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER:-{{hadoop_secure_dn_user}}}\n\n# Extra ssh options. Empty by default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored. $HADOOP_HOME/logs by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# Where log files are stored in the secure data environment.\nexport HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.\n# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where hadoop code should be rsync'd from. Unset by default.\n# export HADOOP_MA
STER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are stored. /tmp by default.\nexport HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# History server pid\nexport HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n# A string representing this instance of hadoop. $USER by default.\nexport HADOOP_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes. See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Use libraries from standard classpath\nJAVA_JDBC_LIBS=\"\"\n#Add libraries required by mysql connector\nfor jarFile in `ls /usr/share/java/*
mysql* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n# Add libraries required by oracle connector\nfor jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n# Add libraries required by nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\n# added to the HADOOP_CLASSPATH\nif [ -d \"/usr/hdp/current/tez-client\" ]; then\n if [ -d \"/etc/tez/conf/\" ]; then\n # When using versioned RPMs, the tez-client will be a symlink to the current folder of tez in HDP.\n export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:/usr/hdp/current/tez-client/*:/usr/hdp/current/tez-client/lib/*:/etc/tez/conf/\n fi\nfi\n\n\n# Setting path to hdfs command line\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n# Mostly required for hadoop 2.0\nexport JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}\n\nexport HADOOP_OPTS=\"-Dhdp.version=$HDP_VERSION $HADOOP_OPTS\"
",
+ "hdfs_user": "hdfs",
+ "namenode_opt_newsize": "256m",
+ "hadoop_root_logger": "INFO,RFA",
+ "hadoop_heapsize": "1024",
+ "namenode_opt_maxpermsize": "256m",
+ "namenode_opt_permsize": "128m"
+ },
+ "slider-client": {
+ "slider.yarn.queue": "default"
+ },
+ "core-site": {
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
+ },
+ "hdfs-site": {
+ "a": "b"
+ },
+ "yarn-site": {
+ "yarn.application.classpath": "/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*",
+ "yarn.resourcemanager.address": "c6401.ambari.apache.org:8050",
+ "yarn.resourcemanager.scheduler.address": "c6401.ambari.apache.org:8030"
+ },
+ "cluster-env": {
+ "managed_hdfs_resource_property_names": "",
+ "security_enabled": "false",
+ "ignore_groupsusers_create": "false",
+ "smokeuser": "ambari-qa",
+ "kerberos_domain": "EXAMPLE.COM",
+ "user_group": "hadoop"
+ },
+ "ranger-knox-plugin-properties": {
+ "POLICY_MGR_URL": "{{policymgr_mgr_url}}",
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "KNOX_HOME": "/usr/hdp/current/knox-server",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "REPOSITORY_NAME": "{{repo_name}}",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}",
+ "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "ranger-knox-plugin-enabled": "Yes",
+ "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}",
+ "policy_user": "ambari-qa",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "REPOSITORY_CONFIG_USERNAME": "admin",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "admin-password",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+ },
+ "webhcat-site": {
+ "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
+ "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
+ "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive.tar.gz",
+ "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz",
+ "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming.jar"
+ },
+ "slider-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "slider-env": {
+ "content": "envproperties\nline2"
+ },
+ "ranger-hbase-plugin-properties": {
+ "ranger-hbase-plugin-enabled":"yes"
+ },
+ "ranger-hive-plugin-properties": {
+ "ranger-hive-plugin-enabled":"yes"
+ }
+ },
+ "configuration_attributes": {
+ "yarn-site": {
+ "final": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
+ "yarn.nodemanager.container-executor.class": "true",
+ "yarn.nodemanager.local-dirs": "true"
+ }
+ },
+ "hdfs-site": {
+ "final": {
+ "dfs.web.ugi": "true",
+ "dfs.support.append": "true",
+ "dfs.cluster.administrators": "true"
+ }
+ },
+ "core-site": {
+ "final": {
+ "hadoop.proxyuser.hive.groups": "true",
+ "webinterface.private.actions": "true",
+ "hadoop.proxyuser.oozie.hosts": "true"
+ }
+ }
+ },
+ "configurationTags": {
+ "slider-client": {
+ "tag": "version1"
+ },
+ "slider-log4j": {
+ "tag": "version1"
+ },
+ "slider-env": {
+ "tag": "version1"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "yarn-site": {
+ "tag": "version1"
+ },
+ "gateway-site": {
+ "tag": "version1"
+ },
+ "topology": {
+ "tag": "version1"
+ },
+ "users-ldif": {
+ "tag": "version1"
+ },
+ "kafka-env": {
+ "tag": "version1"
+ },
+ "kafka-log4j": {
+ "tag": "version1"
+ },
+ "kafka-broker": {
+ "tag": "version1"
+ }
+ },
+ "commandId": "7-1",
+ "clusterHostInfo": {
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_ping_ports": [
+ "8670",
+ "8670"
+ ],
+ "rm_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "knox_gateway_hosts": [
+ "jaimin-knox-1.c.pramod-thangali.internal"
+ ],
+ "kafka_broker_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_admin_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_usersync_hosts" : [
+ "c6408.ambari.apache.org"
+ ]
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/ranger_admin_unsupported_db_flavor.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/configs/ranger_admin_unsupported_db_flavor.json b/ambari-server/src/test/python/common-services/configs/ranger_admin_unsupported_db_flavor.json
new file mode 100644
index 0000000..aa620a4
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/ranger_admin_unsupported_db_flavor.json
@@ -0,0 +1,386 @@
+{
+ "roleCommand": "SERVICE_CHECK",
+ "clusterName": "c1",
+ "hostname": "c6401.ambari.apache.org",
+ "hostLevelParams": {
+ "not_managed_hdfs_path_list": "[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+ "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+ "ambari_db_rca_password": "mapred",
+ "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+ "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+ "stack_version": "2.2",
+ "stack_name": "HDP",
+ "ambari_db_rca_driver": "org.postgresql.Driver",
+ "jdk_name": "jdk-7u67-linux-x64.tar.gz",
+ "ambari_db_rca_username": "mapred",
+ "java_home": "/usr/jdk64/jdk1.7.0_45",
+ "java_version": "8",
+ "db_name": "ambari",
+ "custom_mysql_jdbc_name" : "mysql-connector-java.jar",
+ "custom_oracle_jdbc_name" : "oracle-jdbc-driver.jar",
+ "custom_postgres_jdbc_name" : "test-postgres-jdbc.jar",
+ "custom_mssql_jdbc_name" : "mssql-jdbc-driver.jar",
+ "custom_sqlanywhere_jdbc_name" : "sqla-client-jdbc.tar.gz"
+ },
+ "commandType": "EXECUTION_COMMAND",
+ "roleParams": {},
+ "serviceName": "SLIDER",
+ "role": "SLIDER",
+ "commandParams": {
+ "version": "2.2.1.0-2067",
+ "command_timeout": "300",
+ "service_package_folder": "OOZIE",
+ "script_type": "PYTHON",
+ "script": "scripts/service_check.py",
+ "excluded_hosts": "host1,host2"
+ },
+ "taskId": 152,
+ "public_hostname": "c6401.ambari.apache.org",
+ "configurations": {
+ "admin-properties": {
+ "authentication_method": "UNIX",
+ "db_root_user": "root",
+ "xa_ldap_groupSearchBase": "\"ou=groups,dc=xasecure,dc=net\"",
+ "audit_db_name": "ranger_audit",
+ "xa_ldap_ad_domain": "\"xasecure.net\"",
+ "remoteLoginEnabled": "true",
+ "SQL_CONNECTOR_JAR": "/usr/share/java/mysql-connector-java.jar",
+ "xa_ldap_userDNpattern": "\"uid={0},ou=users,dc=xasecure,dc=net\"",
+ "SQL_COMMAND_INVOKER": "mysql",
+ "db_user": "rangeradmin",
+ "db_password": "aa",
+ "authServicePort": "5151",
+ "audit_db_password": "aa",
+ "DB_FLAVOR": "UNSUPPORTED",
+ "audit_db_user": "rangerlogger",
+ "db_root_password": "aa",
+ "xa_ldap_url": "\"ldap://71.127.43.33:389\"",
+ "db_name": "ranger",
+ "xa_ldap_groupSearchFilter": "\"(member=uid={0},ou=users,dc=xasecure,dc=net)\"",
+ "authServiceHostName": "localhost",
+ "xa_ldap_ad_url": "\"ldap://ad.xasecure.net:389\"",
+ "policymgr_external_url": "http://localhost:6080",
+ "policymgr_http_enabled": "true",
+ "db_host": "localhost",
+ "xa_ldap_groupRoleAttribute": "\"cn\""
+ },
+ "ranger-site": {
+ "http.enabled": "true",
+ "http.service.port": "6080",
+ "https.attrib.keystorePass": "ranger",
+ "https.attrib.clientAuth": "want",
+ "https.attrib.keystoreFile": "/etc/ranger/admin/keys/server.jks",
+ "https.service.port": "6182",
+ "https.attrib.keyAlias": "myKey"
+ },
+ "ranger-admin-site": {
+ "ranger.admin.kerberos.cookie.domain": "",
+ "ranger.kms.service.user.hdfs": "hdfs",
+ "ranger.spnego.kerberos.principal": "",
+ "ranger.ldap.ad.url": "{{ranger_ug_ldap_url}}",
+ "ranger.plugins.hive.serviceuser": "hive",
+ "ranger.lookup.kerberos.keytab": "",
+ "ranger.plugins.kms.serviceuser": "kms",
+ "ranger.service.https.attrib.ssl.enabled": "false",
+ "ranger.sso.browser.useragent": "Mozilla,chrome",
+ "ranger.jpa.jdbc.url": "jdbc:mysql://c6401.ambari.apache.org:3306/ranger01",
+ "ranger.plugins.hbase.serviceuser": "hbase",
+ "ranger.plugins.hdfs.serviceuser": "hdfs",
+ "ranger.ldap.group.searchbase": "{{ranger_ug_ldap_group_searchbase}}",
+ "ranger.ldap.user.dnpattern": "uid={0},ou=users,dc=xasecure,dc=net",
+ "ranger.plugins.knox.serviceuser": "knox",
+ "ranger.ldap.base.dn": "dc=example,dc=com",
+ "ranger.sso.publicKey": "",
+ "ranger.admin.kerberos.cookie.path": "/",
+ "ranger.service.https.attrib.clientAuth": "want",
+ "ranger.jpa.jdbc.user": "{{ranger_db_user}}",
+ "ranger.ldap.ad.user.searchfilter": "(sAMAccountName={0})",
+ "ranger.ldap.group.roleattribute": "cn",
+ "ranger.plugins.kafka.serviceuser": "kafka",
+ "ranger.admin.kerberos.principal": "",
+ "ranger.ldap.ad.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.credential.provider.path": "/etc/ranger/admin/rangeradmin.jceks",
+ "ranger.ldap.referral": "ignore",
+ "ranger.service.http.port": "6080",
+ "ranger.ldap.user.searchfilter": "(uid={0})",
+ "ranger.plugins.atlas.serviceuser": "atlas",
+ "ranger.truststore.password": "changeit",
+ "ranger.ldap.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.password": "NONE",
+ "ranger.audit.solr.zookeepers": "c6401.ambari.apache.org:2181/ambari-solr",
+ "ranger.lookup.kerberos.principal": "",
+ "ranger.service.https.port": "6182",
+ "ranger.plugins.storm.serviceuser": "storm",
+ "ranger.externalurl": "{{ranger_external_url}}",
+ "ranger.truststore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.kms.service.user.hive": "",
+ "ranger.https.attrib.keystore.file": "/etc/ranger/admin/conf/ranger-admin-keystore.jks",
+ "ranger.jpa.jdbc.dialect": "{{jdbc_dialect}}",
+ "ranger.service.host": "{{ranger_host}}",
+ "ranger.service.https.attrib.keystore.keyalias": "rangeradmin",
+ "ranger.service.https.attrib.keystore.pass": "xasecure",
+ "ranger.unixauth.remote.login.enabled": "true",
+ "ranger.jpa.jdbc.credential.alias": "rangeradmin",
+ "ranger.ldap.ad.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}",
+ "ranger.audit.solr.username": "ranger_solr",
+ "ranger.sso.enabled": "false",
+ "ranger.audit.solr.urls": "",
+ "ranger.ldap.ad.domain": "",
+ "ranger.plugins.yarn.serviceuser": "yarn",
+ "ranger.audit.source.type": "solr",
+ "ranger.ldap.bind.dn": "{{ranger_ug_ldap_bind_dn}}",
+ "ranger.ldap.url": "{{ranger_ug_ldap_url}}",
+ "ranger.authentication.method": "UNIX",
+ "ranger.service.http.enabled": "true",
+ "ranger.ldap.group.searchfilter": "{{ranger_ug_ldap_group_searchfilter}}",
+ "ranger.ldap.ad.referral": "ignore",
+ "ranger.ldap.ad.base.dn": "dc=example,dc=com",
+ "ranger.jpa.jdbc.password": "_",
+ "ranger.spnego.kerberos.keytab": "",
+ "ranger.sso.providerurl": "",
+ "ranger.unixauth.service.hostname": "{{ugsync_host}}",
+ "ranger.admin.kerberos.keytab": "",
+ "ranger.admin.kerberos.token.valid.seconds": "30",
+ "ranger.jpa.jdbc.driver": "com.mysql.jdbc.Driver",
+ "ranger.unixauth.service.port": "5151"
+ },
+ "usersync-properties": {
+ "SYNC_INTERVAL": "1",
+ "SYNC_LDAP_USERNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_FILTER": "-",
+ "SYNC_LDAP_URL": "ldap://localhost:389",
+ "SYNC_LDAP_GROUPNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_SCOPE": "sub",
+ "SYNC_LDAP_BIND_PASSWORD": "admin321",
+ "SYNC_LDAP_USER_NAME_ATTRIBUTE": "cn",
+ "MIN_UNIX_USER_ID_TO_SYNC": "1000",
+ "SYNC_LDAP_USER_SEARCH_BASE": "ou=users,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_OBJECT_CLASS": "person",
+ "CRED_KEYSTORE_FILENAME": "/usr/lib/xausersync/.jceks/xausersync.jceks",
+ "SYNC_SOURCE": "unix",
+ "SYNC_LDAP_BIND_DN": "cn=admin,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_GROUP_NAME_ATTRIBUTE": "memberof,ismemberof",
+ "logdir": "logs"
+ },
+ "usersync-properties": {
+ "SYNC_INTERVAL": "1",
+ "SYNC_LDAP_USERNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_FILTER": "-",
+ "SYNC_LDAP_URL": "ldap://localhost:389",
+ "SYNC_LDAP_GROUPNAME_CASE_CONVERSION": "none",
+ "SYNC_LDAP_USER_SEARCH_SCOPE": "sub",
+ "SYNC_LDAP_BIND_PASSWORD": "admin321",
+ "SYNC_LDAP_USER_NAME_ATTRIBUTE": "cn",
+ "MIN_UNIX_USER_ID_TO_SYNC": "1000",
+ "SYNC_LDAP_USER_SEARCH_BASE": "ou=users,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_OBJECT_CLASS": "person",
+ "CRED_KEYSTORE_FILENAME": "/usr/lib/xausersync/.jceks/xausersync.jceks",
+ "SYNC_SOURCE": "unix",
+ "SYNC_LDAP_BIND_DN": "cn=admin,dc=xasecure,dc=net",
+ "SYNC_LDAP_USER_GROUP_NAME_ATTRIBUTE": "memberof,ismemberof",
+ "logdir": "logs"
+ },
+ "ranger-env": {
+ "ranger_group": "ranger",
+ "ranger_admin_log_dir": "/var/log/ranger/admin",
+ "oracle_home": "-",
+ "admin_username": "admin",
+ "ranger_user": "ranger",
+ "ranger_admin_username": "amb_ranger_admin",
+ "admin_password": "admin",
+ "ranger_admin_password": "aa",
+ "ranger_usersync_log_dir": "/var/log/ranger/usersync",
+ "xml_configurations_supported" : "false"
+ },
+ "spark-javaopts-properties": {
+ "content": " "
+ },
+ "hadoop-env": {
+ "dtnode_heapsize": "1024m",
+ "namenode_opt_maxnewsize": "256m",
+ "hdfs_log_dir_prefix": "/var/log/hadoop",
+ "namenode_heapsize": "1024m",
+ "proxyuser_group": "users",
+ "hadoop_pid_dir_prefix": "/var/run/hadoop",
+ "content": "\n# Set Hadoop-specific environment variables here.\n\n# The only required environment variable is JAVA_HOME. All others are\n# optional. When running a distributed configuration it is best to\n# set JAVA_HOME in this file, so that it is correctly defined on\n# remote nodes.\n\n# The java implementation to use. Required.\nexport JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hadoop Configuration Directory\n\n{# this is different for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java runtime options. Empty by default.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# Command specific options appended to H
ADOOP_OPTS when specified\nexport HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}\"\nHADOOP_JOBTRACKER_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xm
x{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}\"\nexport HADOOP_DATANODE_OPTS=\"-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -XX:PermSize=128m -XX:MaxPermSize=256m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtnode_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport HADOOP_SECONDARYNAMENODE_OPTS=$HADOOP_NAMENODE_OPTS\n\n# The following appli
es to multiple commands (fs, dfs, fsck, distcp etc)\nexport HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m -XX:MaxPermSize=512m $HADOOP_CLIENT_OPTS\"\n\n# On secure datanodes, user to run the datanode as after dropping privileges\nexport HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER:-{{hadoop_secure_dn_user}}}\n\n# Extra ssh options. Empty by default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored. $HADOOP_HOME/logs by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# Where log files are stored in the secure data environment.\nexport HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.\n# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where hadoop code should be rsync'd from. Unset by default.\n# export HADOOP_MA
STER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave commands. Unset by default. This\n# can be useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive faster than the master can service them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are stored. /tmp by default.\nexport HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# History server pid\nexport HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n# A string representing this instance of hadoop. $USER by default.\nexport HADOOP_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes. See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Use libraries from standard classpath\nJAVA_JDBC_LIBS=\"\"\n#Add libraries required by mysql connector\nfor jarFile in `ls /usr/share/java/*
mysql* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n# Add libraries required by oracle connector\nfor jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`\ndo\n JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n# Add libraries required by nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\n# added to the HADOOP_CLASSPATH\nif [ -d \"/usr/hdp/current/tez-client\" ]; then\n if [ -d \"/etc/tez/conf/\" ]; then\n # When using versioned RPMs, the tez-client will be a symlink to the current folder of tez in HDP.\n export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:/usr/hdp/current/tez-client/*:/usr/hdp/current/tez-client/lib/*:/etc/tez/conf/\n fi\nfi\n\n\n# Setting path to hdfs command line\nexport HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n# Mostly required for hadoop 2.0\nexport JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}\n\nexport HADOOP_OPTS=\"-Dhdp.version=$HDP_VERSION $HADOOP_OPTS\"
",
+ "hdfs_user": "hdfs",
+ "namenode_opt_newsize": "256m",
+ "hadoop_root_logger": "INFO,RFA",
+ "hadoop_heapsize": "1024",
+ "namenode_opt_maxpermsize": "256m",
+ "namenode_opt_permsize": "128m"
+ },
+ "slider-client": {
+ "slider.yarn.queue": "default"
+ },
+ "core-site": {
+ "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
+ },
+ "hdfs-site": {
+ "a": "b"
+ },
+ "yarn-site": {
+ "yarn.application.classpath": "/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*",
+ "yarn.resourcemanager.address": "c6401.ambari.apache.org:8050",
+ "yarn.resourcemanager.scheduler.address": "c6401.ambari.apache.org:8030"
+ },
+ "cluster-env": {
+ "managed_hdfs_resource_property_names": "",
+ "security_enabled": "false",
+ "ignore_groupsusers_create": "false",
+ "smokeuser": "ambari-qa",
+ "kerberos_domain": "EXAMPLE.COM",
+ "user_group": "hadoop"
+ },
+ "ranger-knox-plugin-properties": {
+ "POLICY_MGR_URL": "{{policymgr_mgr_url}}",
+ "XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900",
+ "KNOX_HOME": "/usr/hdp/current/knox-server",
+ "XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:yyyyMMdd%",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit",
+ "common.name.for.certificate": "-",
+ "XAAUDIT.HDFS.IS_ENABLED": "false",
+ "SQL_CONNECTOR_JAR": "{{sql_connector_jar}}",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:yyyyMMdd-HHmm.ss%.log",
+ "REPOSITORY_NAME": "{{repo_name}}",
+ "SSL_KEYSTORE_PASSWORD": "myKeyFilePassword",
+ "XAAUDIT.DB.IS_ENABLED": "true",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS": "600",
+ "XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS": "60",
+ "XAAUDIT.SOLR.SOLR_URL": "http://localhost:6083/solr/ranger_audits",
+ "XAAUDIT.DB.DATABASE_NAME": "{{xa_audit_db_name}}",
+ "XAAUDIT.DB.HOSTNAME": "{{xa_db_host}}",
+ "XAAUDIT.SOLR.IS_ENABLED": "false",
+ "SSL_KEYSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-keystore.jks",
+ "ranger-knox-plugin-enabled": "Yes",
+ "XAAUDIT.DB.USER_NAME": "{{xa_audit_db_user}}",
+ "policy_user": "ambari-qa",
+ "XAAUDIT.HDFS.DESTINTATION_FILE": "%hostname%-audit.log",
+ "XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS": "86400",
+ "XAAUDIT.DB.PASSWORD": "{{xa_audit_db_password}}",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT": "10",
+ "SSL_TRUSTSTORE_PASSWORD": "changeit",
+ "XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit/archive",
+ "REPOSITORY_CONFIG_USERNAME": "admin",
+ "XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS": "1000",
+ "XAAUDIT.DB.FLAVOUR": "{{xa_audit_db_flavor}}",
+ "XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS": "60",
+ "SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
+ "REPOSITORY_CONFIG_PASSWORD": "admin-password",
+ "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+ },
+ "webhcat-site": {
+ "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
+ "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
+ "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive.tar.gz",
+ "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz",
+ "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming.jar"
+ },
+ "slider-log4j": {
+ "content": "log4jproperties\nline2"
+ },
+ "slider-env": {
+ "content": "envproperties\nline2"
+ },
+ "ranger-hbase-plugin-properties": {
+ "ranger-hbase-plugin-enabled":"yes"
+ },
+ "ranger-hive-plugin-properties": {
+ "ranger-hive-plugin-enabled":"yes"
+ }
+ },
+ "configuration_attributes": {
+ "yarn-site": {
+ "final": {
+ "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
+ "yarn.nodemanager.container-executor.class": "true",
+ "yarn.nodemanager.local-dirs": "true"
+ }
+ },
+ "hdfs-site": {
+ "final": {
+ "dfs.web.ugi": "true",
+ "dfs.support.append": "true",
+ "dfs.cluster.administrators": "true"
+ }
+ },
+ "core-site": {
+ "final": {
+ "hadoop.proxyuser.hive.groups": "true",
+ "webinterface.private.actions": "true",
+ "hadoop.proxyuser.oozie.hosts": "true"
+ }
+ }
+ },
+ "configurationTags": {
+ "slider-client": {
+ "tag": "version1"
+ },
+ "slider-log4j": {
+ "tag": "version1"
+ },
+ "slider-env": {
+ "tag": "version1"
+ },
+ "core-site": {
+ "tag": "version1"
+ },
+ "hdfs-site": {
+ "tag": "version1"
+ },
+ "yarn-site": {
+ "tag": "version1"
+ },
+ "gateway-site": {
+ "tag": "version1"
+ },
+ "topology": {
+ "tag": "version1"
+ },
+ "users-ldif": {
+ "tag": "version1"
+ },
+ "kafka-env": {
+ "tag": "version1"
+ },
+ "kafka-log4j": {
+ "tag": "version1"
+ },
+ "kafka-broker": {
+ "tag": "version1"
+ }
+ },
+ "commandId": "7-1",
+ "clusterHostInfo": {
+ "ambari_server_host": [
+ "c6401.ambari.apache.org"
+ ],
+ "all_ping_ports": [
+ "8670",
+ "8670"
+ ],
+ "rm_host": [
+ "c6402.ambari.apache.org"
+ ],
+ "all_hosts": [
+ "c6401.ambari.apache.org",
+ "c6402.ambari.apache.org"
+ ],
+ "knox_gateway_hosts": [
+ "jaimin-knox-1.c.pramod-thangali.internal"
+ ],
+ "kafka_broker_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "zookeeper_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_admin_hosts": [
+ "c6401.ambari.apache.org"
+ ],
+ "ranger_usersync_hosts" : [
+ "c6408.ambari.apache.org"
+ ]
+
+ }
+}
[14/57] [abbrv] ambari git commit: AMBARI-21897: Fix NPE in
InstallHostTask (jluniya)
Posted by lp...@apache.org.
AMBARI-21897: Fix NPE in InstallHostTask (jluniya)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98b00094
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98b00094
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98b00094
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 98b00094c458c904b500919d4ab08190ff4c5155
Parents: 0c45d48
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Wed Sep 6 14:48:02 2017 -0700
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Wed Sep 6 14:48:02 2017 -0700
----------------------------------------------------------------------
.../server/topology/tasks/InstallHostTask.java | 24 +++++++++++---------
.../tasks/PersistHostResourcesTask.java | 3 +++
.../tasks/RegisterWithConfigGroupTask.java | 3 +++
.../server/topology/tasks/StartHostTask.java | 23 +++++++++++--------
4 files changed, 32 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/98b00094/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/InstallHostTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/InstallHostTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/InstallHostTask.java
index f38022a..ceb58c8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/InstallHostTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/InstallHostTask.java
@@ -50,19 +50,21 @@ public class InstallHostTask extends TopologyHostTask {
LOG.info("HostRequest: Executing INSTALL task for host: {}", hostRequest.getHostName());
boolean skipInstallTaskCreate = clusterTopology.getProvisionAction().equals(ProvisionAction.START_ONLY);
RequestStatusResponse response = clusterTopology.installHost(hostRequest.getHostName(), skipInstallTaskCreate, skipFailure);
- // map logical install tasks to physical install tasks
- List<ShortTaskStatus> underlyingTasks = response.getTasks();
- for (ShortTaskStatus task : underlyingTasks) {
+ if(response != null) {
+ // map logical install tasks to physical install tasks
+ List<ShortTaskStatus> underlyingTasks = response.getTasks();
+ for (ShortTaskStatus task : underlyingTasks) {
- String component = task.getRole();
- Long logicalInstallTaskId = hostRequest.getLogicalTasksForTopologyTask(this).get(component);
- if(logicalInstallTaskId == null) {
- LOG.info("Skipping physical install task registering, because component {} cannot be found", task.getRole());
- continue;
+ String component = task.getRole();
+ Long logicalInstallTaskId = hostRequest.getLogicalTasksForTopologyTask(this).get(component);
+ if (logicalInstallTaskId == null) {
+ LOG.info("Skipping physical install task registering, because component {} cannot be found", task.getRole());
+ continue;
+ }
+ //todo: for now only one physical task per component
+ long taskId = task.getTaskId();
+ hostRequest.registerPhysicalTaskId(logicalInstallTaskId, taskId);
}
- //todo: for now only one physical task per component
- long taskId = task.getTaskId();
- hostRequest.registerPhysicalTaskId(logicalInstallTaskId, taskId);
}
LOG.info("HostRequest: Exiting INSTALL task for host: {}", hostRequest.getHostName());
http://git-wip-us.apache.org/repos/asf/ambari/blob/98b00094/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/PersistHostResourcesTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/PersistHostResourcesTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/PersistHostResourcesTask.java
index e4b10c2..990aee7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/PersistHostResourcesTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/PersistHostResourcesTask.java
@@ -48,6 +48,7 @@ public class PersistHostResourcesTask extends TopologyHostTask {
@Override
public void runTask() {
LOG.info("HostRequest: Executing RESOURCE_CREATION task for host: {}", hostRequest.getHostName());
+
HostGroup group = hostRequest.getHostGroup();
Map<String, Collection<String>> serviceComponents = new HashMap<>();
for (String service : group.getServices()) {
@@ -55,5 +56,7 @@ public class PersistHostResourcesTask extends TopologyHostTask {
}
clusterTopology.getAmbariContext().createAmbariHostResources(hostRequest.getClusterId(),
hostRequest.getHostName(), serviceComponents);
+
+ LOG.info("HostRequest: Exiting RESOURCE_CREATION task for host: {}", hostRequest.getHostName());
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/98b00094/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/RegisterWithConfigGroupTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/RegisterWithConfigGroupTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/RegisterWithConfigGroupTask.java
index 029f2a4..1613c19 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/RegisterWithConfigGroupTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/RegisterWithConfigGroupTask.java
@@ -43,8 +43,11 @@ public class RegisterWithConfigGroupTask extends TopologyHostTask {
@Override
public void runTask() {
LOG.info("HostRequest: Executing CONFIGURE task for host: {}", hostRequest.getHostName());
+
clusterTopology.getAmbariContext().registerHostWithConfigGroup(hostRequest.getHostName(), clusterTopology,
hostRequest.getHostgroupName());
+
+ LOG.info("HostRequest: Exiting CONFIGURE task for host: {}", hostRequest.getHostName());
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/98b00094/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/StartHostTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/StartHostTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/StartHostTask.java
index 054ed1e..ab2ffbf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/StartHostTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/StartHostTask.java
@@ -47,19 +47,22 @@ public class StartHostTask extends TopologyHostTask {
@Override
public void runTask() {
LOG.info("HostRequest: Executing START task for host: {}", hostRequest.getHostName());
+
RequestStatusResponse response = clusterTopology.startHost(hostRequest.getHostName(), skipFailure);
- // map logical install tasks to physical install tasks
- List<ShortTaskStatus> underlyingTasks = response.getTasks();
- for (ShortTaskStatus task : underlyingTasks) {
+ if (response != null) {
+ // map logical install tasks to physical install tasks
+ List<ShortTaskStatus> underlyingTasks = response.getTasks();
+ for (ShortTaskStatus task : underlyingTasks) {
- String component = task.getRole();
- Long logicalStartTaskId = hostRequest.getLogicalTasksForTopologyTask(this).get(component);
- if(logicalStartTaskId == null) {
- LOG.info("Skipping physical start task registering, because component {} cannot be found", task.getRole());
- continue;
+ String component = task.getRole();
+ Long logicalStartTaskId = hostRequest.getLogicalTasksForTopologyTask(this).get(component);
+ if (logicalStartTaskId == null) {
+ LOG.info("Skipping physical start task registering, because component {} cannot be found", task.getRole());
+ continue;
+ }
+ // for now just set on outer map
+ hostRequest.registerPhysicalTaskId(logicalStartTaskId, task.getTaskId());
}
- // for now just set on outer map
- hostRequest.registerPhysicalTaskId(logicalStartTaskId, task.getTaskId());
}
LOG.info("HostRequest: Exiting START task for host: {}", hostRequest.getHostName());
[40/57] [abbrv] ambari git commit: AMBARI-21924. Positioning of 'Out
of sync' icon on Admin page is wrong (alexantonenko)
Posted by lp...@apache.org.
AMBARI-21924. Positioning of 'Out of sync' icon on Admin page is wrong (alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4f23c1ec
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4f23c1ec
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4f23c1ec
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 4f23c1ec5ee5630b6bff48317247d83dc6fd9b65
Parents: 3ac4340
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Mon Sep 11 16:08:40 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Mon Sep 11 16:08:40 2017 +0300
----------------------------------------------------------------------
ambari-web/app/styles/stack_versions.less | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/4f23c1ec/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index 61cb177..e8afaf4 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -188,6 +188,7 @@
position: absolute;
left: -8px;
color: orange;
+ z-index: 3;
}
.state {
margin: 15px 0;
[29/57] [abbrv] ambari git commit: AMBARI-21911 "Retry" upgrade
doesn't work if a server action times out (dgrinenko)
Posted by lp...@apache.org.
AMBARI-21911 "Retry" upgrade doesn't work if a server action times out (dgrinenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2170ce03
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2170ce03
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2170ce03
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 2170ce0321b8ea2c772c5dcdce38c4d94bea8175
Parents: 2ab8b39
Author: Dmytro Grinenko <ha...@apache.org>
Authored: Fri Sep 8 15:29:45 2017 +0300
Committer: Dmytro Grinenko <ha...@apache.org>
Committed: Fri Sep 8 15:29:45 2017 +0300
----------------------------------------------------------------------
.../actionmanager/ActionDBAccessorImpl.java | 11 ++++
.../actionmanager/TestActionDBAccessorImpl.java | 63 +++++++++++++-------
2 files changed, 54 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/2170ce03/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
index f0e2ce7..063ea1c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
@@ -538,6 +538,12 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
reportedTaskStatus = HostRoleStatus.SKIPPED_FAILED;
}
}
+
+ // if TIMEOUT and marked for holding then set status = HOLDING_TIMEOUT
+ if (reportedTaskStatus == HostRoleStatus.TIMEDOUT && commandEntity.isRetryAllowed()){
+ reportedTaskStatus = HostRoleStatus.HOLDING_TIMEDOUT;
+ }
+
if (!existingTaskStatus.isCompletedState()) {
commandEntity.setStatus(reportedTaskStatus);
}
@@ -601,6 +607,11 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
}
}
+ // if TIMEOUT and marked for holding then set status = HOLDING_TIMEOUT
+ if (status == HostRoleStatus.TIMEDOUT && command.isRetryAllowed()){
+ status = HostRoleStatus.HOLDING_TIMEDOUT;
+ }
+
command.setStatus(status);
command.setStdOut(report.getStdOut().getBytes());
command.setStdError(report.getStdErr().getBytes());
http://git-wip-us.apache.org/repos/asf/ambari/blob/2170ce03/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index c449aae..94799cc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -132,7 +132,7 @@ public class TestActionDBAccessorImpl {
@Test
public void testActionResponse() throws AmbariException {
String hostname = "host1";
- populateActionDB(db, hostname, requestId, stageId);
+ populateActionDB(db, hostname, requestId, stageId, false);
Stage stage = db.getAllStages(requestId).get(0);
Assert.assertEquals(stageId, stage.getStageId());
stage.setHostRoleStatus(hostname, "HBASE_MASTER", HostRoleStatus.QUEUED);
@@ -160,7 +160,7 @@ public class TestActionDBAccessorImpl {
@Test
public void testCancelCommandReport() throws AmbariException {
String hostname = "host1";
- populateActionDB(db, hostname, requestId, stageId);
+ populateActionDB(db, hostname, requestId, stageId, false);
Stage stage = db.getAllStages(requestId).get(0);
Assert.assertEquals(stageId, stage.getStageId());
stage.setHostRoleStatus(hostname, "HBASE_MASTER", HostRoleStatus.ABORTED);
@@ -191,8 +191,8 @@ public class TestActionDBAccessorImpl {
@Test
public void testGetStagesInProgress() throws AmbariException {
List<Stage> stages = new ArrayList<>();
- stages.add(createStubStage(hostName, requestId, stageId));
- stages.add(createStubStage(hostName, requestId, stageId + 1));
+ stages.add(createStubStage(hostName, requestId, stageId, false));
+ stages.add(createStubStage(hostName, requestId, stageId + 1, false));
Request request = new Request(stages, "", clusters);
db.persistActions(request);
assertEquals(2, stages.size());
@@ -200,8 +200,8 @@ public class TestActionDBAccessorImpl {
@Test
public void testGetStagesInProgressWithFailures() throws AmbariException {
- populateActionDB(db, hostName, requestId, stageId);
- populateActionDB(db, hostName, requestId + 1, stageId);
+ populateActionDB(db, hostName, requestId, stageId, false);
+ populateActionDB(db, hostName, requestId + 1, stageId, false);
List<Stage> stages = db.getFirstStageInProgressPerRequest();
assertEquals(2, stages.size());
@@ -289,7 +289,7 @@ public class TestActionDBAccessorImpl {
@Test
public void testPersistActions() throws AmbariException {
- populateActionDB(db, hostName, requestId, stageId);
+ populateActionDB(db, hostName, requestId, stageId, false);
for (Stage stage : db.getAllStages(requestId)) {
log.info("taskId={}" + stage.getExecutionCommands(hostName).get(0).
getExecutionCommand().getTaskId());
@@ -302,7 +302,7 @@ public class TestActionDBAccessorImpl {
@Test
public void testHostRoleScheduled() throws InterruptedException, AmbariException {
- populateActionDB(db, hostName, requestId, stageId);
+ populateActionDB(db, hostName, requestId, stageId, false);
Stage stage = db.getStage(StageUtils.getActionId(requestId, stageId));
assertEquals(HostRoleStatus.PENDING, stage.getHostRoleStatus(hostName, Role.HBASE_MASTER.toString()));
List<HostRoleCommandEntity> entities=
@@ -421,7 +421,7 @@ public class TestActionDBAccessorImpl {
@Test
public void testUpdateHostRole() throws Exception {
- populateActionDB(db, hostName, requestId, stageId);
+ populateActionDB(db, hostName, requestId, stageId, false);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 50000; i++) {
sb.append("1234567890");
@@ -452,13 +452,36 @@ public class TestActionDBAccessorImpl {
}
@Test
+ public void testUpdateHostRoleTimeoutRetry() throws Exception {
+ populateActionDB(db, hostName, requestId, stageId, true);
+
+ CommandReport commandReport = new CommandReport();
+ commandReport.setStatus(HostRoleStatus.TIMEDOUT.toString());
+ commandReport.setStdOut("");
+ commandReport.setStdErr("");
+ commandReport.setStructuredOut("");
+ commandReport.setExitCode(123);
+ db.updateHostRoleState(hostName, requestId, stageId, Role.HBASE_MASTER.toString(), commandReport);
+
+ List<HostRoleCommandEntity> commandEntities =
+ hostRoleCommandDAO.findByHostRole(hostName, requestId, stageId, Role.HBASE_MASTER.toString());
+
+ HostRoleCommandEntity commandEntity = commandEntities.get(0);
+ HostRoleCommand command = db.getTask(commandEntity.getTaskId());
+ assertNotNull(command);
+ assertEquals(HostRoleStatus.HOLDING_TIMEDOUT, command.getStatus());
+
+ }
+
+
+ @Test
public void testGetRequestsByStatus() throws AmbariException {
List<Long> requestIds = new ArrayList<>();
requestIds.add(requestId + 1);
requestIds.add(requestId);
- populateActionDB(db, hostName, requestId, stageId);
+ populateActionDB(db, hostName, requestId, stageId, false);
clusters.addHost("host2");
- populateActionDB(db, hostName, requestId + 1, stageId);
+ populateActionDB(db, hostName, requestId + 1, stageId, false);
List<Long> requestIdsResult =
db.getRequestsByStatus(null, BaseRequest.DEFAULT_PAGE_SIZE, false);
@@ -508,7 +531,7 @@ public class TestActionDBAccessorImpl {
}
for (Long id : ids) {
- populateActionDB(db, hostName, id, stageId);
+ populateActionDB(db, hostName, id, stageId, false);
}
List<Long> expected = null;
@@ -617,7 +640,7 @@ public class TestActionDBAccessorImpl {
@Test
public void testEntitiesCreatedWithIDs() throws Exception {
List<Stage> stages = new ArrayList<>();
- Stage stage = createStubStage(hostName, requestId, stageId);
+ Stage stage = createStubStage(hostName, requestId, stageId, false);
stages.add(stage);
@@ -707,8 +730,8 @@ public class TestActionDBAccessorImpl {
}
private void populateActionDB(ActionDBAccessor db, String hostname,
- long requestId, long stageId) throws AmbariException {
- Stage s = createStubStage(hostname, requestId, stageId);
+ long requestId, long stageId, boolean retryAllowed) throws AmbariException {
+ Stage s = createStubStage(hostname, requestId, stageId, retryAllowed);
List<Stage> stages = new ArrayList<>();
stages.add(s);
Request request = new Request(stages, "", clusters);
@@ -721,7 +744,7 @@ public class TestActionDBAccessorImpl {
List<Stage> stages = new ArrayList<>();
for (int i = 0; i < numberOfStages; i++) {
- Stage stage = createStubStage(hostname, requestId, stageId + i);
+ Stage stage = createStubStage(hostname, requestId, stageId + i, false);
stages.add(stage);
}
@@ -732,7 +755,7 @@ public class TestActionDBAccessorImpl {
private void populateActionDBWithCompletedRequest(ActionDBAccessor db, String hostname,
long requestId, long stageId) throws AmbariException {
- Stage s = createStubStage(hostname, requestId, stageId);
+ Stage s = createStubStage(hostname, requestId, stageId, false);
List<Stage> stages = new ArrayList<>();
stages.add(s);
Request request = new Request(stages, "", clusters);
@@ -745,7 +768,7 @@ public class TestActionDBAccessorImpl {
private void populateActionDBWithPartiallyCompletedRequest(ActionDBAccessor db, String hostname,
long requestId, long stageId) throws AmbariException {
- Stage s = createStubStage(hostname, requestId, stageId);
+ Stage s = createStubStage(hostname, requestId, stageId, false);
List<Stage> stages = new ArrayList<>();
stages.add(s);
@@ -756,14 +779,14 @@ public class TestActionDBAccessorImpl {
db.persistActions(request);
}
- private Stage createStubStage(String hostname, long requestId, long stageId) {
+ private Stage createStubStage(String hostname, long requestId, long stageId, boolean retryAllowed) {
Stage s = stageFactory.createNew(requestId, "/a/b", "cluster1", 1L, "action db accessor test",
"commandParamsStage", "hostParamsStage");
s.setStageId(stageId);
s.addHostRoleExecutionCommand(hostname, Role.HBASE_MASTER,
RoleCommand.START,
new ServiceComponentHostStartEvent(Role.HBASE_MASTER.toString(),
- hostname, System.currentTimeMillis()), "cluster1", "HBASE", false, false);
+ hostname, System.currentTimeMillis()), "cluster1", "HBASE", retryAllowed, false);
s.addHostRoleExecutionCommand(
hostname,
Role.HBASE_REGIONSERVER,
[47/57] [abbrv] ambari git commit: AMBARI-21307 Rest resource for
supporting ambari configurations
Posted by lp...@apache.org.
AMBARI-21307 Rest resource for supporting ambari configurations
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/700bce9d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/700bce9d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/700bce9d
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 700bce9de4c9608425491f204b9a45d7916521f9
Parents: c241b9a
Author: lpuskas <lp...@apache.org>
Authored: Wed Jul 5 14:20:18 2017 +0200
Committer: lpuskas <la...@sequenceiq.com>
Committed: Tue Sep 12 12:03:00 2017 +0200
----------------------------------------------------------------------
.../resources/ResourceInstanceFactoryImpl.java | 4 +
.../services/AmbariConfigurationService.java | 146 +++++++++++
.../internal/AbstractProviderModule.java | 2 +-
.../AmbariConfigurationResourceProvider.java | 247 +++++++++++++++++++
.../internal/DefaultProviderModule.java | 26 +-
.../ambari/server/controller/spi/Resource.java | 5 +-
.../server/orm/dao/AmbariConfigurationDAO.java | 92 +++++++
.../orm/entities/AmbariConfigurationEntity.java | 70 ++++++
.../orm/entities/ConfigurationBaseEntity.java | 159 ++++++++++++
.../authorization/RoleAuthorization.java | 95 +++----
.../resources/Ambari-DDL-Postgres-CREATE.sql | 25 +-
.../src/main/resources/META-INF/persistence.xml | 2 +
12 files changed, 809 insertions(+), 64 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
index d0d115d..f5fb6e9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
@@ -471,6 +471,10 @@ public class ResourceInstanceFactoryImpl implements ResourceInstanceFactory {
case RemoteCluster:
resourceDefinition = new RemoteClusterResourceDefinition();
break;
+ case AmbariConfiguration:
+ resourceDefinition = new SimpleResourceDefinition(Resource.Type.AmbariConfiguration, "ambariconfiguration", "ambariconfigurations");
+
+ break;
default:
throw new IllegalArgumentException("Unsupported resource type: " + type);
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
new file mode 100644
index 0000000..0fa6e44
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariConfigurationService.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import java.util.Collections;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.http.HttpStatus;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.ApiResponse;
+import io.swagger.annotations.ApiResponses;
+
+/**
+ * Rest endpoint for managing ambari configurations. Supports CRUD operations.
+ * Ambari configurations are resources that relate to the ambari server instance even before a cluster is provisioned.
+ *
+ * Ambari configuration resources may be shared with components and services in the cluster
+ * (by recommending them as default values)
+ *
+ * Eg. LDAP configuration is stored as ambariconfiguration.
+ * The request payload has the form:
+ *
+ * <pre>
+ * {
+ * "AmbariConfiguration": {
+ * "type": "ldap-configuration",
+ * "data": [
+ * {
+ * "authentication.ldap.primaryUrl": "localhost:33389"
+ * },
+ * {
+ * "authentication.ldap.secondaryUrl": "localhost:333"
+ * },
+ * {
+ * "authentication.ldap.baseDn": "dc=ambari,dc=apache,dc=org"
+ * }
+ * // ......
+ * ]
+ * }
+ * }
+ * </pre>
+ */
+@Path("/configurations/")
+@Api(value = "/configurations", description = "Endpoint for Ambari configuration related operations")
+public class AmbariConfigurationService extends BaseService {
+
+ /**
+ * Creates an ambari configuration resource.
+ *
+ * @param body the payload in json format
+ * @param headers http headers
+ * @param uri request uri information
+ * @return
+ */
+ @POST
+ @Produces(MediaType.TEXT_PLAIN)
+ @ApiOperation(value = "Creates an ambari configuration resource")
+ @ApiImplicitParams({
+ @ApiImplicitParam(dataType = "", paramType = PARAM_TYPE_BODY)
+ })
+ @ApiResponses({
+ @ApiResponse(code = HttpStatus.SC_CREATED, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_ACCEPTED, message = MSG_REQUEST_ACCEPTED),
+ @ApiResponse(code = HttpStatus.SC_BAD_REQUEST, message = MSG_INVALID_ARGUMENTS),
+ @ApiResponse(code = HttpStatus.SC_CONFLICT, message = MSG_RESOURCE_ALREADY_EXISTS),
+ @ApiResponse(code = HttpStatus.SC_UNAUTHORIZED, message = MSG_NOT_AUTHENTICATED),
+ @ApiResponse(code = HttpStatus.SC_FORBIDDEN, message = MSG_PERMISSION_DENIED),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR),
+ })
+ public Response createAmbariConfiguration(String body, @Context HttpHeaders headers, @Context UriInfo uri) {
+ return handleRequest(headers, body, uri, Request.Type.POST, createResource(Resource.Type.AmbariConfiguration,
+ Collections.EMPTY_MAP));
+ }
+
+ @GET
+ @Produces(MediaType.TEXT_PLAIN)
+ @ApiOperation(value = "Retrieve ambari configuration resources")
+ public Response getAmbariConfigurations(String body, @Context HttpHeaders headers, @Context UriInfo uri) {
+ return handleRequest(headers, body, uri, Request.Type.GET, createResource(Resource.Type.AmbariConfiguration,
+ Collections.EMPTY_MAP));
+ }
+
+ @GET
+ @Path("{configurationId}")
+ @Produces(MediaType.TEXT_PLAIN)
+ @ApiOperation(value = "Retrieve ambari configuration resource")
+ public Response getAmbariConfiguration(String body, @Context HttpHeaders headers, @Context UriInfo uri,
+ @PathParam("configurationId") String configurationId) {
+ return handleRequest(headers, body, uri, Request.Type.GET, createResource(Resource.Type.AmbariConfiguration,
+ Collections.singletonMap(Resource.Type.AmbariConfiguration, configurationId)));
+ }
+
+ @PUT
+ @Produces(MediaType.TEXT_PLAIN)
+ @ApiOperation(value = "Update ambari configuration resources")
+ public Response updateAmbariConfiguration() {
+ throw new UnsupportedOperationException("Not yet implemented");
+ }
+
+ @DELETE
+ @Path("{configurationId}")
+ @Produces(MediaType.TEXT_PLAIN)
+ @ApiOperation(value = "Deletes an ambari configuration resource")
+ @ApiResponses({
+ @ApiResponse(code = HttpStatus.SC_OK, message = MSG_SUCCESSFUL_OPERATION),
+ @ApiResponse(code = HttpStatus.SC_NOT_FOUND, message = MSG_RESOURCE_NOT_FOUND),
+ @ApiResponse(code = HttpStatus.SC_UNAUTHORIZED, message = MSG_NOT_AUTHENTICATED),
+ @ApiResponse(code = HttpStatus.SC_FORBIDDEN, message = MSG_PERMISSION_DENIED),
+ @ApiResponse(code = HttpStatus.SC_INTERNAL_SERVER_ERROR, message = MSG_SERVER_ERROR),
+ })
+ public Response deleteAmbariConfiguration(String body, @Context HttpHeaders headers, @Context UriInfo uri,
+ @PathParam("configurationId") String configurationId) {
+ return handleRequest(headers, body, uri, Request.Type.DELETE, createResource(Resource.Type.AmbariConfiguration,
+ Collections.singletonMap(Resource.Type.AmbariConfiguration, configurationId)));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index 1cd2d10..1501a01 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -224,7 +224,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
* are going to work unless refactoring is complete.
*/
@Inject
- AmbariManagementController managementController;
+ protected AmbariManagementController managementController;
@Inject
TimelineMetricCacheProvider metricCacheProvider;
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
new file mode 100644
index 0000000..5e5af9e
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariConfigurationResourceProvider.java
@@ -0,0 +1,247 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.inject.Inject;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
+import org.apache.ambari.server.controller.spi.NoSuchResourceException;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
+import org.apache.ambari.server.controller.utilities.PredicateHelper;
+import org.apache.ambari.server.orm.dao.AmbariConfigurationDAO;
+import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
+import org.apache.ambari.server.orm.entities.ConfigurationBaseEntity;
+import org.apache.ambari.server.security.authorization.RoleAuthorization;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Sets;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * Resource provider for AmbariConfiguration resources.
+ */
+@StaticallyInject
+public class AmbariConfigurationResourceProvider extends AbstractAuthorizedResourceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AmbariConfigurationResourceProvider.class);
+ private static final String DEFAULT_VERSION_TAG = "Default version";
+ private static final Integer DEFAULT_VERSION = 1;
+
+ /**
+ * Resource property id constants.
+ */
+ private enum ResourcePropertyId {
+
+ ID("AmbariConfiguration/id"),
+ TYPE("AmbariConfiguration/type"),
+ VERSION("AmbariConfiguration/version"),
+ VERSION_TAG("AmbariConfiguration/version_tag"),
+ DATA("AmbariConfiguration/data");
+
+ private String propertyId;
+
+ ResourcePropertyId(String propertyId) {
+ this.propertyId = propertyId;
+ }
+
+ String getPropertyId() {
+ return this.propertyId;
+ }
+
+ public static ResourcePropertyId fromString(String propertyIdStr) {
+ ResourcePropertyId propertyIdFromStr = null;
+
+ for (ResourcePropertyId id : ResourcePropertyId.values()) {
+ if (id.getPropertyId().equals(propertyIdStr)) {
+ propertyIdFromStr = id;
+ break;
+ }
+ }
+
+ if (propertyIdFromStr == null) {
+ throw new IllegalArgumentException("Unsupported property type: " + propertyIdStr);
+ }
+
+ return propertyIdFromStr;
+
+ }
+ }
+
+ private static Set<String> properties = Sets.newHashSet(
+ ResourcePropertyId.ID.getPropertyId(),
+ ResourcePropertyId.TYPE.getPropertyId(),
+ ResourcePropertyId.VERSION.getPropertyId(),
+ ResourcePropertyId.VERSION_TAG.getPropertyId(),
+ ResourcePropertyId.DATA.getPropertyId());
+
+ private static Map<Resource.Type, String> pkPropertyMap = Collections.unmodifiableMap(
+ new HashMap<Resource.Type, String>() {{
+ put(Resource.Type.AmbariConfiguration, ResourcePropertyId.ID.getPropertyId());
+ }}
+ );
+
+
+ @Inject
+ private static AmbariConfigurationDAO ambariConfigurationDAO;
+
+ private Gson gson;
+
+ protected AmbariConfigurationResourceProvider() {
+ super(properties, pkPropertyMap);
+ setRequiredCreateAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION));
+ setRequiredDeleteAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_CONFIGURATION));
+
+ gson = new GsonBuilder().create();
+ }
+
+ @Override
+ protected Set<String> getPKPropertyIds() {
+ return Sets.newHashSet("AmbariConfiguration/id");
+ }
+
+ @Override
+ public RequestStatus createResourcesAuthorized(Request request) throws SystemException, UnsupportedPropertyException,
+ ResourceAlreadyExistsException, NoSuchParentResourceException {
+
+ LOGGER.info("Creating new ambari configuration resource ...");
+ AmbariConfigurationEntity ambariConfigurationEntity = getEntityFromRequest(request);
+
+ LOGGER.info("Persisting new ambari configuration: {} ", ambariConfigurationEntity);
+ ambariConfigurationDAO.persist(ambariConfigurationEntity);
+
+ return getRequestStatus(null);
+ }
+
+
+ @Override
+ protected Set<Resource> getResourcesAuthorized(Request request, Predicate predicate) throws SystemException,
+ UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
+ Set<Resource> resources = Sets.newHashSet();
+
+ List<AmbariConfigurationEntity> ambariConfigurationEntities = ambariConfigurationDAO.findAll();
+ for (AmbariConfigurationEntity ambariConfigurationEntity : ambariConfigurationEntities) {
+ try {
+ resources.add(toResource(ambariConfigurationEntity, getPropertyIds()));
+ } catch (AmbariException e) {
+ LOGGER.error("Error while retrieving ambari configuration", e);
+ }
+ }
+ return resources;
+ }
+
+ @Override
+ protected RequestStatus deleteResourcesAuthorized(Request request, Predicate predicate) throws SystemException,
+ UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
+
+ Long idFromRequest = Long.valueOf((String) PredicateHelper.getProperties(predicate).get(ResourcePropertyId.ID.getPropertyId()));
+
+ if (null == idFromRequest) {
+ LOGGER.debug("No resource id provided in the request");
+ } else {
+ LOGGER.debug("Deleting amari configuration with id: {}", idFromRequest);
+ try {
+ ambariConfigurationDAO.deleteById(idFromRequest);
+ } catch (IllegalStateException e) {
+ throw new NoSuchResourceException(e.getMessage());
+ }
+
+ }
+
+ return getRequestStatus(null);
+
+ }
+
+ private Resource toResource(AmbariConfigurationEntity entity, Set<String> requestedIds) throws AmbariException {
+ Resource resource = new ResourceImpl(Resource.Type.AmbariConfiguration);
+ Set<Map<String, String>> configurationSet = gson.fromJson(entity.getConfigurationBaseEntity().getConfigurationData(), Set.class);
+
+ setResourceProperty(resource, ResourcePropertyId.ID.getPropertyId(), entity.getId(), requestedIds);
+ setResourceProperty(resource, ResourcePropertyId.TYPE.getPropertyId(), entity.getConfigurationBaseEntity().getType(), requestedIds);
+ setResourceProperty(resource, ResourcePropertyId.DATA.getPropertyId(), configurationSet, requestedIds);
+
+ return resource;
+ }
+
+ private AmbariConfigurationEntity getEntityFromRequest(Request request) {
+
+ AmbariConfigurationEntity ambariConfigurationEntity = new AmbariConfigurationEntity();
+ ambariConfigurationEntity.setConfigurationBaseEntity(new ConfigurationBaseEntity());
+
+
+ for (ResourcePropertyId resourcePropertyId : ResourcePropertyId.values()) {
+ Object requestValue = getValueFromRequest(resourcePropertyId, request);
+
+ switch (resourcePropertyId) {
+ case DATA:
+ if (requestValue == null) {
+ throw new IllegalArgumentException("No configuration data is provided in the request");
+ }
+
+ ambariConfigurationEntity.getConfigurationBaseEntity().setConfigurationData(gson.toJson(requestValue));
+ break;
+ case TYPE:
+ ambariConfigurationEntity.getConfigurationBaseEntity().setType((String) requestValue);
+ break;
+
+ case VERSION:
+ Integer version = (requestValue == null) ? DEFAULT_VERSION : Integer.valueOf((Integer) requestValue);
+ ambariConfigurationEntity.getConfigurationBaseEntity().setVersion((version));
+ break;
+ case VERSION_TAG:
+ String versionTag = requestValue == null ? DEFAULT_VERSION_TAG : (String) requestValue;
+ ambariConfigurationEntity.getConfigurationBaseEntity().setVersionTag(versionTag);
+ break;
+ default:
+ LOGGER.debug("Ignored property in the request: {}", resourcePropertyId);
+ break;
+ }
+ }
+ ambariConfigurationEntity.getConfigurationBaseEntity().setCreateTimestamp(Calendar.getInstance().getTimeInMillis());
+ return ambariConfigurationEntity;
+
+ }
+
+ private Object getValueFromRequest(ResourcePropertyId resourcePropertyIdEnum, Request request) {
+ LOGGER.debug("Locating resource property [{}] in the request ...", resourcePropertyIdEnum);
+ Object requestValue = null;
+ for (Map<String, Object> propertyMap : request.getProperties()) {
+ if (propertyMap.containsKey(resourcePropertyIdEnum.getPropertyId())) {
+ requestValue = propertyMap.get(resourcePropertyIdEnum.getPropertyId());
+ LOGGER.debug("Found resource property {} in the request, value: {} ...", resourcePropertyIdEnum, requestValue);
+ break;
+ }
+ }
+ return requestValue;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
index 074f8e1..6e7ca0a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
@@ -21,20 +21,18 @@ package org.apache.ambari.server.controller.internal;
import java.util.Map;
import java.util.Set;
-import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.controller.AmbariServer;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.ResourceProvider;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
-
-import com.google.inject.Inject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The default provider module implementation.
*/
public class DefaultProviderModule extends AbstractProviderModule {
- @Inject
- private AmbariManagementController managementController;
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DefaultProviderModule.class);
// ----- Constructors ------------------------------------------------------
@@ -42,9 +40,7 @@ public class DefaultProviderModule extends AbstractProviderModule {
* Create a default provider module.
*/
public DefaultProviderModule() {
- if (managementController == null) {
- managementController = AmbariServer.getController();
- }
+ super();
}
@@ -52,8 +48,10 @@ public class DefaultProviderModule extends AbstractProviderModule {
@Override
protected ResourceProvider createResourceProvider(Resource.Type type) {
- Set<String> propertyIds = PropertyHelper.getPropertyIds(type);
- Map<Resource.Type,String> keyPropertyIds = PropertyHelper.getKeyPropertyIds(type);
+
+ LOGGER.debug("Creating resource provider for the type: {}", type);
+ Set<String> propertyIds = PropertyHelper.getPropertyIds(type);
+ Map<Resource.Type, String> keyPropertyIds = PropertyHelper.getKeyPropertyIds(type);
switch (type.getInternalType()) {
case Workflow:
@@ -124,10 +122,12 @@ public class DefaultProviderModule extends AbstractProviderModule {
return new ArtifactResourceProvider(managementController);
case RemoteCluster:
return new RemoteClusterResourceProvider();
-
+ case AmbariConfiguration:
+ return new AmbariConfigurationResourceProvider();
default:
+ LOGGER.debug("Delegating creation of resource provider for: {} to the AbstractControllerResourceProvider", type.getInternalType());
return AbstractControllerResourceProvider.getResourceProvider(type, propertyIds,
- keyPropertyIds, managementController);
+ keyPropertyIds, managementController);
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
index 362b4e6..7835373 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
@@ -160,7 +160,8 @@ public interface Resource {
VersionDefinition,
ClusterKerberosDescriptor,
LoggingQuery,
- RemoteCluster;
+ RemoteCluster,
+ AmbariConfiguration;
/**
* Get the {@link Type} that corresponds to this InternalType.
@@ -282,6 +283,8 @@ public interface Resource {
public static final Type ClusterKerberosDescriptor = InternalType.ClusterKerberosDescriptor.getType();
public static final Type LoggingQuery = InternalType.LoggingQuery.getType();
public static final Type RemoteCluster = InternalType.RemoteCluster.getType();
+ public static final Type AmbariConfiguration = InternalType.AmbariConfiguration.getType();
+
/**
* The type name.
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
new file mode 100644
index 0000000..dea37eb
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AmbariConfigurationDAO.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.orm.dao;
+
+import java.util.List;
+
+import javax.inject.Inject;
+import javax.inject.Provider;
+import javax.inject.Singleton;
+import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
+
+import org.apache.ambari.server.orm.RequiresSession;
+import org.apache.ambari.server.orm.entities.AmbariConfigurationEntity;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.persist.Transactional;
+
+/**
+ * DAO dealing with ambari configuration related JPA operations.
+ */
+
+@Singleton
+// todo extend CrudDao (amend crud dao to handle NPEs)
+public class AmbariConfigurationDAO {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AmbariConfigurationDAO.class);
+
+ @Inject
+ private Provider<EntityManager> entityManagerProvider;
+
+ /**
+ * DAO utilities for dealing mostly with {@link TypedQuery} results.
+ */
+ @Inject
+ private DaoUtils daoUtils;
+
+ public AmbariConfigurationEntity findByid(Long id) {
+ return entityManagerProvider.get().find(AmbariConfigurationEntity.class, id);
+ }
+
+ @RequiresSession
+ @Transactional
+ public void persist(AmbariConfigurationEntity entity) {
+ LOGGER.debug("Persisting ambari configuration: {}", entity);
+ entityManagerProvider.get().persist(entity);
+ }
+
+ @RequiresSession
+ public List<AmbariConfigurationEntity> findAll() {
+ TypedQuery<AmbariConfigurationEntity> query = entityManagerProvider.get().createNamedQuery(
+ "AmbariConfigurationEntity.findAll", AmbariConfigurationEntity.class);
+ return daoUtils.selectList(query);
+ }
+
+
+ @RequiresSession
+ @Transactional
+ public void deleteById(Long ambariConfigurationId) {
+
+ if (ambariConfigurationId == null) {
+ throw new IllegalArgumentException("No Ambari Configuration id provided.");
+ }
+
+ LOGGER.debug("Removing Ambari Configuration with id :{}", ambariConfigurationId);
+
+ AmbariConfigurationEntity ambariConfigurationEntity = findByid(ambariConfigurationId);
+ if (ambariConfigurationEntity == null) {
+ String msg = String.format("No Ambari Configuration found with id: %s", ambariConfigurationId);
+ LOGGER.debug(msg);
+ throw new IllegalStateException(msg);
+ }
+
+ entityManagerProvider.get().remove(ambariConfigurationEntity);
+ LOGGER.debug("Ambari Configuration with id: {}", ambariConfigurationId);
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java
new file mode 100644
index 0000000..34fa221
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AmbariConfigurationEntity.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.orm.entities;
+
+import javax.persistence.CascadeType;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.MapsId;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "ambari_configuration")
+@NamedQueries({
+ @NamedQuery(
+ name = "AmbariConfigurationEntity.findAll",
+ query = "select ace from AmbariConfigurationEntity ace")
+})
+
+public class AmbariConfigurationEntity {
+
+ @Id
+ @Column(name = "id")
+ private Long id;
+
+ @OneToOne(cascade = CascadeType.ALL)
+ @MapsId
+ @JoinColumn(name = "id")
+ private ConfigurationBaseEntity configurationBaseEntity;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public ConfigurationBaseEntity getConfigurationBaseEntity() {
+ return configurationBaseEntity;
+ }
+
+ public void setConfigurationBaseEntity(ConfigurationBaseEntity configurationBaseEntity) {
+ this.configurationBaseEntity = configurationBaseEntity;
+ }
+
+ @Override
+ public String toString() {
+ return "AmbariConfigurationEntity{" +
+ "id=" + id +
+ ", configurationBaseEntity=" + configurationBaseEntity +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigurationBaseEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigurationBaseEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigurationBaseEntity.java
new file mode 100644
index 0000000..9ad30d7
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigurationBaseEntity.java
@@ -0,0 +1,159 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.orm.entities;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.Table;
+import javax.persistence.TableGenerator;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
+@Table(name = "configuration_base")
+@TableGenerator(
+ name = "configuration_id_generator",
+ table = "ambari_sequences",
+ pkColumnName = "sequence_name",
+ valueColumnName = "sequence_value",
+ pkColumnValue = "configuration_id_seq",
+ initialValue = 1
+)
+@Entity
+public class ConfigurationBaseEntity {
+
+ @Id
+ @Column(name = "id")
+ @GeneratedValue(strategy = GenerationType.TABLE, generator = "configuration_id_generator")
+ private Long id;
+
+ @Column(name = "version")
+ private Integer version;
+
+ @Column(name = "version_tag")
+ private String versionTag;
+
+ @Column(name = "type")
+ private String type;
+
+ @Column(name = "data")
+ private String configurationData;
+
+ @Column(name = "attributes")
+ private String configurationAttributes;
+
+ @Column(name = "create_timestamp")
+ private Long createTimestamp;
+
+ public Long getId() {
+ return id;
+ }
+
+ public Integer getVersion() {
+ return version;
+ }
+
+ public void setVersion(Integer version) {
+ this.version = version;
+ }
+
+ public String getVersionTag() {
+ return versionTag;
+ }
+
+ public void setVersionTag(String versionTag) {
+ this.versionTag = versionTag;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getConfigurationData() {
+ return configurationData;
+ }
+
+ public void setConfigurationData(String configurationData) {
+ this.configurationData = configurationData;
+ }
+
+ public String getConfigurationAttributes() {
+ return configurationAttributes;
+ }
+
+ public void setConfigurationAttributes(String configurationAttributes) {
+ this.configurationAttributes = configurationAttributes;
+ }
+
+ public Long getCreateTimestamp() {
+ return createTimestamp;
+ }
+
+ public void setCreateTimestamp(Long createTimestamp) {
+ this.createTimestamp = createTimestamp;
+ }
+
+ @Override
+ public String toString() {
+ return "ConfigurationBaseEntity{" +
+ "id=" + id +
+ ", version=" + version +
+ ", versionTag='" + versionTag + '\'' +
+ ", type='" + type + '\'' +
+ ", configurationData='" + configurationData + '\'' +
+ ", configurationAttributes='" + configurationAttributes + '\'' +
+ ", createTimestamp=" + createTimestamp +
+ '}';
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
+
+ ConfigurationBaseEntity that = (ConfigurationBaseEntity) o;
+
+ return new EqualsBuilder()
+ .append(id, that.id)
+ .append(version, that.version)
+ .append(versionTag, that.versionTag)
+ .append(type, that.type)
+ .append(configurationData, that.configurationData)
+ .append(configurationAttributes, that.configurationAttributes)
+ .append(createTimestamp, that.createTimestamp)
+ .isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 37)
+ .append(id)
+ .append(version)
+ .append(versionTag)
+ .append(type)
+ .append(configurationData)
+ .append(configurationAttributes)
+ .append(createTimestamp)
+ .toHashCode();
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
index cd35c2c..3c50628 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
@@ -39,6 +39,7 @@ public enum RoleAuthorization {
AMBARI_MANAGE_VIEWS("AMBARI.MANAGE_VIEWS"),
AMBARI_RENAME_CLUSTER("AMBARI.RENAME_CLUSTER"),
AMBARI_RUN_CUSTOM_COMMAND("AMBARI.RUN_CUSTOM_COMMAND"),
+ AMBARI_MANAGE_CONFIGURATION("AMBARI.MANAGE_CONFIGURATION"),
CLUSTER_MANAGE_CREDENTIALS("CLUSTER.MANAGE_CREDENTIALS"),
CLUSTER_MODIFY_CONFIGS("CLUSTER.MODIFY_CONFIGS"),
CLUSTER_MANAGE_CONFIG_GROUPS("CLUSTER.MANAGE_CONFIG_GROUPS"),
@@ -84,58 +85,58 @@ public enum RoleAuthorization {
VIEW_USE("VIEW.USE");
public static final Set<RoleAuthorization> AUTHORIZATIONS_VIEW_CLUSTER = EnumSet.of(
- CLUSTER_VIEW_STATUS_INFO,
- CLUSTER_VIEW_ALERTS,
- CLUSTER_VIEW_CONFIGS,
- CLUSTER_VIEW_METRICS,
- CLUSTER_VIEW_STACK_DETAILS,
- CLUSTER_MODIFY_CONFIGS,
- CLUSTER_MANAGE_CONFIG_GROUPS,
- CLUSTER_TOGGLE_ALERTS,
- CLUSTER_TOGGLE_KERBEROS,
- CLUSTER_UPGRADE_DOWNGRADE_STACK);
+ CLUSTER_VIEW_STATUS_INFO,
+ CLUSTER_VIEW_ALERTS,
+ CLUSTER_VIEW_CONFIGS,
+ CLUSTER_VIEW_METRICS,
+ CLUSTER_VIEW_STACK_DETAILS,
+ CLUSTER_MODIFY_CONFIGS,
+ CLUSTER_MANAGE_CONFIG_GROUPS,
+ CLUSTER_TOGGLE_ALERTS,
+ CLUSTER_TOGGLE_KERBEROS,
+ CLUSTER_UPGRADE_DOWNGRADE_STACK);
public static final Set<RoleAuthorization> AUTHORIZATIONS_UPDATE_CLUSTER = EnumSet.of(
- CLUSTER_TOGGLE_ALERTS,
- CLUSTER_TOGGLE_KERBEROS,
- CLUSTER_UPGRADE_DOWNGRADE_STACK,
- CLUSTER_MODIFY_CONFIGS,
- CLUSTER_MANAGE_AUTO_START,
- SERVICE_MODIFY_CONFIGS);
+ CLUSTER_TOGGLE_ALERTS,
+ CLUSTER_TOGGLE_KERBEROS,
+ CLUSTER_UPGRADE_DOWNGRADE_STACK,
+ CLUSTER_MODIFY_CONFIGS,
+ CLUSTER_MANAGE_AUTO_START,
+ SERVICE_MODIFY_CONFIGS);
public static final Set<RoleAuthorization> AUTHORIZATIONS_VIEW_SERVICE = EnumSet.of(
- SERVICE_VIEW_ALERTS,
- SERVICE_VIEW_CONFIGS,
- SERVICE_VIEW_METRICS,
- SERVICE_VIEW_STATUS_INFO,
- SERVICE_COMPARE_CONFIGS,
- SERVICE_ADD_DELETE_SERVICES,
- SERVICE_DECOMMISSION_RECOMMISSION,
- SERVICE_ENABLE_HA,
- SERVICE_MANAGE_CONFIG_GROUPS,
- SERVICE_MODIFY_CONFIGS,
- SERVICE_START_STOP,
- SERVICE_TOGGLE_MAINTENANCE,
- SERVICE_TOGGLE_ALERTS,
- SERVICE_MOVE,
- SERVICE_RUN_CUSTOM_COMMAND,
- SERVICE_RUN_SERVICE_CHECK);
+ SERVICE_VIEW_ALERTS,
+ SERVICE_VIEW_CONFIGS,
+ SERVICE_VIEW_METRICS,
+ SERVICE_VIEW_STATUS_INFO,
+ SERVICE_COMPARE_CONFIGS,
+ SERVICE_ADD_DELETE_SERVICES,
+ SERVICE_DECOMMISSION_RECOMMISSION,
+ SERVICE_ENABLE_HA,
+ SERVICE_MANAGE_CONFIG_GROUPS,
+ SERVICE_MODIFY_CONFIGS,
+ SERVICE_START_STOP,
+ SERVICE_TOGGLE_MAINTENANCE,
+ SERVICE_TOGGLE_ALERTS,
+ SERVICE_MOVE,
+ SERVICE_RUN_CUSTOM_COMMAND,
+ SERVICE_RUN_SERVICE_CHECK);
public static final Set<RoleAuthorization> AUTHORIZATIONS_UPDATE_SERVICE = EnumSet.of(
- SERVICE_ADD_DELETE_SERVICES,
- SERVICE_DECOMMISSION_RECOMMISSION,
- SERVICE_ENABLE_HA,
- SERVICE_MANAGE_CONFIG_GROUPS,
- SERVICE_MODIFY_CONFIGS,
- SERVICE_START_STOP,
- SERVICE_TOGGLE_MAINTENANCE,
- SERVICE_TOGGLE_ALERTS,
- SERVICE_MOVE,
- SERVICE_RUN_CUSTOM_COMMAND,
- SERVICE_RUN_SERVICE_CHECK,
- SERVICE_MANAGE_ALERTS,
- SERVICE_MANAGE_AUTO_START,
- SERVICE_SET_SERVICE_USERS_GROUPS);
+ SERVICE_ADD_DELETE_SERVICES,
+ SERVICE_DECOMMISSION_RECOMMISSION,
+ SERVICE_ENABLE_HA,
+ SERVICE_MANAGE_CONFIG_GROUPS,
+ SERVICE_MODIFY_CONFIGS,
+ SERVICE_START_STOP,
+ SERVICE_TOGGLE_MAINTENANCE,
+ SERVICE_TOGGLE_ALERTS,
+ SERVICE_MOVE,
+ SERVICE_RUN_CUSTOM_COMMAND,
+ SERVICE_RUN_SERVICE_CHECK,
+ SERVICE_MANAGE_ALERTS,
+ SERVICE_MANAGE_AUTO_START,
+ SERVICE_SET_SERVICE_USERS_GROUPS);
private final String id;
@@ -162,7 +163,7 @@ public enum RoleAuthorization {
/**
* Safely translates a role authorization Id to a RoleAuthorization
*
- * @param authenticationId an authentication id
+ * @param authenticationId an authentication id
* @return a RoleAuthorization or null if no translation can be made
*/
public static RoleAuthorization translate(String authenticationId) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 90cdbfe..3605783 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -62,8 +62,26 @@ CREATE TABLE clusters (
desired_cluster_state VARCHAR(255) NOT NULL,
desired_stack_id BIGINT NOT NULL,
CONSTRAINT PK_clusters PRIMARY KEY (cluster_id),
- CONSTRAINT FK_clusters_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack(stack_id),
- CONSTRAINT FK_clusters_resource_id FOREIGN KEY (resource_id) REFERENCES adminresource(resource_id));
+ CONSTRAINT FK_clusters_desired_stack_id FOREIGN KEY (desired_stack_id) REFERENCES stack (stack_id),
+ CONSTRAINT FK_clusters_resource_id FOREIGN KEY (resource_id) REFERENCES adminresource (resource_id)
+);
+
+CREATE TABLE configuration_base (
+ id BIGINT NOT NULL,
+ version_tag VARCHAR(255) NOT NULL,
+ version BIGINT NOT NULL,
+ type VARCHAR(255) NOT NULL,
+ data TEXT NOT NULL,
+ attributes TEXT,
+ create_timestamp BIGINT NOT NULL,
+ CONSTRAINT PK_configuration_base PRIMARY KEY (id)
+);
+
+CREATE TABLE ambari_configuration (
+ id BIGINT NOT NULL,
+ CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
+ CONSTRAINT FK_ambari_configuration_configuration_base FOREIGN KEY (id) REFERENCES configuration_base (id)
+);
CREATE TABLE clusterconfig (
config_id BIGINT NOT NULL,
@@ -1089,6 +1107,7 @@ INSERT INTO ambari_sequences (sequence_name, sequence_value) VALUES
('remote_cluster_id_seq', 0),
('remote_cluster_service_id_seq', 0),
('servicecomponent_version_id_seq', 0),
+ ('configuration_id_seq', 0),
('hostcomponentdesiredstate_id_seq', 0);
INSERT INTO adminresourcetype (resource_type_id, resource_type_name) VALUES
@@ -1173,6 +1192,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage administrative settings' UNION ALL
+ SELECT 'AMBARI.MANAGE_CONFIGURATION', 'Manage ambari configuration' UNION ALL
SELECT 'AMBARI.MANAGE_USERS', 'Manage users' UNION ALL
SELECT 'AMBARI.MANAGE_GROUPS', 'Manage groups' UNION ALL
SELECT 'AMBARI.MANAGE_VIEWS', 'Manage Ambari Views' UNION ALL
@@ -1378,6 +1398,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_SETTINGS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+ SELECT permission_id, 'AMBARI.MANAGE_CONFIGURATION' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_USERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_GROUPS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
SELECT permission_id, 'AMBARI.MANAGE_VIEWS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
http://git-wip-us.apache.org/repos/asf/ambari/blob/700bce9d/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml b/ambari-server/src/main/resources/META-INF/persistence.xml
index e4045ef..0f8e964 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -96,6 +96,8 @@
<class>org.apache.ambari.server.orm.entities.KerberosDescriptorEntity</class>
<class>org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity</class>
<class>org.apache.ambari.server.orm.entities.RemoteAmbariClusterServiceEntity</class>
+ <class>org.apache.ambari.server.orm.entities.ConfigurationBaseEntity</class>
+ <class>org.apache.ambari.server.orm.entities.AmbariConfigurationEntity</class>
<properties>
<property name="eclipselink.cache.size.default" value="10000" />
[17/57] [abbrv] ambari git commit: AMBARI-21900. Upgrade History
Style Changes (alexantonenko)
Posted by lp...@apache.org.
AMBARI-21900. Upgrade History Style Changes (alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ab06654a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ab06654a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ab06654a
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: ab06654a4cb8de34b8135b7cc5ee2f7ede593a1d
Parents: 8b5d697
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Thu Sep 7 17:09:02 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Thu Sep 7 17:09:02 2017 +0300
----------------------------------------------------------------------
ambari-web/app/styles/stack_versions.less | 8 +++++++-
.../templates/main/admin/stack_upgrade/upgrade_history.hbs | 6 +++---
.../views/main/admin/stack_upgrade/upgrade_history_view.js | 4 ++++
3 files changed, 14 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab06654a/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index 92843fa..b854933 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -756,6 +756,12 @@
.wide-column {
width: 14%;
}
+ .widest-column{
+ width: 20%;
+ white-space: nowrap;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ }
.accordion-group {
border-right: none;
padding-left:20px;
@@ -774,7 +780,7 @@
.accordion-body {
min-width: 400px;
.accordion-inner{
- padding-top:10px;
+ padding-top:20px;
.label{
padding: 5px 15px;
&.available{
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab06654a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs
index bcf4761..fce7171 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs
@@ -51,7 +51,7 @@
<tr>
<th>{{t common.direction}}</th>
<th>{{t common.type}}</th>
- <th class="wide-column">{{t common.repository}}</th>
+ <th class="widest-column">{{t common.repository}}</th>
<th class="wide-column">{{t common.repositoryType}}</th>
<th>{{t common.start.time}}</th>
<th>{{t common.duration}}</th>
@@ -102,8 +102,8 @@
<td>
<span>{{item.upgradeTypeLabel}}</span>
</td>
- <td class="wide-column">
- <span>{{item.repositoryName}}</span>
+ <td class="widest-column">
+ <span {{bindAttr data-original-title="item.repositoryName"}}>{{item.repositoryName}}</span>
</td>
<td class="wide-column">
<span>{{item.repositoryType}}</span>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab06654a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
index 320ceef..0ed34ec 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
@@ -209,6 +209,10 @@ App.MainAdminStackUpgradeHistoryView = App.TableView.extend(App.TableServerViewM
this.$(".accordion").on("show hide", function (e) {
$(e.target).siblings(".accordion-heading").find("i.accordion-toggle").toggleClass('icon-caret-right icon-caret-down');
});
+
+ Em.run.later(this, function(){
+ App.tooltip($('.widest-column span'));
+ }, 1000)
},
observesCategories: function () {
[12/57] [abbrv] ambari git commit: AMBARI-21871 : Add new graphs to
HBase RegionServer dashboard in Grafana.
Posted by lp...@apache.org.
AMBARI-21871 : Add new graphs to HBase RegionServer dashboard in Grafana.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/54d4d5e6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/54d4d5e6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/54d4d5e6
Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 54d4d5e62d836d926bca79fea6b48503803db8fb
Parents: 249bb97
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Wed Sep 6 10:03:11 2017 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Wed Sep 6 10:03:11 2017 -0700
----------------------------------------------------------------------
.../conf/unix/metrics_whitelist | 159 +-
.../conf/windows/metrics_whitelist | 159 +-
.../HDP/grafana-hbase-regionservers.json | 1442 ++++++++++++++++++
3 files changed, 1728 insertions(+), 32 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/54d4d5e6/ambari-metrics/ambari-metrics-timelineservice/conf/unix/metrics_whitelist
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/metrics_whitelist b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/metrics_whitelist
index bd36429..2edac39 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/metrics_whitelist
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/metrics_whitelist
@@ -4,6 +4,15 @@ BytesQueued
BytesReceivedLast5Minutes
BytesSentLast5Minutes
ChannelSize
+Counter.%.CacheMisses
+Counter.CacheHits
+Counter.CacheMisses
+Counter.ReadAllQuery
+Counter.ReadAllQuery.%
+Counter.ReadAllQuery.HostRoleCommandEntity
+DataModifyQuery
+DirectReadQuery
+DoesExistQuery
EventPutSuccessCount
EventTakeSuccessCount
FSDatasetState.org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.Capacity
@@ -13,8 +22,21 @@ FlowFilesQueued
FlowFilesReceivedLast5Minutes
FlowFilesSentLast5Minutes
Free Slots
+InsertObjectQuery
+ReadAllQuery
+ReadAllQuery.HostRoleCommandEntity
+ReadObjectQuery
Supervisors
TimelineMetricStoreWatcher.FakeMetric
+Timer.ObjectBuilding
+Timer.QueryPreparation
+Timer.ReadAllQuery
+Timer.ReadAllQuery.%
+Timer.ReadAllQuery.HostRoleCommandEntity
+Timer.RowFetch
+Timer.SqlGeneration
+Timer.SqlPrepare
+Timer.StatementExecute
Topologies
Total Executors
Total Slots
@@ -79,6 +101,18 @@ dfs.FSNamesystem.TotalLoad
dfs.FSNamesystem.TransactionsSinceLastCheckpoint
dfs.FSNamesystem.TransactionsSinceLastLogRoll
dfs.FSNamesystem.UnderReplicatedBlocks
+dfs.NNTopUserOpCounts.windowMs=1500000.op=%.TotalCount
+dfs.NNTopUserOpCounts.windowMs=1500000.op=*.TotalCount
+dfs.NNTopUserOpCounts.windowMs=1500000.op=*.user=%.count
+dfs.NNTopUserOpCounts.windowMs=1500000.op=__%.user=%
+dfs.NNTopUserOpCounts.windowMs=300000.op=%.TotalCount
+dfs.NNTopUserOpCounts.windowMs=300000.op=*.TotalCount
+dfs.NNTopUserOpCounts.windowMs=300000.op=*.user=%.count
+dfs.NNTopUserOpCounts.windowMs=300000.op=__%.user=%
+dfs.NNTopUserOpCounts.windowMs=60000.op=%.TotalCount
+dfs.NNTopUserOpCounts.windowMs=60000.op=*.TotalCount
+dfs.NNTopUserOpCounts.windowMs=60000.op=*.user=%.count
+dfs.NNTopUserOpCounts.windowMs=60000.op=__%.user=%
dfs.datanode.BlocksRead
dfs.datanode.BlocksWritten
dfs.datanode.DatanodeNetworkErrors
@@ -94,6 +128,37 @@ dfs.namenode.TotalFileOps
disk_free
disk_total
disk_used
+druid/broker.*.%.query/time
+druid/broker.heap.jvm/mem/max
+druid/broker.heap.jvm/mem/used
+druid/broker.jvm/gc/time
+druid/coordinator.heap.jvm/mem/max
+druid/coordinator.heap.jvm/mem/used
+druid/coordinator.jvm/gc/time
+druid/historical.*.%.query/segment/time
+druid/historical.*.%.query/time
+druid/historical.*.%.query/wait/time
+druid/historical.heap.jvm/mem/max
+druid/historical.heap.jvm/mem/used
+druid/historical.jvm/gc/time
+druid/historical.segment/scan/pending
+druid/middlemanager.*.%.query/segment/time
+druid/middlemanager.*.%.query/time
+druid/middlemanager.*.%.query/wait/time
+druid/middlemanager.*.ingest/events/processed
+druid/middlemanager.*.ingest/events/thrownAway
+druid/middlemanager.*.ingest/events/unparseable
+druid/middlemanager.*.ingest/persists/count
+druid/middlemanager.*.ingest/persists/time
+druid/middlemanager.*.ingest/rows/output
+druid/middlemanager.heap.jvm/mem/max
+druid/middlemanager.heap.jvm/mem/used
+druid/middlemanager.jvm/gc/time
+druid/middlemanager.segment/scan/pending
+druid/overlord.*.segment/added/bytes
+druid/overlord.heap.jvm/mem/max
+druid/overlord.heap.jvm/mem/used
+druid/overlord.jvm/gc/time
executors.ExecutorMetrics.ExecutorAvailableFreeSlots
executors.ExecutorMetrics.ExecutorAvailableFreeSlotsPercent
executors.ExecutorMetrics.ExecutorCacheMemoryPerInstance
@@ -118,14 +183,14 @@ executors.ExecutorMetrics.ExecutorTotalKilled
executors.ExecutorMetrics.ExecutorTotalRejectedRequests
executors.ExecutorMetrics.ExecutorTotalRequestsHandled
executors.ExecutorMetrics.ExecutorTotalSuccess
-gc.ConcurrentMarkSweep.count
-gc.ConcurrentMarkSweep.time
-gc.ParNew.count
-gc.ParNew.time
+filter.error.grok
+filter.error.keyvalue
+input.files.count
+input.files.read_bytes
+input.files.read_lines
io.IOMetrics.MaxDecodingTime
io.IOMetrics.PercentileDecodingTime_30s50thPercentileLatency
io.IOMetrics.PercentileDecodingTime_30s90thPercentileLatency
-io.IOMetrics.PercentileDecodingTime_30s95thPercentileLatency
io.IOMetrics.PercentileDecodingTime_30s99thPercentileLatency
ipc.client.org.apache.hadoop.ipc.DecayRpcScheduler.Caller(*).Priority
ipc.client.org.apache.hadoop.ipc.DecayRpcScheduler.Caller(*).Volume
@@ -151,6 +216,10 @@ jvm.JvmMetrics.ThreadsRunnable
jvm.JvmMetrics.ThreadsTerminated
jvm.JvmMetrics.ThreadsTimedWaiting
jvm.JvmMetrics.ThreadsWaiting
+jvm.LlapDaemonJVMMetrics.LlapDaemonDirectBufferMemoryUsed
+jvm.LlapDaemonJVMMetrics.LlapDaemonDirectBufferTotalCapacity
+jvm.LlapDaemonJVMMetrics.LlapDaemonMappedBufferMemoryUsed
+jvm.LlapDaemonJVMMetrics.LlapDaemonMappedBufferTotalCapacity
jvm.Master.JvmMetrics.ThreadsBlocked
jvm.Master.JvmMetrics.ThreadsNew
jvm.Master.JvmMetrics.ThreadsRunnable
@@ -177,8 +246,23 @@ jvm.RegionServer.JvmMetrics.ThreadsTimedWaiting
jvm.RegionServer.JvmMetrics.ThreadsWaiting
jvm.daemon_thread_count
jvm.file_descriptor_usage
+jvm.gc.ConcurrentMarkSweep.count
+jvm.gc.ConcurrentMarkSweep.time
+jvm.gc.ParNew.count
+jvm.gc.ParNew.time
jvm.heap_usage
+jvm.memory.heap.committed
+jvm.memory.heap.max
+jvm.memory.heap.used
+jvm.memory.non-heap.committed
+jvm.memory.non-heap.max
+jvm.memory.non-heap.used
jvm.thread_count
+jvm.threads.blocked.count
+jvm.threads.count
+jvm.threads.daemon.count
+jvm.threads.deadlock.count
+jvm.threads.runnable.count
jvm.uptime
kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.1MinuteRate
kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.count
@@ -236,12 +320,8 @@ mem_free
mem_shared
mem_total
mem_used
-memory.heap.committed
-memory.heap.max
-memory.heap.used
-memory.non-heap.committed
-memory.non-heap.max
-memory.non-heap.used
+output.solr.write_bytes
+output.solr.write_logs
pkts_in
pkts_out
proc_run
@@ -250,6 +330,24 @@ read_bps
read_bytes
read_count
read_time
+regionserver.IO.FsPReadTime_75th_percentile
+regionserver.IO.FsPReadTime_95th_percentile
+regionserver.IO.FsPReadTime_99th_percentile
+regionserver.IO.FsPReadTime_max
+regionserver.IO.FsPReadTime_mean
+regionserver.IO.FsPReadTime_median
+regionserver.IO.FsReadTime_75th_percentile
+regionserver.IO.FsReadTime_95th_percentile
+regionserver.IO.FsReadTime_99th_percentile
+regionserver.IO.FsReadTime_max
+regionserver.IO.FsReadTime_mean
+regionserver.IO.FsWriteTime_75th_percentile
+regionserver.IO.FsWriteTime_95th_percentile
+regionserver.IO.FsWriteTime_99th_percentile
+regionserver.IO.FsWriteTime_max
+regionserver.IO.FsWriteTime_mean
+regionserver.IO.FsWriteTime_median
+regionserver.IO.fsChecksumFailureCount
regionserver.RegionServer.ProcessCallTime_75th_percentile
regionserver.RegionServer.ProcessCallTime_95th_percentile
regionserver.RegionServer.ProcessCallTime_99th_percentile
@@ -441,13 +539,42 @@ rpc.rpc.datanode.RpcQueueTimeNumOps
rpc.rpc.datanode.RpcSlowCalls
rpcdetailed.rpcdetailed.client.AddBlockAvgTime
rpcdetailed.rpcdetailed.client.AddBlockNumOps
+solr.admin.info.jvm.memory.used
+solr.admin.info.system.processCpuLoad
+solr.admin.mbeans.cache.documentCache.hitratio
+solr.admin.mbeans.cache.documentCache.size
+solr.admin.mbeans.cache.documentCache.warmupTime
+solr.admin.mbeans.cache.filterCache.hitratio
+solr.admin.mbeans.cache.filterCache.size
+solr.admin.mbeans.cache.filterCache.warmupTime
+solr.admin.mbeans.cache.queryResultCache.hitratio
+solr.admin.mbeans.cache.queryResultCache.size
+solr.admin.mbeans.cache.queryResultCache.warmupTime
+solr.admin.mbeans.queryHandler.browse.avgTimePerRequest
+solr.admin.mbeans.queryHandler.browse.requests
+solr.admin.mbeans.queryHandler.export.avgTimePerRequest
+solr.admin.mbeans.queryHandler.export.requests
+solr.admin.mbeans.queryHandler.get.avgTimePerRequest
+solr.admin.mbeans.queryHandler.get.requests
+solr.admin.mbeans.queryHandler.query.avgTimePerRequest
+solr.admin.mbeans.queryHandler.query.requests
+solr.admin.mbeans.queryHandler.select.15minRateReqsPerSecond
+solr.admin.mbeans.queryHandler.select.5minRateReqsPerSecond
+solr.admin.mbeans.queryHandler.select.75thPcRequestTime
+solr.admin.mbeans.queryHandler.select.95thPcRequestTime
+solr.admin.mbeans.queryHandler.select.999thPcRequestTime
+solr.admin.mbeans.queryHandler.select.99thPcRequestTime
+solr.admin.mbeans.queryHandler.select.avgRequestsPerSecond
+solr.admin.mbeans.queryHandler.select.avgTimePerRequest
+solr.admin.mbeans.queryHandler.select.medianRequestTime
+solr.admin.mbeans.queryHandler.select.requests
+solr.admin.mbeans.updateHandler.adds
+solr.admin.mbeans.updateHandler.deletesById
+solr.admin.mbeans.updateHandler.deletesByQuery
+solr.admin.mbeans.updateHandler.docsPending
+solr.admin.mbeans.updateHandler.errors
swap_free
swap_total
-threads.blocked.count
-threads.count
-threads.daemon.count
-threads.deadlock.count
-threads.runnable.count
topology.*.%.--ack-count.%
topology.*.%.--complete-latency.%
topology.*.%.--emit-count.%
http://git-wip-us.apache.org/repos/asf/ambari/blob/54d4d5e6/ambari-metrics/ambari-metrics-timelineservice/conf/windows/metrics_whitelist
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/windows/metrics_whitelist b/ambari-metrics/ambari-metrics-timelineservice/conf/windows/metrics_whitelist
index bd36429..2edac39 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/conf/windows/metrics_whitelist
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/windows/metrics_whitelist
@@ -4,6 +4,15 @@ BytesQueued
BytesReceivedLast5Minutes
BytesSentLast5Minutes
ChannelSize
+Counter.%.CacheMisses
+Counter.CacheHits
+Counter.CacheMisses
+Counter.ReadAllQuery
+Counter.ReadAllQuery.%
+Counter.ReadAllQuery.HostRoleCommandEntity
+DataModifyQuery
+DirectReadQuery
+DoesExistQuery
EventPutSuccessCount
EventTakeSuccessCount
FSDatasetState.org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.Capacity
@@ -13,8 +22,21 @@ FlowFilesQueued
FlowFilesReceivedLast5Minutes
FlowFilesSentLast5Minutes
Free Slots
+InsertObjectQuery
+ReadAllQuery
+ReadAllQuery.HostRoleCommandEntity
+ReadObjectQuery
Supervisors
TimelineMetricStoreWatcher.FakeMetric
+Timer.ObjectBuilding
+Timer.QueryPreparation
+Timer.ReadAllQuery
+Timer.ReadAllQuery.%
+Timer.ReadAllQuery.HostRoleCommandEntity
+Timer.RowFetch
+Timer.SqlGeneration
+Timer.SqlPrepare
+Timer.StatementExecute
Topologies
Total Executors
Total Slots
@@ -79,6 +101,18 @@ dfs.FSNamesystem.TotalLoad
dfs.FSNamesystem.TransactionsSinceLastCheckpoint
dfs.FSNamesystem.TransactionsSinceLastLogRoll
dfs.FSNamesystem.UnderReplicatedBlocks
+dfs.NNTopUserOpCounts.windowMs=1500000.op=%.TotalCount
+dfs.NNTopUserOpCounts.windowMs=1500000.op=*.TotalCount
+dfs.NNTopUserOpCounts.windowMs=1500000.op=*.user=%.count
+dfs.NNTopUserOpCounts.windowMs=1500000.op=__%.user=%
+dfs.NNTopUserOpCounts.windowMs=300000.op=%.TotalCount
+dfs.NNTopUserOpCounts.windowMs=300000.op=*.TotalCount
+dfs.NNTopUserOpCounts.windowMs=300000.op=*.user=%.count
+dfs.NNTopUserOpCounts.windowMs=300000.op=__%.user=%
+dfs.NNTopUserOpCounts.windowMs=60000.op=%.TotalCount
+dfs.NNTopUserOpCounts.windowMs=60000.op=*.TotalCount
+dfs.NNTopUserOpCounts.windowMs=60000.op=*.user=%.count
+dfs.NNTopUserOpCounts.windowMs=60000.op=__%.user=%
dfs.datanode.BlocksRead
dfs.datanode.BlocksWritten
dfs.datanode.DatanodeNetworkErrors
@@ -94,6 +128,37 @@ dfs.namenode.TotalFileOps
disk_free
disk_total
disk_used
+druid/broker.*.%.query/time
+druid/broker.heap.jvm/mem/max
+druid/broker.heap.jvm/mem/used
+druid/broker.jvm/gc/time
+druid/coordinator.heap.jvm/mem/max
+druid/coordinator.heap.jvm/mem/used
+druid/coordinator.jvm/gc/time
+druid/historical.*.%.query/segment/time
+druid/historical.*.%.query/time
+druid/historical.*.%.query/wait/time
+druid/historical.heap.jvm/mem/max
+druid/historical.heap.jvm/mem/used
+druid/historical.jvm/gc/time
+druid/historical.segment/scan/pending
+druid/middlemanager.*.%.query/segment/time
+druid/middlemanager.*.%.query/time
+druid/middlemanager.*.%.query/wait/time
+druid/middlemanager.*.ingest/events/processed
+druid/middlemanager.*.ingest/events/thrownAway
+druid/middlemanager.*.ingest/events/unparseable
+druid/middlemanager.*.ingest/persists/count
+druid/middlemanager.*.ingest/persists/time
+druid/middlemanager.*.ingest/rows/output
+druid/middlemanager.heap.jvm/mem/max
+druid/middlemanager.heap.jvm/mem/used
+druid/middlemanager.jvm/gc/time
+druid/middlemanager.segment/scan/pending
+druid/overlord.*.segment/added/bytes
+druid/overlord.heap.jvm/mem/max
+druid/overlord.heap.jvm/mem/used
+druid/overlord.jvm/gc/time
executors.ExecutorMetrics.ExecutorAvailableFreeSlots
executors.ExecutorMetrics.ExecutorAvailableFreeSlotsPercent
executors.ExecutorMetrics.ExecutorCacheMemoryPerInstance
@@ -118,14 +183,14 @@ executors.ExecutorMetrics.ExecutorTotalKilled
executors.ExecutorMetrics.ExecutorTotalRejectedRequests
executors.ExecutorMetrics.ExecutorTotalRequestsHandled
executors.ExecutorMetrics.ExecutorTotalSuccess
-gc.ConcurrentMarkSweep.count
-gc.ConcurrentMarkSweep.time
-gc.ParNew.count
-gc.ParNew.time
+filter.error.grok
+filter.error.keyvalue
+input.files.count
+input.files.read_bytes
+input.files.read_lines
io.IOMetrics.MaxDecodingTime
io.IOMetrics.PercentileDecodingTime_30s50thPercentileLatency
io.IOMetrics.PercentileDecodingTime_30s90thPercentileLatency
-io.IOMetrics.PercentileDecodingTime_30s95thPercentileLatency
io.IOMetrics.PercentileDecodingTime_30s99thPercentileLatency
ipc.client.org.apache.hadoop.ipc.DecayRpcScheduler.Caller(*).Priority
ipc.client.org.apache.hadoop.ipc.DecayRpcScheduler.Caller(*).Volume
@@ -151,6 +216,10 @@ jvm.JvmMetrics.ThreadsRunnable
jvm.JvmMetrics.ThreadsTerminated
jvm.JvmMetrics.ThreadsTimedWaiting
jvm.JvmMetrics.ThreadsWaiting
+jvm.LlapDaemonJVMMetrics.LlapDaemonDirectBufferMemoryUsed
+jvm.LlapDaemonJVMMetrics.LlapDaemonDirectBufferTotalCapacity
+jvm.LlapDaemonJVMMetrics.LlapDaemonMappedBufferMemoryUsed
+jvm.LlapDaemonJVMMetrics.LlapDaemonMappedBufferTotalCapacity
jvm.Master.JvmMetrics.ThreadsBlocked
jvm.Master.JvmMetrics.ThreadsNew
jvm.Master.JvmMetrics.ThreadsRunnable
@@ -177,8 +246,23 @@ jvm.RegionServer.JvmMetrics.ThreadsTimedWaiting
jvm.RegionServer.JvmMetrics.ThreadsWaiting
jvm.daemon_thread_count
jvm.file_descriptor_usage
+jvm.gc.ConcurrentMarkSweep.count
+jvm.gc.ConcurrentMarkSweep.time
+jvm.gc.ParNew.count
+jvm.gc.ParNew.time
jvm.heap_usage
+jvm.memory.heap.committed
+jvm.memory.heap.max
+jvm.memory.heap.used
+jvm.memory.non-heap.committed
+jvm.memory.non-heap.max
+jvm.memory.non-heap.used
jvm.thread_count
+jvm.threads.blocked.count
+jvm.threads.count
+jvm.threads.daemon.count
+jvm.threads.deadlock.count
+jvm.threads.runnable.count
jvm.uptime
kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.1MinuteRate
kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.count
@@ -236,12 +320,8 @@ mem_free
mem_shared
mem_total
mem_used
-memory.heap.committed
-memory.heap.max
-memory.heap.used
-memory.non-heap.committed
-memory.non-heap.max
-memory.non-heap.used
+output.solr.write_bytes
+output.solr.write_logs
pkts_in
pkts_out
proc_run
@@ -250,6 +330,24 @@ read_bps
read_bytes
read_count
read_time
+regionserver.IO.FsPReadTime_75th_percentile
+regionserver.IO.FsPReadTime_95th_percentile
+regionserver.IO.FsPReadTime_99th_percentile
+regionserver.IO.FsPReadTime_max
+regionserver.IO.FsPReadTime_mean
+regionserver.IO.FsPReadTime_median
+regionserver.IO.FsReadTime_75th_percentile
+regionserver.IO.FsReadTime_95th_percentile
+regionserver.IO.FsReadTime_99th_percentile
+regionserver.IO.FsReadTime_max
+regionserver.IO.FsReadTime_mean
+regionserver.IO.FsWriteTime_75th_percentile
+regionserver.IO.FsWriteTime_95th_percentile
+regionserver.IO.FsWriteTime_99th_percentile
+regionserver.IO.FsWriteTime_max
+regionserver.IO.FsWriteTime_mean
+regionserver.IO.FsWriteTime_median
+regionserver.IO.fsChecksumFailureCount
regionserver.RegionServer.ProcessCallTime_75th_percentile
regionserver.RegionServer.ProcessCallTime_95th_percentile
regionserver.RegionServer.ProcessCallTime_99th_percentile
@@ -441,13 +539,42 @@ rpc.rpc.datanode.RpcQueueTimeNumOps
rpc.rpc.datanode.RpcSlowCalls
rpcdetailed.rpcdetailed.client.AddBlockAvgTime
rpcdetailed.rpcdetailed.client.AddBlockNumOps
+solr.admin.info.jvm.memory.used
+solr.admin.info.system.processCpuLoad
+solr.admin.mbeans.cache.documentCache.hitratio
+solr.admin.mbeans.cache.documentCache.size
+solr.admin.mbeans.cache.documentCache.warmupTime
+solr.admin.mbeans.cache.filterCache.hitratio
+solr.admin.mbeans.cache.filterCache.size
+solr.admin.mbeans.cache.filterCache.warmupTime
+solr.admin.mbeans.cache.queryResultCache.hitratio
+solr.admin.mbeans.cache.queryResultCache.size
+solr.admin.mbeans.cache.queryResultCache.warmupTime
+solr.admin.mbeans.queryHandler.browse.avgTimePerRequest
+solr.admin.mbeans.queryHandler.browse.requests
+solr.admin.mbeans.queryHandler.export.avgTimePerRequest
+solr.admin.mbeans.queryHandler.export.requests
+solr.admin.mbeans.queryHandler.get.avgTimePerRequest
+solr.admin.mbeans.queryHandler.get.requests
+solr.admin.mbeans.queryHandler.query.avgTimePerRequest
+solr.admin.mbeans.queryHandler.query.requests
+solr.admin.mbeans.queryHandler.select.15minRateReqsPerSecond
+solr.admin.mbeans.queryHandler.select.5minRateReqsPerSecond
+solr.admin.mbeans.queryHandler.select.75thPcRequestTime
+solr.admin.mbeans.queryHandler.select.95thPcRequestTime
+solr.admin.mbeans.queryHandler.select.999thPcRequestTime
+solr.admin.mbeans.queryHandler.select.99thPcRequestTime
+solr.admin.mbeans.queryHandler.select.avgRequestsPerSecond
+solr.admin.mbeans.queryHandler.select.avgTimePerRequest
+solr.admin.mbeans.queryHandler.select.medianRequestTime
+solr.admin.mbeans.queryHandler.select.requests
+solr.admin.mbeans.updateHandler.adds
+solr.admin.mbeans.updateHandler.deletesById
+solr.admin.mbeans.updateHandler.deletesByQuery
+solr.admin.mbeans.updateHandler.docsPending
+solr.admin.mbeans.updateHandler.errors
swap_free
swap_total
-threads.blocked.count
-threads.count
-threads.daemon.count
-threads.deadlock.count
-threads.runnable.count
topology.*.%.--ack-count.%
topology.*.%.--complete-latency.%
topology.*.%.--emit-count.%
http://git-wip-us.apache.org/repos/asf/ambari/blob/54d4d5e6/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-hbase-regionservers.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-hbase-regionservers.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-hbase-regionservers.json
index 3184357..70c2c5c 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-hbase-regionservers.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-hbase-regionservers.json
@@ -8959,6 +8959,1448 @@
],
"showTitle": true,
"title": "Locality"
+ },
+ {
+ "collapse": true,
+ "editable": true,
+ "height": "250px",
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 142,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "app": "hbase",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "regionserver.IO.FsWriteTime_mean",
+ "precision": "default",
+ "refId": "A",
+ "seriesAggregator": "none",
+ "templatedCluster": [
+ ""
+ ],
+ "templatedHost": "%",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "FileSystem Write Time - Mean",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 143,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "app": "hbase",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "regionserver.IO.FsWriteTime_median",
+ "precision": "default",
+ "refId": "A",
+ "seriesAggregator": "none",
+ "templatedCluster": [
+ ""
+ ],
+ "templatedHost": "%",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "FileSystem Write Time - Median",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 144,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "app": "hbase",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "regionserver.IO.FsWriteTime_75th_percentile",
+ "precision": "default",
+ "refId": "A",
+ "seriesAggregator": "none",
+ "templatedCluster": [
+ ""
+ ],
+ "templatedHost": "%",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "FileSystem Write Time - 75th Percentile",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 145,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "app": "hbase",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "regionserver.IO.FsWriteTime_95th_percentile",
+ "precision": "default",
+ "refId": "A",
+ "seriesAggregator": "none",
+ "templatedCluster": [
+ ""
+ ],
+ "templatedHost": "%",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "FileSystem Write Time - 95th Percentile",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 146,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "app": "hbase",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "regionserver.IO.FsWriteTime_99th_percentile",
+ "precision": "default",
+ "refId": "A",
+ "seriesAggregator": "none",
+ "templatedCluster": [
+ ""
+ ],
+ "templatedHost": "%",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "FIleSystem Write Time - 99th Percentile",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 147,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "app": "hbase",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "regionserver.IO.FsWriteTime_max",
+ "precision": "default",
+ "refId": "A",
+ "seriesAggregator": "none",
+ "templatedCluster": [
+ ""
+ ],
+ "templatedHost": "%",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "FIleSystem Write Time - Max",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ }
+ ],
+ "showTitle": true,
+ "title": "FileSystem Write Times"
+ },
+ {
+ "title": "FileSystem Read Times",
+ "height": "250px",
+ "editable": true,
+ "collapse": true,
+ "panels": [
+ {
+ "title": "FileSystem Read Time - Mean",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 148,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsReadTime_mean"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem Read Time - Median",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 149,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsReadTime_mean"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem Read Time - 75th Percentile",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 150,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsReadTime_75th_percentile"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem Read Time - 95th Percentile",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 151,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsReadTime_95th_percentile"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem Read Time - 99th Percentile",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 152,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsReadTime_99th_percentile"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem Read Time - Max",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 153,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsReadTime_max"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ }
+ ],
+ "showTitle": true
+ },
+ {
+ "title": "FileSystem PRead Times",
+ "height": "250px",
+ "editable": true,
+ "collapse": true,
+ "panels": [
+ {
+ "title": "FileSystem PRead Time - Mean",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 154,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsPReadTime_mean"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem PRead Time - Median",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 155,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsPReadTime_median"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem PRead Time - 75th Percentile",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 156,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsPReadTime_75th_percentile"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem PRead Time - 95th Percentile",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 157,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsPReadTime_95th_percentile"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem PRead Time - 99th Percentile",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 158,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsPReadTime_99th_percentile"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ },
+ {
+ "title": "FileSystem PRead Time - Max",
+ "error": false,
+ "span": 6,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 159,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": null,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.FsPReadTime_max"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ }
+ ],
+ "showTitle": true
+ },
+ {
+ "title": "FileSystem Checksum Failures",
+ "height": "250px",
+ "editable": true,
+ "collapse": true,
+ "panels": [
+ {
+ "title": "FileSystem Checksum Failures",
+ "error": false,
+ "span": 12,
+ "editable": true,
+ "type": "graph",
+ "isNew": true,
+ "id": 160,
+ "datasource": null,
+ "renderer": "flot",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ],
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "rightMax": null,
+ "leftMin": 0,
+ "rightMin": null,
+ "rightLogBase": 1,
+ "threshold1": null,
+ "threshold2": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "lines": true,
+ "fill": 1,
+ "linewidth": 2,
+ "points": false,
+ "pointradius": 5,
+ "bars": false,
+ "stack": false,
+ "percentage": false,
+ "legend": {
+ "show": true,
+ "values": false,
+ "min": false,
+ "max": false,
+ "current": false,
+ "total": false,
+ "avg": false
+ },
+ "nullPointMode": "connected",
+ "steppedLine": false,
+ "tooltip": {
+ "value_type": "cumulative",
+ "shared": true
+ },
+ "timeFrom": null,
+ "timeShift": null,
+ "targets": [
+ {
+ "refId": "A",
+ "errors": {},
+ "aggregator": "avg",
+ "downsampleAggregator": "avg",
+ "transform": "none",
+ "precision": "default",
+ "seriesAggregator": "none",
+ "templatedHost": "%",
+ "templatedCluster": [
+ ""
+ ],
+ "app": "hbase",
+ "metric": "regionserver.IO.fsChecksumFailureCount"
+ }
+ ],
+ "aliasColors": {},
+ "seriesOverrides": [],
+ "links": []
+ }
+ ],
+ "showTitle": true
}
],
"time": {