1) IDEなどから.patchファイルを作成する。例えば、MetaStoreをスタート時にKinitする。
[root@node1 ~]# cat ~/hive_metastore_kinit.patch
Index: ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py (revision 6a8abfa65789b87da764549c27ca0f1440b91297)
+++ ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py (revision )
@@ -55,6 +55,14 @@
env.set_params(params)
# writing configurations on start required for securtity
+ if params.security_enabled:
+ import status_params
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.hive_user,
+ params.hive_metastore_keytab_path,
+ params.hive_server_principal, # FIXME: Should use 'hive.metastore.kerberos.principal'
+ status_params.hostname,
+ status_params.tmp_dir)
self.configure(env)
hive_service('metastore', action='start', upgrade_type=upgrade_type)
2) 適用してみる
[root@node1 ~]# cd /var/lib/ambari-server/
[root@node1 ambari-server]# patch -p3 -b -i ~/hive_metastore_kinit.patch [--verbose]
3) 確認
[root@node1 ambari-server]# ls -l resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore*
-rwxr-xr-x 1 root root 10655 Jul 27 09:03 resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
-rwxr-xr-x 1 root root 10404 May 5 19:11 resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py.orig
[root@node1 ambari-server]# diff resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py.orig resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
57a58,60
> if params.security_enabled:
> kinit_command=format("{kinit_path_local} -kt {hive_metastore_keytab_path} {hive_server_principal}; ") # FIXME: Should use 'hive.metastore.kerberos.principal'
> Execute(kinit_command,user=params.smokeuser)
4) Ambari Serverの再起動
[root@node1 ambari-server]# ambari-server restart
5) Agent側にもコピーされたのを確認
[root@node2 ~]# ls -l /var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py*
-rw-r--r-- 1 root root 10655 Jul 27 09:07 /var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
-rw-r--r-- 1 root root 10777 Jul 27 09:07 /var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.pyc
-rw-r--r-- 1 root root 10404 Jul 27 09:07 /var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py.orig
6) 元に戻す
[root@node1 ~]# cd /var/lib/ambari-server/
[root@node1 ambari-server]# mv resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py.orig resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
mv: overwrite `resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py'? y
[root@node1 ambari-server]# ambari-server restart
My note for Hadoop, HDP, DevOps, DevSecOps, Sonatype Nexus, and Sonatype IQ
2016年7月27日水曜日
HDP SqoopクライアントのデバッグにJDBを使用するメモ
1) vim /usr/hdp/current/hadoop-client/bin/hadoop.distro
if [ -n "$HADOOP_JDB" ]; then
echo "export CLASSPATH=$CLASSPATH"
echo "${JAVA_HOME}/bin/jdb" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
else
exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
fi
2) HADOOP_JDB="Y" sqoop import --username SYSTEM --password oracle --direct --connect 'jdbc:oracle:thin:@//192.168.8.22:1521/XE' --query "select * from TEST.TMP_SQOOP_DF_TEST67 WHERE \$CONDITIONS" --split-by COLUMN_NUMBER --target-dir /tmp/test > jdb_sqoop_import.sh
3) jdb_sqoop_import.shからいらないラインを削除する
4) bash ./jdb_sqoop_import.sh
5) JDB内でブレークポイントを指定する
> stop in org.apache.sqoop.manager.oracle.OraOopManagerFactory.isOraOopEnabled
> run
6) next と localsを繰り返す
if [ -n "$HADOOP_JDB" ]; then
echo "export CLASSPATH=$CLASSPATH"
echo "${JAVA_HOME}/bin/jdb" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
else
exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
fi
2) HADOOP_JDB="Y" sqoop import --username SYSTEM --password oracle --direct --connect 'jdbc:oracle:thin:@//192.168.8.22:1521/XE' --query "select * from TEST.TMP_SQOOP_DF_TEST67 WHERE \$CONDITIONS" --split-by COLUMN_NUMBER --target-dir /tmp/test > jdb_sqoop_import.sh
3) jdb_sqoop_import.shからいらないラインを削除する
4) bash ./jdb_sqoop_import.sh
5) JDB内でブレークポイントを指定する
> stop in org.apache.sqoop.manager.oracle.OraOopManagerFactory.isOraOopEnabled
> run
6) next と localsを繰り返す
登録:
投稿 (Atom)