网站域名和空间区别,重庆网站制作招聘,教育培训机构推荐,公众号运营收费价格表当前项目背景需要使用到国密SM4对加密后的数据进行解密#xff0c;Hive是不支持的#xff0c;尝试了华为DWS数仓#xff0c;华为只支持在DWS中的SM4加密解密#xff0c;不支持外部加密数据DWS解密 新建Maven工程
只需要将引用的第三方依赖打到jar包中#xff0c;hadoop和… 当前项目背景需要使用到国密SM4对加密后的数据进行解密Hive是不支持的尝试了华为DWS数仓华为只支持在DWS中的SM4加密解密不支持外部加密数据DWS解密 新建Maven工程
只需要将引用的第三方依赖打到jar包中hadoop和hive的依赖不需要打不需要打的依赖scope选择provided即可。 使用idea新建maven工程pom.xml配置如下:
?xml version1.0 encodingUTF-8?project xmlnshttp://maven.apache.org/POM/4.0.0 xmlns:xsihttp://www.w3.org/2001/XMLSchema-instancexsi:schemaLocationhttp://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsdmodelVersion4.0.0/modelVersiongroupIdcom.szc.bigdata.hive/groupIdartifactIdsm4_decode/artifactIdversion0.1/versionpropertieshadoop.version3.1.1-hw-ei-311006/hadoop.versionhive.version3.1.0-hw-ei-311006/hive.version/propertiesdependencies!-- 国密解密依赖以下3个包commons-codec用来将字节数组转string用的 --dependencygroupIdcommons-codec/groupIdartifactIdcommons-codec/artifactIdversion1.15/version/dependencydependencygroupIdorg.bouncycastle/groupIdartifactIdbcprov-jdk15to18/artifactIdversion1.69/version/dependencydependencygroupIdcn.hutool/groupIdartifactIdhutool-crypto/artifactIdversion5.8.16/version/dependencydependencygroupIdorg.apache.hadoop/groupIdartifactIdhadoop-auth/artifactIdversion${hadoop.version}/versionscopeprovided/scope/dependencydependencygroupIdorg.apache.hive/groupIdartifactIdhive-jdbc/artifactIdversion${hive.version}/versionscopeprovided/scope/dependencydependencygroupIdorg.apache.hive/groupIdartifactIdhive-common/artifactIdversion${hive.version}/versionscopeprovided/scope/dependencydependencygroupIdorg.apache.hive/groupIdartifactIdhive-shims/artifactIdversion${hive.version}/versionscopeprovided/scope/dependencydependencygroupIdorg.apache.hadoop/groupIdartifactIdhadoop-common/artifactIdversion${hadoop.version}/versionscopeprovided/scope/dependencydependencygroupIdorg.apache.hive/groupIdartifactIdhive-exec/artifactIdversion${hive.version}/versionscopeprovided/scope/dependency/dependenciesrepositories!-- 这里根据实际情况选择我这边使用的是华为平台 --repositoryidhuaweicloudsdk/idurlhttps://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk//urlreleasesenabledtrue/enabled/releasessnapshotsenabledtrue/enabled/snapshots/repository/repositoriesbuildpluginsplugingroupIdorg.apache.maven.plugins/groupIdartifactIdmaven-assembly-plugin/artifactIdversion3.1.0/versionconfiguration!-- 此配置会打两个包一个是不带依赖的一个是将依赖打到jar包的 --descriptorRefsdescriptorRefjar-with-dependencies/descriptorRef/descriptorRefsarchivemanifestaddClasspathtrue/addClasspathmainClasscom.szc.bigdata.hive.udf.SM4Decode/mainClass/manifest/archive/configurationexecutionsexecutionidmake-assembly/id!-- bind to the packaging phase --phasepackage/phasegoalsgoalsingle/goal/goals/execution/executions/plugin/plugins/build
/project
编写自定义函数类
SmUtil引用的是hutools里的工具类
public class SM4Decode extends UDF {public String evaluate(String data, String key) {if (data null || .equals(data)) {return null;}SymmetricCrypto sm4 SmUtil.sm4(key.getBytes());return StringUtils.newStringUtf8(sm4.decrypt(data));}
}上传jar包到hdfs上
上传jar包到hdfs上
如果集群开启了权限控制kerberos需要先试用kinit登录才可以
# 刷新环境变量
source bigdate_env# kinit 登录
kinit 用户名
# 回车后输入密码# 上传到指定目录
hdfs dfs -put ~/sm4_decode-3.1.0-hw-ei-311006-jar-with-dependencies.jar /tmp# 授权
hdfs dfs -chmod 777 /tmp/sm4_decode-3.1.0-hw-ei-311006-jar-with-dependencies.jar创建函数
# 进入到hive目录下执行beeline
beeline# 授权admin权限
set role admin;# 创建函数
CREATE FUNCTION sm4decode AS com.szc.bigdata.hive.udf.SM4Decode using jar hdfs:///tmp/sm4_decode-3.1.0-hw-ei-311006-jar-with-dependencies.jar;# 创建临时函数
CREATE TEMPORARY FUNCTION sm4decode AS com.szc.bigdata.hive.udf.SM4Decode using jar hdfs:///tmp/sm4_decode-3.1.0-hw-ei-311006-jar-with-dependencies.jar;# 使用函数
select sm4decode(decodestr,key);