Commit ce2a82c6 by 李辅翼

v3

parent 59759779
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
<properties> <properties>
<java.version>1.8</java.version> <java.version>1.8</java.version>
<hbase.version>1.2.0-cdh5.15.1</hbase.version>
</properties> </properties>
...@@ -28,6 +29,29 @@ ...@@ -28,6 +29,29 @@
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>geronimo-annotation_1.0_spec</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>geronimo-jaspic_1.0_spec</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
...@@ -35,12 +59,18 @@ ...@@ -35,12 +59,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!--<dependency>-->
<!--<groupId>oracle</groupId>-->
<!--<artifactId>ojdbc6</artifactId>-->
<!--<version>11.2.0.3</version>-->
<!--<scope>system</scope>-->
<!--<systemPath>${project.basedir}/src/main/resources/lib/ojdbc6-11.2.0.3.jar</systemPath>-->
<!--</dependency>-->
<dependency> <dependency>
<groupId>oracle</groupId> <groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId> <artifactId>ojdbc6</artifactId>
<version>11.2.0.3</version> <version>11.2.0.3</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/resources/lib/ojdbc6-11.2.0.3.jar</systemPath>
</dependency> </dependency>
<dependency> <dependency>
...@@ -56,6 +86,14 @@ ...@@ -56,6 +86,14 @@
<groupId>javax.servlet</groupId> <groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId> <artifactId>servlet-api</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>geronimo-annotation_1.0_spec</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>geronimo-jaspic_1.0_spec</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
...@@ -63,6 +101,14 @@ ...@@ -63,6 +101,14 @@
<artifactId>spring-boot-starter-web</artifactId> <artifactId>spring-boot-starter-web</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.16.0</version>
</dependency>
<dependency>
<groupId>redis.clients</groupId> <groupId>redis.clients</groupId>
<artifactId>jedis</artifactId> <artifactId>jedis</artifactId>
<version>2.9.0</version> <version>2.9.0</version>
...@@ -80,6 +126,9 @@ ...@@ -80,6 +126,9 @@
<artifactId>fastjson</artifactId> <artifactId>fastjson</artifactId>
<version>1.2.51</version> <version>1.2.51</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId> <artifactId>commons-lang3</artifactId>
...@@ -91,11 +140,6 @@ ...@@ -91,11 +140,6 @@
<version>1.26.5</version> <version>1.26.5</version>
</dependency> </dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
</dependencies> </dependencies>
<build> <build>
......
...@@ -2,16 +2,25 @@ package com.hikcreate.controller; ...@@ -2,16 +2,25 @@ package com.hikcreate.controller;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.hikcreate.drv_photo_pic.DrvPhoto; import com.hikcreate.drv_photo_pic.DrvPhoto;
import com.hikcreate.drv_photo_pic.VioPic;
import com.hikcreate.entity.PicByte;
import com.hikcreate.service.fdfs.service.FileService; import com.hikcreate.service.fdfs.service.FileService;
import com.hikcreate.utils.DateUtil;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.validation.annotation.Validated; import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.*;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.client.RestTemplate;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.imageio.stream.FileImageOutputStream;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import java.io.*; import java.io.*;
import java.util.Date;
import java.util.Map;
import org.springframework.http.*;
/** /**
...@@ -21,60 +30,69 @@ import java.io.*; ...@@ -21,60 +30,69 @@ import java.io.*;
*/ */
@RestController @RestController
@RequestMapping("/pic") @RequestMapping("/pic")
@Validated
public class PicController { public class PicController {
@Autowired @Autowired
private VioPic vioPic;
@Autowired
private DrvPhoto drvPhoto; private DrvPhoto drvPhoto;
@Autowired @Autowired
private FileService fileService; private FileService fileService;
@Value("${ftpUrl}")
private String ftpUrl;
@GetMapping("/drvPhotoHis") @GetMapping("/drvPhotoHis")
public void getHisDrvPhoto(@RequestParam("time") String time, HttpServletResponse response){ public void getHisDrvPhoto(@RequestParam("time") String time, HttpServletResponse response) {
try { try {
boolean result=drvPhoto.getHisDrvPhoto(time); boolean result = drvPhoto.getHisDrvPhoto(time);
JSONObject jsonObject = new JSONObject(); JSONObject jsonObject = new JSONObject();
jsonObject.put("result",result); jsonObject.put("result", result);
response.setContentType("application/json;charset=UTF-8"); response.setContentType("application/json;charset=UTF-8");
response.getWriter().write(jsonObject.toJSONString()); response.getWriter().write(jsonObject.toJSONString());
}catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
@GetMapping("/testHttp")
@GetMapping("/testFastDfs") public void testHttp(@RequestParam("param") String param, HttpServletResponse response) {
public void testFastDfs(@RequestParam("url") String url, HttpServletResponse response){
try { try {
File file=new File("C:\\Users\\lifuyi5\\Downloads\\2019-05-15"); RestTemplate restTemplate = new RestTemplate();
FileInputStream fis = new FileInputStream(file); MultiValueMap<String, String> params = new LinkedMultiValueMap<String, String>();
ByteArrayOutputStream bos = new ByteArrayOutputStream(fis.available()); params.add("urls", param);
byte[] b = new byte[1024]; HttpHeaders headers = new HttpHeaders();
int len = -1; HttpMethod method = HttpMethod.POST;
while((len = fis.read(b)) != -1) { headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
bos.write(b, 0, len); HttpEntity<MultiValueMap<String, String>> requestEntity = new HttpEntity<>(params, headers);
ResponseEntity<PicByte> resp = restTemplate.exchange(ftpUrl, method, requestEntity, PicByte.class);
Map<String, byte[]> map = resp.getBody().getMap();
//遍历Map
String path = "/home/pic/";
File file;
for (Map.Entry<String, byte[]> entry : map.entrySet()) {
String[] split = entry.getKey().split("/");
file = new File(path + split[split.length - 1]);
if (file.exists()) {
file.delete();
}
FileImageOutputStream imageOutput = new FileImageOutputStream(file);
imageOutput.write(entry.getValue(), 0, entry.getValue().length);
imageOutput.close();
} }
byte[] fileByte = bos.toByteArray();
String urlFast = fileService.uploadFile(fileByte, "2019-05-15");
response.setContentType("application/json;charset=UTF-8"); response.setContentType("application/json;charset=UTF-8");
response.getWriter().write(urlFast); response.getWriter().write(path);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
@GetMapping("/testUrl") @GetMapping("/testIncVio")
public void testUrl(@RequestParam("url") String url, HttpServletResponse response){ public void testFastDfs(@RequestParam("past") int past) {
String date = DateUtil.getDate();
try { vioPic.getIncrementVioPic(DateUtil.formatDate(DateUtil.getPastDate(new Date(),-past)),date);
System.out.println("dddddddddddddd");
response.setContentType("application/json;charset=UTF-8");
response.getWriter().write("cjdncvjf");
} catch (IOException e) {
e.printStackTrace();
}
} }
} }
...@@ -12,4 +12,6 @@ public interface DrvPhoto { ...@@ -12,4 +12,6 @@ public interface DrvPhoto {
boolean getHisDrvPhoto(String time); boolean getHisDrvPhoto(String time);
void getIncrementDrvPhoto();
} }
package com.hikcreate.drv_photo_pic;
public interface VioPic {
void getIncrementVioPic(String startDay,String endDay);
}
package com.hikcreate.entity;
import lombok.Data;
import java.util.Map;
@Data
public class PicByte {
Map<String,byte[]> map;
}
package com.hikcreate.schedul;
import com.hikcreate.drv_photo_pic.DrvPhoto;
import com.hikcreate.drv_photo_pic.VioPic;
import com.hikcreate.utils.DateUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.util.Date;
@Component
public class PicSchedule {
@Autowired
private DrvPhoto drvPhoto;
@Autowired
private VioPic vioPic;
@Value("${pastDay}")
private int pastDay;
/**
* 每天早上10点同步增量数据
*/
@Scheduled(cron = "0 10 * * * *")
public void getIncrementDrvPhoto(){
drvPhoto.getIncrementDrvPhoto();
}
// @Scheduled(cron = "0 10 * * * *")
public void getIncrementVioPic(){
String date = DateUtil.getDate();
vioPic.getIncrementVioPic(date,DateUtil.formatDate(DateUtil.getPastDate(new Date(),-pastDay)));
}
}
package com.hikcreate.service.fdfs.service.impl; package com.hikcreate.service.fdfs.service.impl;
import com.github.tobato.fastdfs.domain.fdfs.StorePath;
import com.github.tobato.fastdfs.domain.upload.FastImageFile;
import com.github.tobato.fastdfs.service.FastFileStorageClient; import com.github.tobato.fastdfs.service.FastFileStorageClient;
import com.hikcreate.service.fdfs.FastDFSClient; import com.hikcreate.service.fdfs.FastDFSClient;
import com.hikcreate.service.fdfs.service.FileService; import com.hikcreate.service.fdfs.service.FileService;
import org.apache.commons.io.FilenameUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils; import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.annotation.Resource; import javax.annotation.Resource;
...@@ -39,6 +35,7 @@ public class FileServiceImpl implements FileService { ...@@ -39,6 +35,7 @@ public class FileServiceImpl implements FileService {
return "/group" + path.split("group")[1]; return "/group" + path.split("group")[1];
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace();
logger.error("Upload Img Error, msg ={}", e); logger.error("Upload Img Error, msg ={}", e);
throw new RuntimeException("上传图片失败"); throw new RuntimeException("上传图片失败");
} }
......
package com.hikcreate.utils;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicResponseHandler;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLConnection;
public class HttpUtils {
public static String sendPost(String url, String param) {
PrintWriter out = null;
BufferedReader in = null;
String result = "";
try {
URL realUrl = new URL(url);
// 打开和URL之间的连接
URLConnection conn = realUrl.openConnection();
// 设置通用的请求属性
conn.setRequestProperty("accept", "*/*");
conn.setRequestProperty("connection", "Keep-Alive");
conn.setRequestProperty("user-agent",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1;SV1)");
// 发送POST请求必须设置如下两行
conn.setDoOutput(true);
conn.setDoInput(true);
// 获取URLConnection对象对应的输出流
out = new PrintWriter(conn.getOutputStream());
// 发送请求参数
out.print(param);
// flush输出流的缓冲
out.flush();
// 定义BufferedReader输入流来读取URL的响应
in = new BufferedReader(
new InputStreamReader(conn.getInputStream()));
String line;
while ((line = in.readLine()) != null) {
result += line;
}
} catch (Exception e) {
System.out.println("发送 POST 请求出现异常!"+e);
e.printStackTrace();
}
//使用finally块来关闭输出流、输入流
finally{
try{
if(out!=null){
out.close();
}
if(in!=null){
in.close();
}
}
catch(IOException ex){
ex.printStackTrace();
}
}
return result;
}
public static String HttpPostWithJson(String url, String json) {
String returnValue = "这是默认返回值,接口调用失败";
CloseableHttpClient httpClient = HttpClients.createDefault();
ResponseHandler<String> responseHandler = new BasicResponseHandler();
try {
//第一步:创建HttpClient对象
httpClient = HttpClients.createDefault();
//第二步:创建httpPost对象
HttpPost httpPost = new HttpPost(url);
//第三步:给httpPost设置JSON格式的参数
StringEntity requestEntity = new StringEntity(json, "utf-8");
requestEntity.setContentEncoding("UTF-8");
httpPost.setHeader("Content-type", "application/json");
httpPost.setEntity(requestEntity);
//第四步:发送HttpPost请求,获取返回值
//调接口获取返回值时,必须用此方法
returnValue = httpClient.execute(httpPost, responseHandler);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
httpClient.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//第五步:处理返回值
return returnValue;
}
}
package com.hikcreate.utils.redis;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* @auther: lifuyi
* @date: 2019/1/31 10:55
* @description:
*/
public class PropertyUtil {
private static final Logger logger = LoggerFactory.getLogger(PropertyUtil.class);
private static Properties props;
static {
loadProps("common-config.properties");
}
public static String getProperty(String key) {
if (null == props) {
throw new NullPointerException("props is null");
}
return props.getProperty(key);
}
public static String getProperty(String key, String defaultValue) {
if (null == props) {
throw new NullPointerException("props is null");
}
return props.getProperty(key, defaultValue);
}
private Properties getProps(String path) {
if (props == null) {
loadProps(path);
}
return props;
}
private synchronized static void loadProps(String path) {
props = new Properties();
InputStream in = null;
try {
path = path;
in = PropertyUtil.class.getClassLoader().getResourceAsStream(path);
props.load(in);
} catch (FileNotFoundException e) {
logger.error("loadProps error", e);
} catch (IOException e) {
logger.error("loadProps error", e);
} finally {
try {
if (null != in) {
in.close();
}
} catch (IOException e) {
}
}
}
}
package com.hikcreate.utils.redis; package com.hikcreate.utils.redis;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import redis.clients.jedis.JedisCluster; import redis.clients.jedis.JedisCluster;
import java.util.List; import java.util.List;
...@@ -10,8 +13,12 @@ import java.util.Set; ...@@ -10,8 +13,12 @@ import java.util.Set;
* @date: 2019/1/31 10:53 * @date: 2019/1/31 10:53
* @description: * @description:
*/ */
@Component
public class RedisClient { public class RedisClient {
@Value("${redis.server}")
private String redisServer;
public final static String VIRTUAL_COURSE_PREX = "_lc_vc_"; public final static String VIRTUAL_COURSE_PREX = "_lc_vc_";
private JedisCluster jedisCluster; private JedisCluster jedisCluster;
...@@ -520,7 +527,7 @@ public class RedisClient { ...@@ -520,7 +527,7 @@ public class RedisClient {
private <R> R jedisCall(JedisFunction<R> function) { private <R> R jedisCall(JedisFunction<R> function) {
if (jedisCluster == null) { if (jedisCluster == null) {
String hosts = PropertyUtil.getProperty("redis.cluster"); String hosts = redisServer;
jedisCluster = RedisUtils.getClusterClient(hosts); jedisCluster = RedisUtils.getClusterClient(hosts);
} }
try { try {
......
package com.hikcreate.utils.redis; package com.hikcreate.utils.redis;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
/** /**
* @auther: lifuyi * @auther: lifuyi
* @date: 2019/1/31 10:53 * @date: 2019/1/31 10:53
* @description: * @description:
*/ */
@Component
public class RedisClientUtil { public class RedisClientUtil {
@Value("${redis.server}")
private static String redisServer;
@Autowired
private static RedisClient redisClient; private static RedisClient redisClient;
public static RedisClient getRedisClient() { public static RedisClient getRedisClient() {
if (redisClient == null) { if (redisClient == null) {
synchronized (RedisClientUtil.class) { synchronized (RedisClientUtil.class) {
redisClient = RedisBuilder.getRedisClient(PropertyUtil.getProperty("redis.server")); redisClient = RedisBuilder.getRedisClient(redisServer);
} }
} }
return redisClient; return redisClient;
......
...@@ -4,17 +4,32 @@ password=zckj2018 ...@@ -4,17 +4,32 @@ password=zckj2018
roundDay=-5 roundDay=-5
redis.cluster=172.16.25.23:7000,172.16.25.23:7001,172.16.25.23:7002,172.16.25.24:7003,172.16.25.24:7004,172.16.25.24:7005 redis.cluster=172.16.25.23:7000,172.16.25.23:7001,172.16.25.23:7002,172.16.25.24:7003,172.16.25.24:7004,172.16.25.24:7005
redis.server=172.16.25.23:7000,172.16.25.23:7001,172.16.25.23:7002,172.16.25.24:7003,172.16.25.24:7004,172.16.25.24:7005 redis.server=172.16.25.23:7000,172.16.25.23:7001,172.16.25.23:7002,172.16.25.24:7003,172.16.25.24:7004,172.16.25.24:7005
ftpUrl=http://193.5.103.5:80/ftp/testFtpUtil
#hive
hive.url=jdbc:hive2://172.16.25.25:10000/kakou
hive.user=hdfs
hive.password=hdfs
#fdfs #fdfs
fdfs.so-timeout = 1500 fdfs.so-timeout = 1500
fdfs.connect-timeout = 600 fdfs.connect-timeout = 600
fdfs.thumb-image.width = 150 fdfs.thumb-image.width = 150
fdfs.thumb-image.height = 150 fdfs.thumb-image.height = 150
fdfs.tracker-list[0] = 10.197.236.172:22122 fdfs.tracker-list[0] = 172.16.25.23:22122
fdfs.tracker-list[1] = 10.197.236.188:22122 fdfs.tracker-list[1] = 172.16.25.26:22122
fdfs.pool.max-total = 153 fdfs.pool.max-total = 153
fdfs.pool.max-wait-millis = 102 fdfs.pool.max-wait-millis = 102
#˿
server.port=8084
hbase.zookeeper.property.clientPort=2181
hbase.zookeeper.quorum=172.16.25.25,172.16.25.28,172.16.25.24,172.16.25.26,172.16.25.27
hbase.master=172.16.25.25:60000
hbase.drv_photo.table=drv:drv_photo
hbase.driverlicense.table=drv:drivinglicense
hbase.vio.table=vio:vio_violation
#hive----sql
increment.vio.pic.sql=SELECT a.ccarnumber hphm,a.clicensetype hpzl,b.wfsj wfsj,a.cpic1path url1,a.cpic2path url2,a.cpic3path url3 from (SELECT * from kakou.vio_violation_pic_his_ods WHERE substr(export_time,0,10)<=? and substr(export_time,0,10)>?) a INNER JOIN (SELECT * from default.vio_surveil_all WHERE clsj='null' or clsj='' or clsj is null) b WHERE a.ccarnumber=b.hphm and a.clicensetype=b.hpzl and substr(a.dillegaldate,0,16)=substr(b.wfsj,0,16)
pastDay=10
<?xml version="1.0" encoding="UTF-8"?>
<!--Autogenerated by Cloudera Manager-->
<configuration>
<property>
<name>dfs.nameservices</name>
<value>nameservice1</value>
</property>
<property>
<name>dfs.client.failover.proxy.provider.nameservice1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property>
<property>
<name>dfs.ha.automatic-failover.enabled.nameservice1</name>
<value>true</value>
</property>
<property>
<name>ha.zookeeper.quorum</name>
<value>172.16.25.24:2181,172.16.25.25:2181,172.16.25.26:2181,172.16.25.27:2181,172.16.25.28:2181</value>
</property>
<property>
<name>dfs.ha.namenodes.nameservice1</name>
<value>namenode237,namenode293</value>
</property>
<property>
<name>dfs.namenode.rpc-address.nameservice1.namenode237</name>
<value>172.16.25.25:8020</value>
</property>
<property>
<name>dfs.namenode.servicerpc-address.nameservice1.namenode237</name>
<value>172.16.25.25:8022</value>
</property>
<property>
<name>dfs.namenode.http-address.nameservice1.namenode237</name>
<value>172.16.25.25:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.nameservice1.namenode237</name>
<value>172.16.25.25:50470</value>
</property>
<property>
<name>dfs.namenode.rpc-address.nameservice1.namenode293</name>
<value>172.16.25.28:8020</value>
</property>
<property>
<name>dfs.namenode.servicerpc-address.nameservice1.namenode293</name>
<value>172.16.25.28:8022</value>
</property>
<property>
<name>dfs.namenode.http-address.nameservice1.namenode293</name>
<value>172.16.25.28:50070</value>
</property>
<property>
<name>dfs.namenode.https-address.nameservice1.namenode293</name>
<value>172.16.25.28:50470</value>
</property>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.blocksize</name>
<value>134217728</value>
</property>
<property>
<name>dfs.client.use.datanode.hostname</name>
<value>false</value>
</property>
<property>
<name>fs.permissions.umask-mode</name>
<value>022</value>
</property>
<property>
<name>dfs.namenode.acls.enabled</name>
<value>false</value>
</property>
<property>
<name>dfs.client.use.legacy.blockreader</name>
<value>false</value>
</property>
<property>
<name>dfs.client.read.shortcircuit</name>
<value>false</value>
</property>
<property>
<name>dfs.domain.socket.path</name>
<value>/var/run/hdfs-sockets/dn</value>
</property>
<property>
<name>dfs.client.read.shortcircuit.skip.checksum</name>
<value>false</value>
</property>
<property>
<name>dfs.client.domain.socket.data.traffic</name>
<value>false</value>
</property>
<property>
<name>dfs.datanode.hdfs-blocks-metadata.enabled</name>
<value>true</value>
</property>
</configuration>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment