Filesystem closed

java.io.IOException: Filesystem closed
	at org.apache.hadoop.hdfs.DFSClient.checkOpen(DFSClient.java:232)
	at org.apache.hadoop.hdfs.DFSClient.access$600(DFSClient.java:70)
	at org.apache.hadoop.hdfs.DFSClient$DFSInputStream.read(DFSClient.java:1937)
	at org.apache.hadoop.hdfs.DFSClient$DFSInputStream.read(DFSClient.java:1876)
	at java.io.DataInputStream.readInt(DataInputStream.java:370)
	at org.apache.hadoop.hive.ql.io.RCFile$Reader.readRecordLength(RCFile.java:1397)
	at org.apache.hadoop.hive.ql.io.RCFile$Reader.nextKeyBuffer(RCFile.java:1436)
	at org.apache.hadoop.hive.ql.io.RCFile$Reader.next(RCFile.java:1602)
	at com.sohu.adrd.targeting.hadoop.mapreduce.RCFileRecordReader.nextKeyValue(RCFileRecordReader.java:82)
	at org.apache.hadoop.mapreduce.lib.input.DelegatingRecordReader.nextKeyValue(DelegatingRecordReader.java:85)
	at org.apache.hadoop.mapred.MapTask$NewTrackingRecordReader.nextKeyValue(MapTask.java:456)
	at org.apache.hadoop.mapreduce.MapContext.nextKeyValue(MapContext.java:67)
	at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:143)
	at org.apache.hadoop.mapreduce.lib.input.DelegatingMapper.run(DelegatingMapper.java:51)
	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:647)
	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:323)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:270)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1127)
	at org.apache.hadoop.mapred.Child.main(Child.java:264)

原因是我寫了一個方法讀取hdfs上的文件,裏面FileSystem.get(new Configuration());獲取了一個FS並finally關閉了。
但是參數fs.hdfs.impl.disable.cache默認爲false,於是這個conf被Cache,導致在方法外的FS closed。
設置它爲true即可。
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章