hadoop KeyValueLineRecordReader 源码
haddop KeyValueLineRecordReader 代码
文件路径:/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/KeyValueLineRecordReader.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
/**
* This class treats a line in the input as a key/value pair separated by a
* separator character. The separator can be specified in config file
* under the attribute name mapreduce.input.keyvaluelinerecordreader.key.value.separator. The default
* separator is the tab character ('\t').
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class KeyValueLineRecordReader implements RecordReader<Text, Text> {
private final LineRecordReader lineRecordReader;
private byte separator = (byte) '\t';
private LongWritable dummyKey;
private Text innerValue;
public Class getKeyClass() { return Text.class; }
public Text createKey() {
return new Text();
}
public Text createValue() {
return new Text();
}
public KeyValueLineRecordReader(Configuration job, FileSplit split)
throws IOException {
lineRecordReader = new LineRecordReader(job, split);
dummyKey = lineRecordReader.createKey();
innerValue = lineRecordReader.createValue();
String sepStr = job.get("mapreduce.input.keyvaluelinerecordreader.key.value.separator", "\t");
this.separator = (byte) sepStr.charAt(0);
}
public static int findSeparator(byte[] utf, int start, int length,
byte sep) {
return org.apache.hadoop.mapreduce.lib.input.
KeyValueLineRecordReader.findSeparator(utf, start, length, sep);
}
/** Read key/value pair in a line. */
public synchronized boolean next(Text key, Text value)
throws IOException {
byte[] line = null;
int lineLen = -1;
if (lineRecordReader.next(dummyKey, innerValue)) {
line = innerValue.getBytes();
lineLen = innerValue.getLength();
} else {
return false;
}
if (line == null)
return false;
int pos = findSeparator(line, 0, lineLen, this.separator);
org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader.
setKeyValue(key, value, line, lineLen, pos);
return true;
}
public float getProgress() throws IOException {
return lineRecordReader.getProgress();
}
public synchronized long getPos() throws IOException {
return lineRecordReader.getPos();
}
public synchronized void close() throws IOException {
lineRecordReader.close();
}
}
相关信息
相关文章
0
赞
热门推荐
-
2、 - 优质文章
-
3、 gate.io
-
8、 golang
-
9、 openharmony
-
10、 Vue中input框自动聚焦