package com.1.test;
import java.io.DataInput;
import java.io.DataOutput; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException;import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputFormat; import org.apache.hadoop.mapred.RecordWriter; import org.apache.hadoop.mapred.Reporter;public class WriteOrcToHdfs {
public static void main(String[] args) throws URISyntaxException, IOException, InterruptedException { JobConf conf = new JobConf(); FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.11:8020"), conf, "hdfs"); OutputFormat outputFormat = new OrcOutputFormat(); RecordWriter writer = outputFormat.getRecordWriter(fs, conf, new Path("hdfs://192.168.1.11:8020/data/warehouse/rsd/student/wew.txt").toString(), Reporter.NULL); OrcSerde serde = new OrcSerde(); // StructObjectInspector inspector = (StructObjectInspector) ObjectInspectorFactory .getReflectionObjectInspector(MyRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); writer.write(NullWritable.get(), serde.serialize(new MyRow(1, "vvvv", 2, "vvv"), inspector)); fs.close(); writer.close(Reporter.NULL); }static class MyRow implements Writable {
long id; String name; int age; String tel;MyRow(long id, String name, int age, String tel) {
this.id = id; this.name = name; this.age = age; this.tel = tel; }public void readFields(DataInput arg0) throws IOException { throw new UnsupportedOperationException("no write"); }
public void write(DataOutput arg0) throws IOException { throw new UnsupportedOperationException("no read"); }
}
}