[120] | 1 | /** |
---|
| 2 | * Licensed to the Apache Software Foundation (ASF) under one |
---|
| 3 | * or more contributor license agreements. See the NOTICE file |
---|
| 4 | * distributed with this work for additional information |
---|
| 5 | * regarding copyright ownership. The ASF licenses this file |
---|
| 6 | * to you under the Apache License, Version 2.0 (the |
---|
| 7 | * "License"); you may not use this file except in compliance |
---|
| 8 | * with the License. You may obtain a copy of the License at |
---|
| 9 | * |
---|
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
---|
| 11 | * |
---|
| 12 | * Unless required by applicable law or agreed to in writing, software |
---|
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
---|
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
---|
| 15 | * See the License for the specific language governing permissions and |
---|
| 16 | * limitations under the License. |
---|
| 17 | */ |
---|
| 18 | |
---|
| 19 | package org.apache.hadoop.record; |
---|
| 20 | |
---|
| 21 | import java.io.IOException; |
---|
| 22 | import junit.framework.*; |
---|
| 23 | import java.io.File; |
---|
| 24 | import java.io.FileInputStream; |
---|
| 25 | import java.io.FileOutputStream; |
---|
| 26 | import java.util.ArrayList; |
---|
| 27 | import java.util.TreeMap; |
---|
| 28 | import org.apache.hadoop.record.meta.RecordTypeInfo; |
---|
| 29 | |
---|
| 30 | /** |
---|
| 31 | */ |
---|
| 32 | public class TestRecordVersioning extends TestCase { |
---|
| 33 | |
---|
| 34 | public TestRecordVersioning(String testName) { |
---|
| 35 | super(testName); |
---|
| 36 | } |
---|
| 37 | |
---|
| 38 | protected void setUp() throws Exception { |
---|
| 39 | } |
---|
| 40 | |
---|
| 41 | protected void tearDown() throws Exception { |
---|
| 42 | } |
---|
| 43 | |
---|
| 44 | /* |
---|
| 45 | * basic versioning |
---|
| 46 | * write out a record and its type info, read it back using its typeinfo |
---|
| 47 | */ |
---|
| 48 | public void testBasic() { |
---|
| 49 | File tmpfile, tmpRTIfile; |
---|
| 50 | try { |
---|
| 51 | tmpfile = File.createTempFile("hadooprec", ".dat"); |
---|
| 52 | tmpRTIfile = File.createTempFile("hadooprti", ".dat"); |
---|
| 53 | FileOutputStream ostream = new FileOutputStream(tmpfile); |
---|
| 54 | BinaryRecordOutput out = new BinaryRecordOutput(ostream); |
---|
| 55 | FileOutputStream oRTIstream = new FileOutputStream(tmpRTIfile); |
---|
| 56 | BinaryRecordOutput outRTI = new BinaryRecordOutput(oRTIstream); |
---|
| 57 | RecRecord1 r1 = new RecRecord1(); |
---|
| 58 | r1.setBoolVal(true); |
---|
| 59 | r1.setByteVal((byte)0x66); |
---|
| 60 | r1.setFloatVal(3.145F); |
---|
| 61 | r1.setDoubleVal(1.5234); |
---|
| 62 | r1.setIntVal(-4567); |
---|
| 63 | r1.setLongVal(-2367L); |
---|
| 64 | r1.setStringVal("random text"); |
---|
| 65 | r1.setBufferVal(new Buffer()); |
---|
| 66 | r1.setVectorVal(new ArrayList<String>()); |
---|
| 67 | r1.setMapVal(new TreeMap<String,String>()); |
---|
| 68 | RecRecord0 r0 = new RecRecord0(); |
---|
| 69 | r0.setStringVal("other random text"); |
---|
| 70 | r1.setRecordVal(r0); |
---|
| 71 | r1.serialize(out, ""); |
---|
| 72 | ostream.close(); |
---|
| 73 | // write out the type info |
---|
| 74 | RecRecord1.getTypeInfo().serialize(outRTI); |
---|
| 75 | oRTIstream.close(); |
---|
| 76 | |
---|
| 77 | // read |
---|
| 78 | FileInputStream istream = new FileInputStream(tmpfile); |
---|
| 79 | BinaryRecordInput in = new BinaryRecordInput(istream); |
---|
| 80 | FileInputStream iRTIstream = new FileInputStream(tmpRTIfile); |
---|
| 81 | BinaryRecordInput inRTI = new BinaryRecordInput(iRTIstream); |
---|
| 82 | RecordTypeInfo rti = new RecordTypeInfo(); |
---|
| 83 | rti.deserialize(inRTI); |
---|
| 84 | iRTIstream.close(); |
---|
| 85 | RecRecord1.setTypeFilter(rti); |
---|
| 86 | RecRecord1 r2 = new RecRecord1(); |
---|
| 87 | r2.deserialize(in, ""); |
---|
| 88 | istream.close(); |
---|
| 89 | tmpfile.delete(); |
---|
| 90 | tmpRTIfile.delete(); |
---|
| 91 | assertTrue("Serialized and deserialized versioned records do not match.", r1.equals(r2)); |
---|
| 92 | } catch (IOException ex) { |
---|
| 93 | ex.printStackTrace(); |
---|
| 94 | } |
---|
| 95 | } |
---|
| 96 | |
---|
| 97 | /* |
---|
| 98 | * versioning |
---|
| 99 | * write out a record and its type info, read back a similar record using the written record's typeinfo |
---|
| 100 | */ |
---|
| 101 | public void testVersioning() { |
---|
| 102 | File tmpfile, tmpRTIfile; |
---|
| 103 | try { |
---|
| 104 | tmpfile = File.createTempFile("hadooprec", ".dat"); |
---|
| 105 | tmpRTIfile = File.createTempFile("hadooprti", ".dat"); |
---|
| 106 | FileOutputStream ostream = new FileOutputStream(tmpfile); |
---|
| 107 | BinaryRecordOutput out = new BinaryRecordOutput(ostream); |
---|
| 108 | FileOutputStream oRTIstream = new FileOutputStream(tmpRTIfile); |
---|
| 109 | BinaryRecordOutput outRTI = new BinaryRecordOutput(oRTIstream); |
---|
| 110 | |
---|
| 111 | // we create an array of records to write |
---|
| 112 | ArrayList<RecRecordOld> recsWrite = new ArrayList<RecRecordOld>(); |
---|
| 113 | int i, j, k, l; |
---|
| 114 | for (i=0; i<5; i++) { |
---|
| 115 | RecRecordOld s1Rec = new RecRecordOld(); |
---|
| 116 | |
---|
| 117 | s1Rec.setName("This is record s1: " + i); |
---|
| 118 | |
---|
| 119 | ArrayList<Long> iA = new ArrayList<Long>(); |
---|
| 120 | for (j=0; j<3; j++) { |
---|
| 121 | iA.add(new Long(i+j)); |
---|
| 122 | } |
---|
| 123 | s1Rec.setIvec(iA); |
---|
| 124 | |
---|
| 125 | ArrayList<ArrayList<RecRecord0>> ssVec = new ArrayList<ArrayList<RecRecord0>>(); |
---|
| 126 | for (j=0; j<2; j++) { |
---|
| 127 | ArrayList<RecRecord0> sVec = new ArrayList<RecRecord0>(); |
---|
| 128 | for (k=0; k<3; k++) { |
---|
| 129 | RecRecord0 sRec = new RecRecord0("This is record s: ("+j+": "+k+")"); |
---|
| 130 | sVec.add(sRec); |
---|
| 131 | } |
---|
| 132 | ssVec.add(sVec); |
---|
| 133 | } |
---|
| 134 | s1Rec.setSvec(ssVec); |
---|
| 135 | |
---|
| 136 | s1Rec.setInner(new RecRecord0("This is record s: " + i)); |
---|
| 137 | |
---|
| 138 | ArrayList<ArrayList<ArrayList<String>>> aaaVec = new ArrayList<ArrayList<ArrayList<String>>>(); |
---|
| 139 | for (l=0; l<2; l++) { |
---|
| 140 | ArrayList<ArrayList<String>> aaVec = new ArrayList<ArrayList<String>>(); |
---|
| 141 | for (j=0; j<2; j++) { |
---|
| 142 | ArrayList<String> aVec = new ArrayList<String>(); |
---|
| 143 | for (k=0; k<3; k++) { |
---|
| 144 | aVec.add(new String("THis is a nested string: (" + l + ": " + j + ": " + k + ")")); |
---|
| 145 | } |
---|
| 146 | aaVec.add(aVec); |
---|
| 147 | } |
---|
| 148 | aaaVec.add(aaVec); |
---|
| 149 | } |
---|
| 150 | s1Rec.setStrvec(aaaVec); |
---|
| 151 | |
---|
| 152 | s1Rec.setI1(100+i); |
---|
| 153 | |
---|
| 154 | java.util.TreeMap<Byte,String> map1 = new java.util.TreeMap<Byte,String>(); |
---|
| 155 | map1.put(new Byte("23"), "23"); |
---|
| 156 | map1.put(new Byte("11"), "11"); |
---|
| 157 | s1Rec.setMap1(map1); |
---|
| 158 | |
---|
| 159 | java.util.TreeMap<Integer,Long> m1 = new java.util.TreeMap<Integer,Long>(); |
---|
| 160 | java.util.TreeMap<Integer,Long> m2 = new java.util.TreeMap<Integer,Long>(); |
---|
| 161 | m1.put(new Integer(5), 5L); |
---|
| 162 | m1.put(new Integer(10), 10L); |
---|
| 163 | m2.put(new Integer(15), 15L); |
---|
| 164 | m2.put(new Integer(20), 20L); |
---|
| 165 | java.util.ArrayList<java.util.TreeMap<Integer,Long>> vm1 = new java.util.ArrayList<java.util.TreeMap<Integer,Long>>(); |
---|
| 166 | vm1.add(m1); |
---|
| 167 | vm1.add(m2); |
---|
| 168 | s1Rec.setMvec1(vm1); |
---|
| 169 | java.util.ArrayList<java.util.TreeMap<Integer,Long>> vm2 = new java.util.ArrayList<java.util.TreeMap<Integer,Long>>(); |
---|
| 170 | vm2.add(m1); |
---|
| 171 | s1Rec.setMvec2(vm2); |
---|
| 172 | |
---|
| 173 | // add to our list |
---|
| 174 | recsWrite.add(s1Rec); |
---|
| 175 | } |
---|
| 176 | |
---|
| 177 | // write out to file |
---|
| 178 | for (RecRecordOld rec: recsWrite) { |
---|
| 179 | rec.serialize(out); |
---|
| 180 | } |
---|
| 181 | ostream.close(); |
---|
| 182 | // write out the type info |
---|
| 183 | RecRecordOld.getTypeInfo().serialize(outRTI); |
---|
| 184 | oRTIstream.close(); |
---|
| 185 | |
---|
| 186 | // read |
---|
| 187 | FileInputStream istream = new FileInputStream(tmpfile); |
---|
| 188 | BinaryRecordInput in = new BinaryRecordInput(istream); |
---|
| 189 | FileInputStream iRTIstream = new FileInputStream(tmpRTIfile); |
---|
| 190 | BinaryRecordInput inRTI = new BinaryRecordInput(iRTIstream); |
---|
| 191 | RecordTypeInfo rti = new RecordTypeInfo(); |
---|
| 192 | |
---|
| 193 | // read type info |
---|
| 194 | rti.deserialize(inRTI); |
---|
| 195 | iRTIstream.close(); |
---|
| 196 | RecRecordNew.setTypeFilter(rti); |
---|
| 197 | |
---|
| 198 | // read records |
---|
| 199 | ArrayList<RecRecordNew> recsRead = new ArrayList<RecRecordNew>(); |
---|
| 200 | for (i=0; i<recsWrite.size(); i++) { |
---|
| 201 | RecRecordNew s2Rec = new RecRecordNew(); |
---|
| 202 | s2Rec.deserialize(in); |
---|
| 203 | recsRead.add(s2Rec); |
---|
| 204 | } |
---|
| 205 | istream.close(); |
---|
| 206 | tmpfile.delete(); |
---|
| 207 | tmpRTIfile.delete(); |
---|
| 208 | |
---|
| 209 | // compare |
---|
| 210 | for (i=0; i<recsRead.size(); i++) { |
---|
| 211 | RecRecordOld s1Out = recsWrite.get(i); |
---|
| 212 | RecRecordNew s2In = recsRead.get(i); |
---|
| 213 | assertTrue("Incorrectly read name2 field", null == s2In.getName2()); |
---|
| 214 | assertTrue("Error comparing inner fields", (0 == s1Out.getInner().compareTo(s2In.getInner()))); |
---|
| 215 | assertTrue("Incorrectly read ivec field", null == s2In.getIvec()); |
---|
| 216 | assertTrue("Incorrectly read svec field", null == s2In.getSvec()); |
---|
| 217 | for (j=0; j<s2In.getStrvec().size(); j++) { |
---|
| 218 | ArrayList<ArrayList<String>> ss2Vec = s2In.getStrvec().get(j); |
---|
| 219 | ArrayList<ArrayList<String>> ss1Vec = s1Out.getStrvec().get(j); |
---|
| 220 | for (k=0; k<ss2Vec.size(); k++) { |
---|
| 221 | ArrayList<String> s2Vec = ss2Vec.get(k); |
---|
| 222 | ArrayList<String> s1Vec = ss1Vec.get(k); |
---|
| 223 | for (l=0; l<s2Vec.size(); l++) { |
---|
| 224 | assertTrue("Error comparing strVec fields", (0 == s2Vec.get(l).compareTo(s1Vec.get(l)))); |
---|
| 225 | } |
---|
| 226 | } |
---|
| 227 | } |
---|
| 228 | assertTrue("Incorrectly read map1 field", null == s2In.getMap1()); |
---|
| 229 | for (j=0; j<s2In.getMvec2().size(); j++) { |
---|
| 230 | assertTrue("Error comparing mvec2 fields", (s2In.getMvec2().get(j).equals(s1Out.getMvec2().get(j)))); |
---|
| 231 | } |
---|
| 232 | } |
---|
| 233 | |
---|
| 234 | } catch (IOException ex) { |
---|
| 235 | ex.printStackTrace(); |
---|
| 236 | } |
---|
| 237 | } |
---|
| 238 | |
---|
| 239 | } |
---|