001 /**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements. See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership. The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License. You may obtain a copy of the License at
009 *
010 * http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018
019 package org.apache.hadoop.record.compiler;
020
021 import java.io.File;
022 import java.io.FileWriter;
023 import java.io.IOException;
024 import java.util.*;
025
026 import org.apache.hadoop.classification.InterfaceAudience;
027 import org.apache.hadoop.classification.InterfaceStability;
028
029 /**
030 * @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
031 */
032 @Deprecated
033 @InterfaceAudience.Public
034 @InterfaceStability.Stable
035 public class JRecord extends JCompType {
036
037 class JavaRecord extends JavaCompType {
038
039 private String fullName;
040 private String name;
041 private String module;
042 private ArrayList<JField<JavaType>> fields =
043 new ArrayList<JField<JavaType>>();
044
045 JavaRecord(String name, ArrayList<JField<JType>> flist) {
046 super(name, "Record", name, "TypeID.RIOType.STRUCT");
047 this.fullName = name;
048 int idx = name.lastIndexOf('.');
049 this.name = name.substring(idx+1);
050 this.module = name.substring(0, idx);
051 for (Iterator<JField<JType>> iter = flist.iterator(); iter.hasNext();) {
052 JField<JType> f = iter.next();
053 fields.add(new JField<JavaType>(f.getName(), f.getType().getJavaType()));
054 }
055 }
056
057 @Override
058 String getTypeIDObjectString() {
059 return "new org.apache.hadoop.record.meta.StructTypeID(" +
060 fullName + ".getTypeInfo())";
061 }
062
063 @Override
064 void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
065 // ignore, if we'ev already set the type filter for this record
066 if (!nestedStructMap.containsKey(fullName)) {
067 // we set the RTI filter here
068 cb.append(fullName + ".setTypeFilter(rti.getNestedStructTypeInfo(\""+
069 name + "\"));\n");
070 nestedStructMap.put(fullName, null);
071 }
072 }
073
074 // for each typeInfo in the filter, we see if there's a similar one in the record.
075 // Since we store typeInfos in ArrayLists, thsi search is O(n squared). We do it faster
076 // if we also store a map (of TypeInfo to index), but since setupRtiFields() is called
077 // only once when deserializing, we're sticking with the former, as the code is easier.
078 void genSetupRtiFields(CodeBuffer cb) {
079 cb.append("private static void setupRtiFields()\n{\n");
080 cb.append("if (null == " + Consts.RTI_FILTER + ") return;\n");
081 cb.append("// we may already have done this\n");
082 cb.append("if (null != " + Consts.RTI_FILTER_FIELDS + ") return;\n");
083 cb.append("int " + Consts.RIO_PREFIX + "i, " + Consts.RIO_PREFIX + "j;\n");
084 cb.append(Consts.RTI_FILTER_FIELDS + " = new int [" +
085 Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().size()];\n");
086 cb.append("for (" + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX + "i<"+
087 Consts.RTI_FILTER_FIELDS + ".length; " + Consts.RIO_PREFIX + "i++) {\n");
088 cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = 0;\n");
089 cb.append("}\n");
090 cb.append("java.util.Iterator<org.apache.hadoop.record.meta." +
091 "FieldTypeInfo> " + Consts.RIO_PREFIX + "itFilter = " +
092 Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().iterator();\n");
093 cb.append(Consts.RIO_PREFIX + "i=0;\n");
094 cb.append("while (" + Consts.RIO_PREFIX + "itFilter.hasNext()) {\n");
095 cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " +
096 Consts.RIO_PREFIX + "tInfoFilter = " +
097 Consts.RIO_PREFIX + "itFilter.next();\n");
098 cb.append("java.util.Iterator<org.apache.hadoop.record.meta." +
099 "FieldTypeInfo> " + Consts.RIO_PREFIX + "it = " + Consts.RTI_VAR +
100 ".getFieldTypeInfos().iterator();\n");
101 cb.append(Consts.RIO_PREFIX + "j=1;\n");
102 cb.append("while (" + Consts.RIO_PREFIX + "it.hasNext()) {\n");
103 cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " +
104 Consts.RIO_PREFIX + "tInfo = " + Consts.RIO_PREFIX + "it.next();\n");
105 cb.append("if (" + Consts.RIO_PREFIX + "tInfo.equals(" +
106 Consts.RIO_PREFIX + "tInfoFilter)) {\n");
107 cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = " +
108 Consts.RIO_PREFIX + "j;\n");
109 cb.append("break;\n");
110 cb.append("}\n");
111 cb.append(Consts.RIO_PREFIX + "j++;\n");
112 cb.append("}\n");
113 /*int ct = 0;
114 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
115 ct++;
116 JField<JavaType> jf = i.next();
117 JavaType type = jf.getType();
118 String name = jf.getName();
119 if (ct != 1) {
120 cb.append("else ");
121 }
122 type.genRtiFieldCondition(cb, name, ct);
123 }
124 if (ct != 0) {
125 cb.append("else {\n");
126 cb.append("rtiFilterFields[i] = 0;\n");
127 cb.append("}\n");
128 }*/
129 cb.append(Consts.RIO_PREFIX + "i++;\n");
130 cb.append("}\n");
131 cb.append("}\n");
132 }
133
134 @Override
135 void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
136 if (decl) {
137 cb.append(fullName+" "+fname+";\n");
138 }
139 cb.append(fname+"= new "+fullName+"();\n");
140 cb.append(fname+".deserialize(" + Consts.RECORD_INPUT + ",\""+tag+"\");\n");
141 }
142
143 @Override
144 void genWriteMethod(CodeBuffer cb, String fname, String tag) {
145 cb.append(fname+".serialize(" + Consts.RECORD_OUTPUT + ",\""+tag+"\");\n");
146 }
147
148 @Override
149 void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
150 cb.append("{\n");
151 cb.append("int r = "+fullName+
152 ".Comparator.slurpRaw("+b+","+s+","+l+");\n");
153 cb.append(s+"+=r; "+l+"-=r;\n");
154 cb.append("}\n");
155 }
156
157 @Override
158 void genCompareBytes(CodeBuffer cb) {
159 cb.append("{\n");
160 cb.append("int r1 = "+fullName+
161 ".Comparator.compareRaw(b1,s1,l1,b2,s2,l2);\n");
162 cb.append("if (r1 <= 0) { return r1; }\n");
163 cb.append("s1+=r1; s2+=r1; l1-=r1; l2-=r1;\n");
164 cb.append("}\n");
165 }
166
167 void genCode(String destDir, ArrayList<String> options) throws IOException {
168 String pkg = module;
169 String pkgpath = pkg.replaceAll("\\.", "/");
170 File pkgdir = new File(destDir, pkgpath);
171
172 final File jfile = new File(pkgdir, name+".java");
173 if (!pkgdir.exists()) {
174 // create the pkg directory
175 boolean ret = pkgdir.mkdirs();
176 if (!ret) {
177 throw new IOException("Cannnot create directory: "+pkgpath);
178 }
179 } else if (!pkgdir.isDirectory()) {
180 // not a directory
181 throw new IOException(pkgpath+" is not a directory.");
182 }
183
184 CodeBuffer cb = new CodeBuffer();
185 cb.append("// File generated by hadoop record compiler. Do not edit.\n");
186 cb.append("package "+module+";\n\n");
187 cb.append("public class "+name+
188 " extends org.apache.hadoop.record.Record {\n");
189
190 // type information declarations
191 cb.append("private static final " +
192 "org.apache.hadoop.record.meta.RecordTypeInfo " +
193 Consts.RTI_VAR + ";\n");
194 cb.append("private static " +
195 "org.apache.hadoop.record.meta.RecordTypeInfo " +
196 Consts.RTI_FILTER + ";\n");
197 cb.append("private static int[] " + Consts.RTI_FILTER_FIELDS + ";\n");
198
199 // static init for type information
200 cb.append("static {\n");
201 cb.append(Consts.RTI_VAR + " = " +
202 "new org.apache.hadoop.record.meta.RecordTypeInfo(\"" +
203 name + "\");\n");
204 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
205 JField<JavaType> jf = i.next();
206 String name = jf.getName();
207 JavaType type = jf.getType();
208 type.genStaticTypeInfo(cb, name);
209 }
210 cb.append("}\n\n");
211
212 // field definitions
213 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
214 JField<JavaType> jf = i.next();
215 String name = jf.getName();
216 JavaType type = jf.getType();
217 type.genDecl(cb, name);
218 }
219
220 // default constructor
221 cb.append("public "+name+"() { }\n");
222
223 // constructor
224 cb.append("public "+name+"(\n");
225 int fIdx = 0;
226 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
227 JField<JavaType> jf = i.next();
228 String name = jf.getName();
229 JavaType type = jf.getType();
230 type.genConstructorParam(cb, name);
231 cb.append((!i.hasNext())?"":",\n");
232 }
233 cb.append(") {\n");
234 fIdx = 0;
235 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
236 JField<JavaType> jf = i.next();
237 String name = jf.getName();
238 JavaType type = jf.getType();
239 type.genConstructorSet(cb, name);
240 }
241 cb.append("}\n");
242
243 // getter/setter for type info
244 cb.append("public static org.apache.hadoop.record.meta.RecordTypeInfo"
245 + " getTypeInfo() {\n");
246 cb.append("return " + Consts.RTI_VAR + ";\n");
247 cb.append("}\n");
248 cb.append("public static void setTypeFilter("
249 + "org.apache.hadoop.record.meta.RecordTypeInfo rti) {\n");
250 cb.append("if (null == rti) return;\n");
251 cb.append(Consts.RTI_FILTER + " = rti;\n");
252 cb.append(Consts.RTI_FILTER_FIELDS + " = null;\n");
253 // set RTIFilter for nested structs.
254 // To prevent setting up the type filter for the same struct more than once,
255 // we use a hash map to keep track of what we've set.
256 Map<String, Integer> nestedStructMap = new HashMap<String, Integer>();
257 for (JField<JavaType> jf : fields) {
258 JavaType type = jf.getType();
259 type.genSetRTIFilter(cb, nestedStructMap);
260 }
261 cb.append("}\n");
262
263 // setupRtiFields()
264 genSetupRtiFields(cb);
265
266 // getters/setters for member variables
267 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
268 JField<JavaType> jf = i.next();
269 String name = jf.getName();
270 JavaType type = jf.getType();
271 type.genGetSet(cb, name);
272 }
273
274 // serialize()
275 cb.append("public void serialize("+
276 "final org.apache.hadoop.record.RecordOutput " +
277 Consts.RECORD_OUTPUT + ", final String " + Consts.TAG + ")\n"+
278 "throws java.io.IOException {\n");
279 cb.append(Consts.RECORD_OUTPUT + ".startRecord(this," + Consts.TAG + ");\n");
280 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
281 JField<JavaType> jf = i.next();
282 String name = jf.getName();
283 JavaType type = jf.getType();
284 type.genWriteMethod(cb, name, name);
285 }
286 cb.append(Consts.RECORD_OUTPUT + ".endRecord(this," + Consts.TAG+");\n");
287 cb.append("}\n");
288
289 // deserializeWithoutFilter()
290 cb.append("private void deserializeWithoutFilter("+
291 "final org.apache.hadoop.record.RecordInput " +
292 Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
293 "throws java.io.IOException {\n");
294 cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
295 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
296 JField<JavaType> jf = i.next();
297 String name = jf.getName();
298 JavaType type = jf.getType();
299 type.genReadMethod(cb, name, name, false);
300 }
301 cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
302 cb.append("}\n");
303
304 // deserialize()
305 cb.append("public void deserialize(final " +
306 "org.apache.hadoop.record.RecordInput " +
307 Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
308 "throws java.io.IOException {\n");
309 cb.append("if (null == " + Consts.RTI_FILTER + ") {\n");
310 cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " +
311 Consts.TAG + ");\n");
312 cb.append("return;\n");
313 cb.append("}\n");
314 cb.append("// if we're here, we need to read based on version info\n");
315 cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
316 cb.append("setupRtiFields();\n");
317 cb.append("for (int " + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX +
318 "i<" + Consts.RTI_FILTER + ".getFieldTypeInfos().size(); " +
319 Consts.RIO_PREFIX + "i++) {\n");
320 int ct = 0;
321 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
322 JField<JavaType> jf = i.next();
323 String name = jf.getName();
324 JavaType type = jf.getType();
325 ct++;
326 if (1 != ct) {
327 cb.append("else ");
328 }
329 cb.append("if (" + ct + " == " + Consts.RTI_FILTER_FIELDS + "[" +
330 Consts.RIO_PREFIX + "i]) {\n");
331 type.genReadMethod(cb, name, name, false);
332 cb.append("}\n");
333 }
334 if (0 != ct) {
335 cb.append("else {\n");
336 cb.append("java.util.ArrayList<"
337 + "org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = "
338 + "(java.util.ArrayList<"
339 + "org.apache.hadoop.record.meta.FieldTypeInfo>)"
340 + "(" + Consts.RTI_FILTER + ".getFieldTypeInfos());\n");
341 cb.append("org.apache.hadoop.record.meta.Utils.skip(" +
342 Consts.RECORD_INPUT + ", " + "typeInfos.get(" + Consts.RIO_PREFIX +
343 "i).getFieldID(), typeInfos.get(" +
344 Consts.RIO_PREFIX + "i).getTypeID());\n");
345 cb.append("}\n");
346 }
347 cb.append("}\n");
348 cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
349 cb.append("}\n");
350
351 // compareTo()
352 cb.append("public int compareTo (final Object " + Consts.RIO_PREFIX +
353 "peer_) throws ClassCastException {\n");
354 cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
355 cb.append("throw new ClassCastException(\"Comparing different types of records.\");\n");
356 cb.append("}\n");
357 cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " +
358 Consts.RIO_PREFIX + "peer_;\n");
359 cb.append("int " + Consts.RIO_PREFIX + "ret = 0;\n");
360 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
361 JField<JavaType> jf = i.next();
362 String name = jf.getName();
363 JavaType type = jf.getType();
364 type.genCompareTo(cb, name, Consts.RIO_PREFIX + "peer."+name);
365 cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) return " +
366 Consts.RIO_PREFIX + "ret;\n");
367 }
368 cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
369 cb.append("}\n");
370
371 // equals()
372 cb.append("public boolean equals(final Object " + Consts.RIO_PREFIX +
373 "peer_) {\n");
374 cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
375 cb.append("return false;\n");
376 cb.append("}\n");
377 cb.append("if (" + Consts.RIO_PREFIX + "peer_ == this) {\n");
378 cb.append("return true;\n");
379 cb.append("}\n");
380 cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " +
381 Consts.RIO_PREFIX + "peer_;\n");
382 cb.append("boolean " + Consts.RIO_PREFIX + "ret = false;\n");
383 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
384 JField<JavaType> jf = i.next();
385 String name = jf.getName();
386 JavaType type = jf.getType();
387 type.genEquals(cb, name, Consts.RIO_PREFIX + "peer."+name);
388 cb.append("if (!" + Consts.RIO_PREFIX + "ret) return " +
389 Consts.RIO_PREFIX + "ret;\n");
390 }
391 cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
392 cb.append("}\n");
393
394 // clone()
395 cb.append("public Object clone() throws CloneNotSupportedException {\n");
396 cb.append(name+" " + Consts.RIO_PREFIX + "other = new "+name+"();\n");
397 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
398 JField<JavaType> jf = i.next();
399 String name = jf.getName();
400 JavaType type = jf.getType();
401 type.genClone(cb, name);
402 }
403 cb.append("return " + Consts.RIO_PREFIX + "other;\n");
404 cb.append("}\n");
405
406 cb.append("public int hashCode() {\n");
407 cb.append("int " + Consts.RIO_PREFIX + "result = 17;\n");
408 cb.append("int " + Consts.RIO_PREFIX + "ret;\n");
409 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
410 JField<JavaType> jf = i.next();
411 String name = jf.getName();
412 JavaType type = jf.getType();
413 type.genHashCode(cb, name);
414 cb.append(Consts.RIO_PREFIX + "result = 37*" + Consts.RIO_PREFIX +
415 "result + " + Consts.RIO_PREFIX + "ret;\n");
416 }
417 cb.append("return " + Consts.RIO_PREFIX + "result;\n");
418 cb.append("}\n");
419
420 cb.append("public static String signature() {\n");
421 cb.append("return \""+getSignature()+"\";\n");
422 cb.append("}\n");
423
424 cb.append("public static class Comparator extends"+
425 " org.apache.hadoop.record.RecordComparator {\n");
426 cb.append("public Comparator() {\n");
427 cb.append("super("+name+".class);\n");
428 cb.append("}\n");
429
430 cb.append("static public int slurpRaw(byte[] b, int s, int l) {\n");
431 cb.append("try {\n");
432 cb.append("int os = s;\n");
433 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
434 JField<JavaType> jf = i.next();
435 String name = jf.getName();
436 JavaType type = jf.getType();
437 type.genSlurpBytes(cb, "b","s","l");
438 }
439 cb.append("return (os - s);\n");
440 cb.append("} catch(java.io.IOException e) {\n");
441 cb.append("throw new RuntimeException(e);\n");
442 cb.append("}\n");
443 cb.append("}\n");
444
445 cb.append("static public int compareRaw(byte[] b1, int s1, int l1,\n");
446 cb.append(" byte[] b2, int s2, int l2) {\n");
447 cb.append("try {\n");
448 cb.append("int os1 = s1;\n");
449 for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
450 JField<JavaType> jf = i.next();
451 String name = jf.getName();
452 JavaType type = jf.getType();
453 type.genCompareBytes(cb);
454 }
455 cb.append("return (os1 - s1);\n");
456 cb.append("} catch(java.io.IOException e) {\n");
457 cb.append("throw new RuntimeException(e);\n");
458 cb.append("}\n");
459 cb.append("}\n");
460 cb.append("public int compare(byte[] b1, int s1, int l1,\n");
461 cb.append(" byte[] b2, int s2, int l2) {\n");
462 cb.append("int ret = compareRaw(b1,s1,l1,b2,s2,l2);\n");
463 cb.append("return (ret == -1)? -1 : ((ret==0)? 1 : 0);");
464 cb.append("}\n");
465 cb.append("}\n\n");
466 cb.append("static {\n");
467 cb.append("org.apache.hadoop.record.RecordComparator.define("
468 +name+".class, new Comparator());\n");
469 cb.append("}\n");
470 cb.append("}\n");
471
472 FileWriter jj = new FileWriter(jfile);
473 try {
474 jj.write(cb.toString());
475 } finally {
476 jj.close();
477 }
478 }
479 }
480
481 class CppRecord extends CppCompType {
482
483 private String fullName;
484 private String name;
485 private String module;
486 private ArrayList<JField<CppType>> fields =
487 new ArrayList<JField<CppType>>();
488
489 CppRecord(String name, ArrayList<JField<JType>> flist) {
490 super(name.replaceAll("\\.","::"));
491 this.fullName = name.replaceAll("\\.", "::");
492 int idx = name.lastIndexOf('.');
493 this.name = name.substring(idx+1);
494 this.module = name.substring(0, idx).replaceAll("\\.", "::");
495 for (Iterator<JField<JType>> iter = flist.iterator(); iter.hasNext();) {
496 JField<JType> f = iter.next();
497 fields.add(new JField<CppType>(f.getName(), f.getType().getCppType()));
498 }
499 }
500
501 @Override
502 String getTypeIDObjectString() {
503 return "new ::hadoop::StructTypeID(" +
504 fullName + "::getTypeInfo().getFieldTypeInfos())";
505 }
506
507 String genDecl(String fname) {
508 return " "+name+" "+fname+";\n";
509 }
510
511 @Override
512 void genSetRTIFilter(CodeBuffer cb) {
513 // we set the RTI filter here
514 cb.append(fullName + "::setTypeFilter(rti.getNestedStructTypeInfo(\""+
515 name + "\"));\n");
516 }
517
518 void genSetupRTIFields(CodeBuffer cb) {
519 cb.append("void " + fullName + "::setupRtiFields() {\n");
520 cb.append("if (NULL == p" + Consts.RTI_FILTER + ") return;\n");
521 cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") return;\n");
522 cb.append("p" + Consts.RTI_FILTER_FIELDS + " = new int[p" +
523 Consts.RTI_FILTER + "->getFieldTypeInfos().size()];\n");
524 cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
525 Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER +
526 "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
527 cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX +
528 "i] = 0;\n");
529 cb.append("}\n");
530 cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
531 Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER +
532 "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
533 cb.append("for (unsigned int " + Consts.RIO_PREFIX + "j=0; " +
534 Consts.RIO_PREFIX + "j<p" + Consts.RTI_VAR +
535 "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "j++) {\n");
536 cb.append("if (*(p" + Consts.RTI_FILTER + "->getFieldTypeInfos()[" +
537 Consts.RIO_PREFIX + "i]) == *(p" + Consts.RTI_VAR +
538 "->getFieldTypeInfos()[" + Consts.RIO_PREFIX + "j])) {\n");
539 cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX +
540 "i] = " + Consts.RIO_PREFIX + "j+1;\n");
541 cb.append("break;\n");
542 cb.append("}\n");
543 cb.append("}\n");
544 cb.append("}\n");
545 cb.append("}\n");
546 }
547
548 void genCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
549 throws IOException {
550 CodeBuffer hb = new CodeBuffer();
551
552 String[] ns = module.split("::");
553 for (int i = 0; i < ns.length; i++) {
554 hb.append("namespace "+ns[i]+" {\n");
555 }
556
557 hb.append("class "+name+" : public ::hadoop::Record {\n");
558 hb.append("private:\n");
559
560 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
561 JField<CppType> jf = i.next();
562 String name = jf.getName();
563 CppType type = jf.getType();
564 type.genDecl(hb, name);
565 }
566
567 // type info vars
568 hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_VAR + ";\n");
569 hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_FILTER + ";\n");
570 hb.append("static int* p" + Consts.RTI_FILTER_FIELDS + ";\n");
571 hb.append("static ::hadoop::RecordTypeInfo* setupTypeInfo();\n");
572 hb.append("static void setupRtiFields();\n");
573 hb.append("virtual void deserializeWithoutFilter(::hadoop::IArchive& " +
574 Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
575 hb.append("public:\n");
576 hb.append("static const ::hadoop::RecordTypeInfo& getTypeInfo() " +
577 "{return *p" + Consts.RTI_VAR + ";}\n");
578 hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo& rti);\n");
579 hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo* prti);\n");
580 hb.append("virtual void serialize(::hadoop::OArchive& " +
581 Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const;\n");
582 hb.append("virtual void deserialize(::hadoop::IArchive& " +
583 Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
584 hb.append("virtual const ::std::string& type() const;\n");
585 hb.append("virtual const ::std::string& signature() const;\n");
586 hb.append("virtual bool operator<(const "+name+"& peer_) const;\n");
587 hb.append("virtual bool operator==(const "+name+"& peer_) const;\n");
588 hb.append("virtual ~"+name+"() {};\n");
589 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
590 JField<CppType> jf = i.next();
591 String name = jf.getName();
592 CppType type = jf.getType();
593 type.genGetSet(hb, name);
594 }
595 hb.append("}; // end record "+name+"\n");
596 for (int i=ns.length-1; i>=0; i--) {
597 hb.append("} // end namespace "+ns[i]+"\n");
598 }
599
600 hh.write(hb.toString());
601
602 CodeBuffer cb = new CodeBuffer();
603
604 // initialize type info vars
605 cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" +
606 Consts.RTI_VAR + " = " + fullName + "::setupTypeInfo();\n");
607 cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" +
608 Consts.RTI_FILTER + " = NULL;\n");
609 cb.append("int* " + fullName + "::p" +
610 Consts.RTI_FILTER_FIELDS + " = NULL;\n\n");
611
612 // setupTypeInfo()
613 cb.append("::hadoop::RecordTypeInfo* "+fullName+"::setupTypeInfo() {\n");
614 cb.append("::hadoop::RecordTypeInfo* p = new ::hadoop::RecordTypeInfo(\"" +
615 name + "\");\n");
616 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
617 JField<CppType> jf = i.next();
618 String name = jf.getName();
619 CppType type = jf.getType();
620 type.genStaticTypeInfo(cb, name);
621 }
622 cb.append("return p;\n");
623 cb.append("}\n");
624
625 // setTypeFilter()
626 cb.append("void "+fullName+"::setTypeFilter(const " +
627 "::hadoop::RecordTypeInfo& rti) {\n");
628 cb.append("if (NULL != p" + Consts.RTI_FILTER + ") {\n");
629 cb.append("delete p" + Consts.RTI_FILTER + ";\n");
630 cb.append("}\n");
631 cb.append("p" + Consts.RTI_FILTER + " = new ::hadoop::RecordTypeInfo(rti);\n");
632 cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") {\n");
633 cb.append("delete p" + Consts.RTI_FILTER_FIELDS + ";\n");
634 cb.append("}\n");
635 cb.append("p" + Consts.RTI_FILTER_FIELDS + " = NULL;\n");
636 // set RTIFilter for nested structs. We may end up with multiple lines that
637 // do the same thing, if the same struct is nested in more than one field,
638 // but that's OK.
639 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
640 JField<CppType> jf = i.next();
641 CppType type = jf.getType();
642 type.genSetRTIFilter(cb);
643 }
644 cb.append("}\n");
645
646 // setTypeFilter()
647 cb.append("void "+fullName+"::setTypeFilter(const " +
648 "::hadoop::RecordTypeInfo* prti) {\n");
649 cb.append("if (NULL != prti) {\n");
650 cb.append("setTypeFilter(*prti);\n");
651 cb.append("}\n");
652 cb.append("}\n");
653
654 // setupRtiFields()
655 genSetupRTIFields(cb);
656
657 // serialize()
658 cb.append("void "+fullName+"::serialize(::hadoop::OArchive& " +
659 Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const {\n");
660 cb.append(Consts.RECORD_OUTPUT + ".startRecord(*this," +
661 Consts.TAG + ");\n");
662 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
663 JField<CppType> jf = i.next();
664 String name = jf.getName();
665 CppType type = jf.getType();
666 if (type instanceof JBuffer.CppBuffer) {
667 cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+","+name+
668 ".length(),\""+name+"\");\n");
669 } else {
670 cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+",\""+
671 name+"\");\n");
672 }
673 }
674 cb.append(Consts.RECORD_OUTPUT + ".endRecord(*this," + Consts.TAG + ");\n");
675 cb.append("return;\n");
676 cb.append("}\n");
677
678 // deserializeWithoutFilter()
679 cb.append("void "+fullName+"::deserializeWithoutFilter(::hadoop::IArchive& " +
680 Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
681 cb.append(Consts.RECORD_INPUT + ".startRecord(*this," +
682 Consts.TAG + ");\n");
683 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
684 JField<CppType> jf = i.next();
685 String name = jf.getName();
686 CppType type = jf.getType();
687 if (type instanceof JBuffer.CppBuffer) {
688 cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
689 name+",len,\""+name+"\");\n}\n");
690 } else {
691 cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
692 name+"\");\n");
693 }
694 }
695 cb.append(Consts.RECORD_INPUT + ".endRecord(*this," + Consts.TAG + ");\n");
696 cb.append("return;\n");
697 cb.append("}\n");
698
699 // deserialize()
700 cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& " +
701 Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
702 cb.append("if (NULL == p" + Consts.RTI_FILTER + ") {\n");
703 cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " +
704 Consts.TAG + ");\n");
705 cb.append("return;\n");
706 cb.append("}\n");
707 cb.append("// if we're here, we need to read based on version info\n");
708 cb.append(Consts.RECORD_INPUT + ".startRecord(*this," +
709 Consts.TAG + ");\n");
710 cb.append("setupRtiFields();\n");
711 cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " +
712 Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER +
713 "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
714 int ct = 0;
715 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
716 JField<CppType> jf = i.next();
717 String name = jf.getName();
718 CppType type = jf.getType();
719 ct++;
720 if (1 != ct) {
721 cb.append("else ");
722 }
723 cb.append("if (" + ct + " == p" + Consts.RTI_FILTER_FIELDS + "[" +
724 Consts.RIO_PREFIX + "i]) {\n");
725 if (type instanceof JBuffer.CppBuffer) {
726 cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
727 name+",len,\""+name+"\");\n}\n");
728 } else {
729 cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
730 name+"\");\n");
731 }
732 cb.append("}\n");
733 }
734 if (0 != ct) {
735 cb.append("else {\n");
736 cb.append("const std::vector< ::hadoop::FieldTypeInfo* >& typeInfos = p" +
737 Consts.RTI_FILTER + "->getFieldTypeInfos();\n");
738 cb.append("::hadoop::Utils::skip(" + Consts.RECORD_INPUT +
739 ", typeInfos[" + Consts.RIO_PREFIX + "i]->getFieldID()->c_str()" +
740 ", *(typeInfos[" + Consts.RIO_PREFIX + "i]->getTypeID()));\n");
741 cb.append("}\n");
742 }
743 cb.append("}\n");
744 cb.append(Consts.RECORD_INPUT + ".endRecord(*this, " + Consts.TAG+");\n");
745 cb.append("}\n");
746
747 // operator <
748 cb.append("bool "+fullName+"::operator< (const "+fullName+"& peer_) const {\n");
749 cb.append("return (1\n");
750 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
751 JField<CppType> jf = i.next();
752 String name = jf.getName();
753 cb.append("&& ("+name+" < peer_."+name+")\n");
754 }
755 cb.append(");\n");
756 cb.append("}\n");
757
758 cb.append("bool "+fullName+"::operator== (const "+fullName+"& peer_) const {\n");
759 cb.append("return (1\n");
760 for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
761 JField<CppType> jf = i.next();
762 String name = jf.getName();
763 cb.append("&& ("+name+" == peer_."+name+")\n");
764 }
765 cb.append(");\n");
766 cb.append("}\n");
767
768 cb.append("const ::std::string&"+fullName+"::type() const {\n");
769 cb.append("static const ::std::string type_(\""+name+"\");\n");
770 cb.append("return type_;\n");
771 cb.append("}\n");
772
773 cb.append("const ::std::string&"+fullName+"::signature() const {\n");
774 cb.append("static const ::std::string sig_(\""+getSignature()+"\");\n");
775 cb.append("return sig_;\n");
776 cb.append("}\n");
777
778 cc.write(cb.toString());
779 }
780 }
781
782 class CRecord extends CCompType {
783
784 }
785
786 private String signature;
787
788 /**
789 * Creates a new instance of JRecord
790 */
791 public JRecord(String name, ArrayList<JField<JType>> flist) {
792 setJavaType(new JavaRecord(name, flist));
793 setCppType(new CppRecord(name, flist));
794 setCType(new CRecord());
795 // precompute signature
796 int idx = name.lastIndexOf('.');
797 String recName = name.substring(idx+1);
798 StringBuilder sb = new StringBuilder();
799 sb.append("L").append(recName).append("(");
800 for (Iterator<JField<JType>> i = flist.iterator(); i.hasNext();) {
801 String s = i.next().getType().getSignature();
802 sb.append(s);
803 }
804 sb.append(")");
805 signature = sb.toString();
806 }
807
808 @Override
809 String getSignature() {
810 return signature;
811 }
812
813 void genCppCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
814 throws IOException {
815 ((CppRecord)getCppType()).genCode(hh, cc, options);
816 }
817
818 void genJavaCode(String destDir, ArrayList<String> options)
819 throws IOException {
820 ((JavaRecord)getJavaType()).genCode(destDir, options);
821 }
822 }