You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by yw...@apache.org on 2015/09/06 08:20:39 UTC

[1/3] bigtop git commit: BIGTOP-2009: added new tests for chgrp, cp, ls, mv, du, put, get, mkdir, stat and touchz

Repository: bigtop
Updated Branches:
  refs/heads/master 49705dad1 -> 96ecf29a1


http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_3
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_3 b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_3
new file mode 100644
index 0000000..c59d21a
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_3
@@ -0,0 +1,3321 @@
+%PDF-1.3
+%����
+4 0 obj
+<< /Type /Info
+/Producer (FOP 0.20.5) >>
+endobj
+5 0 obj
+<< /Length 1127 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gb!$G:K-9r(rl%C'V'@35m.fkZFAB3;.gkS)ooka*b(VI):MDR;Vc9+OJG/8:i`hGNaj5.^`S&tpP&3`Ed$OFi^3k.jTMPIo*GK!#mNs"?kB:ujs0)p&B#OWCO2O&38^<X;\FcSg%6oMSirs/HCgS:\?<?_P5GI\!V*;Y,657>UXq1/.)[0B)ig,K]P3"u7oR,YhrhYNd\846HPBl;D\"(e)Pe5H6/*Hhe`8r;o=D&QW5Y'hZN$dh3:9;";[P\\@:2^_fU`=>#=@4uQMNnCV]pPiZSJT0E`@?Bh-g]Y%t_e8U(5(^6X/jQ>q1##qYgXF>=+*o$U'0rNu/9e$K&sAX)El4XZ"OT9-qQo'^k:eI<th]OT,GqCd.umHN^.^Y`=RO8&/#0IO(D"SpiWNcWi"RG[4:GC53rndP.Up#oQ48>h*IX]9TbJ6gVaD[&9Pd>0JR>^#4e\X\R,d\*/U0qa;A=Ka/'i-+EMBA[;.FY-+Fs#<R9BDJ0cM2=s1eCS<?YF-=;0<boP;eTrciam0T\_ZKSP7T>ilF)Pkfh9S@?"b(4#5*`*$V&PgoLh/>+9!D6(T\"JMS8p7kZs0d945ACO,_'m,/4O"2C70h_%VKC9Ft#=WDqk+mraK)8Z-67V8?.Ub7imCsDeglDDFa;/k".D;HZRs<3%82Dp>ne*Y?U(9eMlf.Z-*8'2Q;RRT/7gAA&H#"?KRkN_]MQ^>'N]tS+9_\ZH:7p#N*^?mWaknP=VtT9_sS1R<RDs-R57l3I!61C#LXl&&(X!`_P1R<?F9TM9JJL^E#0,L>CGTf\:dre-(MN<%iO+bGfQfEOikZDX$Vj[2m%n3*XtE.:8oC<Jr)nh.5+<2MY5lN9iqZU3lJmLu,d^X,!P<j@C_H5@GpWUE_Mpg)F*I+J.?aYEZ<doCX9b&gu2em\"I`B?cY[nAi*WlWuo'44>_9_es@[PqpVO%dUS&3a8_-q)ckVg^rKgdV%#,b[&5P\)pku\[B='J:
 Th%2"MqN.g7+#bK8)P\[kN]H>F-$8`3P52A&jp,=XsEiB-B6^ATPr[^eR!9dhnGJl#k2/?Z*j`ZNrGDO5!`=hQ3"Wk"9nZ"PmZBYB_pVBEGnfB#sQ(O#$E)YANQAj<0~>
+endstream
+endobj
+6 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 5 0 R
+/Annots 7 0 R
+>>
+endobj
+7 0 obj
+[
+8 0 R
+10 0 R
+12 0 R
+14 0 R
+16 0 R
+18 0 R
+20 0 R
+22 0 R
+24 0 R
+26 0 R
+28 0 R
+30 0 R
+32 0 R
+34 0 R
+36 0 R
+38 0 R
+40 0 R
+42 0 R
+44 0 R
+46 0 R
+]
+endobj
+8 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 102.0 559.666 148.664 547.666 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 9 0 R
+/H /I
+>>
+endobj
+10 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 102.0 541.466 175.988 529.466 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 11 0 R
+/H /I
+>>
+endobj
+12 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 102.0 523.266 157.316 511.266 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 13 0 R
+/H /I
+>>
+endobj
+14 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 102.0 505.066 201.332 493.066 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 15 0 R
+/H /I
+>>
+endobj
+16 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 102.0 486.866 239.66 474.866 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 17 0 R
+/H /I
+>>
+endobj
+18 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 468.666 185.156 456.666 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 19 0 R
+/H /I
+>>
+endobj
+20 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 450.466 153.488 438.466 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 21 0 R
+/H /I
+>>
+endobj
+22 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 432.266 190.82 420.266 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 23 0 R
+/H /I
+>>
+endobj
+24 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 102.0 414.066 254.276 402.066 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 25 0 R
+/H /I
+>>
+endobj
+26 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 395.866 162.164 383.866 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 27 0 R
+/H /I
+>>
+endobj
+28 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 377.666 210.5 365.666 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 29 0 R
+/H /I
+>>
+endobj
+30 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 359.466 276.488 347.466 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 31 0 R
+/H /I
+>>
+endobj
+32 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 341.266 277.184 329.266 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 33 0 R
+/H /I
+>>
+endobj
+34 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 323.066 168.5 311.066 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 35 0 R
+/H /I
+>>
+endobj
+36 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 304.866 176.504 292.866 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 37 0 R
+/H /I
+>>
+endobj
+38 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 286.666 229.472 274.666 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 39 0 R
+/H /I
+>>
+endobj
+40 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 102.0 268.466 239.66 256.466 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 41 0 R
+/H /I
+>>
+endobj
+42 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 250.266 185.156 238.266 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 43 0 R
+/H /I
+>>
+endobj
+44 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 232.066 187.172 220.066 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 45 0 R
+/H /I
+>>
+endobj
+46 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 213.866 174.176 201.866 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 47 0 R
+/H /I
+>>
+endobj
+48 0 obj
+<< /Length 2536 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gat=-99\*i%_hgAkVc"4>:i]Jmk(k-4^4mXbp0qsk!2SHc"XkGUq4&8rBN"DPXr3gQ)[g)(^0U7p]+,T*d<Z@M;sAok8_f0caiCia+HUjF7<Qj%Kk;n@cpiSIsgbAO3X=bh>W1:>LCr`I<>I1Nh:M[*I!!c?mkmDD@d4C2iV;pm!la)<Tj_NS9D[7IdD/GN_]kM\j#,c@5=cC?FcR@k&o7@XA\Xb<gSeR=&qol_MU;E;^O<::CKhDU6lb!A7C%c)6Y[a-ENV%'(pPL\%8kjnR%a:FYg59d,skBQFGNML41:!Fiem%Z_/UkXi3*%X30T/R.[0kNW5!>BiQ`E/_j2FG+b]o8eO)VaIo&2k6\R5m-g65Xu_7P+%Ge:%0A<\0FHV1*ZmjKj;t1cq5KOZ?l2M9lr[_Po1CKO)p9>-bH!G6^k'$"6@=;sWsW*jmes!%VW:EJ?H@WP@.Z&F#uW_Oa8:6/,e=;`!V!q]K1%I<0\p^QH2+B1qp3R5%o`4A+<ZfV&P/.HIt4(H]^6DnkGD,9]K0C)gdc'uEFu>^;iokn>+RkZKL1[0EH2`=+135JF"L3PE$<b<nWN-n,MdS6,*D2@+'p'%I32c@K[B)U8g,bGgW"HRL"617BVf1TVH;BtWJrjeXq-?]",J%L&K<+QXIUW!Z!Wo6)j=M)GWZ]==EA7D`J("Eb.1KeQ*%nPPmpj:G8V_i&7G<UckE'GqrJ!kN3cS9[tJJZ7Yc)g&'97\&gk=c-;N<i@M9t/&s`,m]i%N/#'SRZ@oRJRAR5OA=p:To;t,_q?<9\\l]l,7H.e!7@oAaf72S<C9NQS7'(I5!laF*!Vjo5YZ#:`,[SV>^rhHAM`Ur'T,o!n.+#1t,)_*n?70U!"('1`O%BNB3lW+&r0C1LM0SH<2\m\T+S]7Pb4$d^:l^TJYpQ^V@pDC)mXqe%roMj`s5?#@.YNc0S_(A&HX,758BIaDOiq[;`gcn!ACKcXkSmJflri/au9F(sPeQ
 ONOaqd*l0TZiS3"316KNCmKf'^OB:c2S@XC:Ubel,l/lId5l4o>McI*W6LUW3Y\HuaO;Qu!![+`>=-pqW&e'N\6tb7,:[LL1n`-WgGHCEO"^[,,en%(@(A2NHC+er[;DqP\9H36@Qe_',57c.[RI)"M+i5=hr_7L+s,D3ke^I=[aMp5&sr^?:e)"M_tQ)&Uf2#+/<so...@->BRa[9OOUnr7X'+W/IDfbI1I,D&f$2S)'fqZ`68bSRcJj;2&)0,MGW'XPdcdd*1]\>7JFeC'*>f]F+JMkoU]UIQ7*D"n=l/Kr^sm5uFfi;/..r6^>spiKW)fbdW*-b>/,ldJ_XlkrBt$Zl[-\!#6#WNL4<(\EDu(bu)V7#:%MdbZpXli2bnfJ'T$bJ&m)>=G)Z_XWGVWp.kp@9BolcWF*^9nE5Y(`+op=f/6(J/I^Vn'/6]>?kb=hcam3K<85pnJpmMhA@a<NPrj9^nPggH2P%;Erf7J3A9U!2.n4$e+4A^nZqI1&;oW?>W+.nB:HWLdsR\>aD&,^=3'On\1OY*gM!QR')-<.Ra(euA'`J7Fidsf>mGIu!D.DGTb";l2\tDJ1&]m.g#B:j*4(&P6Ziq291Um7CLAW."P5UN`_e(oB6PB:8">S2"?d(9e35B
 2b`n#&Q+0PXiQs7<q(nAQ9S&TtlgMI-Y@QJDLA'P1K%Y/^eb3E#JrG4G8(4K;=k*<8M('VE8[]>`hFrpVi56uhgp4!=4^2bki*h%e5%Db/<3G++eirVtaFm#5^4#d7MT:9?W(,3/AM/E`dPB+r5hUls6fB;!?iDn*mOFaCiNSVQN9*)m&BJ_!R\L9>,nV3.a25e'm8Zi,#b2fX47Q+mSr`UiAh?X+g%4$YoV#%*XCnML8/"h?$dZ4f877q<7_^s]Q`?@/B:*J-aHuX-(.*%+jFa_ep`UQO58E#GkJd-qZ$`9(["I(&YDGtjNshTE$U&r)ka6ZkF/c2WRgTF?+C+RDci`E&S\u%DT,a.-lq%7p'U,B-g\,Xb.&g"&Qf54,pC]EA`Yo%rU@+lX4MCCa92d#g+\2\?#/`88/bsN/j_kJ4\k4nboUi>CPU'[j-:#?C()]V;)T&?KnOl[6]EoDlqMLl?4me?PT3&u&P'JntmBHB8cj[+mU<a+_c;WP"Gn7TH`.lUa_kPa7d@0_MIWK_QmN70YQl,~>
+endstream
+endobj
+49 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 48 0 R
+/Annots 50 0 R
+>>
+endobj
+50 0 obj
+[
+51 0 R
+52 0 R
+53 0 R
+]
+endobj
+51 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 148.992 546.932 198.984 534.932 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (quickstart.html)
+/S /URI >>
+/H /I
+>>
+endobj
+52 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 148.992 533.732 213.996 521.732 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (cluster_setup.html)
+/S /URI >>
+/H /I
+>>
+endobj
+53 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 183.636 344.598 294.648 332.598 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (hdfs_design.html)
+/S /URI >>
+/H /I
+>>
+endobj
+54 0 obj
+<< /Length 2767 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU6=``?+(4Ol=i+(3T#RM&Ig.l:D9Ub:m[*>^1PD?^[Ziit%.8cFi'_U)8"0MkC5`p%JF[[3]s3:iErGqFJ\bP(!Z*s/ZVaY84g[[H;I_&d5^'j#J9NY)Ks"<.](N4f^Q:'W]EjMu#+rgN(XI>5Lin:KBgfo9A3i'`1l],"N?9O/Eo/V_e66,h<1&Cf`FGe.TlYgS7/')`l=fNJ(Ot^S14As<@K$'>SSFu%mCZ3o3F..\]HcpCh#t*F[8M$>6gpbS[K?h&c:U[Yli@R8hgt'XJ7-cZmRcG"?8I^8k%B`h:6Pa)q(j4(S3c-rVV!g[5M.dtQ[Z/EhL=CAbcA3Ugqqsm@a0LMb\@A31mc)b,*DM^hN(D,sAE^9hqp-e>W'L-6%dX*;.hOc?8Y6QTF'N+(LJ).*+Bk"1DQ8X-?cB#rU8LC%5*)lMfol=phh`Tr=rN=snMQJaD:p)opX/LZ1<#WiBkB>pM06?AfknBS-%%c1D4^q\hO?I]Lj(p+p,ou4%>&R/N-OeoplOj9B4c.;GHC8rpaBX#KltH1)m8Do!:+qM6Q_I:5RLBrXFe6hT['ka(F9"=bP`eo>'BhkffPE_l2DM70n?jQO`uL()FA='"6Y(,rqi;3]?g3l4hm_(Y"]mci%LV>nc"o3]f;[Zs''Smqe$(PjOW?GM-$?fHH<Mib!TK\Z,ShH&_n@T^L-tpRS&g&p#G@%I=/h\-@NDELr6loV2I,-rbsEHge`:q.#[r24UdQ-agk%GcYNER"9/`;NDe<HFboA8at.Nid(*qU7=0#LZfh<f$M=Sf,uRrp>pX&'T1":rZ1M8\Q;A5hT271;FSY0(>i#IoB34`'-n3-],D^<,8%\k`@h`SO)dMbSKSN\.Po.HSp'qN;D7F;=5K2Vu._W*GVS:"tkFdas$TVs?\Nl\5c'U#\9g[0+'VFbba!PW<UN313R`?tE0R9Y_[DO"[lu1D.nO):c*/7Xd@[1!9V<Vb%(';`14fh&!Jd
 -\Oe#q`U7V#++.PB'KL^n3<a@Lh;dHE%"EJR;26iaA.o*clL<=,Z2RO)n`>'uu><;msQ!i9p.ZV$rSa2ln%-_X<C4Hm[)(.UsoDcCkAm_m'U'hr9i!8WJ6Y$qVZeuB4K@)/Mq2+D8*8RF*Q)R:aHi^tWqZ5_Q"lB]\Q8^4d-J/%b[.n\;M<-:b*CAq>m6Qra0ZkB=H]N_RLg#ZTQ^1b]][[=:\0ST0ZbSYH1:cU`brj07B`s<GJl;p%Q89E\]`=s[Y%/d80Zp+I3a-Y$.D$Q_>$gJ$[DsPZ"hoL@S=Xinu/#o.Q3jSDn_tCSBIBU'uc0R-_62HIm.^s9qJGT"m<YO1'h8d,"ao0._?f8R%/I']&o^(LNmu3S/5Sc2ArpFD]Gmt!RaV?.-9E<8`L^5J1KTY%4in>^dg#@;[P:/>GNg[iH+euP_MjOTXQDuQfg&!3^N"p'h_;NOM=N,[/C&6(T\]c@i::JB"Xbd7^<ETDjZ]G/&4X1fHD0!OUq">8Im#;e6ctg6'a@7lje%V`MIr#;sPR</9J-?FV2Qb(\WnSW17^=ZGd(fC>B'TtAM<j6E+guAF?p5k[U[;MU@8HAQS$Eqd*YR<^.dt>'c1&+YXpX:iL^.;IXLf6:'IS.n:'BEuc;F.k8[GeZ.[MW0bGZC:H+h[.kH17LYCk,nYd2i#<"_,]F@o2JJ$QnW.D,5RfT!)0:^K)+^'rrP/VY[tB#J[knPq?^rE(,AcI+dD=cZT]\7o>7PkuhUYCp+N\ltAeS@AtZ7R[#`%Qq);fX\`e:jiK2't2,"@EDJ$DNugZRtnFf;Y]Lc/=&Ba0c;>0Q(D^LK@*G#T$X-*(Duuk/tq`VD^J2,/JkKoP-1P?/h`le7nJ$R%Y1Z49_ZQ".[:)S;JZk(c3T&#3(MMZ$Pn$37Cc1^eo>#,nP`SH[74:'CIKNY;p#g&C<J_"IeL@aY)f0PB\IF@dP`CF^c0c-V/ehu>VrI"o)69p`WlcMP#R:7>Qcc
 U^/M7+:6SE"G>QtBJ1?0ZfnXZ$N"oY/*hFHs=$P1X*7af;XX(.0+!>>`<Q)No#4019j&X)ATd.Jo''MLLr'l$>`lb#N!$am6SHRb`rqc3p]&P>n>+t*V"GM0fRDW[,YG2sM2qoD!Ap]Kon3D,WDn#L/aV[(Wj$67GKabjbgEHSbVoSkKTtG*<R3&.22=cYOM<`3]R6aXue=P8@UjSM2Kb3roheenE&=m<QOdHEQl)pFkC(7+EcK$pS;[kE$T\=6.*.$idXe);ikVn2PgQML=If=$!Pq$qiY/W<KilVdf4/X"A+K-#icJQo9KJ0m_\nr).&U_jpd-Y5TeDpg@F.CBk3(X2[jn(B3g_);o"V7W-4rkEf$#50_o/_S).*,Vo2Mp'''P;W%U>K<cAcm`INoW,=@kKWrQXIEsZ_FW[Z?MEj-22n:r43M23K9$J*(Bi`E/gft"W-dn9gCK?C5&,Jo4h%nrHQtE&SVC%/.^Qsl:XihJ2YHIr2,[:5f6pV!_rYc#5N->@#E`R!#nd8$S]Vn3@mlPW"j'mcN>$GrA_U5!&i"f\"iasP8f%sE@S`f,\fX0]ikJc=:H8hWa"VV>Uj1$VSd2Ome*k:;Y_?$\iOcqL![)Qiq$b(ZrP(VY]P1V^]M8Jj)[Alp!QsOb0hoj?p+*fNkZUnO=;6[!Z*D.R_S)XO-_ESm](UO?iEL5X%1='re]@;h`iuJJP\SDK*`/T5GXe.*+e?Y0<hFWgL[uAX8cG)I]-f?kZQsj4,Gfrj8T.?JCG\~>
+endstream
+endobj
+55 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 54 0 R
+/Annots 56 0 R
+>>
+endobj
+56 0 obj
+[
+57 0 R
+58 0 R
+59 0 R
+60 0 R
+61 0 R
+62 0 R
+63 0 R
+64 0 R
+]
+endobj
+57 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 643.6 198.324 631.6 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/streaming/package-summary.html)
+/S /URI >>
+/H /I
+>>
+endobj
+58 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 617.2 174.996 605.2 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/pipes/package-summary.html)
+/S /URI >>
+/H /I
+>>
+endobj
+59 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 197.328 617.2 227.988 605.2 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (http://www.swig.org/)
+/S /URI >>
+/H /I
+>>
+endobj
+60 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 162.0 488.866 203.988 476.866 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/io/Writable.html)
+/S /URI >>
+/H /I
+>>
+endobj
+61 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 475.666 190.644 463.666 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/io/WritableComparable.html)
+/S /URI >>
+/H /I
+>>
+endobj
+62 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 179.328 298.932 257.976 286.932 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (quickstart.html#Standalone+Operation)
+/S /URI >>
+/H /I
+>>
+endobj
+63 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 263.976 298.932 353.304 286.932 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (quickstart.html#SingleNodeSetup)
+/S /URI >>
+/H /I
+>>
+endobj
+64 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 369.3 298.932 447.3 286.932 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (quickstart.html#Fully-Distributed+Operation)
+/S /URI >>
+/H /I
+>>
+endobj
+65 0 obj
+<< /Length 2301 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatV"?#SIU'Sc)T.s.0fM]u&n5-Sq?>-qLh(%#0,]pO#;g.EpGOZZcAg(!rJUI87\CjqmQ[7CihYNU%65!1k=j6+V)bN7aP"nD$Vf.W%5n,k>(A,\W#h9S#29`0Qh:#UM(r3:K1?dH6RnaD]3SXs@U]oWJJ)Xr7SUl,lls5N<*XZF]fFi:j74!>ofi-KZ>,*ZC:,NQFLDI4!:F=o-5(."R4_#G'$ledGNIRq06KD4P/lsG*hH->*Bk(JC2h75YcC(E40(1R5Q/q^IWTT_AZaj/Zj+80j+?@+VNctT]Kk2+GYq0H9?gO)rOHiNNl^WYC!%ltL[-OIe[3GL-VPRTYj@o0^lV,t87S'i%+IZOj8&fY*nA"<-N0@p4X\<G.qN=de^*&h-g]"7LJZ`p8o!(4pHZBhR7l3\pZ/[/Oa,NLho#kcYTEMbY(&C.dtF(VQW>6lmMb8_a4ME>^eb1b"()?:0uF3$DR=PEQVL'oR$@hEGi=tHF4)jY)J6ORC_F#)q?.bI'IaW00)aAU9&PP(%+Rpom&98rrTQT8_2>B,=2]\n)JN0*)h9'Fo.E.OE1N&@aESXJhp(W&l.ElSHQ@ZMJ4@pAB&Gc;k%pbE)V"Y;_!@!k9%,*lPpUPmO)Y9:bQ(WI?UI*PB.^^G$+Z^RZ*5gdFsYRc0m!=_p)&R6u@&TV/6e%$"2Gh(sdJ7\-\G(@g9p&p/M7i9>6N`[ch@aG&KBihHKAQeKm_a,>mXIIImqH,2Sk?i:i;1jGIAI`(*<tM)E,cFtdd]7@FL)6.S%mWH#B,G9O1HR8C!Aq<Ob\E.B"%@RS6D:hR?m#9)JLJZq^_q4*!`euG#dL_I6lD,894XN7\.KjM^GS#+j)F%GeI-5"To#C2'5,SubG!0b9;-Wpn)Zk*V(J^l@;pBn]V75"YNu+^pVl:)/tV6]6CuK7V-WUf<39S^3$$0a+(8M5c[N'uqi=J)k+?h9pMuoQ&*3:X2^D&96L_R$:IG'`WLBj92d
 uH<ICY_X7_;nr3p8oq4"DXW/HY1UW.Gn@M-N*cZ?-YT\9OR0kcf0E-8L?0R\^[PIP`ckhis1IX71L<<r(%ROqcjHE7/u90m0=OqkO(o.\=!LY-,k2g3ctqV@'omS%[4c)eKDVSV6>+?j8WOi./r#d^E/+Z,Xc-b[^mq(c.T'*@&U^4Vo)^ohunpn^c.5@X$,_b/W?b:(W)Ebd8'2$)["kb=*tq)MMRd1>-g(p(L9J3%',\Z3,_u/"+p-%0h\*dWIUj5%fD;g1<h,#uumKW2]$Nc?d^qV:5.2+(5;nj_mk2QuT?Mbj'nmjJ^(g2k?"PWRR;jI)!6j><Ve_cc&t6B,@je]Qd3;f42NVWm0PXA%Ot60C%Ar=LWGEbhOe)Okc2JGDgT.Q8874U3clB2#?`/GoPHG_UZ%V\_T1:PL=6]If&]Z]%FK'>sTB.F])W?eMrK@V8[A!0MeS)H4c*tj!lTWmdP$S*uJcuK[3>8=DeC#REHW,MG*n!T(qoND9D-MEjR>+/S;b(r3d@g33ML\=HK!r9--.VBL5J-&EL8*/FR'de0s<)^N2T()/@n6_)UJFid&WQiXf-GQT7On<-na4bPH9WQ2bq@E@c0t0:d<PRc6o&Cb3S=6a(g'D4FA,JT,?q?d!-tS<bse;lEs>,1JSI=Z0T$W,5T>j,B1\H-$Pd_Q@?$F3L.(p,m1b_[SVcd]n2&,e!\15J3tbU?D!l'hJ!ADSA;163TYgc:n[TOJ7UY<%_VlC)hm`Q,V1[]n:=E3UQF$*?^#>a^!tqG'>?ka+DmuA,D%U<@Otn?eQ_O#dY4(p"_onT-[ea=<pTq2qDA^+,`sj\0>',MjnQF#N^nc.d&*.^d;/F*9LqEMKhO2@ZS#bn^lc<B@VX3gof0#bGP.0+L')[C:T4#e/\]t07jdV**PorQ@*H%1W!8SR`h,@%V-a,A1i(b(J`MQs308sA&H):8#%@X3^)\^JHM0q=<dBd$"<DP4gj[DTU^!se@];
 sDW"PNLH24Z5H6d\BHlLYKfKi!1).2f!S<6;."75/K^p9K[I$h!XDpt`b[^rl&8!\a42u,PeXmL6\$)`sEi'On`#D17V-ZJTT?p9qJ*m9kNaM$aP5&/mdsgWZ<bt-!?="]-\gb7N4)+^KXaKc6%pXK7-^\N"c0JL\N/%tA@5kl61.:[c^Rr5KUnfbpdY0';7*LjeG9Mq8K$1e`i^j_:e;JYQ[41RRD*M>c_cJA2LchCtON=3Y[ZC_YpO@oGO$D-<m8MjiYIplreL'aV2YtR1]:D(Jhts&d(Pe`1/2djHrr^]*&mG~>
+endstream
+endobj
+66 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 65 0 R
+>>
+endobj
+67 0 obj
+<< /Length 2222 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU7gMYb8&:O:S&:(S/Rc.h5'[\0k@jGnh,\\3kH&2#V9=-9M(0-6L',:E>0"EM#db`0+R38e:3*ab!3AZ$,bPB8\q8kiKEH'2&rGke(@2*6T`]k>_^RkVoIPb&DH1K"tbK4_]E8[?[_E)pHKF-Fh8)Z#YJ+uAA*f*8J=%;h`kg0aL/j!GRbKeB%1hJ#G0LiiPei\@$#BAr*r9>Qm4c]<p3%4XT5T6Lk0Y\&\T-ei.(46GNDo_4?0bESppk4hQWl'or#mE7T"u5?"F3`/8mGRe!W<%nOBKUg;)`54:7A<MbN+8K#^N(4hR4It62K31kg-H<t!e&uRjQYu>8<+oT.8PU7)ptuR>]/-9Zf!s(3q2CJ/R,p%gm%]LCAqA<%a.JPX)Dj_?0rrU_gCBLa'?;6R=XhLSTsd<\^qm0hsNDQD7+444`CWk)CceHT$@']IoJSDfs[NAF^&0i.d^FH13<gk0pDk/fb`r,Inas/.)5A4q(&Eh5/uYT15Y$+hHrk%E+)D#p=7Pf*^O,24=V=r6t4.dh.b0W7)Dk/*c_[7`"RrdfX[VD?gIAW[@1F6MdD_7gV9fX(6)r%O>q14W[Z]6H"IiXf5br2p6KS=)m\YEU"paCMgD_/Q<eG&/:(amMjM+eg=1OX5O([G=#\o0h-oiui_4dPhDFnr3R8j["-[Xe]_](<G3ultU2j->*,9M(O0gVd,7&A-j9CY]`X"OU.H>MH7iJq$P!G![$RRnbF-8d8pt.<4jX_sg&O5T`Q>cZ<X_Kt"^ICH@eYQK#PF(*;PJ5\NEtg2Z1q^:dTLn-P768qW1j4_6_MgFDe!q0<,#AReEie!Cl++E/DmZLf6It/cVVJrLeTGlC37H'C]nGX9TJU7>h'fOe[Q_UhKeXH[I5Y,/Y1@3!i_UMH)IHSo3\*Wg+;9.h(hCXr;9=Wd;:YO%q>I!Oge+]&f2_k"?HbmIi.P^OhP`78&r.,=(+sFfd578B>3GAmRmUd%B"W\ck,
 d&MSkAL6@kVF44oa28UNM!VG2IO$@l;4R)*V/\FQ.GYY8ANg(t+%NK&PMq;_-uC%%\/?J0m9+`I]EK4M9=SU+mnTs5?j,-5LVBF"Bu=lHVB0B3`NuTi505/@ko7NKfI7V&>4B>?28/&kr+g:)P/DptchoVs,mmebBD&*>5=@7=V&<6AcI4_8hRQiN:R=03k9T;7^`a@h_\0;MI7i>hIK"8ZG?pYo<$cLMBB(GiLX*=Xb:PQ/Eh+]'H?Ej%t`d2,5u/>E##%KX@A%`sZA[S0j^V;M1S,TS,Xk02<1OQ)Z!h$#Xnp6I<Q9M3"k7qasD6SUSf4#7VKX,e/JiPc5,ce9+SS@09$nUE,T(a3t926<kg(UW7RT1>_Q7/_Aarl,U58Dm)BT0i!PQ7%H`1c1Tr#=*e\H[R<X"U6KF@a/%LDJ*)";U>`d;&l#E7OkPUe?,mWnB"Y^Ck,_B6JP,EoA",(7m(BGR0C%qiG.]bK4nhb4P<tg7aPI]1TB^$2;'35o;:$="\gI"n(Jk48_eGl,ZQ^Y'Y\:PNV+6`fRH40?6^VZ$T#?Rh&(@&6lGQU-*cc)VeCj)n)"+GjbNaM$b?'Eom^TEcSZ\PD-)J9kV^SU>7R=;2Zp)FgKeWX(dhLl\d(ru!oFu%f6(I/?PSA[BE3j$cJe#d/I(nJ+.j<20"!8:2C_:@LPV(j8oOBmoY.\l`)`Y,Fm@]D$Sj%*,@]UqdGY2?\SrWJ3S:1E<ID=tNVu@AO0i7,<7u>?*Y=VFX/>LGhdI?!.M5D#Q#h2-$g+B3Visk*,B8*Pq<.^/NS$6AI;/-fr?em`$Pa8b_kuZNY%b/;UYhUY3m!&a>2/o8J_Np..SLDe62%Sj'?7.Jf%I,c_V8Vk&Js.1,$o6fO(/O,f7DNRq=R^QD%'q#?O(/^c<X*V^BiF4\Oo9M0A`sF'Jt!AL4BSNE=C0jL]3Wt.b/,*RMSc=D<?0!s`KZIeX.\FgQ@sWk,u3!=%-H:7-8FP?j"tD
 FXRK2@LR]WWE=(mbFBmpI)TG>laSE5Keb+qUn%/a)O_784.I-/:SQC5&5I>R>/iPhX1.8e@W#s+"Im&V-Ap6NrftdbJbG?)kQe19,JUC2T`,M!6B0sld:`&t<J?[*;Qne#+/@rEkGMen_ohF382s^/g((%pZPseKP>4\4U21FDI9>1;/^2!,Z7f2=*?ZiNoDT=?Y*ntL1UOW#ECKLD5q-'mW&cN:tIqF9~>
+endstream
+endobj
+68 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 67 0 R
+>>
+endobj
+69 0 obj
+<< /Length 2008 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU6gN)%,&:N/3n=^B`9EYqd?J6bsM<e\...@1u6BI1>"BI\;TEKoEJJ:Wdf'`j[[<!?Kf&ds1YVpYU&k?eIVF[&#(H1X4IW9l^q%!$o/Ks0IKg\42GV:u0<:3RW#Q'mQpM;3^Q(M,8I01i?D2ZmrI6R77JR>N;X_-E`PPhTYm!lQ+dhO/H]Tj2OUK&e+fnD%V['gcYKXk#Y,*[7>$#>4Y6B+PC-O*f@dXb),@V8Z.KXC2bV9E8,CV-69Qq!OG0fSqMP9r+X;hf5@0Jrk]`T\&&UReg'^r!50PR14a5#\1SV@,1L/Y!Sm/PURAWKnNX$m]%=($M2!7"?NAZu*+Xk?r%GA%TZ6lD
 bKIjkbOu/$`M'TLca(JQ0sOZ798k8<9A:fseqDPO;&)+*MHClB&03oTll4O9VLY6pY<qV]f8cJV@;i<>$:GDY'^:I<8r<R(q+%XlX(h,rLdZ&/,C:7!eM'(fu^Xg.jK"=Ho6tm6.M@BWUS)(ug!YNG3Sg96GH:0%?9ZFCMJ^C)k<j-?dD;7P[+FfauaSBakoUE0/P.['dH[Jt-bT1%&qTWU=ZPM&Bfp:(+LrNBNQV5YgN/4<UOG$i:LRY^*-3FV(,fr\<a2O!;,-7qBO<`>HN=Cd+&:62raoq$"Zq'FUXkV3+*Li-A0>*]L^CH5/("_'I)s2F3/qo.JAH>[;dUH?o2BD*.E84iHQ?L/U$"(L_T]G(Vp#b&3I"NgO[,7@@u$8\)rdh:#oDf#3(`"On%]q=`;1];sgG5u]4Oh[i-CJ0S0.-U$7fQ)q<.hp$F=o:h#SN[\8KlCP,`a#uLjBqdqqEsEEPfD7#@f8K"22Eq_o;kf*V#8!!UCeeJu+A\2!@Qs%jI7Wq$295n(,QP&gr)7gVHi/6(ah-Lf"2(W6J)qKBBq_>d*>`4>Y:@Ol2k4k(HZk'A3aiaLCn:JOC&#aHSu68Q5-8ttA%'PNPYik1Sun:'rLag[,,St\1-[otOM0:K)640TolSrJk(b.Q5@j#k?$H?pU#C@Tr=!B3,-\>3&NhZeh91nESM&iV%sE&k\Fsop&ChL;5H8A'fftL>^8Z"fFEK!@Ff=b?-GOqUFu*[g9NPekJ!Vaumk<R'>T4nu,nmmZaKLZE,L#qZMCuga%c'Z!2Uc"G,P!%%^6/jJp&U$q04b"iU3Kf"_@XXoJ@t-%A3Um&de=e9%rC3!ZBZA5]t7\/rNL&"U8RhcNa'moJ3k+V*8tY%&VW**O1]1!$O0KWmI.?bI9K1?oG]BD*NJ.MHL1,&;9e#CgOIO.GP@W<Yf[X&-AC7]q0ptUQD.*1_>-8talD^R[X:hjl,a+Pm$bC<+ALU.])c9NON)!sk"k5
 Fg-Y`K:>caf~>
+endstream
+endobj
+70 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 69 0 R
+>>
+endobj
+71 0 obj
+<< /Length 1935 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gatm<>A...@4P>X4fjOcn6CisLL`U8e9Zn97?\oR.o_A%QBfZ+=#I-`MoU3`LuZ(og0an@FJn]sY`U=cf6*e]G1DSl:lFUV=>cA[a>d=eXe-sJ`8`QY4D&>T*535alG\]M@k'6i?(P;1PM%+5GUtXN.qh%D;L&D!1o<&[1]tPLTdg2@p]JMLeh!m1F<X#mctd5TB"32YMQ.S+fu5@OU;RE#8=Yu;Z^0/54W8QQC1Nbq(OEbnpQL0p*<dI=?ZHjZSCoJI_,dB38XKsm$,(12-ien)W:9oLR3qs:b#0RE-3qS,>hk(':`i5.N_8:seYo]nO\%>MQ=Q\&7MoP8%2PV^"]a/XRbd/.1Rq5>k8"%tO&P%ZJJCae/Q@U5^+,RtUl330(Cd:F\)p.1c-MhMFhnaN*or4o#4
 gb5cF^,E0@dW4i63$3E=\kN\48+gjBt[@0KAHqN_:Q\F<6sAr]5%lem%<J^laO`_n4c[l@S*%*0:RFSOpq9/4KcX,)?YKc<-V5k(2%/(t<CjHI2EQVSke(li8Qu?-6F="OuJMj!N:_Y=pbSjt]7nFRjEn1$d$JU21UVKYg\BQoq.IH_*mI0jU-$QcGC&%LbgiSt9+^j!19G_'9=!Yan21rQG#QCR#4$;%-4`np&,:XOgppPO0Ej+0?BL_;+)6IQc+lHe:*MN_oHU@Y^$s2N*tnI*-ai:gp<aHkGW!]\_nIg$Wg1).*Oaonh#4j*-'Qor@tsJ17qO5bIaZVoKgSPQZP/0ts42jA:Xp+425SJ=8H4Eo.d8UDg(G/_"rFcSpQ,&3:uC8C>L)nIO;^:j`JDUn0?[LHEA/8@;UPcIe72[p9e?##RQs_<odr@HUr'gRtbdSei%YR?@Tl9N<*@PAj2=lWoWHknq7#Tjt&05XZ>N<92Vm7cCcf*[R3fbWm/hA<f_3$3M(UH`n&Ei)ff<YKU4/P#Hde`BK-2aOG2Y+6u^>V7nU)-DhEC]>6N'mDqg>hd3"FrFCK[j-l5_'9UuH$.ihImSB)e:q4$!(`lfIB%VBJrMh0:AI.Ttjs&%?J46>e2mnAH\SP8Ch^=><Hf6tF:'6H.A7i>D=<.P]IIG8@:@@<>,&Qg$;CHCuU"6@XEW?HjAGL!(jKI9_o(j[/0(=1%\-Kf\b`/+uQkL'\erN08%[=r-Ns[EH>6Mj6[Ri'R]6fF?]kd\bWY:saJ>[6K)Q;EQm9qc[K_b"CfGk+u[H[c[3slL5r$f(OZk+*@h;2n^?7b/$!`INdX3Po5%MmG::+Z.ND<)_PW9ZdoVo+D&DE+_`IAQ[r2e>(NL\4BHW?2UX\DMQBP:5Z60gT>a3\3>F'jAdQl?dfF-fDLa!Mc*NfD~>
+endstream
+endobj
+72 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 71 0 R
+/Annots 73 0 R
+>>
+endobj
+73 0 obj
+[
+74 0 R
+]
+endobj
+74 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 275.304 309.2 368.304 297.2 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (commands_manual.html)
+/S /URI >>
+/H /I
+>>
+endobj
+75 0 obj
+<< /Length 1705 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU4gMZ%0&:O:SkcG/Q0Hqt2;A9;'TY&q^$FF">qp=td[V;488Q9h(2LjVHlpO2m.6CZ88PIl+Nm>k=3Q'V5in*;O^BG'inG@ZsKd7)d3/521k[.[)T#/6G"foV7Mq_<_r...@mqSgRmgE9aWkNLWX>GMZQRrCI+(?DHG-.aObfQi3NfnA*8>rr@Sa":?-Mg[e,dkm-hA@4N[FSQa)XD+M?80Yr'MWaURplO,eMDZY/0a'&mf"C8Zgls%^:trRnq/FDlH02HUnu&R3;JeDmle%Od;r?oC&5%p&d;)>a$cn$\(c9-aN[oN3;];@G^eiTic20,5M--Q2RqD.aW]LpE0_-&`O;P6e>?Hr7Y.J%`)AI>C8H;`&k^QOZ)ul*[;<6"mk6&2mD(O;,Ekh5PIk,&G/I(bU:++aXHY>]#W>Vj\00&'Y`9"-N]_u4_eb,-<DR\'d0"Mp-4OrY-=`WJ2OCNn'ZJ08(PQ`5ED@J9R9MqHf,o;'((aCV^eXSN*n(_V[isp2$R_Y.&^h_=kg5iHjMeJn1UfC9kR/L/If)Mn7UeAN*L$U1`4!X!k+Eim_MOQGKlWm0Fsp\@GWi/R<ak5M#AQcjrLg"!@NADa`It/_an=T\>g[/BLl#!&g:c1`\X?`EJ_BB_f<h]jYr"aGckU5TY4ACOK?
 QF-+:'<"6"8AUGImIYX8>qRq%pV+3n9BlG4ursD1#kg'^Do`THMT>[Q_NU8pq_WU7j_3n:2)[nQn.kA$<%HX4c^c17nmPAdW'GkUaSl3InBQ8V1Sq!2J=$ng>F8Z%`cp"SR-GV?uN3B/,3R;0V+AHZ3XS'8pQb3eZ^LXL5mt[$>E\o6r<C:oQ=C<GB2>*%(WRBp)N&h)Disr3DI)W1/6/EC_;ieh#;AO@>4,Zo8(qTH0URMa#qA!YH+Z,Ga$NZ)l6Ih2%sS9:5ZO+3j'\Uu+kAUGO3CC5*Sg2g^oCSM1Br.mPWIB?Oc,foVVHKAPcr6^K!YUE=hlnII'f7QN8rc7$Qp&:!\\\lGGPZa(>7_N_XNPc\rHWnMtIDVPkgE[6n+0Za+r]9'k%BPi5[g-+m[gZWD?%$=9!rXtd4B$R.8PL#f:q):Pjc-(fko71.qg(m`%%#!%Ij'ak1WMo@*GFeIB@8GD(rP6eo(8N4]M[k&1Q3IXP"j4-'.u-UBmuSekOtaRSr%>%>:IbkIhHh8,&WR))GH(PCkTP#aKI#Q%OUB-brYfqVI#UarXpAtIoR-^\H^Yp;q*XT:"&nh?1d$9q-,bs;TZ^Fe"Nb*bU_,^'4`#7RWi'%hp-ZM=l+jAocuB5[/K%X1Ol3J=V#l@'epj*)qfpUdLNhU+52$FFPT[;"XW/+ofaV,c7b&#a6UpGA#8j*XK`~>
+endstream
+endobj
+76 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 75 0 R
+>>
+endobj
+77 0 obj
+<< /Length 2552 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GauHM=`<(R&:XAWd%^]]N(E5%T+\VN;m])b\B%%n#C;`KAhAk>Na900^8$%rQ*d$X."W_=X>r;]2rf,V=5Vh@rH1Q"4oG9X.X$[7"@3%fH@GI56US_\69:0/^L"VNd]_Q`q$q!*KDXJigCh'dLH6HL+-=;2SGUh3rAkOU/7(=qXQ=N*ZK\oGP3>_D\kjRoYFIhIq]A*jkYSIH?ufmDM0[Hf&'2]JB9Bt).(AjREP7b]Ik.cFmk[dU-T89sjiuEg2M2ME%sVMN)"SQ6@&iF:KS>tOpJ7fZ`0PFB.?/_:(]'f5&q(t$a$PY]BAtX3(5Z#B=Z)cCDL10^8uMmk$ji(%=hIGK)SC%lp8`"jGaCGS2p]+LXO^=)WI@NtgRC6J!P:[o7pS6PHa5d)V!DPQSY'GURqe[Y<LP9jQ!*d+HFM;T:@7?Kq^C4Y^riH]L-f><Vg^2[K2]ut)hoX'XkfsMhWtUNrul*Ua>-m@COa^O`q;l20L:poV+asFJtfQ`4t8&m@]MH/\Jgk[`T@JP,nX_a7L`4T@k%YIb?=-4Ns*.=L>Z$t'm=hj(\1.0V)k915dS&4)IWh0kcBpoKYP?(i<[6F3_N;2VGc>KYOqJb.#k92WZn<g*[b94hbUPp;20Iu$>aN)Su$QMI<g]GEh>fb$%-@+JXjfN]$h=`((A23M(GAZIM]sWT+MtZ"C9T\M;bU5I$84PNY7'3gH89]VS,@XPj0QMQO1Bq0bn=ac/#W)K$Vm@47$b?jmIi2&fA8[Ofu<];Sl+Z1S0<4n>"'t./JP(/-TcTBG%)HHICPg4pp+jXI,WBnOJiiL+?(5D,n-G9:RC!o)bg8/q46$[cuTpWOH`P?SnC%Pf-9E+q9%OO1.qnf-2_WKNhEZf;-@4anLdZ6qtKS-cZTSTjJq?Fg`+`A&8.5'U<D7HSnZ6&-L@sFDF3VkZ_DDW2mS6DKV+-/(L\g+7V"G`$/EhO##<U[@[%t5ba0*^o^^G^X']]4'eXkok
 $1i>eMK'j,s"WkH_F'\dOMMfV=B`he'fS`d%S*Mr(#IRQ?2WnSp8gO+kAG"?eu`SE-OJUe71Mgg9'E7XTDX<.J%h3nln-_^F-q-cEX:I9r/)QI!hk?8NFsDcKpTlERMQp(jmMC5m;h0Ls'^U"\"%j6QH9:>2<1Z-LqLO#(Cqb%bG1"FSKG$8j,gL7*RdLA$OM8[K>7r[H"aW(T7q#F*P$Qc.k2W+-+pECa?gC\_oFk%4;,6:s4MVRWt:K#@*;gJmLpO)k<1JOsh^Y__$\CJ`85TW`&,5$S\-;cC\^lgHG3$>agPm1_s-(7?F\><dg-f=.`VhEMH3''"C*mHHsKRsf[h._sPCUr9A)^.1MV:+8KR;0`M@e"#>;381-;::Mt`gT,p$Pk4ElVl,(mi-E&#JiY&h,-QKY3%b3dcoF.Zk&pt]QX]Dcg2g!j+?g6X]PX=sm3JBd+YF2U".a!a50K9O#EdNWQaor36C=LXU6H$qE?"2Bga[l1\0X/Am>9<;;1kuq(JraU1<J?%aIudFbUCVHUE(28A0J8,$pDcoKnSsM)U?7WR('FM18B9C8eqMX)6=gC_5$V%!'Fm4UGFr%b\_&r>YM.qi1:CC)c)iEQX+eYSu61K!L&I(pgOb4FLK8q#ogN_^sb#--eR4hAV!V1T[+/Oh;YS16BhWpM$A\&YlfFd>HO#/90478`8opF)0<Pt1(8b\FlT_81hdVCSuI*qBKned5)V7c:T(4uQN154/l@.-%!ZB'%:MSD,Eq/!d$d68pMEgG#tp8oZipfu=COH6^m$*I*G]$oNa\eBkg!WI)FfD6<$U8)Z&qo<E`&,lp9$H@2flPZk(5TNnjmR0XZCJ`*0ci>Cpjd.g8/s"@0^)%#NM;;5.K,>=L2ePltR<+bn?"pgWkT57h;e83b&8%45kusLJ6&()"SD/7:C^%[BOsU),/F%;1G(K.4o+OOI(%jf:/et9PlIb*t<TM(HbH&%h]7*ggt>WVsX6j2#5S
 7\]!_G(ShksTSMjZKrQ&phs*$7XoD`]BO]/%2"B0T3G%1(/Dq0>3*9cI%8?pRZ=$LX;.YZ4,EK%O4(Z5[(1'qC/lHs?74MNRP9nIj+cY.Db;?[7;TF!,ZQME3pdBrQ+'(,&\`gXk`u0[2\/@&"K>PB5X"kLf$]l"!ji%lZ<$ZA(\u729)"fF@X+D7?',pVu`UZK/98S.%ONs(s)@"(^!Y%)K`YTFm?5ePsQb?+8.D)&'4?Zrq?>d(LCdfL>]?V=i0+>i)#WE;t+oeQBJK*PpOWpjtfrj1jT:VH4Mf[/CLRN4@?_51?XGLoenG%-VGfEH!8!CuCLm*Vc):ofmB@@SSm.7!Pn1X4@IYTSP:IsV9`&iOL+m"B_\?g'>it.8k(bcnjO0A$L?^-rb[I5mX#6*!<a+'Ft^\$@ldlX,N".Em;`l"[%Db>qM&_-3J$KNH?iEB&1q2e7#mf61ZXP[:<lMRJTqkr#Y@C%u.IK2&aSXA_te#As6]#3:8>AF`3:O6(_qqL>kh!$lS[P`maZ>Kp&*Eq,B3m1D:#Iu.Ej8K)6D9BT~>
+endstream
+endobj
+78 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 77 0 R
+/Annots 79 0 R
+>>
+endobj
+79 0 obj
+[
+80 0 R
+81 0 R
+82 0 R
+83 0 R
+]
+endobj
+80 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 341.841 127.32 329.841 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Mapper.html)
+/S /URI >>
+/H /I
+>>
+endobj
+81 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 225.441 267.636 213.441 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConfigurable.html#configure(org.apache.hadoop.mapred.JobConf))
+/S /URI >>
+/H /I
+>>
+endobj
+82 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 194.304 212.241 503.58 200.241 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Mapper.html#map(K1, V1, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter))
+/S /URI >>
+/H /I
+>>
+endobj
+83 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 185.841 172.98 173.841 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/io/Closeable.html#close())
+/S /URI >>
+/H /I
+>>
+endobj
+84 0 obj
+<< /Length 2901 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gatm>=``W5&UsJXi*/<D2MM3V5,EF-RGD[oEm-S0LH#g/"A.Al#qc`McEC%8mY3u!P=eQ//O8M%NUF!^!IoF0qnr$]Dg0ts_>bVG@[FW#&-VLOo=N>0^'IjQ8>fA2md>";TDnin*Z$A`\Ok^?Y/;:Cl]9=^j87&A4XYHL+0^8#o1\;V/Fl0kc1tqDs7k8kJ>qis+"&tsaT:qL?t9L9JG;-Krd"O1[OKtn8S=IF2nEJqiR?!K5H;(^G+![)H'qXfVh]!Ojhh>d?dA8h+Q5/`?;s;Ifsm-dQ\4l-MBBtr9D;#AlD":94GquLp\Uag#&UK.6$$'Oc]=^YH)hU._-Ih$&Jah9Q+C^02:MAfF<P;6UhfTH[d&,hj0I+29%EUP10#jsIBm\T0iP8*6gL2:C!g7.@$@*[D)IRi9MG:.`;pjQ4=&1;.CeOs.4Bs!h%jCSHHQr(@'N"P2Q<b=o<3"jG+9>'DStP6P(jt340hY9(+*2\WpHnJEotZMP%@`8Ca<);P_a]]Z;r.'-?ZHbh%F8/\$S/j(!FW2BS"r6pQ;_>^THd.\%>iBeuGIdd#q>(gCanN^"_>0m+7:b(/=Zh<_9INFS-*rTD1YXgVX)s_d805.9RX8<8L]!B_U7i&p[0>A2!\"#7*46%]VUdZnUNa)B@3Ma\7W!A`E0$n#TPhf^#92FCqpikUd?6"SQJk(+9!Qe>:lsa*7*&Z=uao8UroIDoBggD])Har[$ZA3XN-)!C(Q#OG#1#+j]pmT"2oDCWPrLI"Ac!p$PBf7g?I\,_81ZpI4nU%%QYFIe[KnqcO6Ddi-0=k:Tm-]]kcQF3(#=iX9/YVH7d[][7i?>cD$;fIe6c<h]p@VSFoe2/*<UGWjX@EYC3ibQA8"$=+/pO%U7VA1/:dYp23=_3%['q+`)Hejo$l5^bD_l+A`]UpOG7e/FDSU[#uWRi4p(m0nLNPPQ6XF2XK#0`>#?^)%DoW'@2,K_9lReh$-2Q=&R/GQTo-f"
 +b+>oa4/SM9?o$fUF8iIq[?PtJgZ-IRS\a!APNQKDbUE_&YHNm20f]WLc7brj+9lp;YLq!/n*>MAmLJGCOk'bqiX(?jL6<F'cGP:X=6l<@[8O@f^Via1f[FF,Ybp6dW!:bH(pCWRe:"!&K,Ft]"lHja1T*0=]af2&14=$_-FCu7M@Xd4SGis!d1ko_bQKdP8q$p;W<b2?3*kM!hFKX/:))mT6J2/^_i.r0Knm;[i'cu6m\j<X1dheN5V6$"qO0@\T%BUUD"lD(@/S-P(>c4,=DXU/XF+&EhVF.;YD`_EK%W:M2@q$e/c,D]f]bT=aMA[j"pgWg^]8Oi"$:=Ml/QFrtG!P$2OU]l="H!-2ZbP7odId"ZVU^53&=RI)_EJ7tj!/NG"oh09+kQ3$GAa9dgm7"$IO)%bp=hd,$GkGZ2XdBeacd(6tG^qpo-d/[?h("al:ln&2&9o8c$V)0N&X7L_`%=6a0SsJ&h'iL3IaSJ/[if&0WWWNo\b3M"-G/SPrO23Y+m_]e<6C$G>Ep6df<.]bAqCT:?qScfMhI]H(fps$p\u"'J'r8Yi(Ht\j&&tZ*Zl$*Ocg;ur,+XQ8T>?K!8U3!2DEJ]0nV<$$2pt]0MfiVK!VR`RcFtoD9+C,kETMd:B.*U_N!J.Z4=RO8F*[^>H,:h"0&mFPEU^)*]qH;\G4n2&bjJ:Wi\63V9c[E0WObe7P'MA-nMppr3B"-BoS8oWQXPbeOnpRBc0EB!^/IH%.6<"f3Mknl0ts.R)Wg,2.ta::dL1$<Z;=cY`fOA\4P@&lpp-2XtB$&OR"A/PJ!n&\`LPA+#@YNTJ`/Zgsa*,-q;]X^6-#YW/<@!8uY6u:g'm[7P((2ZW#rMDU&.D^,pb)3lJ!1bR`ZFSec1LP'tip'pNY1Ma%6HO2.U2_2-Wif.CS?D#r(;]?d,9Y&;/m/bNfN9jX*-B1!)75g=VXkdlV,DN'M;bGVI1!Hj/ho0>564+&EUkQN]@Vu%u?Mj6Y
 J!N8OO\>I_"@;Q%t;m3$D?mhcNL9cRX8G-u@d)8YXIV+2=Jsc=SRmRDG63VZU.!b12;(cK.m>KXgkQ9q6ZI5r@JV4k$pCt(E(DZ\m!K;60\0/-`TEt;odI$5</O...@Gm>EYVg+M@t!ZMr+b)GV-D4(X"\AM.*g-ArF-ZjdL<GYF?8%tB8Nd64#%C.t%>d\9HMQOG(\_"6U@e=?l-[`4qfX=8"B!1AE5/u=R#!9_^`Lg.6X.?W31?ZO@6q=1e1,3B=\"M4;m/qX5TJUWA?*AS3+PT7'uMrHF$E>XN2@cZS$q-@a,IVutIBp)>gS43L!6PfN.9a"r'Xs?&',n6KrR<-I%G(JC<@6?]LS>\^@UA]%TXGL$^eZ=hT)uuHfn=9gb:[mTuXF-XEHoPdFiYUSFAL(M>>Xi4;0FWCYP<BueU6!RRQo9\ecX#kn-PhHXL7VlloWi6h<flQQeU]<lq_&2$X2Y~>
+endstream
+endobj
+85 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 84 0 R
+/Annots 86 0 R
+>>
+endobj
+86 0 obj
+[
+87 0 R
+88 0 R
+89 0 R
+90 0 R
+91 0 R
+92 0 R
+93 0 R
+94 0 R
+95 0 R
+]
+endobj
+87 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 664.8 356.616 652.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/OutputCollector.html#collect(K, V))
+/S /URI >>
+/H /I
+>>
+endobj
+88 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 569.6 317.652 557.6 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setOutputKeyComparatorClass(java.lang.Class))
+/S /URI >>
+/H /I
+>>
+endobj
+89 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 318.9 500.8 483.9 488.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setCombinerClass(java.lang.Class))
+/S /URI >>
+/H /I
+>>
+endobj
+90 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 187.644 426.8 281.64 414.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/io/compress/CompressionCodec.html)
+/S /URI >>
+/H /I
+>>
+endobj
+91 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 190.332 282.509 298.32 270.509 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setNumMapTasks(int))
+/S /URI >>
+/H /I
+>>
+endobj
+92 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 219.137 129.984 207.137 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Reducer.html)
+/S /URI >>
+/H /I
+>>
+endobj
+93 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 357.612 197.937 523.26 185.937 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setNumReduceTasks(int))
+/S /URI >>
+/H /I
+>>
+endobj
+94 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 163.537 267.636 151.537 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConfigurable.html#configure(org.apache.hadoop.mapred.JobConf))
+/S /URI >>
+/H /I
+>>
+endobj
+95 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 194.304 150.337 508.224 138.337 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Reducer.html#reduce(K2, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter))
+/S /URI >>
+/H /I
+>>
+endobj
+96 0 obj
+<< /Length 2506 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GauHM=c_<,&...@RoFnX>H_4,g:E^:W#Vg5-!9rc=dHs5F1ms6:!&?m;B`?EZT*7XfV\^2XU75c1)Vs!<<eA!q6M;>g=FaRp2Z4K'fG?JQsX2$*rCUUfZ#R+u8[>Bo`22UEcj"ea9&h-u!S%AI:FAO!IrBAc8cq.QpfYedgdHELA'6oa&m#c67U$>WVKG"K=':4JL,Rt24Ve)f_oV3S1\KI5:g6NXr67MB!IP_Z)^;VNFS[g:<_E"s5]mM3OV^.Xn-.R^&1bQe!OuKeFD@6q,_CR;^#]]!UZuQaOCp3[:T7%SiqC+'^]96GlaG[lh]rnh7IE@RKVtr,3+k;L\X:p42='2e>G+>GQ5!T_WR)IIkLXk\1QQq_n5Z<[fPi2mb;rCPu1-l`dL[_)h,)T\ATG6=CYtjNJ_KAtJ>qH1/o2uHK(=t&<($Z$rPBHP+'#$Yf!G)[fZbEQ)dh\ps9D3^<C)d<R"lTR%mhe8M6Jss572-:j-tm6(36TarTY%fAid@]ipb60D/ILd'E*c3q5a;umD5?Nk'q)Md2Q/Z<8_0\[I+!:=V9`LE?T]?me.1s!SQ?B+\3t7OFE0F8i+XuhB4]8mdnI^e)pGZKB"C+gSWu(H,Cfra7*2ZCn=[8dBkJ0I`O
 $4.KZ7RsRm=dh=!Wcu'!eW@qBH%I;f/4JdT/2j9o?-iC?!s+Z92bg<H>*^RH[uifD8D;>>@8`pUJ@^Q?-SDjr%2FYI*oD+NaSuqA66<<S_>5h9m(#_;Ol7bC0lK-S#PO]iKjD-@9hG0AmI%9cR,!9D/0EK>J2Oq%>Lj8^AhiCj5T6Xb1j(-irdK*Z"fm&7<>s29fXW+sc+ime_BQT:bhQde+,f')pMTk3mY0:M-]%a,I(AfiLq;+NCq&F9oHc'I8ItN1s^=AE1f)7Kp*jK%m5RlfZBScB'?!R,9EFGZmH7%ecd2PS/K_Fd]#1Ia"FebJ3Yei/4oP3QJFSfWi=62aUS\NXH/,r`aHtg2.9WXTeH"nNOP7H!VmtcYUS$Et#Hl$C9:<M+dXHOjAWIL_j8=%fNdW0\d"T'WbS1E=p_];^A$PfF5#8RrqIdiJ\8kqNpiYmU7lD^eXr;Qo0Dt8KAYj`H6.N0KF/>g3#d/qY')-WQe,iB?m@pR[hOE7;"3i7:jbIn2Uj_4fZ*;@>qOT&pkI9bN/LgNG[fIRTu)#c9U(l"BX3S3tNTQ4-7&"do:6%Pb*^c18?L#HM0NV#XOlTCDp,gTL;t5dY/S)8gf?#W!.!d/+[%6ieBMODjBQq(S(%df2U,s.B'Ec_nURn"bALsnYau<k>#GO8>:\8lIqqP2S4.R2LZNFTC'6-^NhFEj*1^U:B1U,^/,7k_\>&YQS1W&J-90!!EO0>`fk1GC>si8n.)=OLZb\gR4J2/fX4Tg`=&d_;hE(&8kn4P-WbL89Zbj6=&tN%pZY>%'e@]5l_8,H@%FQaAq3sL=^Ye-8<Ho)o'Xe;A;r"mnLQ`[m2PaWf%IMhm]u%]'H!a)ck\?de^C;pMBSJ<i@loh*iN2-q4Z:QhTS7*M5H$LXOjG#e>O%6ft;4l`W4Z,[-kk"`p!W"ei_e'45>J!Q@"d^$cF%Qo>YmD98ur`C712-,;B?$el4L2YamN$\uN9K/e1ZW\;cK
 sjO(ZC0_\0=)"]EXJg!]r9Z4TI==1;/n2XWL/Teg&!8L#cVE![.l!ZMs=X]M4ZYHmM]\)ApOHL`^\BPbec2E/_OoJO1rQYl&5;2`Ge-p!:%=I-KROZ[J40Gh^%S,TA;j2'?l+Y]NL>aGFelh+;8t/82!2h=1I'HSr`6+oS%qD<#OeDO,Yo2I>#d!dZ:)67uDDI]u/C>*G&_nblFliJnm6^RS@G7p3icrV&gUX50[lA:tT+[tO?JeH+?bQ*4%i#*O#HQhcY*,.fUb:]pB3g]"[ha$aOY$JR9UOLrWrf5'(=2[0PSD=L+XlLl"AmGnMn7M)`4J,bN;h1\6ii7HO9`ao:Lf_J*j6J1has?l7Jb@YK+EBhluTuj9U\r5&#hgbH:3@<6*J:h!;i'=8s'k+>12c?"!LO@V(bD-8p[1u5iFm[T#YAncGtSCPB;+[^)p$EI1g]6h\OR4UZ[D[?U+2hh4;32_QdmW)*qfhcPG>(/%\-o]$f^#ZtKWhr?%,S/8>~>
+endstream
+endobj
+97 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 96 0 R
+/Annots 98 0 R
+>>
+endobj
+98 0 obj
+[
+99 0 R
+100 0 R
+101 0 R
+102 0 R
+103 0 R
+104 0 R
+]
+endobj
+99 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 193.968 651.6 276.948 639.6 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/io/Closeable.html#close())
+/S /URI >>
+/H /I
+>>
+endobj
+100 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 397.008 346.308 385.008 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setOutputValueGroupingComparator(java.lang.Class))
+/S /URI >>
+/H /I
+>>
+endobj
+101 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 383.808 317.652 371.808 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setOutputKeyComparatorClass(java.lang.Class))
+/S /URI >>
+/H /I
+>>
+endobj
+102 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 171.324 321.517 485.244 309.517 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Reducer.html#reduce(K2, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter))
+/S /URI >>
+/H /I
+>>
+endobj
+103 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 356.304 287.117 410.316 275.117 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/fs/FileSystem.html)
+/S /URI >>
+/H /I
+>>
+endobj
+104 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 273.917 359.616 261.917 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/OutputCollector.html#collect(K, V))
+/S /URI >>
+/H /I
+>>
+endobj
+105 0 obj
+<< /Length 2528 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gatm=997iK&\cSpi6_aPg?du!kRH+T$'<u&4[9Ks%P=\,FbXE_I#KcVlVf&tZANHr_BLZr(iiU+p&+*UerMI[5J92XpPCtEq#Bqai\K*l_[q:n6217ti42O+JQ"#\nFVd>+92?BJpY^9pu;FGLWi71s7@UV\:D?,S=\MBb(Y?#)'f=#6J$EfEp$dOri)C18*AdTbMPQ5R=n$1cD.4hbE?7r#hBW;b[N%FZg8nreXQEOSEA^uMjb_;F_juVcS&1!bR^+XMtAbhlTA6=/K9tgcS&jLS0.#c<Xh*jP!sdsPqLARb?U@SZF24:"rSQLS"DNb?GcWf2<"XM6dd>fkk`0Y(\^7eKZs#je^%C*(Mk7(`g8I!VN\)#<R&jD3T:l_29sHS1<GT[n=Qa"7C4g%N]0`".[PQq/?om4';OC,'MFZ`Po>&$<PGF;:Hf\[EMPF0nS&6YO?icb*:,%`7NtJ816Y-67kgunDSY&T^V:OuV.<<E+h9)Vr.%Mooh,hLn10t&9'5DRW`Q"b9iVuUO$=cYgLaOKi9]1&+Q4jm*LgNBd>=:%WbT`&eDQe4/t\rbg.KoHa9Ibg"3_-R=Jmo![[^m^(js%[(gK81_U=i9?Q'>&RAu,2(mV6?PH>IR5Ab?DCK6L<ObrA79Jn`M1fbk/JnXRu>s)F!-\JG#/lgV;ZI*UU\WQRNZ>4G@'RK13b9h1c?rg*C)Jg)oF2OK,+lc8#!+Fm=hH+74YJ:&4dnT"OjokhUcqVNZ(Nu"^54kc_DTk62h7=.64K`dKQC0mW[;%U/HbX'W+mnpY+r7>_ocSno"H+,pLT8SD#k5KWKd*BL)VIuTKh[ObKfoVo)Dli[g2MK\0g5c''UpF:]2fle>0YYn.KG*C)7MEmiWr#NUdoF5H;0l-edE.W7"W#A"Y0E(3t08Z-d]TRf9-D3Fg86(H>\^WjKW5p<m'R1;0@Y<g_>ABkNgI>'DHp@,U"QK/-88N,CI<i.#@Cfk-urP,3S6=)u
 AVB?(E!(g.![8*&-<[hXKF=Y4!J/T-%u`?rB!,LkJWd^YD,MUVRVA0Z`Y8<*QKVe]=bb%LSj<IHXl!VOn$XaG4XTSrkei,[Gns[!3DfJIAI>LIt3ad9;\uWt!X-O0]scAn^j6d!l;kX1M-qi+W9gI/sZGU9,_N=_`SJRkW1O:hSR=#n13!^0/u(>s_RL2lPah$UD5r@;ReQ1[]V-H!$j^7CMF!X$SL;QSMQAFS"M.M7"?[:FVaTA6\4jL(NXTM/ZkR,<uO%U8ltB@kIY]2C!*hs'R=Wdh,R(kVQo+m=Bf6SeiCkfLAG:'<Zl5)g8HpXVijI&HQ\=?>AD?)".u[l.[)S^'gDhi#;.X,4U\7Ju)tELtg#.2)a!i(&,^)*K./?3YX+_V>"YUXePJhe^XYs-k]`+]2)?@Zu1LG&uDfTTd]i5$DO@E"dUk;$&ceWXBXt9d<I$C(#I7,m'AAW<<"T3\iue;6JtTi*b`eeh[op<=b_`;I\`i%DDb@pMN*I7g?<?+S2#%.I;FO'$mOKqb<$g.R.."4Nt@,:MI6Y9?_GU1!'9WDi>a,O=mAM"P&8e:e:@$N@)*r9[l2&pU>\PF'YQSY73"'+Jp@@nA@!_9]^/=!&]H:,K5(/`Mi-st?P1/n%[/M:0`7+oT&0CEkpmn**/&YD(']srH2d1boQcrU]ZSArB=`BGJ<7Oho$c2ZS&qt[!9'8SSEqgR@=i'%cEL*r5N!SsX;[]eoqngP/-=[`DBQQNCg(q,%bg#iFbM]4\+82GrH9o=ga]16r5H#`B[hC5'Eb7;Lp<L\#"LeKNT6o[^u]I9F2.uGA`h`-/%,W^-G]),aL&JG+=q>49/3!(RE>QUQJ!UO^$TQigtq2o^P0EXNg-hlVLs$];XT[16dlA!(:LVn^Cr_]-6Jd3Na5Kph^%#r0X&<NN6Hf.+`tO.Gb(j$<n`WqWiE0uK6dVq\lWJ3k?=JcI0'F3Jn3`'1*+j[k@)AV;f$TZm*-!%9hAa
 )35+QdMsk.`Y^Un^=\H9!XtTq(M[4>P;73CCg1n;aMCfc^hVAKISlN81Z?aY-4p0jq\Wg2=@R,_g`9I%j<`"B1YN3]PE<`@17Ih=>kmQX%=1N<`,2\>C4qc!9CGAIIU1a`di-Qi"DQ?4n!TSlij*%iGW>i1j'3$(?6/?BXmr]F)dl)P/hN+)><'c+d1'.c3<eY86LlF%0mlLOM(c''"B_6ngD/Ljk`'SqXUcWdMS.@,)OWia#?iG0kl3aTkrmA:ZG74+t<M-BLR@pq2C8Ch-6j1L24Gt9Pa@LuU8sXM"j_L+J)<&Qj:aHGi(_L'US8KK.cXPiu3K]0HIEN!C5D#\MOc1jdBMAFH@omeM.$b9THMu8MVUEraGb]cOJbO.C@aR^]iL'VVjO6ZlpsPDp0"AM`Ju'lUP*$$Bl%*(^?atGmMC]O*+(4LQ"OJcP'>lRJpmAJ[]c&U;BPc905[!4siO[G7FPDn4LX2ejnae\\C9G$K2n\t7IBP&;$d0eRT\f1bT3\5TiN&4g>&aHGS3_0a~>
+endstream
+endobj
+106 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 105 0 R
+/Annots 107 0 R
+>>
+endobj
+107 0 obj
+[
+108 0 R
+109 0 R
+110 0 R
+111 0 R
+]
+endobj
+108 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 144.996 482.109 242.328 470.109 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileOutputFormat.html#setOutputPath(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.fs.Path))
+/S /URI >>
+/H /I
+>>
+endobj
+109 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 418.737 140.664 406.737 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Partitioner.html)
+/S /URI >>
+/H /I
+>>
+endobj
+110 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 323.537 165.324 311.537 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/lib/HashPartitioner.html)
+/S /URI >>
+/H /I
+>>
+endobj
+111 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 273.365 131.988 261.365 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Reporter.html)
+/S /URI >>
+/H /I
+>>
+endobj
+112 0 obj
+<< /Length 3231 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU6969,O'#+6En2RTA^]aGh>;F;GS#U::@or&+]a%*&,\Hhu;PZJ$YMTOT!Y#F2W,,1Rb(SGD1qnDMh`p#spjDhn46djEkGPr9-b^D;1&N+_iqJKGM[<mUfB1rPm[n^\?R;Ap-X":t_;j^sUC@.q]YF%aSIbb!2pd:&M:9t$Y.T76L^>@>\bA9Cam6i_I<Xtg:JG!SgTG8Z^&N?'^UOs`'&p6>;B\_)Cb%W(oR?h^)bCkk@DKOuUN)0_9@"u"P^%-'e"hDF[$OJ?K4c(<!YPU?1r?@O2Ol,D-7[<pC9E%LjE9>MZM9W-78rOFB!6^QNk'Vko5.>?h7HuR[EWKlbBJ8Do0,P>.mH2rT'uM-B%ZZ(Ir!cALo)&,1490"*_tj6#O=7lNd&RG7_38!,K]rTF/D&aM./:e+N+fqC7k&t&Oi$SYtlR%88QLBOVoH%25pg6/]eDDXs8kij[3@)/6h];:t/e-<r&oV?#KNHKnilR@kXQ-+\Z$R3I<Z-rb+2CpVb0,JHJ++qjHoAk-Q*cgSW,Ub+$-%PrJp(^^ioMl[t0,C>]=B!9#(mj[fo5Lp2i;Q&E!631gipA3a:6>2HtT%V?9JUB'pIp=FO19,+T.3T'S-I]d3<SfdSs3s:IDEk"#>dM=9hf>pH0hKFT4p-;5LU]H\aAu#lP!_0J76;`IkoTs0b+$MRT^,YcP#U_EI)R0hr4IY1b8AX]Fn2,_^&!BWEC#G17:=[Ko6I^fX-3.Lh&==RSG9LZ3N3^grD166kF'a'.9Qe>?+4s.hlm/le$rYP.)pED;lG<]C"sc$^0_QFjo$eG?$rd<P!9,^s@lWC9=CRe6&/=<jD-LiI^ZHN[a\[/4L!(#<PD%<)=@YP09ZY!1gO=8B[.\m[P*"B!UCC7DcN.b<I,Yf$Lra5q0WaC!9>=VtoLa5D!?@pTU;IepJVar"*1OG82/iMl(sj.o@8)8dUjms\D+UIoNYI?(+t`<kE.$eU(dJ7/!g/>0<H
 7*h,C`-e2L/Wh(Yc.GV8D9\A<95...@c2>M.\>Kd,3U,*si9RmkOl#2R-I$]W+M.5p)jqU55ug_:Mo"(V9.>h$)>)mEE.,pkCVqH._l2Ans^$>?]_$:WCtHu'H](WY`#+"Pd)5$!X4qr1>F92kL@eECruJSB`$3&/d3WoD>s($B)UQZuV_352dhfASW45-O__XC0dr4;GFk'dN3^@Fa3b-AbU
 `%`s3N]NG53ne,#pCUf".W)<Q]...@K.>+R,BEooBXP.#CQ(+`Fs&Mr$31cBZZ8/HSNA!fY#<I7unEErS[`AVO(TsmPH@ZK*=!irHS-d:Q-_uh3t('Ar3Su=;4J[/t/h7YH5n'MG%DhDeKl8&;E>>I9r5D>)Q+XkJJDJjbHd:FdN6[6Uc*Tes_Mrp!DX!<Er0lgdu*hUkWQ=@*[NG'Z-4M\ta-_C@BqJPcFm%?U?1um*ZJJ#6C!9$%:Xqdb,<JK%Qg!G[8W4)5+-Z2C:k4/g:>=JRQdGP)Q&f21%FT^]K:AI+!3:3.5Lcq64S%1?Z'W<#`dqb25G/-gV7.)02i;+;?=M7W,FqENr1SU@WOYPRaQ.7[uM_N2!bminoWnY;Ai,<u[YhEE_F"hGa[bSW*#7&CjO,5&O9$Knqt$:5hZJ^6r#>M%SY3["%2p+ABKgeXmr`BosqB<^sD=#Pu"ZO%A3dVQo9(U%/a1f<s1=X_,o\=0Q+;1#"_LmObl/C!V^Ca8,8m=GR%BEg(3l$M%=blQDRgk#lhH73\Tr:kD?k/($bKWPB:?;AP%&jb@G(,cYMjPN:&BHd5t&Y:]sDP)`Yh#$)pc2e^a\QU;PGV"qsg6?2s$A@:ht][84%,h6b&I05IJhgf0Pg[skn7mSRrT"/qKJ"(;L13p'U7anh6(8V)_VhK7feANUj+F0stAg\7h-kTg.;>+T$:FucR,cdbE%Q]nF8u2"*<'p+Dn'?_pQHHL!Wtg;UZ*H".9jEe$_f_m2Z36o-Aa,O[:1gh'!Rp*$\mI>Z3ld-*`P<>(DghA+4Q)&QD`:,:)OE\XG=0C_$;l.LkP=J(5CPYY@PU,Bm&/s;L
 V!^#eo=mnV$Q?O6\elL4:6PGaGih9-#[c=i`4IZG!eH6B3n3IMfq.TOChX-?XT'W"c?O9?I\Oe%m>-1mKSU^F;SmX;*Q5hP*Aas40nbJMA(-tObG(2'AJ-gN<[H2l46&kSk6d)hg#i/:)nOn#U[r56?%6h`V4Pt0U5?SMl;)6^7XVenNtLinD.g2T'?'Mflq@EV`Uu-#']OG\s,"6Q)/qkZHq_p#Fp7n2\s2Mrr_3u-Dp~>
+endstream
+endobj
+113 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 112 0 R
+/Annots 114 0 R
+>>
+endobj
+114 0 obj
+[
+115 0 R
+116 0 R
+117 0 R
+118 0 R
+119 0 R
+120 0 R
+121 0 R
+122 0 R
+123 0 R
+124 0 R
+125 0 R
+126 0 R
+127 0 R
+128 0 R
+129 0 R
+130 0 R
+131 0 R
+132 0 R
+133 0 R
+134 0 R
+135 0 R
+]
+endobj
+115 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 631.828 168.0 619.828 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/OutputCollector.html)
+/S /URI >>
+/H /I
+>>
+endobj
+116 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 303.3 584.228 335.292 572.228 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/lib/package-summary.html)
+/S /URI >>
+/H /I
+>>
+endobj
+117 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 519.775 130.668 507.775 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html)
+/S /URI >>
+/H /I
+>>
+endobj
+118 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 395.592 454.975 417.588 442.975 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/conf/Configuration.html#FinalParams)
+/S /URI >>
+/H /I
+>>
+endobj
+119 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 394.26 428.575 516.24 416.575 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setNumReduceTasks(int))
+/S /URI >>
+/H /I
+>>
+endobj
+120 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 270.96 402.175 378.948 390.175 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setNumMapTasks(int))
+/S /URI >>
+/H /I
+>>
+endobj
+121 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 230.988 352.575 387.648 340.575 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileInputFormat.html#setInputPaths(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.fs.Path[]))
+/S /URI >>
+/H /I
+>>
+endobj
+122 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 387.648 352.575 533.976 340.575 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileInputFormat.html#addInputPath(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.fs.Path))
+/S /URI >>
+/H /I
+>>
+endobj
+123 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 107.328 339.375 262.992 327.375 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileInputFormat.html#setInputPaths(org.apache.hadoop.mapred.JobConf,%20java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+124 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 262.992 339.375 421.992 327.375 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileInputFormat.html#addInputPath(org.apache.hadoop.mapred.JobConf,%20java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+125 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 203.988 326.175 301.32 314.175 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileOutputFormat.html#setOutputPath(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.fs.Path))
+/S /URI >>
+/H /I
+>>
+endobj
+126 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 197.976 265.375 331.296 253.375 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setMapDebugScript(java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+127 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 334.632 265.375 481.944 253.375 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setReduceDebugScript(java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+128 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 335.604 252.175 521.568 240.175 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setMapSpeculativeExecution(boolean))
+/S /URI >>
+/H /I
+>>
+endobj
+129 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 291.288 238.975 491.244 226.975 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setReduceSpeculativeExecution(boolean))
+/S /URI >>
+/H /I
+>>
+endobj
+130 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 282.648 225.775 404.64 213.775 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setMaxMapAttempts(int))
+/S /URI >>
+/H /I
+>>
+endobj
+131 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 219.324 212.575 355.308 200.575 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setMaxReduceAttempts(int))
+/S /URI >>
+/H /I
+>>
+endobj
+132 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 223.968 199.375 399.936 187.375 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setMaxMapTaskFailuresPercent(int))
+/S /URI >>
+/H /I
+>>
+endobj
+133 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 273.3 186.175 463.26 174.175 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setMaxReduceTaskFailuresPercent(int))
+/S /URI >>
+/H /I
+>>
+endobj
+134 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 209.292 164.975 295.296 152.975 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/conf/Configuration.html#set(java.lang.String, java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+135 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 258.96 164.975 385.968 152.975 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/conf/Configuration.html#get(java.lang.String, java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+136 0 obj
+<< /Length 2430 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU5D3*I/&H7-n+s.A2V62c&H`Cib/[S>r#g-$p;1uT=GLq%a,BiKm62YpRamNc2F`I_cl=lf6`\ZYCB3t.DFn,.iif=I^D;kNLMqBAb'a+R8p&+OX_KS-Uq6!_=COPa5`N'4MBLj,MHhk[jVun\RRgZhQZJcn5H9sh=JaHU@h"p]cV^F?7l(T`7rMQPhXbG6oH]g/'aA_C`C\81)KbefXAi_.a+<blR7KIMN5ege!AlsJ0c]^hTZ5=X2NntP\U+V@o([>t66!5+gcjPp"H;R4b$%6q.D4e=%m6t:Ipdq=WCd4b\"Molqe6(6P\`*4<o)'aO]-J:lnkNaK#;t&ul&P4raO>E>Z)X6UfSO*IO,0)XLUc`kE$X[pSi-m9-9*CE_>%Lqe_T_jBPp3E[3M5s+`jmHB<i1R3\7>C9XmJp7:@I*]OE`s8#pD!m`,g2a();kF(3o=g*G7>6^)2W1aa'XNn6UWDtLQtfSbe$bK[tCOF2"kj&L@gM1^qHO7GcdnHg45ilK&I1pU\8d[D=HbDIZVZT$U_%nV#WdkheVBX8,H?+!J4V90*+oS/).)BR]Ap?80+4lX&(/l9inR\utA9Wm&#,9W#:1DkdO39HBg`C=Y_n?3>1$qB_KiZEBRNgOmANJX3L?45KD'SMgmnQi\S%3j""^Z0IQXHCUOfqDe>=&>.#67Q$3XN%Nbe.-?LPkDs`eP3;F,"GEa;;>Q5nJt9Jl;eIKfj$SrJ_WO.UmU8TB!oFQfm1KBkp!>4TT$m5nPsf!4gR:c<IAf^EXo,mf"s-XXqDF+hi7ea;P[Vl86>FJ4/Cn8iorS@*>K7,'_oat^+Q`p#e.i*^M)S#UgM>7L@#T<766u)pa.h^#61mL$kO:^l((Yl+`CN7R.2(i3h]QaRCdLo+`WF!R%dYn/k(+ATjt)oFG:`=em2HI)rM!3(@`SmJ;nJlc->CefZNYc?=eN,8pQULTtP+3[V7OU`Xc3T2J]:Fl=qmG$pEL5GG
 <_"%lC)1q\k/p\_Y-JB;5P;l2k,uE2-MuOmWd:=%pcKZ;*2-X2"ONKbd`3&a<=hU/FW*%(dVXXX,V+Q\$T"b.uWu?C%%2Yr+CGaM\N0AMSm8+Ea39j,PPs_P?BCkBU5!TAe8Q$@M,N[p4p%U+3ju.u@+1m:D^8!-tkhak=AK)rqbrcDlR4IrbKs%9c-]3PiRX>+b:FS0r^L>eQTQ*"%U\n1g1J/]uDh)q/q1kd4@)VTN'<R"NI>,5-il$s*k7F&@('.Y>`/H,[,M!Ga<]:kns7m9blLBH';Trgr-AZ@MN,V)],n0"[H>*WU%)\%_4EHb>Dk6k3>H*n4PEbjX;E"X=.nB*;.&?LGt+0CsTijqTE?96b/+PRGb$a\[n"?E[#DT5PC]r*tXjan)@OX5u(8O`lLO:_/-Mi[[!YaJL(?.faZpKI,7GI17@"3+38$?D0ag)=>BIilQG/Y)Df=!KL&iAk&>r.Dh1iV:"#qM)Oq4bN7@a-@9/KEX#JA%$eS4Z8C0j$4TBn#]f8:*X<n9%&jF9BTIM$BT4du'Zgj:&>j?HO^2:_%i6UL-i/EA'=grGeTF,$S\\8=FXV"3)LA7aLI05Vrf:`aB+nq%!]#1V+a;2O=!tL4T0#6R$/DoCL^[ShHERc65!A`0eYf=d,%#*3S]RjcDe,r^Oif6I(/g^S:dOA:dc!ZLs.O_lA@'qjP?sM^0/I6aa$UF5%T)N0(!>@FMe1pW\@Tf$G)+Qu8mL2Z-'Y[=F<!-Tp`Aqbq<[`'L-4V_7/t!mLaZf?&Jp0pg.&DO4#WVm*fgc;<dm4Zd.NQ+)D,Gh"-RNd1=p6eWh5S"5CNL'=D'lV*bK)#V@9on\%(QrbG6ZCZ&[g':$@>%]i*GY[oP`VUiG;9k8"LOik!G"8S6X$njD`gqPg*4*_>V!V&P304rRpA&7E0M*U:D:,!2&3S&DY40FcDH6,PXV8Q!$SV[Q^ri)!*9%:sckmnQYu!pIb!bL@g4m&u'5FrUf
 BqKo2rq:G1gP0=s<_k<*5csh#H)P@C%G;>E!fKiZGS*Cn#gTSfn7!5XYg6?MDTn245s-k^CcllOY+]W394`E1?lCk(='bG-T"2MDi_VH`[mEJ+2]Ib8N&HEXu[`'(q%6^HH\QI@#.LtSiDaHql72&B'A?`dFD\nB53qUB3o)m:p]8h/qD0>o3pqrkqB2".dDDmQ9Q4p\\VYN$P=h\mWLj3*j5iM"HOVLc\:a\HN03DDD<QcKngS(EQ[9k)Cmn^i_e)OHW3ZuUf1ts'Zb\fqMLT`RSN8K)bfc7cWd'4u_i&QU%!RsQs6'<=_I+Ae'*n#brIEVd?3hb^]0Va(Y3;9N\MIbmhbHK*(?QX>Ci+a2_N_bFqA)#tH3hTippp)s#I)#Z[go.hP9'i1_.@VOKk/7CbA_F<OcB`.gU&)J,E#Xc1%qu?e;#~>
+endstream
+endobj
+137 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 136 0 R
+/Annots 138 0 R
+>>
+endobj
+138 0 obj
+[
+139 0 R
+]
+endobj
+139 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 203.947 178.332 191.947 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (cluster_setup.html#Configuring+the+Environment+of+the+Hadoop+Daemons)
+/S /URI >>
+/H /I
+>>
+endobj
+140 0 obj
+<< /Length 2411 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU6gQ(#H&:O:S#XNuWJ6c<Pm...@V>Xb<GfiNlNXcG>bruYdnS),79ONOkDO1_b)p.2iUS%K3)8oKQOd,]PI+fs8NnUS?DgfJ_'Bn.pSJ,F/Bc\;k*oF*XhUsVBAo>,oM,#%MsVeXrOP?E3#X%i#-JWK(5_E#OEgcn%MtY:m#7c#Zc15iRlB1U3hPQ%Vd'!_k/UVWP?GRE):&'cU.?C\!XH<</$]6Oj)dp.ghGFp2qU(sD0/lHKCP7=.heSXHh8Dim_m@8&mR8O*)\IZ\])5WW6RWZZ^l_uGfTB71rEHS_F6.CcG;]YZ"83@5_1mYmihfj(@b?*M(Q)Jbb'epF>ZN5^Bmb[Fq^"R6"kcW@M%j7=`%^7llo5Gp`'tr**QSS`LcVs0g#NhC%-XG%.R/bkdD0d1.oJO2e>9Q\u+^@#>\1$%<)h;QJu-Zl4SPbX)Pbf.<lND[o/p/l.5c88E4m.H>!1IL<Q)u"@dt]5pBuc-RppHf^NbK=KiD^<\Zru$8f8e6(HJpXl7/ik0a(hGqp3Dc3hFhV_M5,M(LKRC*uOp.657('RDYU)Oi.K5tmsU:/b&jQjp=9"mpN_-MhTnf%5u6XEWOg;Td9O!O=j)*M6l.A&glGmB,+qp%PnB@grH,Ng,.AD#%*sDJr&l?N/A<nrE:QX1O_s5%)o[>SGB_d.gT)\UknWc1!JE&HDI'XN4.sje5;G0,[mTQ<g-09?._o!nu2S)[,UX$ij2%G2oRq\C*:O(jUg_haU29Y0sl/T_YF.ipQ*"6FLMsA@6_:Pl>4A-F,Jrq2$b)XlG]jW1]Ml1u=:!W>LgU?Eq">j1f#/T%A3uM$Y*hLCjf1b]QM^=L6m_.(*8q_G84<@U
 ;lmd`eN466*r]7_5fDb(^L<+%8'p#9F0][B#dr'+<Fs<cmPj%_0<@_2O9rUM26m0?Qk8='KKuqRU,t0i\lW:[g1teKIU-"N-GVM%S"S"I5+Blb)jq),N.k>T6Ib9<r0"(t^RA!lQIad&=$"Y=r*[Gg`h#la!3"jYlV-,-E9/5d;"3AU!(bV^?/"VF"-&&h[p5m'O=G$N`Brj5Z=P\Ot-K2@Zu-2J"<jM#5_Q,t+43d5)Li\MshB<HtVNfF=WgSaNm[$_hI\X)@+SXL!T!OGFmMX*W0:>T)_R6lT<7/D>]@O1T4%[S\_`jHUK,092meS5?t+"A6qHiH-t'-QJHA8r+p=jL#n"KBqf9d-AIA"?A-I'e'obK)`K/PHN2_Y:O,_5=@7T:+e@*VK503'qA6c<c)h&C7kV8h#!40CEs^70U-6lE^'2MoY10Y$qF%$aafHh9Fi[O!tHlro5#cW=MG\>13u2ko_O.n;Xi:+`2#q-k[k^.O<)0JEct0ZTOQb<ja^*/?iEuKES]UZE,\H:^`k)(L.`MC"HWHPd]Zt[@9+sJJu37Q_*"g9[\ptd2t4*dNffL4oi`(+1)>"=T+e9Q+G?o$8=>@($Vn&S,'$H.Xc3;pHdhEcSL;eDHR"'k<Dch1!s"&OAf(dRNMIu4dC^ZN7og=*!2RO/0PX3iHUNNU,ui%>%MoJn3/1WKW9@pqg[G9R)E*"E\AElgLR$#-q./-oRc>E&qIq&RC<.3iWpSY4JH2bbQY&C!r!Rh)Sa67coEl,Al$PV6)@s1T9eFjoU^*Inc^Uo!L4P(^Ccuet0EC3(QNe;6p^s1]eZLkBQK+P)2es7Z0(*iN*/8Z=J!BF1,GcgVUt`i[.8@V/0/io[WuaWJ]?F=L3%CjURT+'i3X,lrm-Fac2).qhEqkkAD>Ob\2/skf3?"nhHX/LQI*!;Y=Tp+2NF7X4Y7u@k?-ti'YdSp>0p4&91.BXl9"=>[_3>HLY02u>2:VC&='JM=cp90
 JLM,hg,Y\W?2KkNM5Qqu_:<Wb@I/Zi"b4i7g,<Jik@6XpRg]&U=YDMEL%Q=AN2%A=6'RlmUgdF/"dgf[LpUXh0b[5l*jH5a\?:SnEd^?=cW[Rdq<G5gu"4fElOec8M8a!Jd3b%AoYl]<tT]N[.*]UDca3D5.B/X=\C^:l-HI7!M3^_Kk:E%g+;6'csCW-iS%^KQu5P\bDE]r/G8[j\OnVXSP&O\/r;2D<S?Z@h#HAq[[j$bQFRGu-GU2S\#]&+%.:1Qj\rrW=b,_P7B"7*kN#@-X['&S_EIN)Y`7U=7JJ/>B;!UA;lMYmI]C#$-G'NhUn>]RHehQl-te'5>_:DC9`!<oVG+ifH#W6[uii,s&7L:J8\Z?<SD=jfJ/_!6:nJ'u]5Fd2>!mFij''9SJ"A.(jLpHOf34]M~>
+endstream
+endobj
+141 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 140 0 R
+/Annots 142 0 R
+>>
+endobj
+142 0 obj
+[
+143 0 R
+144 0 R
+]
+endobj
+143 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 126.0 528.8 252.312 516.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#getJobLocalDir())
+/S /URI >>
+/H /I
+>>
+endobj
+144 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 275.652 436.4 355.968 424.4 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#getJar())
+/S /URI >>
+/H /I
+>>
+endobj
+145 0 obj
+<< /Length 3266 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU7?'!aO&UiPDTn`\@J@f&X,UKud+-LI3EKN*B`\0bZ"LKc88n:S[jkXg4rqDSM5)'h!P')?Z9C7h23BW'a9&uK'c(C=]s1AZN*WPm5B&OulMkZ%-:T4'aQJrb#55)#+>M]5Mk)HVI8KpPNYePOon:Hc%C:V%JRqb.e)gDB,U+VF%j:cHWnG`E-P>s8BH2mB"^[kX9YLV6\6^4N,cn"QCCc[%@qH+5]1+Ui&:h7Vk:cR!*aq!*E#C^$Vi:gqK'V;UV5EsUV@oa>b6[Ak^O0$LV.SkQ[?!iOrQba;n8E]j?o@Nl#%tAn&mmZ,r0-+2dMYp_3-REOJBM6<fpfXRu+3L9b^ldOY$<:`Q^B-qYpDLc,2jUW4U3[k%M-i3D?\F2_'Qu(23QQ?Sq$kj-c2k9pBG!aL)[BDEkm"9a=h[0rKTe.L!2JJXrgn[#,[C;8jl^a**?6FT#ns^,F>rYU,6k`PaR@.dp:2dG]Qd>!=&qbX<ct9Y'll#^eh#/HSHAY"a,-14(9=.5/rn\&GHP&='h@IT"k_&MJD2qCrH\MB8H4sXc_C+TgQ3B5q^D"1+0OYlVe-R7*p'to3>!$9&dUJ5kiJ<h<#nsEpciZ0eQO=.@3%5SMD,Hk[W0!+Lu%Eo1Te4-S%Em7SUNNG@/cl$MBZ@a-A[Cs]`5hYW<o\g\AC,+0oMAK!8&#o@R."2OcNA&W!FuI9-ZdUR8Do^ZO"*29O/5fE=1"0MhR&8I<o'u=XeJT!!P.G7FQEpRs%7O2\PWp@BVRnF6SUif!ke5#<X/K;bBk'/(oGKF"#f,T[=>";HhZ03/3>3_'JXAFO5^%&t<f^eKgG8-lO^AWlZ.na!do3L'GrqZd":[D>*(jI)q>RT]aY",<KI2HDVEX8C>:#O'141B3OmES0Udq0A&Tk=0.i&PliK_(1.q,G/]SG!n4.V+CeB9""GW9?Pt/47F@DRGf1CQ!B4QRe0Gg/h(l=ef5X-%?o'O65NpNI(68n#Ru
 Tc^5f2PqZT&GW;1I0ZNA$j1;1<o%...@-Z1U83ed>4MiX#bu'4]NMLCHbuB@A2WV?<`KhqXZc(=V),6a;-6\?\b6^Cbq$s<)&&:=N"rs68M%\lV$54#9$75M':5n]P.IB$9,TkOb`&u4?)NI`[M'VHgJ2V7hJ2/,Z$7:#XYsC)l&2o,C5iAp\j=`t$1[#6hH[iN1An_.Yp5^U%\Hd>!2*=hj=/QlFjn^f;K13c3/1!F6*t/3*ms-1$)k!)1=W=@pou!+WlZ^C!>k*%2sIYLFF9eKnt!SQ?96Q()g01?URE0(ZDu3I?L<c@CT0F#KNpmaY)j8hN*>TLO>?>pF]*DnOI1iQAVehN6<i1%&T<mrQ[qk9o+AK6jCC"[T<@.2R!Q,XS<>;mf&d^k9R&UXi[!_g#r\tpXN_\k3btKWr;^a(:j+D92;2ZQG2''+[adGVHXn++`K!J%95IdC!cX*V:^9gUC:GlglA2,c@IT(-MDbWPNm3pJ6#*Km&>#%67gCJR:VsAS[9KJDe".FjH`N%AUi_l0Cns-+]iM_oGBnOMD(^6NX\I6_d/kq8>Nq_oSm/,#l[k'm/_:/a*MFaEi(36%FRim9\Lrs\[;oQ3e.FuN^>KT3']g+bD0t![OBGe.:"ni"9G=o=KHE]D?/\H24O`kE*2AdlVa8];3_5c]Z)tS/V6r2g)CLtYbU:qH]q80/"#NIV>o,)9`0+5sdjc9O@o0N7,Xr5K(F+&jd$?A1`#(qja7E`)<dLUFT'Q"OF_DrX0S]$Gs'HAUYcL5TNIt!E@1p1_AS^nWQd4;5@R%GOeaug?^JJpQKplX=DRogY'[Ln
 V@n/qD,t>#$!D')A[0nBPq-4^^Fi;Xm3-`B3dEC>^nF]_+.#Wu)UWBRO)g5r')_PfrJ1o4h9HN(#`CW1l42/n)!65PL^e*=an#lSZgMHZ&n#n'5Su5QlcegYSr#)l.LXX'o>WI@LNr+!hIEea"Z"FnGX65BV'Wbip;1NU-2/d_BZ1Y<-RYW1AFuspo\LqKS;MQE&1Iae"it)1cnG<.-Q`iV;I<kkQro:nGn'7i<ipE=r"2k?Z-j'S^P;[%0_gCi!#fPt40>p0h`&i2EB1#(Mr^lNt^=(Y\o"I[j[[!na2]V@))hD;OB7,N@e[\F6DqV_jhtOq.i'ej%K([\'*5cCaW;m#'L5s/j!UOb8^Zu`d/E#oKT<+Kmo5J5_p'B!0B.L,0jkRCe1n;1OSiG>@-B&icS*1Pu[Y_;6isrNAo>5d$60"/Lq#L>u3Ve1%oC2gSHXb=:D<6_+<Y,+b1l(pUmW#%K^-5V`Eh,QC/`i).c.kV'O9_K51tuiR.T'Ug-T`"ZVsd1O0rH6nX8HLh%*@IU7e3k+UmE7EaM2PaiQ8HQ.7SOoW`;7=s7l6Vc,8k&^]!s(HMM";>)!UeATd+_%KL17*Yi989FtC^bS#0FoK;gF@7H#Mb6t2+cXXtl-54Nl2'H5WMe3;XE*._S\k;_8C,Td1;&oH+I,\9<$"#m<6.ARV+dJC(PaY#uiGBFNgc>Sp@_kZ,DnB&I][aOFFe_n:KDi?28SXI?/B_J!!u'b0GgaoiZKj\Y[`=lWUE8pD8"XWTA?#<'ESg)Vb56J_Joq_uG#PKgiZ]5HBD'31?m>:pHj#V28I5W_&<8CbT%K"DcjC%R:@1:B(t$&]jLlgKO"5-jpJ+%Cq>)XW(J#,^@&/7,=,FY^=,6-a;XP,GalRS`bkV)qZ'^f)nO4R'`8)_*n^1mSQlXP:<A5f6+,JF+,NS_ns'u6IS9Zrq[EW]]H0$AV*oG^79D`X&9(qC_?.g0qOA0"+/$&$PPe1q*&_32`M
 $u#PT(X&7F<ssN\&u[lVES59TL8A^id?b/Tig0)UN$)6h;hO0I=GK8obBeH5t&CTkbE*ZGd4c((G;N^#IZ&g\P3jg#A;Z3c`Fd+k`^JqG^1'h@C[kT>s%aBe&A\*Mm%S;'DJaa>bQUu\/Sm!R`)<U?eJ3A0ShtFh1[IV,ei99E;1mDb>mgRKLaRen?sa;X),6M)uf:g!t6J>j2FAWfu(HT]/`9F^D5N"WLZT@PBY!6D_/gs@0EUlq+tc$#5b&Cjg9<o)sdU<rrHrrhcB~>
+endstream
+endobj
+146 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 145 0 R
+/Annots 147 0 R
+>>
+endobj
+147 0 obj
+[
+148 0 R
+150 0 R
+151 0 R
+152 0 R
+153 0 R
+]
+endobj
+148 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 111.66 248.35 195.648 236.35 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 149 0 R
+/H /I
+>>
+endobj
+150 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 156.984 208.75 251.976 196.75 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (http://java.sun.com/j2se/1.5.0/docs/api/java/lang/System.html#loadLibrary(java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+151 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 267.972 208.75 326.976 196.75 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (http://java.sun.com/j2se/1.5.0/docs/api/java/lang/System.html#load(java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+152 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 348.276 195.55 447.936 183.55 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (native_libraries.html#Loading+native+libraries+through+DistributedCache)
+/S /URI >>
+/H /I
+>>
+endobj
+153 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 144.297 136.008 132.297 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobClient.html)
+/S /URI >>
+/H /I
+>>
+endobj
+154 0 obj
+<< /Length 2558 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gau0E=c_T$&q9:V+Tr0H#+0!q[*H=$Ap88<kHst4'.dgb[[EFLN=L1/hktR!<K\al1C+oTBMp=ccE@_+]tO=3p?pt3@iaP&S%HaR[cRBMr,o^8s*Kf^bR:p\88EGDleU0b5k+7qZ5aA#[YHrVV6f.$YrZ'L1Z*AXD/0%CjnF)q2Gl61?<b0Kg[B^A]2/u:NYep]P3%B]X1^&k>PGdAilct1<MCsspF8?*?e2hg<TEod]dr!5<soU)U#ekG@U-cma$Q)Z5f>BnN9aTD\^!I%qb!)Kjf;]/.etE#mN=h>F'Vc/"Hi)l7A>k<[\Ko0F5u7MO[$LFq`72nEDi-g/7,$fb=Y_k>-%"_Mc<YJB*Ji<Ucb3Q7<tDbnNi`ujpJIW;12T1X$EFZIk!PGj=JKX/rnl6*21!o[[@s*QTAs-DsX]VhdQ5ojP,lWT2]tj-rX]&>oE:.l\[eUEuD$2MH_)F61i!439Z$$'@$sf[MabYf7e"s%kkeN$eEj5])IY%IRXSd(&7Ecmi!3/k)!7L8Icn(UHFI1-6]@HcV[-h11sG&jp"i$q\OX)PSI1dSJtZ[jMtGhWfeX7Fe)D+`H[dYBN4XOE*1U<%!24d<E=h-eWnq)'euGTbNOOQS14E*X1ao(Cp,,A)%fC_nk:t#.#OAWG&[U"%!HW/Y`"E)iiUL5bi&eCA'Fd:'S]_KF`pU%\sV+Pj8Du>/7/B%drE'&Ko@Nf]O9C(3KcF#2r4ftmo1JQq`E"O$LK:uFTZSq`!Gf9"2Of#(k$5LhuZG#,54d)7Kr:@br-W@QU,ZL2L/:<.njhsDb`ld2fHnkOp%1"eX6JKd29"J)/h6JH+$?qOHJ`0&P66c!*7#u]"g:fi#E%@q^D3^\uSJ\>e/0#pgrN67MWD6]+GD.PLsZAZNa-N``[?r96ArZL0<hi_d%NAo1?nK`0*6=3iNPL'QK=NC:m*T`kCcYIAf7/6AIF02r\8*.287Yr4a\I*dShhI'NSbEUu%,+K
 iJE=Y*Dj[X4DkLg5HO5H?%qKT\*uTEu]k5Upf9Q?A'r^tAf%&TRS1i``V.Lrsc'T-R[G/4Xh@6UI9o<uf]^!0B/T+G3jB/Y.S7k"J>"CjO(t4bOE>iVhS@!*L*P=Ae*]a_-6<AoQ-7#oC<i.e$s7NpA)I,)=i(#G!.:R666W,]0gXMB8<rN;J)*GuI[$/toS+:9EP_([:nXI[%A"l!3k0YLYDA]+e'XFUP3S@!anYEkltS]&>'=KM$3eFFZ7".QR81%ONU+o%@0(3F`_r2W@5dTrBD,2std)lIRAR&Op"k>c6sB%lI6PgK5BTC7_+h'pK_!`pQgmM<?!oQNAs/%r)2T5oB\Qq_>H8Z%CO'ca`G`8,<&RgfLk3p:#Hqj)2j:>/:[KV#I<9L=2W#^&Q*`F$,e+ljuqOD,d<-6?*d#)i8.4cjQpfl`hD/oYe_r6BdPj&B)l?XV="`,?1>dV=_W7(_64N1nhe.rQl3%<&7QCLI.mC!uoDbF>DJN4CC2/3$7\MVl'>8'd1&E(0GQ"77@DHpg'"LF5*MsKGo2?`mBWPWj=+<Y3Cm:<]@J`3$uRIndKp.KiH]:-jC^;MA(<kD_6dEVB'$A6ou;qM^$/4;Fd;CbM;Fe?Baj"dC7?i\D&?Y6FSkI1TQ`Gd^K:f%mZitUV!d,$C4&J#8%?]Y[bjh/osjS7@F@`9i^B[#<!H[nc5c._dP[mCg7a=iMZcV\/2F^'RKiqK&=ZB1MLS\ifh/8`_t^K$pL#7S%2#rYu.6;d-et>NCdg31][;^)OYQR=S-[dA,[V7^9&)@YZr#3^&]XE>j:IaWRBF<i!Mi60NJ?H&h1s>EYCWY("$n$JEeEc]P52t5o5(Jm\#K52N=p7j/@c#nU3@:^m""SPY]t&YEJUX/he?01q'/UMi0)!b"bF*k#l(=s/ZtNj$\h:MEq>3nraZ?bT:*e<^4d6/FZW2p]0^5;^jK=]J)X4c>XE36Qqk_24[%[kTV$`(J8r!pgGT
 B*h&P*0'%f>3&NV.YTRuMN[RbRXPo1MD.[q$^4k__7EB\_)l[Ng6TTI%3,"Tn60A&h^r-U&q.))fHsWB24+=uTe'%H9F3dfJ[=^'OpY4%d5pm7T,knT#X.:s,0%EhtH,0bZ7@aJCIi*3GA!pI?j[?br((l!QMVJZq9G(aZBSC8SOn]nSYrIj1b^AVGP]QTR?QCgY<Mb,U'td6[Vbk1_I[euFSPQTogjT:BXsCMl#i:m^KGGHbB;303RTZTPj0)K%\##;W#`[(_1nML[3+XZP;[l?Am2"=%etm43nV<[)g4K;4B*?TUFd<&)[<9^KO8Z#l"&WHL5LpHKmF.,QZlB`TGg7(s+A4'b,PNroc.>3[PAr*`m`Z7FV^WIX*o2UkN^NCU"[8Oo!LN1jh+WPl68a/WMDN9_=Q&k[RE`7n<+^p%5Fk5,,!aqVBB'eJUpi>bP/=JNmq*^R>S$/QK:^9\/3n-/46+qS66/j=iX*I[-#h0J=5:6:ERfPIm2#`I].ZXkqGA?7k<@dF47@9^mFsOS+gTCE]1%'jM:N;scLZi'TXhf;BWe<7~>
+endstream
+endobj
+155 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 154 0 R
+/Annots 156 0 R
+>>
+endobj
+156 0 obj
+[
+157 0 R
+158 0 R
+159 0 R
+160 0 R
+]
+endobj
+157 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 154.308 351.6 232.98 339.6 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/OutputLogFilter.html)
+/S /URI >>
+/H /I
+>>
+endobj
+158 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 175.828 189.324 163.828 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobClient.html#runJob(org.apache.hadoop.mapred.JobConf))
+/S /URI >>
+/H /I
+>>
+endobj
+159 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 162.628 206.004 150.628 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobClient.html#submitJob(org.apache.hadoop.mapred.JobConf))
+/S /URI >>
+/H /I
+>>
+endobj
+160 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 149.428 166.008 137.428 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/RunningJob.html)
+/S /URI >>
+/H /I
+>>
+endobj
+161 0 obj
+<< /Length 2638 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+GatU6>B?RZ&q9SYi6bb?cS?;SGtYU*A5eam@f)[^3"JOu8RrFs!0ZC@ok=U5h(Tu&Vm^cl_pU>(IJNa*`]lTCr:k[2^DlV-i9sH#Oiq"MpO2g<H1fMXT&_F:Sl9<8Fn<Yu%p&q1@-ZQdIsg/a([rNmgDe`M\aYA9NM,Gj*\-@Ki;@lt,)#^,q9+'_V)b8qc[#H1\`%F>hY*=WQ#gmipK82emRc]R/G'#,eTn`rlCh/$5/kh3%_qAn1Z*%Fs!;$#b-NZMFTk#/Qu.;qX(7^$j!r0Ws#baZ\UCK8,hp"[BQ_7[OZ[CO5s'Pi_7:B%-FZd^.f7c\f`)rc5A'+Er%_UWSZhJ5q;I_+qQ7A1Q,0km1e'2l<;R=i3BlWJ"tn:f3Tb#_%p6Hma+qIhZEYAA'^*^#U=n%/*p^98F7kI;g$r$M0@E+*-1b)T#o2Pl9EnrC>KH>(apUQPGoZb-4hUbpY;_re^DqpO7?"YcH#\WjMc[G:LB8E#Y)?heO\MmimMAI.iI-"$CPs^PH!tjnYOV/CUY_NlRQ-`F]\BCR2g7474QE2R]53E#TO7<M9E1s;BnFmU$C\$&E=d8Y]UUW!a[AZW(`O%`TM(=GdW?V"D^S6R2\)`8s/t+CO7mEAic-;V].7MMC,G0P^E6^fphuZLS%-o8ETc*dfN#%baS82>Ys<)Lhn*S#FA.n`*lH'$CD6nCWaBYe)S#W&Wk;]8q$o$DcFH,<bQUN\=r?-9<T[QT7:XFb/#tX6lgh2[6HAE*h6sNa"TMOiF"ak/TI2iK0?;%CRHu]B_-r$10uJ1f,j/]ZRQ#tCRI;YP#%ZUg(GgdWJRm_9l?\ftlire1OUK#/*d;#bH]c3WL9Wme0CF0``-f/:UL*-&k#'0gq:e1YQ:'?$Y#>Rr<Psu#:AJ@cC?c9AaG&$<`:0'7Cq$/t;%\DNl`na/I+I^3@Z+]$n8KpM5lXsI&G7?K^oOr@%['.4?Oc_D)L)nM)@iUF[7,cT)+e\uk\
 Y]>OXjIY+.j3o24Up/inU_Hj6#S_!j2[96;Llh:B4)<:m=EgL6h,l=Sk3LL+Z9;UJ_'r;@9lHoWtf_+.0tM('8X&#MO3D1?'HUgdClC"<YEM=85:0%MHCC5XL](AJ:t7C-to*>!r0&g]n92Xbd86;b-u0\T4A<U(gM.4-o6U"cC'^@*"/BoOb'`9rTX*"N+gA<F`XQ3'/WgVC%58V<F5$,laCLlE`,2(?[`&=lb?;cT5U.hTAG4k)pt]`fdAZ?[o&Yq9GJVTX1"a+l'7.%?q41-cR_Dl"piNFgfRk)3Q2b3DdkO34A;MR*N@iloH"6K<1cd>%=sA=@sg+EtZg/Zn[e1."$i3L#0%$Oth#=CCZmf0T%YP\$,tF)t^Z8$#44;^>33"[0]O/\L>'n7SJbnE<pUXZj!V&!"_r9ndoF>M4o<n/[[49oeYCD1mQsA<>hbW.#b4eK(M!W;d%dZ=6Q+*6_L/-qM_R\ShTP:K2'KsqH>M&jS/I!7:tln'PRX3_aQnGPMaI#Gi1AH-'@&fqqUVMg)T%'A<lEda0c*rm%s6r^!6+&:-/aR'!>Q$mnW<Z\DQ79]6@T2_1Vp]SESQ.5dm>hJaUGmo[IGhNDq,g>_332g34:f$ZYurc-[!"4HmZJ2ZdlAq;o_-\fm?"Ad/%'l3c>Q<-o)f9;X$V*@76?P9Lcr+l.&[_;c-f6W#A`ePr;VO4u[$&(I>Fd1H\s<UAe<O2XW?R5Tf"S&U#jbA73\59*3h9p0\A=jq*D'Ok2l6Q9;sG$E/%"JlYMX%U6Y(;)!EhJS6+X5os`&kF(sQ0l<2=l8=.2%7,k1KOVH-c3\;l.>i*Goc,Q!MTT08Tc5u(8%TH4?2[]2S,L/!pZ&'@3Oo<:UCcT2mC<s,)X_uis7g/Yt3*_C8QaqTMl8ukbhH'\BCcie=3dt%l$#-T4P?r3_bsr/13D`m:5ctB.qI0D,Fn";<T5E-C&kAT@)*L:#L2f(KG]@Y(5_.=4%])CP.1
 aZ4^%Q&jpd92.[&?h/6j4pgr6r&7Ple1b!O#_A#cfX;+k@U5.Yf?EkOV1OL;e%r<dd.*4g?ocF](Tj34r48s%=,T&0'/tWTsZRaoAlMUC.W%O\tA1e\SPBVF4&-0QJ#hp3uWM0A:c#H:Kct!&BX(bf^(ro;OViSgI`.-Z*acmD0'2\4UY#bj#$Fl-88m.'nNS<7NNTY`t1Q>VsX."VO_1f3S3L7%8:>FO;T3Ia.G_^-so<5Htl3+m)dp]NmQ'R.PE8`aUagBlU\oRUgVkp.mhdsW#(.VaOFn<Q.C>r@t5KeN#/@nR5\rRHZPJumTqZ24:KI/'F)%5uiS:CbA&)PLqU8\j_DcmY'DB)8XW5S96h,-%FC:j0*d&1(K>Vd&'X=5OG.1Qm<:jTF^3QBUfV]4AqaV+WF1p1\oD)0dog7(/Z`99jVE'ic?$#I-PG+fQL;K1$Tp2dmgF7UD(#XAW.km$$B]>r5_o72"\1hq5O\=.tA>G'gjo?.m'rYH[]Wd*GA:rA,($SV8R4l4QgH&1n*h6oMP:^"oc+0"RGTB?3Pk+Z@3a1!8Gm7k(nof%ue(ZFn5-!WCm=7)s$=aY]%/qVYMq6kU/$hWMro_S6^K-@KVm2+pO'!20YMR+2->`AD1mFE0l~>
+endstream
+endobj
+162 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 161 0 R
+/Annots 163 0 R
+>>
+endobj
+163 0 obj
+[
+164 0 R
+165 0 R
+166 0 R
+167 0 R
+168 0 R
+169 0 R
+]
+endobj
+164 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 664.8 316.992 652.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setJobEndNotificationURI(java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+165 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 598.347 150.0 586.347 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/InputFormat.html)
+/S /URI >>
+/H /I
+>>
+endobj
+166 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 476.747 168.672 464.747 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileInputFormat.html)
+/S /URI >>
+/H /I
+>>
+endobj
+167 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 355.147 171.996 343.147 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/TextInputFormat.html)
+/S /URI >>
+/H /I
+>>
+endobj
+168 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 230.975 138.012 218.975 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/InputSplit.html)
+/S /URI >>
+/H /I
+>>
+endobj
+169 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 175.375 131.352 163.375 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileSplit.html)
+/S /URI >>
+/H /I
+>>
+endobj
+170 0 obj
+<< /Length 2558 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gatm=D3*G]&cTMZ66?\N!lS?+H>e]C^#J`lJE!_7U\CH2(:6K5RFPO6@`LI`J+$3sV4gRsI>]GnAgqMM1W,VnStWpThL2lHqf53/BDD6"?"=W'npMU/\t8bHj#J7hU;3d^[Cc=fh7'Qp:hl1B?qB5T+g<AIcaN^4]9BS[0=?+/f-Pk_>2.9tcT@bDKTZ#[L3:)aI2$Zu,p*:d7.oF5YI4>N[(?t_Rt[4-6abCeg3u9Pj7h+I_AI85An;bN6K5W;K@I1ukQTPZHG%nu+ko(1e[%,SJN:XRiQ_UZak17kdsaM,;SgQLnF$6B2"!<FKFs&g?08=DiqrfYjkF(6N]_0thaSXCPn"Qt`0oj<\ZG'Ff__\iVA7EY/95!f3:>"M+7ldT*m7M@.sAEZ]TsgqJ0%_&5EbGomCAH9:XeoG?ullp1%TePqmmEXd254ScIk#$\B'`*C?jLGQc8CJLeU`<OZ+JP*Zq4<9rha<+>$UO5"t:J.ZQ:#MKUrUh1a.,KOKAo=K'R.EZ]6Q,hP.TK:aP+I/:896>Qor;r$Yhj*Dtk6f",Zo!2L`1acaccD>&PVBQ+8GU&+dVg;4.;U,[.jN$PaMHJB9bb*!\T"+lAR&c/%RV7^WK"5]=BQ7_O]$8/kg;-@Fi0W4j38Lo$dj@$9Vdt_dj?\#FAQQ6a_Vlp/Ro;U/YMo-^qWoJs_mBLC>#O++=Zo1o\qae<EecrT>T/BYg;Zq-DHctn2GaC3h8<_TZn(Un4e*oCJ_%Mn%E*lGk602Z5]#0qT=h.UGa&gOKKEOeK-,t#j39mqkr2!cM5]H,mM7UTk]C_-;T*Sd8?1r`<Xsk!L8qlc.$?JXRQ_NL,@%iXmsbl"G]/e3IDn!^.]O;TPc9s_fYDXJ@$*fUs2PllJnTfH?jls-F:kE8j0WQY5EC,i)2?stOh_pC;R-MAG1aLs&:1V@h`R/1DoA+TFNoNS/=a?Iaq\/*fT-OeI_gn@-s*L<eqjNcBo;1<bETh=&,
 =A9\V7!R59X1$.R%soL<7"MY6J3.P"W%4%DdenK8U>VoH![<EoBBD!_0'OkX#7#8iH".ms2j>bNb(%18Ct<HK[rfF5q!$AZ?Kr8oi-KH\dsIbi*8540Oac)KGh)+H^SiXeHiTb$BGU#MA81>W3j2.qjIl/'51'hBrj,&U9/9c&M5/+?W>aQ:hdZLco\*B$G*ks3XNtX[!(JjiqAE(2EEp#m8%lBBl0)FjVeJmqMG#os+=1^PBCl*^94/^X-0#%cZ6gV6r&h#dlQ+?G"V._)TR'i'LdJ3Y`M>-r@d&5e\/3<fmD3Jm)>',.UgUWYhB6QPIl1H]4*YW^EMgMGI:]aHk:@`ea43'&mP7SrO$akMid2LhTH!j/1q;<jEZS\Jon&j%p@,"5@9@@p[K;"Kq"!BN%$^i'@S"W`WeZc)JKL:cCLtDo/?>B5;2Ig+(?6h7H_n"rD5<S4aUIg-0rkDn<MJ:MeT?l6?gLYCD\6g-"!^GV(^:#+gaIXmq5El*V#'g4,SY#or-8qpToaTK6lCiNt1/OhW@&V[f[fC4!n438,Goc:dM.adWAjo+)Br>?ruZqIV[o#6pFZ8ePRh%jI($g+'t/c"Uk_;l6Ik.M)He&@GDF7b-Yl95=q\K5l'f^mo0b@#6gCLV;XUG[33e\?.sbl&PD>b62Gj5JPbpeJ6Mg;1jlV3/YM3(o&6c:UY,hVbI$oJ"H#7G3-T.Y&!t-*gr3.aICLmh5n,)KKA6!;ittF$1MFJ'.kp2$A(-R(h>BSJ]PsgHpc=:Fp6j."C#/i?L7^S[uA5+SWFoX$./"qn"8)1'G[gspJ9]EV?.p)hcF_e+Q[aZFh?BNW]i;.m`&XibOm:8pf_Ski1bR0ZAK+"HaPi(\JcLg3:9],beG/d<834:p=&`I#t3Rq]p:eF?#freTh_JrSi?)V#&&/p1X2M),9Zkdn*]I+pFUtF`KF,=j4&nZs02"<C^cE9d=$Q$Z/AuWZ0[#9^8m1i?OFVL[n1Y
 cT-/4'1N#_=+>YG9%:fI'\2,EW&@\CGp0?D.;\*JLW^3_Q(6&me0EknXQI27=^j36+3RLm8=?PMWUe>5=o.!n[b6<,fBm-$c1dDJKbtU3Ve$-iEaoRjGE8bN6P9K=Ip-h<eo=BV8ejbBSeM49.A)j90GXbN^hT?pLNcBul@'Se6Tq=H*V*`0#m9tHX0b3/\%_CmoUBNbM+&<QooW/[4>$20l>qq%611Jp=n)dec`,U01i_:F;oWAoTD5B6=^&f`i7Q.CsPc;V>YC%dW\dHTq$C<_'*1pJAq(>tE;MS"IY?DAVm`DeKIYCVjc[JaPpmo9sWsdj*H`V)KgR?_d==:6RJ8+!\cB(_s]Y_Xo7jA\R6=:XOlj<`1+]?"A5ObHs8gPM]MMST!Wu<'u><n1)-k/];4IcR3L[i[H-HoE?$N&#S!]A[D\Ki0`qhB,0GG>c@K%bY(_5X`Up$gd!%#pD<=`=G,d9bEhMXk#ok<fIq6.jS'k0[n>..X_`+JE0?]`^r[erRea3=Ro\rKE$)4.$%c7^;s:qF.l;NRg@.X(+X(/s6#Qla388~>
+endstream
+endobj
+171 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 170 0 R
+/Annots 172 0 R
+>>
+endobj
+172 0 obj
+[
+173 0 R
+174 0 R
+175 0 R
+]
+endobj
+173 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 631.828 158.64 619.828 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/RecordReader.html)
+/S /URI >>
+/H /I
+>>
+endobj
+174 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 519.775 158.004 507.775 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/OutputFormat.html)
+/S /URI >>
+/H /I
+>>
+endobj
+175 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 139.203 284.328 127.203 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileOutputFormat.html#getWorkOutputPath(org.apache.hadoop.mapred.JobConf))
+/S /URI >>
+/H /I
+>>
+endobj
+176 0 obj
+<< /Length 2675 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gat=-D/\2f')q<+Tb%+[!&7QLW41dSJ2WD9(:^0"IHZIXU#8Pu?,g&>V'N[2ir4:[H6$`HGo[#jYI'bWben^OLALoKlM^\3+'4K1L]8I!,+I5Nn,)Ur4T55uF);QcOem`=^&%Eeq\KF+a<shM?QhZtihOOQ\";\XpuQsKAN*LFr,\r?ddr<61[G$RrV*n)j&hPX3dkMocpl]2`QQ:@?<,ue<97>+s%o'BPt:"6B"X;[*^:jgMbJp0rKNB#$Q20h=*5kiW0(?$WrlP(&8L&[o#CNuFoV'O3S,LWGOAsIM/PoCHYhLO:[t1TY1'hiN<\TmbtdI8f:f)4DuMfg6A^@XkN\2jF]Hp+8Yl4_TZB2njMIdb.8">dJ11C2EF$[K1TfuXF,Z?`A6c$;bsF]FU&?/P\5@fF3:jY[H((=*DuaanV9IFOi.)s&r2pjY'j%<Xmsfb\(RN5^0V7Zq@&b*`*m^!P([[%2K8U!$c=DFp:Sg;fCekGtqeoeW5)Z4sdCl+18($\tGjd7TG(-D`UcVV/<s!`"(+Wr?DK[cMa>0/8(o).]%KJ0Q8D"$;6rg8r-f)5gc),rsEn9Caes4,S&qpbT=]5]gQHNqTRf/f,Z:X6`f!BMNes&p5mB&)r"tdoR>B&h*'+24Z3qA0p7J.Tbf&$9-*-o&CGgk?"Mj)>sc2seYVt/m3%n,,@:%gs[f6j!cp`fu-H/_uGpL4oYOH)11B:Bc<44qk_M:p&3q9GAV7]?u&,o+!dA/X!1Bk6u^:D:*3V/YqlEb/X/*lPL9ag^K#B;n7q^M6VEs8;ZVj88B7_mL`k`NhWA..p>p&Y$c2kco*Ng9qF6>A1r33:_5AoJt2@?4->_50"NCeOIJWHCBnF++Lf6,=C,jcH8mt]`U\K$sW%k2%kcXq(1N1-,1m3$&*HBkf'+q>>N/N)absWC%"O%f8)+onY&T!nJol-B($7Sc4\H>k%@9<W=+Ol_Mo'^2WHo@]WbTOaZSmebCuMP[r
 f/8JBL,42?)W5SFj8cp/mCU[FQ:&\1uB.E2h"UXl\6HI7(_6$(B+9(:4q)^bj!gYt2hG;O6VaN:TV*\,[(NhUIttZQ$/<*8...@B->RM
 DdS1i[e<DkO4HXVGQDpsI\V81uA9(P?>tDsirJFh&-eOr1+R863f^&?%-2t"8Z)ENjnk%c%iX7].$hJo^Xe06n2X$q4,u5o[_2O52*]gtlRaq\e@\0$>JRQFE5Q_lZZ6:n(f23tpnSinIG3-3&%<h=5U<@A?m&91$ch[9?T']R1NHkQ0-W4P"HniiR:A2FdbcruXbPGlnkhn8)2>k)Q?A,nBFpWMjT(>nt![F`m;mJRi4c_#s5)4Hk^0-KaO?PHh/ikh/K'6H[2T0a=)_/r-ClcC;YGj8L(JAb?M--!R1S7p6$AN5^ngCLf:BXF]?#FH[IdNehbOLEI5$D8VdomSJ*FR>`F_]f<iNUE?cZHS2-oZUTAf=$q?^AK&@]PKt!bO.X\6<'Y/aA>"2&d?JTetSo56c<K*AcStD/!I9CU.@]hbJLE9#p-g5Dm(*N"6:+e_*YEYdcjFkUOWU9J%+7ND^#!&WK+-!:K:%W:fpUT&b0j[04b47]25c\:%^c\WZf*%+G*%bD:7(%nDDZN2J,SY,=P1e.d<p!bPOH,@Sr%nk8S?#;&J`jGQ2HCcE+8BtujkADADlmo^e9YFPSCTK"C25;Zd_KM?Xk68;eD(rXIqXuNd)h\H*][F]WuY'TO0V7gpXF?"!G,sc!WrFdjQ[7bTNoi&[bOqG@A!QW!BCB~>
+endstream
+endobj
+177 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 176 0 R
+/Annots 178 0 R
+>>
+endobj
+178 0 obj
+[
+179 0 R
+180 0 R
+181 0 R
+182 0 R
+183 0 R
+]
+endobj
+179 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 147.984 604.0 342.312 592.0 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/FileOutputFormat.html#getWorkOutputPath(org.apache.hadoop.mapred.JobConf))
+/S /URI >>
+/H /I
+>>
+endobj
+180 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 506.228 155.976 494.228 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/RecordWriter.html)
+/S /URI >>
+/H /I
+>>
+endobj
+181 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 344.003 256.308 332.003 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Reporter.html#incrCounter(java.lang.Enum, long))
+/S /URI >>
+/H /I
+>>
+endobj
+182 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 272.304 344.003 474.624 332.003 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/Reporter.html#incrCounter(java.lang.String, java.lang.String, long amount))
+/S /URI >>
+/H /I
+>>
+endobj
+183 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 280.631 173.988 268.631 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html)
+/S /URI >>
+/H /I
+>>
+endobj
+184 0 obj
+<< /Length 2900 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gatm>968mu&\dR4Y\F]2X"4ZFJhQ#o:@'r?WHG;aBse2%jNQ!5.j.>cD3Fpj#/Nf^,WMRcQ:n4Y1u7A>Igu1nrQS^C2ugI_ee*?ph*a0+/`6<]2tQ+n43n@,&3,tHXiHo\h#-rf'/7HM+o;''@q8FArj"cDfH$4ds/;bB4brejZhB+GoBi0VVA7eE\P87u4?fT[XC&(-hgg8:1)Pff4!]h]h$P3QDr+T?:++)!.iAbRU.V^Y(@sGb9(\CM8=Pn]2'T(2"r=HX.@JbB/5I3?XZtkq(\u75]$ioGfo67VKT;=h)UE-NS>pXjN\%5S>!+Fe_pU]n"W8)gY4I>j.PZ?HIr/+]nf4_$Hih+m%)5f'dirQ]UGP0D=a;9f,GTr;Pt<BKoDajQ9HE_Fh*gE1S#<BcccLDSZ@#^&H2;^<&1LECWY-6si@Wrp?nS5SGaVq]M=l+t4^sK3=7Y-ROe=A8((oa)K_XT67"!.KN2M0c!XJ&g`O?q,J'2T&E6ADk*Ifk!L@5`Pa3FeuDm3W5$iP5JHP%R_@"NHFfB\T9\=Ncn-tG@fD[eGTjST<9b::n;pS7/b;tjjuaA1fZaIaL/#<P\\3Yb*71pGSJ.)Kee3-.NWZmBLmiW]$TnFhd_Z!.E`0d!9k2cL6J5pieOU:$pb^\6beC3s8@<P*k7,#(U0oZ4d/OoI4nr84V=O$[M/)Csm^c\He/9bLt'X)_gU_-GXa\:IVjj]:0QfMm-X6/QEHHb%!44@ub!YS^1L$BEq?F29`;e\"'(E8IcX.Ph84c^SZT.#7jd1fc0cd]%K)s71GkoF<+Emoa!^=Z;QLo%Qph`HYh%!"pL%+*VbK-D;5Xcdocr%6^gQ>#WQo*r*[!Y975nmXraILYI<-rVSRVb(D68SQH_YN$;Ya4+mH'=mC4a#NA[C7'8cB>U+^D,+,[k.",Hn0.?mL:C2SsG-uT_"fO=QWF/Za7/j];]a?YR6T,!#1HM2n'PKB](hGr`9eLDPJG
 ?:"<:F...@-uufX>0\(2V8?n;4c
 n@@F8&8u0lMO>)J)HUJ$nn98I1g8i@k-0[0<4#F*GT<)*mE/%D`MY4;1OBT1V[NcE=TmgZ+]dkK&XjZZl?m_`a@[$sDRO<=:SdH,^Cpa1[*kJapd`VoD`Lm55pY3JG9/hF-%4TDdcEKh%hl91%g?gbAaXhr[IEiP[lolr*>Wq/MW9,<EHhhdqRgQo)B\g$#C7&+_YH*Y%)E>-?J-pMe.n@]_,h@i!49X9f3i'$ubo5^=pPR;fCO.lIee+naI3sVXl=BBU5[5<+<L)T"WBF?FJ,gT;*1j6!q0\0/XI!@jaC*!uTifR$hX[ta6[:MG\uWgP^bObH3,[BZm^B`;U)uD[KM*_9K9!Y=?&51R*&tr,c+0,BLhVGBR$Qf:CuIW?VI_6_GhVpC:T=bqZMkp5DXI;&\bH;$"(dq3Su]2PoXdk\R?VGiOGO@o[)#D"2h1#`mcticG+srL@++9_h,JDrhqEiUrFJ`$h^<6e-qX%&aDj\__HBO62ncP\ljI%uA^^eh4lUQ,bsK3jJMZ_!Fnaba5[&V7;5J.s!TAh]'l2_YAGp4u[^ul&UGE7pZ`VY/nqli2j3H3=N-]B>g_c8_cR$Y*`4:9Zeg;Lt5I"%a^Epd\75SN50_OLDA235e8;b@#LC.<.J,X!oKK1_`.b9DB/'n'AIH[C9cY^1j`94#"&3Ah1Ga;9>ZT:6J=)#!gdf+Y08*dVP`bOL8+^p;o'2Y#l-W39)`rJM`!oKDIL2hQiL;o_Q>`XJH++JH!!j?Ms0G&T:_]4C@6W`B*(^Yh$N%b+-TB8tG"gq?)KFi)(k2C49GpMrD$W709jT*3a(Qf_6*<lg:BpAqei6r$:7kh&k2lO^mH9fqoI?3dueBZ,"%+Sr@KKYa7Vc&H@lisRjaR^d0EKfM`#:+F>C&~>
+endstream
+endobj
+185 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 184 0 R
+/Annots 186 0 R
+>>
+endobj
+186 0 obj
+[
+187 0 R
+188 0 R
+189 0 R
+190 0 R
+191 0 R
+192 0 R
+193 0 R
+194 0 R
+195 0 R
+196 0 R
+]
+endobj
+187 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 117.0 539.2 322.956 527.2 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html#addCacheFile(java.net.URI,%20org.apache.hadoop.conf.Configuration))
+/S /URI >>
+/H /I
+>>
+endobj
+188 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 526.0 315.936 514.0 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html#addCacheArchive(java.net.URI,%20org.apache.hadoop.conf.Configuration))
+/S /URI >>
+/H /I
+>>
+endobj
+189 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 512.8 301.296 500.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html#setCacheFiles(java.net.URI[],%20org.apache.hadoop.conf.Configuration))
+/S /URI >>
+/H /I
+>>
+endobj
+190 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 307.632 512.8 538.908 500.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html#setCacheArchives(java.net.URI[],%20org.apache.hadoop.conf.Configuration))
+/S /URI >>
+/H /I
+>>
+endobj
+191 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 425.6 321.636 413.6 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html#createSymlink(org.apache.hadoop.conf.Configuration))
+/S /URI >>
+/H /I
+>>
+endobj
+192 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 189.312 325.2 495.612 313.2 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html#addArchiveToClassPath(org.apache.hadoop.fs.Path,%20org.apache.hadoop.conf.Configuration))
+/S /URI >>
+/H /I
+>>
+endobj
+193 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 312.0 376.32 300.0 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/filecache/DistributedCache.html#addFileToClassPath(org.apache.hadoop.fs.Path,%20org.apache.hadoop.conf.Configuration))
+/S /URI >>
+/H /I
+>>
+endobj
+194 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 111.66 209.028 134.328 197.028 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/util/Tool.html)
+/S /URI >>
+/H /I
+>>
+endobj
+195 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 375.3 174.628 481.272 162.628 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/util/GenericOptionsParser.html)
+/S /URI >>
+/H /I
+>>
+endobj
+196 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 161.428 240.984 149.428 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/util/ToolRunner.html#run(org.apache.hadoop.util.Tool, java.lang.String[]))
+/S /URI >>
+/H /I
+>>
+endobj
+197 0 obj
+<< /Length 2393 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gau0ED3N__&cR6oJfg0?H0,S\_08[-1NaV6oh6:E<r42p%hJn9ZmgS1_7SJJq<qJ#@R5S;`G*A&>G/L<^pCZ!mR"+c=5Ro!rd3`e8,2u;<U(4KM4t:T%Dqi1D<Qbn$N`h=B6X%/eZ8_&5XZ7Wp?$M_GSImE*j49naFujMdgO%9OW-:7Zh)Z=D'mEC$g0EggTRcpC7"b$O\Fl2eB\YN_7tHXr5[L)R4/Mk&kpIIL,;n.<*ZH$]*D@n$c9VNkTkPSO`Va%<r?.,1G?_=^T<)fFqn=Wge#HiFeRfIfI=qJO'ESq2Qu-I+[]\*`$@p;?=?62&eND]4.*5O;2_/9k$:oO>Rn=tI1mrH0UdsWnY-$_RlRgal+Ho=).]J'6p>b=NYf[Q#O1bMiOC->A*;6b5IrdAmd8NFUGFt:KLZ*\h)Xj!`f]6!mU-]bjg@95diTNo(V+k?&!YG5:V<XuC<q+K;(o[se`In02]:5n+ARr[SSM\UD]&*XFPGIm1JNj.W"YfYRU3MS$ReSFC?i)LJq4r]>8nC<2qn$MQaP8bR]D^eckM^tOf@[lk7p!28?k<TXSu=c2VmY`4TuBnFKVM6_P6!3N^u,oKoumPYnFVSn`M\%f;tp)eQ4p\_>%3Y='a6hVL^2E_19g,.]*oHjdL*#O*G_/pFkm,jEZE=n,/CLXH3sR_B-p,@#^)1M"I]Wa5QXM.F1`s;S?GbQZ,4'Opn<ujo1ZP=p3DP4d*7)JIf?rPM;Z2f4$36/E`aW5n)-1W=@N'9o(l'luU9)-03Ol\R\W&4)n2kphjR+It$DRY'2>oElD@7QmC#]pk:=Jfm[72oGRf(o^t[fQoACMd]W0>Djs6?5?^>T\0Hp&?!_DDCbJ2QT*]cgXCBk>PQ!(QAsEM-Y_-B<#nrjJKP[;&P+U'D5PhYYr#[%P^j.:)Ql!`1bFb+@3R/)PY^(LFOHL+bkF9Fqb]&1$\6ZYMVljPN\\,166j"H&ih==[f.ilY2(tO:o[
 _C4@.RBnaJYI=57L0K7Cj?C;)@cD$K+JUA$HL5TpKK:`6a7S7hQYEih\GX\_&:O?klh4pAPqH_3$7KOQ:prbtA5Fp42YC^SZg&&^:7Q$NnibLOqK.B/b+0DR#ZqYQop]eK,-Gn:9m[S,<0*-.)_:RVbSkN01r+?2J.#G'lM#_4^d72;Xc6`cVHIG!J;WSiDCl>RJ-A>4FCPT0uVN*G(rnJ?mdga\0G34O+'f\8$"i^r"_s=5L&LV\,W?d2,^p\-r*K!<>i\1.'J%S'NqfR,*:u<_d)7n0.i$mKVrn:I58]euEI%&9"EHjmX&SZQ#-@5.;Zrq8=:1@7%#7L,JWZ*3@:;A"Yj#KA+qc\j!J$OT/t6%o\W\_>)qt&?uWc>)1ts)mT8EHpD(t4e;(\9==rb^VAV3;/fRE8UW=WB^8h:*laLtCka13X7bnnjiK>M%"k29Ob5V>nP&"U-2;<uV.i!<D7*-J]Z4k<JtAC@"sVEpi=`ejI#UE_Kg7_iH9S^>="$6+r4naf=XtsrU5;c4EVNG1$R?MOBMKLI\tq)fO*(7dR4Y"Qj:>R,"71UoQT*4cT'oNYe0hsjbu9%9[/KSaet4msCFhr(C!5)26\5)NXg<ag0/:u8;K!,/Z%&-aNYo];akXtje\G!5E'Ie$+r!c'#U'+:#hIQGrXYrl_X'U?#&+HF9;S<u!a<_@i&Oc:FegNeg.lk\>$r^5]m6e,m+KO\H5iiC;dV[j+f=3d:cas1bsp<Sj>\kUYFCBRHp?Wf*T^dhmO__XSd@tJPH\]3)b`Z+dTlIS)iLha>,U,/$t]"^S>"%n_>N!RbN6i.)#=?*!MA?Q%kqniTWjfC%P$%9bH9.lD.D3toVYXV14DD?!IJA:\;g'#a&VM\e*u9Uff(<M-B>]3]sf"KpX^qkV@=o]rbo&sWB6A7&1Ej2k2`r*[mg[\9VYstH8:'>`I&aRG`LG'SF!ND@$3$BL)p#UX8/lb-$Z*hO$lsQB+5H=C+C2
 +o/(1dE]ff,I'hDRbc]upZ8NpEfA\D+\De-J5oME+[;hbBFHsAc$$r:(O]ZeLnW`b0hqt3h/fc;QkHcm\!_eO6E1ld@,[Ki:/0;$HZV'ohJc0+'j]cm2MCe>Sb>YJA/utGQI`S[1:7P=.FfsXIlmh%>jc);mcLuHX2\00]7,:%A]mb_<SN7&`N,H?-_'r*K(qOjNVS1g#O1/s?a<?Nf3rba[HG"*faj$_Vd"-4t"4t%;M8S;T"jDYe:!6tXC%:=iBeS(H)bq<5,T_."Q5r\PPbPL/D@[jd(QF%Ys*+W'0TAC(7PBgK]rl9Aod'.hWH0`R27V\<L7V^Kf8]H<#`O6&fC#e=YMY7jH5n-BJ+?c"jh<69"Gug$cn@H72-N?sabObU)8Z1j5lgPB~>
+endstream
+endobj
+198 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 197 0 R
+/Annots 199 0 R
+>>
+endobj
+199 0 obj
+[
+200 0 R
+201 0 R
+202 0 R
+203 0 R
+]
+endobj
+200 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 575.028 167.328 563.028 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/IsolationRunner.html)
+/S /URI >>
+/H /I
+>>
+endobj
+201 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 184.992 321.256 359.964 309.256 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setProfileEnabled(boolean))
+/S /URI >>
+/H /I
+>>
+endobj
+202 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 293.292 247.256 515.268 235.256 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setProfileTaskRange(boolean,%20java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+203 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 186.456 252.324 174.456 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setProfileParams(java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+204 0 obj
+<< /Length 2088 /Filter [ /ASCII85Decode /FlateDecode ]
+ >>
+stream
+Gatm<=c...@sKt>B@ZEFZFN)$H&5CgATP4d#1&qJgiO@T?p9X<g,2-01c(,1:t?_!XUmU2aY?Y)PJK7R/U&dWPb'*XpTL,qQ)r]>asG=JBrl'`(G&SR%9uSp[<9p:F%VV0ePJW@pYCY80uIj_+tMO)ik--,8C`p7G\QPA^?^s/$`=KbFiR//'9>K>EB4*VY3X%1:2Q93Qj1s`kBG4+6B-epAObS[cu=#-&<SqSG&kVLOWQVskB9*N*j.;3#!!&W0]+K#ng6qhPCD<Y=\)%_khJb*a/"g^Ug#TD1jIq$3!_j\/d+[Z4Sd@3*r%W'R=;FpgK%l*o#]>kW;BLGMm&3*l%0o\qC0,9Rd7r1SED'aA($;!1@;]QcFH^pQf:tWQ_aJhU^`^XM9W_iE#q]09d=giAi$'hrW)E;U@)59;q1]jM1
 nM!@-*Z^RNp1GmZP3t2Vt:L%Jk/ml:)+*pZQWD)GU)8BG[m*!rnCCXcOSP)=[T8+UunoJZ_.^cHH`a/WaLpkf",?7Yj+[dXXI*(Mf_EY*%Rtr8,^8Ycj!j;Pspb@>W8>AO<G=a&els-qfgEXG&.Cr4^)Q:p-O,j,sa.$Le?Dh=\&.F\RVk31(NYg#TT&b*T$8:jS#cb;3)9$fY%T,&+#9CX`d"H8J`So8""p#<r9;O^Z0lbW2$,kFLI3(\W9/ie=I$4*'go,U\\'Fn)<YZ:)loJa6^DTQ@*>4E[Ii)X9+jY18F%?,Q'n0jGUDqsdpbY(rLN!dqiGX)OC`go]:VSIC$Vb+7gT`W_MdCJU\?Ute"D(ak.44Je)ss)i;/3Kg\m;581AFu);uX"gmcJMrBh8"67.UXf/g1pc^29lk>6)%3NFWg@c/Nm/$YK.\i`Pp`oi"1LMpA0)Z:`\8gd)j=p4NS0e%^<u96meq7l504.u>q!ZpSXf1[PMFjhO?S0ni$@l+El&,rGmf`>Hn2*O]qiQ+ERo0,?2*K:WPZMk^9`.+rgkK2g3*DaGdpo7c'S&%%JRLk#N9[ba#p!h)Y*F+a0\4^jsUG;"XJXMCHKLF/0`U<IrojBP`sIT#9YrbdX<gPN%))?_g@?aBicD*]((=X#8H(iI=&!@LMa<:FZA.EY-9=R'irj1R%Q?[Jd[9u1`;Nu,=,im-2usC=WA"DA>d*eJVAg/;OSi;dCmul^WH9c=tf'qOp*#OJUn"^FdZHD:'1c.j@q@A5X3h\^MB-(iN"dBJZVc:_8%jF4CB[!ASdThqClm)@Dl@]@*D=n%gHn7J2A(<[TU;gSrYWq'hT0S;Q2QM)U%HL`^k*12*Dtt%9AeEJ8-;dmSF3\_BP6HKpF_Q>!7[X83K;mQosi$I9BUA+=MVW(pGl\H$e:(AM'*NB:RWl3YrE>]&8#6J63nj?r8//R$/p<@=1[&>oWBs("=6k9d1?>915KFI2J&o#M:>
 6FF)?6mVA42HUiigkFO?NVost.DRcc;$8F)<<PsI,[(\+&c$<hZ_p1s_[s`QRRZHS+ZKQE]["`:l^(^)G,^A`!ShgH9~>
+endstream
+endobj
+205 0 obj
+<< /Type /Page
+/Parent 1 0 R
+/MediaBox [ 0 0 612 792 ]
+/Resources 3 0 R
+/Contents 204 0 R
+/Annots 206 0 R
+>>
+endobj
+206 0 obj
+[
+207 0 R
+208 0 R
+209 0 R
+210 0 R
+]
+endobj
+207 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 184.98 495.537 268.968 483.537 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (mapred_tutorial.html#DistributedCache)
+/S /URI >>
+/H /I
+>>
+endobj
+208 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 90.0 393.646 266.988 381.646 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (api/org/apache/hadoop/mapred/JobConf.html#setMapDebugScript(java.lang.String))
+/S /URI >>
+/H /I
+>>
+endobj
+209 0 obj
+<< /Type /Annot
+/Subt

<TRUNCATED>

[3/3] bigtop git commit: BIGTOP-2009: added new tests for chgrp, cp, ls, mv, du, put, get, mkdir, stat and touchz

Posted by yw...@apache.org.
BIGTOP-2009: added new tests for chgrp, cp, ls, mv, du, put, get, mkdir, stat and touchz

Signed-off-by: YoungWoo Kim <yw...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/96ecf29a
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/96ecf29a
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/96ecf29a

Branch: refs/heads/master
Commit: 96ecf29a1aa4f6109639411cd9287245e2ed0387
Parents: 49705da
Author: srinivas-altiscale <sr...@altiscale.com>
Authored: Sun Sep 6 01:18:34 2015 +0530
Committer: YoungWoo Kim <yw...@apache.org>
Committed: Sun Sep 6 14:44:06 2015 +0900

----------------------------------------------------------------------
 .../itest/hadoop/hdfs/CommonFunctions.groovy    |   37 +
 .../bigtop/itest/hadoop/hdfs/TestChgrp.groovy   |  249 ++
 .../bigtop/itest/hadoop/hdfs/TestCp.groovy      |  300 ++
 .../bigtop/itest/hadoop/hdfs/TestDu.groovy      |  323 ++
 .../bigtop/itest/hadoop/hdfs/TestGet.groovy     |  245 ++
 .../bigtop/itest/hadoop/hdfs/TestLs.groovy      |  240 ++
 .../bigtop/itest/hadoop/hdfs/TestMkdir.groovy   |  170 +
 .../bigtop/itest/hadoop/hdfs/TestMv.groovy      |  288 ++
 .../bigtop/itest/hadoop/hdfs/TestPut.groovy     |  192 +
 .../bigtop/itest/hadoop/hdfs/TestStat.groovy    |  222 ++
 .../bigtop/itest/hadoop/hdfs/TestTouchz.groovy  |  173 +
 .../src/main/resources/test_data/test.zip       |  Bin 0 -> 346 bytes
 .../src/main/resources/test_data/test_1.txt     |    4 +
 .../src/main/resources/test_data/test_2.txt     |    4 +
 .../hadoop/src/main/resources/test_data/test_3  | 3321 ++++++++++++++++++
 15 files changed, 5768 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy
new file mode 100644
index 0000000..1844778
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/CommonFunctions.groovy
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+import java.util.ArrayList;
+import java.util.List;
+
+public class CommonFunctions {
+  /**
+   * lookForGivenString check the given string is present in the list data
+   */
+  private boolean lookForGivenString(List<String> data,
+                                     String searchString) {
+    boolean result = false;
+    for( String output_String : data) {
+      if(output_String.contains(searchString)) {
+        result = true;
+        break;
+      }
+    }
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy
new file mode 100644
index 0000000..1569e0a
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestChgrp.groovy
@@ -0,0 +1,249 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestChgrp {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  private static Shell shHDFS = new Shell("/bin/bash -s","hdfs");
+  private static Shell shOOZIE = new Shell("/bin/bash -s","oozie");
+  // extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testChgrpInputDir = "testChgrpInputDir" + date;
+  private static String testChgrpInputs = "test_data_TestChgrp"
+  private static String testChgrpOut = "testChgrpOut" + date;
+  private static String testChgrpOutCmp = "testChgrpOutCmp" + date;
+  private static String user_testinputdir = USERNAME+"/"+testChgrpInputDir+
+                                             "/"+testChgrpInputs;
+  private static String TESTDIR = "/user/$USERNAME/$testChgrpInputDir";
+  static List<String> TestChgrp_output = new ArrayList<String>();
+  static List<String> TestChgrp_error = new ArrayList<String>();
+  static boolean result = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestChgrp.class, "." , null);
+
+    sh.exec("cp -r test_data test_data_TestChgrp");
+    assertTrue("Could not copy data into test_data_TestChgrp", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS",
+               sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testChgrpInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testChgrpInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -chmod -R o+w $TESTDIR/$testChgrpInputs");
+    logError(sh);
+    assertTrue("Could not change permissions", sh.getRet() == 0);
+
+    println("Running chgrp:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory",
+                 sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testChgrpOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testChgrpOut from local disk");
+      assertTrue("Could not remove output directory/file",
+                 sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testChgrpOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testChgrpOutCmp");
+      assertTrue("Could not remove output directory/file",
+                 sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testChgrpInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testChgrpInputs");
+      assertTrue("Could not remove output directory/file",
+                 sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testChgrpWithOutgroupName() {
+    println("testChgrpWithOutgroupName");
+    sh.exec("hdfs dfs -chgrp $TESTDIR/$testChgrpInputs ");
+    assertTrue("chgrp command executed successfully without group name",
+               sh.getRet() == 255);
+
+    String searchStr = "chgrp: Not enough arguments: expected 2 but got 1";
+    assertTrue("expected pattern not found in the output file ",
+               lookForGivenString(sh.getErr(),searchStr) == true);
+  }
+
+  @Test
+  public void testChgrpWithWithInvalidPath() {
+    println("testChgrpWithWithInvalidPath");
+    sh.exec("hdfs dfs -chgrp $TESTDIR/$testChgrpInputs random");
+    assertTrue("Does chgrp worked with wrong path?", sh.getRet() == 1);
+
+    String searchStr = "chgrp: `random': No such file or directory";
+    assertTrue("expected pattern not found in the output file ",
+               lookForGivenString(sh.getErr(),searchStr) == true);
+  }
+
+  @Test
+  public void testChgrpWithWithInvalidGroupName() {
+    sh.exec("hdfs dfs -chgrp random $TESTDIR/$testChgrpInputs ");
+    assertTrue("Does chgrp worked with wrong group?", sh.getRet() == 1);
+
+    String searchStr = "chgrp: changing ownership of '/user/"+
+                       user_testinputdir+
+                       "': User does not belong to random";
+    assertTrue("expected pattern not found in the output file ",
+               lookForGivenString(sh.getErr(),searchStr) == true);
+  }
+
+  @Test
+  public void testChgrp() {
+    println("testChgrp");
+    sh.exec("id | awk \'{print \$3}\' | awk -F\'[()]\' \'{print \$2}\'");
+    List out_grp = sh.getOut();
+    String group_name = out_grp.get(0);
+
+    // first make sure that all the files in the directory belongs to a group
+    sh.exec("hdfs dfs -chgrp -R $group_name $TESTDIR");
+    assertTrue("chgrp command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR");
+    assertTrue("Able to list files?", sh.getRet() == 0);
+
+    String searchString = "$USERNAME $group_name";
+    assertTrue("chgrp failed for a proper group name",
+                lookForGivenString(sh.getOut(), searchString) == true);
+ 
+    shHDFS.exec("hdfs dfs -chgrp hdfs $TESTDIR/$testChgrpInputs");
+    assertTrue("chgrp command failed with hdfs user on HDFS",
+               shHDFS.getRet() == 0);
+
+    shHDFS.exec("hdfs dfs -ls $TESTDIR");
+    assertTrue("Able to list files?", shHDFS.getRet() == 0);
+
+    // check that parent for directory only group name got changed
+
+    searchString = "$USERNAME hdfs";
+    assertTrue("chgrp applied only to parent directory?",
+               lookForGivenString(shHDFS.getOut(), searchString) == true);
+
+    // check that chgrp does not applied to files inside the directory
+    shHDFS.exec("hdfs dfs -ls -R $TESTDIR/$testChgrpInputs");
+    searchString = "$USERNAME hdfs";
+    assertTrue("chgrp does not applied to files inside the directory?",
+               lookForGivenString(shHDFS.getOut(), searchString) == false);
+
+    // now change the group recursively
+    shHDFS.exec("hdfs dfs -chgrp -R hdfs $TESTDIR/$testChgrpInputs");
+    assertTrue("chgrp command with hdfs user failed on HDFS",
+               shHDFS.getRet() == 0);
+
+    shHDFS.exec("hdfs dfs -ls -R $TESTDIR");
+    assertTrue("listing directories failed", shHDFS.getRet() == 0);
+
+    searchString = "$USERNAME $group_name";
+    assertTrue("chgrp failed to execute recursively on directory",
+               lookForGivenString(shHDFS.getOut(), searchString) == false);
+  }
+
+  @Test
+  public void testChgrpWithUnauthorizedUser() {
+    println("testChgrpWithUnauthorizedUser");
+    // remove write permission for others
+    sh.exec("hdfs dfs -chmod -R o-w $TESTDIR/$testChgrpInputs");
+    assertTrue("Could not change permissions", sh.getRet() == 0);
+
+    // now try to change group as oozie user
+    shOOZIE.exec("hdfs dfs -chgrp oozie $TESTDIR/$testChgrpInputs");
+    assertTrue("chgrp command with oozie user failed on HDFS",
+               shOOZIE.getRet() == 1);
+
+    List err_msgs = shOOZIE.getErr();
+    String failure_msg = "chgrp: changing ownership of " +
+                         "\'$TESTDIR/$testChgrpInputs\': Permission denied";
+    Boolean failure = false;
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("chgrp command with oozie user failed on HDFS",
+               failure == true);
+  }
+
+  /**
+   * lookForGivenString check the given string is present in the list data
+   */
+  private boolean lookForGivenString(List<String> data,
+                                     String searchString) {
+    boolean result = false;
+    for( String output_String : data) {
+      if(output_String.contains(searchString)) {
+        result = true;
+        break;
+      }
+    }
+    return result;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy
new file mode 100644
index 0000000..ac96e6d
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestCp.groovy
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import org.junit.runners.MethodSorters;
+import org.junit.FixMethodOrder;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestCp {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testCpInputDir = "testCpInputDir" + date;
+  private static String testCpInputs = "test_data_TestCp"
+  private static String testCpOut = "testCpOut" + date;
+  private static String testCpOutCmp = "testCpOutCmp" + date;
+  private static String TESTDIR  = "/user/$USERNAME/$testCpInputDir";
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestCp.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestCp");
+    logError(sh);
+    assertTrue("Could not copy data into test_data_TestCp.", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    sh.exec("test -d temp_testcp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testcp");
+    }
+    sh.exec("mkdir temp_testcp");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testCpInputs $TESTDIR");
+    logError(sh);
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+
+    println("Running cp:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testCpOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testCpOut from local disk");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testCpOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testCpOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d temp_testcp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testcp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testCpInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testCpInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testCpForFiles() {
+    println("testCpForFiles");
+    // first delete the test_3 file
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_3.txt");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm $TESTDIR/$testCpInputs/test_3.txt");
+      assertTrue("failed to cleanup file from destination", sh.getRet() == 0);
+    }
+    // copy test_1.txt file to test_3.txt on hdfs
+    sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_1.txt $TESTDIR/$testCpInputs/test_3.txt");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_1.txt temp_testcp/test_1.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_3.txt temp_testcp/test_3.txt");
+    assertTrue("get command for 2nd file failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("diff temp_testcp/test_1.txt temp_testcp/test_3.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+
+    sh.exec("rm -f temp_testcp/test_1.txt temp_testcp/test_3.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testCpForDirectories() {
+    sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs $TESTDIR/test_temp");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls -R $TESTDIR/test_temp");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+
+    List out_msgs = sh.getOut();
+    Boolean success_1= false;
+    Boolean success_2= false;
+    Iterator out_iter = out_msgs.iterator();
+    while (out_iter.hasNext()) {
+      String next_val = out_iter.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") && next_val.contains("$TESTDIR/test_temp/test_2.txt")) {
+        success_1 = true;
+        continue;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") && next_val.contains("$TESTDIR/test_temp/test_1.txt"))  {
+        success_2 = true;
+        continue;
+      }
+    }
+    assertTrue("Copied files do not match", success_1 == true && success_2 == true);
+  }
+
+  @Test
+  public void testCopyExistingFile() {
+    println("testCopyExistingFile");
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_3.txt");
+    if (sh.getRet() == 1) {
+      sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_1.txt $TESTDIR/$testCpInputs/test_3.txt");
+      assertTrue("failed to copy a file to HDFS", sh.getRet() == 0);
+    }
+
+    //copy test_2.txt file to test_3.txt on hdfs, see if it gets overwritten
+    sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_2.txt $TESTDIR/$testCpInputs/test_3.txt");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 1);
+    List err_msgs = sh.getErr();
+    Boolean failure= false;
+    String failure_msg = "cp: `$TESTDIR/$testCpInputs/test_3.txt': File exists";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("copy command failed", failure == true);
+  }
+ 
+  @Test
+  public void testCopyOverwriteFile() {
+    println("testCopyOverwriteFile");
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testCpInputs/test_3.txt");
+    if (sh.getRet() == 1) {
+      sh.exec("hdfs dfs -cp $TESTDIR/$testCpInputs/test_1.txt $TESTDIR/$testCpInputs/test_3.txt");
+      assertTrue("failed to copy a file to HDFS", sh.getRet() == 0);
+    }
+
+    //copy test_2.txt file to test_3.txt on hdfs, with overwrite flag
+    sh.exec("hdfs dfs -cp -f $TESTDIR/$testCpInputs/test_2.txt $TESTDIR/$testCpInputs/test_3.txt");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_2.txt temp_testcp/test_2.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_3.txt temp_testcp/test_3.txt");
+    assertTrue("get command for 2nd file failed on HDFS", sh.getRet() == 0);
+    sh.exec("diff temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+    sh.exec("rm -f temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testCopyOverwriteFileInNewDirectory() {
+    println("testCopyOverwriteFileInNewDirectory");
+    //copy test_2.txt file to a newly created directory on hdfs
+    sh.exec("hdfs dfs -mkdir $TESTDIR/temp_testcp");
+    assertTrue("could not create directory on hdfs", sh.getRet() == 0);
+    sh.exec("hdfs dfs -cp -f $TESTDIR/$testCpInputs/test_2.txt $TESTDIR/temp_testcp");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/$testCpInputs/test_2.txt temp_testcp/test_2.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("get command for 2nd file failed on HDFS", sh.getRet() == 0);
+    sh.exec("diff temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+    sh.exec("rm -f temp_testcp/test_2.txt temp_testcp/test_3.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testCopyNonExistingFile() {
+    println("testCopyNonExistingFile");
+    //copy test_4.txt (non existing file) to another location on hdfs
+    sh.exec("hdfs dfs -cp -f $TESTDIR/$testCpInputs/test_4.txt $TESTDIR/temp_testcp");
+    assertTrue("copy command should not get executed for a non existing file on HDFS", sh.getRet() == 1);
+    List err_msgs = sh.getErr();
+    boolean failure= false;
+    String failure_msg = "cp: `$TESTDIR/$testCpInputs/test_4.txt': No such file or directory";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("copy command failed", failure == true);
+  }
+
+  @Test
+  public void TestCpFileProtocolWithFile() {
+    println("TestCpFileProtocolWithFile");
+    //copy test_1.txt from local to a new creatde dir on hdfs
+    sh.exec("hdfs dfs -mkdir $TESTDIR/temp_testcp_1");
+    sh.exec("hdfs dfs -cp file:///$USERDIR/$testCpInputs/test_1.txt " +
+            "$TESTDIR/temp_testcp_1");
+    assertTrue("copy command from local to hdfs failed", sh.getRet() == 0);
+    sh.exec("hdfs dfs -get $TESTDIR/temp_testcp_1/test_1.txt " +
+            "temp_testcp/test_1.txt");
+    assertTrue("get command for 1st file failed on HDFS", sh.getRet() == 0);
+    sh.exec("diff temp_testcp/test_1.txt $testCpInputs/test_1.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+    sh.exec("rm -f temp_testcp/test_1.txt");
+    assertTrue("could not remove the files", sh.getRet() == 0);
+    sh.exec("hdfs dfs -rm -skipTrash $TESTDIR/temp_testcp_1/test_1.txt");
+    assertTrue("could not remove file from hdfs", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void TestCpFileProtocolWithDirectory() {
+    println("TestCpFileProtocolWithDirectory");
+    //copy a dir from local to hdfs
+    sh.exec("hdfs dfs -cp file:///$USERDIR/$testCpInputs $TESTDIR/test_temp_1");
+    assertTrue("copy command failed on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -ls -R $TESTDIR/test_temp_1");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+    List out_msgs_fp = sh.getOut();
+    Boolean success_fp_1= false;
+    Boolean success_fp_2= false;
+    Iterator out_iter_fp = out_msgs_fp.iterator();
+    while (out_iter_fp.hasNext()) {
+      String next_val = out_iter_fp.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_temp_1/test_2.txt")) {
+        success_fp_1 = true;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_temp_1/test_3"))  {
+        success_fp_2 = true;
+      }
+    }
+    assertTrue("Copied files do not match",
+               success_fp_1 == true && success_fp_2 == true);
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy
new file mode 100644
index 0000000..bb92cbc
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestDu.groovy
@@ -0,0 +1,323 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestDu {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testDuInputDir = "testDuInputDir" + date;
+  private static String testDuInputs = "test_data_TestDu"
+  private static String testDuOut = "testDuOut" + date;
+  private static int repfactor = 2;
+  private static String user_testinputdir = USERNAME+"/"+testDuInputDir+
+                                            "/"+testDuInputs;
+  private static String TESTDIR  = "/user/$USERNAME/$testDuInputDir";
+  static List<String> TestDu_output = new ArrayList<String>();
+  static boolean result = false;
+  static boolean result_2 = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestDu.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestDu");
+    assertTrue("Could not copy data into test_data_TestDu", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testDuInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+    println("Running du:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testDuOut");
+    if (sh.getRet() == 0) {
+      // println("rm -rf $testDuOut")
+      sh.exec("rm -rf $testDuOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testDuInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testDuInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+  }
+
+  @Test
+  public void testDuBasics() {
+    println("TestDuBasics");
+    result = false;
+    sh.exec("hdfs dfs -du $TESTDIR");
+    assertTrue("du command on HDFS failed", sh.getRet() == 0);
+    TestDu_output=sh.getOut();
+    int size = TestDu_output.size();
+
+    assertTrue("more number of lines than expected, expected only 1 line",
+               size == 1);
+
+    String[] output_split = TestDu_output.get(0).split(" ");
+    if (Integer.parseInt(output_split[0]) > 119999 &&
+        Integer.parseInt(output_split[0]) < 140000 &&
+        output_split[1].contains("/user/"+user_testinputdir)) {
+      result = true;
+    }
+    assertTrue(" command failed", result == false);
+  }
+
+  @Test
+  public void testDuSummaryOptions() {
+    println("testDuSummaryOptions");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s $TESTDIR/$testDuInputs/*");
+    assertTrue("du -s command on HDFS failed", sh.getRet() == 0);
+
+    TestDu_output=sh.getOut();
+    int size = TestDu_output.size();
+    assertTrue("more number of lines than expected; expected only 4 line",
+               size == 4);
+
+    for(String string :TestDu_output)
+    {
+      if (string.contains("/user/"+user_testinputdir+"/test_3")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 119999 &&
+           Integer.parseInt(output_split[0]) < 140000) {
+          result = true;
+        }
+        continue;
+      }
+
+      if (string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+           Integer.parseInt(output_split[0]) < 20) {
+          result_2= true;
+        }
+        continue;
+      }
+    }
+    assertTrue("Does the -du -s output contains proper data?", result == true && result_2 == true);
+  }
+
+  @Test
+  public void testDuhOptions() {
+    println("testDuSummaryOptions");
+    result = false;
+    sh.exec("hdfs dfs -du -h $TESTDIR ");
+    assertTrue("du -h command on HDFS failed", sh.getRet() == 0);
+    TestDu_output=sh.getOut();
+    assertTrue("Does -du -h generated more number of lines than expected; " +
+               "expected only 1 line", TestDu_output.size() == 1 );
+
+    String[] output_split = TestDu_output.get(0).split("\\s+");
+    if (output_split[0].matches("^1[2-3][0-9]..") &&
+       output_split[1].equals("K") &&
+       output_split[2].contains("/user/"+user_testinputdir)) {
+      result = true;
+    }
+    assertTrue("Does the du -h output is proper?", result == true);
+  }
+
+  @Test
+  public void testDuMultipleOptions() {
+    println("TestDuMultipleOptions");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s -h $TESTDIR/$testDuInputs/* ");
+    assertTrue("du with multiple options failed on HDFS", sh.getRet() == 0);
+
+    TestDu_output = sh.getOut();
+    assertTrue("Does -du -s -h generated more number of lines than expected?"+
+               " expected only 4 lines", TestDu_output.size() ==4 );
+
+    for(String string :TestDu_output) {
+      if (string.contains("/user/"+user_testinputdir+"/test_3")) {
+        String[] output_split = string.split(" ");
+        if (Float.parseFloat(output_split[0]) > 119 &&
+           Float.parseFloat(output_split[0]) < 140 &&
+           output_split[1].equals("K")) {
+          result =true;
+        }
+      }
+
+      if (string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does the du -s -h output contains correct data about files?",
+               result == true && result_2 == true);
+  }
+
+  @Test
+  public void testDuMultipleOptionsForFiles() {
+    println("testDuMultipleOptionsForFiles");
+    result = false;
+    result_2 = false;
+
+    sh.exec("hdfs dfs -du -s -h $TESTDIR/$testDuInputs/test_1.txt  " +
+            "$TESTDIR/$testDuInputs/test_2.txt ");
+    assertTrue("du with multiple options failed on HDFS", sh.getRet() == 0);
+    TestDu_output = sh.getOut();
+    assertTrue("more number of lines than expected; expected only 2 lines",
+               TestDu_output.size() == 2);
+
+    for(String string :TestDu_output)
+    {
+
+      if (string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+           Integer.parseInt(output_split[0]) < 20) {
+          result=true;
+        }
+      }
+
+      if (string.contains("/user/"+user_testinputdir+"/test_2.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does the du -s -h output contains correct data about 2 files?",
+               result == true && result_2 == true);
+  }
+
+  @Test
+  public void testDuHdfsProtocolForMultipleFiles() {
+    println("testDuHdfsProtocolForMultipleFiles");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s -h hdfs://$TESTDIR/$testDuInputs/test_1.txt " +
+            "$TESTDIR/$testDuInputs/test_2.txt ");
+    assertTrue("du command with hdfs protocol failed on HDFS",
+               sh.getRet() == 0);
+
+    TestDu_output = sh.getOut();
+    assertTrue("more number of lines than expected; expected only 2 line",
+               TestDu_output.size() == 2);
+
+    for (String string :TestDu_output) {
+      if (string.contains("hdfs:/") &&
+          string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20 ) {
+          result=true;
+        }
+      }
+
+      if (string.contains(" /user/"+user_testinputdir+"/test_2.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20 ) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does du -s -h output contains valid data with hdfs protocol?",
+               result == true && result_2 == true);
+  }
+
+  @Test
+  public void testDuHdfsProtocolForDirectory() {
+    println("testDuHdfsProtocolForDirectory");
+    result = false;
+    result_2 = false;
+    sh.exec("hdfs dfs -du -s hdfs://$TESTDIR/$testDuInputs/* ");
+    assertTrue("dus command with hdfs protocol failed on HDFS",
+               sh.getRet() == 0);
+
+    TestDu_output = sh.getOut();
+    assertTrue("more number of lines than expected; expected only 4 lines",
+               TestDu_output.size() == 4);
+
+    for (String string :TestDu_output) {
+      if (string.contains("hdfs:/") &&
+          string.contains("/user/"+user_testinputdir+"/test_3")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 119999 &&
+            Integer.parseInt(output_split[0]) < 140000) {
+          result =true;
+        }
+      }
+
+      if (string.contains("hdfs:/") &&
+          string.contains("/user/"+user_testinputdir+"/test_1.txt")) {
+        String[] output_split = string.split(" ");
+        if (Integer.parseInt(output_split[0]) > 0 &&
+            Integer.parseInt(output_split[0]) < 20) {
+          result_2=true;
+        }
+      }
+    }
+    assertTrue("Does du -s -h output contains valid data with hdfs protocol?",
+               result == true && result_2 == true);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy
new file mode 100644
index 0000000..1f04784
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestGet.groovy
@@ -0,0 +1,245 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+
+public class TestGet {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testGetInputDir = "testGetInputDir" + date;
+  private static String testGetInputs = "test_data_TestGet"
+  private static String TESTDIR  = "/user/$USERNAME/$testGetInputDir";
+  private CommonFunctions scripts = new CommonFunctions();
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestGet.class, "." , null);
+
+    sh.exec("cp -r test_data $testGetInputs");
+    logError(sh);
+    assertTrue("Could not copy data into $testGetInputs .", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testGetInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    sh.exec("test -d temp_testget");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testget");
+    }
+    sh.exec("mkdir temp_testget");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+
+    println("Running get:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d temp_testget");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testget");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testGetInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testGetInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d temp_testget/test_optionscrc");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testget/test_optionscrc");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testGetFile() {
+    println("testGetFile");
+
+    //get test_1.txt file from hdfs to local
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_1.txt temp_testget");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("diff temp_testget/test_1.txt $testGetInputs/test_1.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testGetDirectory() {
+    println("testGetDirectory");
+    // get a dir from hdfs to local
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs temp_testget");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("ls -l temp_testget/$testGetInputs");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+    List out_msgs = sh.getOut();
+    Boolean success_1= false;
+    Boolean success_2= false;
+    Iterator out_iter = out_msgs.iterator();
+    while (out_iter.hasNext()) {
+      String next_val = out_iter.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("test_2.txt")) {
+        success_1 = true;
+        continue;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("test_3"))  {
+        success_2 = true;
+        continue;
+      }
+    }
+    assertTrue("Able to find Downloaded files?",
+               success_1 == true && success_2 == true);
+  }
+
+  @Test
+  public void testGetFileWhenFileExistsAtLocal() {
+    println("testGetFileWhenFileExistsAtLocal");
+    //get test_2.txt file from hdfs a location where the file already exists
+    sh.exec("test -f temp_testget/$testGetInputs/test_2.txt");
+    if (sh.getRet() == 1) {
+      sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs temp_testget");
+      assertTrue("get command failed for directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_2.txt " +
+            "temp_testget/$testGetInputs/test_2.txt");
+    assertTrue("get command failed on HDFS", sh.getRet() == 1);
+
+    String failure_msg = "get: `temp_testget/$testGetInputs/test_2.txt': " +
+                         "File exists";
+    assertTrue("Does get command properly failed to download an existing file?",
+               scripts.lookForGivenString(sh.getErr(),failure_msg) == true);
+  }
+
+  @Test
+  public void testGetFileNonExistingFile() {
+    println("testGetFileNonExistingFile");
+    //get test_4.txt(non existing) from hdfs to local
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_4.txt temp_testget");
+    assertTrue("get command failed on HDFS", sh.getRet() == 1);
+
+    String failure_msg = "get: `$TESTDIR/$testGetInputs/test_4.txt': " +
+                         "No such file or directory";
+    assertTrue("Does get command failed to download non existing file?",
+               scripts.lookForGivenString(sh.getErr(),failure_msg) == true);
+  }
+
+  @Test
+  public void testGetFileWithOutSpecifyingDestination() {
+    println("testGetFileWithOutSpecifyingDestination");
+    //get test_2.txt from hdfs to local, with out specifying any destination
+    sh.exec("hdfs dfs -get $TESTDIR/$testGetInputs/test_2.txt");
+    assertTrue("Does Get command worked when no destination is specified?",
+               sh.getRet() == 0);
+
+    sh.exec("diff test_2.txt $testGetInputs/test_2.txt");
+    assertTrue("files differ in content", sh.getRet() == 0);
+
+    sh.exec("rm -f test_2.txt");
+    assertTrue("could not remove a file", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testGetWithCrc() {
+    println("testGetWithCrc");
+    sh.exec("mkdir -p temp_testget/test_optionscrc");
+    //get a text file with crc
+    assertTrue("Able to create directory?", sh.getRet() == 0);
+
+    //get test_2.txt file from hdfs a location where the file already exists
+    sh.exec("hdfs dfs -get -crc $TESTDIR/$testGetInputs/test_2.txt " +
+            "temp_testget/test_optionscrc/test_4.txt");
+    assertTrue("Does get command worked properly with crc option?",
+               sh.getRet() == 0);
+
+    sh.exec("ls -la temp_testget/test_optionscrc");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does get command download file with crc?",
+                scripts.lookForGivenString(sh.getOut(),
+                                          ".test_4.txt.crc") == true);
+    // now compare the contents
+    sh.exec("diff temp_testget/test_optionscrc/test_4.txt $testGetInputs/test_2.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testGetWithoutCrc() {
+    println("testGetWithoutCrc");
+
+    //get a text file without crc
+    sh.exec("hdfs dfs -get -ignoreCrc $TESTDIR/$testGetInputs/test_1.txt " +
+            "temp_testget/test_5.txt");
+    assertTrue("get command failed on HDFS", sh.getRet() == 0);
+
+    sh.exec("ls -la temp_testget");
+    assertTrue("listing files/directories failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does get command skipped crc file properly?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          ".test_5.txt.crc") == false);
+    // now compare the contents
+    sh.exec("diff $testGetInputs/test_1.txt temp_testget/test_5.txt");
+    logError(sh);
+    assertTrue("files differ in content", sh.getRet() == 0);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy
new file mode 100644
index 0000000..d6742ac
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestLs.groovy
@@ -0,0 +1,240 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestLs {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testLsInputDir = "testLsInputDir" + date;
+  private static String testLsInputs = "test_data_TestLs"
+  private static String user_testinputdir  = USERNAME+"/"+testLsInputDir+"/"+
+                                             testLsInputs;
+  static List<String> TestLs_output = new ArrayList<String>();
+  static boolean result = false;
+  private CommonFunctions scripts = new CommonFunctions();
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestLs.class, "." , null);
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("cp -r test_data test_data_TestLs");
+    logError(sh);
+    assertTrue("Could not copy data into test_data_TestLs", sh.getRet() == 0);
+
+    sh.exec("hadoop fs -test -d /user/$USERNAME/$testLsInputDir");
+    if (sh.getRet() == 0) {
+      println("hadoop fs -rm -r -skipTrash /user/$USERNAME/$testLsInputDir")
+      sh.exec("hadoop fs -rm -r -skipTrash /user/$USERNAME/$testLsInputDir");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hadoop fs -mkdir $testLsInputDir");
+    assertTrue("Could not create input directory", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hadoop fs -put $testLsInputs /user/$USERNAME/$testLsInputDir");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f /user/$USERNAME/$testLsInputDir/" +
+            "$testLsInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+
+    println("Running ls:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hadoop fs -test -d /user/$USERNAME/$testLsInputDir");
+    if (sh.getRet() == 0) {
+      sh.exec("hadoop fs -rm -r -skipTrash /user/$USERNAME/$testLsInputDir");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testLsInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testLsInputs");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testLs() {
+    println("TestLs");
+    // test whether root listing of ls command works
+    sh.exec("hdfs dfs -ls / ");
+    assertTrue("Able to list /user contents?",
+               scripts.lookForGivenString(sh.getOut(), "/user") == true);
+
+    // test whether a directory exists with the user name under '/user'
+    sh.exec("hdfs dfs -ls /user/$USERNAME ");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    assertTrue("Able to list /user contents?",
+                scripts.lookForGivenString(sh.getOut(),
+                                          "/user/"+USERNAME) == true);
+  }
+
+  @Test
+  public void testLsWithRegularExpressions() {
+    println("testLsWithRegularExpressions");
+    //test whether a one can list the files with reqular expression
+    sh.exec("hdfs dfs -ls /user/$USERNAME/$testLsInputDir/*");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    assertTrue("Able to list contents with regular expressions?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "/user/"+user_testinputdir) == true);
+  }
+
+  @Test
+  public void testLsVerifyOutputStructureForDirectory() {
+    println("TestLsVerifyOutputStructure");
+    result = false;
+    //verify the structure of the output of ls command for a directory
+    sh.exec("hdfs dfs -ls -R /user/$USERNAME/$testLsInputDir");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    TestLs_output=sh.getOut();
+
+    // verify that default permissions are listed in ls output for directory
+    assertTrue("Does ls outputs directory permissons properly?",
+               scripts.lookForGivenString(TestLs_output,
+               "drwxr-xr-x") == true);
+
+    result = false;
+    String searchDir = "/user/"+USERNAME+"/"+testLsInputDir;
+    for( String output_String : TestLs_output) {
+      String[] string = output_String.split("\\s+");
+      if(string[1].equals("-") && string[2].equals(USERNAME) &&
+         string[4].equals("0") && output_String.contains(searchDir)) {
+        result = true;
+        break;
+      }
+    }
+    // verify that no replication factor is set in the output
+    assertTrue("Does ls output contains proper data?", result == true);
+  }
+
+  @Test
+  public void testLsVerifyOutputStructureForFile() {
+    println("testLsVerifyOutputStructureForFile");
+    result = false;
+    // verify the structure of the output of ls command for a file
+    sh.exec("hdfs dfs -ls /user/$USERNAME/$testLsInputDir/" +
+            "$testLsInputs/test_2.txt");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+    String fileName = "/user/"+user_testinputdir+"/test_2.txt";
+    TestLs_output=sh.getOut();
+    for( String output_String : TestLs_output)
+    {
+      String[] string = output_String.split("\\s+");
+      if(output_String.contains("-rw-r--r--") && string[1].equals("3") &&
+         string[2].equals(USERNAME) && output_String.contains(fileName)) {
+        result = true;
+        break;
+      }
+    }
+    assertTrue("Does the file listing happened properly?", result == true);
+    result = false;
+  }
+
+  @Test
+  public void testLsWithNonExistingDirectory() {
+   println("testLsWithNonExistingDirectory");
+    // verify that commands fails when wrong arguments are provided
+    sh.exec("hdfs dfs -ls -r /user/$USERNAME/$testLsInputDir/$testLsInputs");
+    assertTrue("wrong parameter ls -r command successfully executed on HDFS",
+               sh.getRet() == 255);
+  }
+
+  @Test
+  public void testLsWithdOption() {
+    println("testLsWithdOption");
+    //verify that when '-d' is used parent directory is listed
+    String dirName = "/user/$USERNAME/$testLsInputDir/$testLsInputs";
+    sh.exec("hdfs dfs -ls -d $dirName");
+ 
+    assertTrue("listing directory failed on HDFS", sh.getRet() == 0);
+    TestLs_output = sh.getOut();
+    String output_String = TestLs_output.get(0).toString();
+    assertTrue("Does output Contains only One Line?",
+               TestLs_output.size() == 1);
+    assertTrue("Does output Contains only directory Name?",
+               scripts.lookForGivenString(TestLs_output, dirName) == true);
+  }
+
+  @Test
+  public void testLsForRecursiveListing() {
+    println("testLsForRecursiveListing");
+    //verify that when '-R' is used the files are listed recursively
+    sh.exec("hdfs dfs -ls -R /user/$USERNAME/$testLsInputDir/$testLsInputs");
+    assertTrue("listing recursive directory structure failed on HDFS",
+                sh.getRet() == 0);
+
+    TestLs_output = sh.getOut();
+    String fileName = "/user/"+user_testinputdir+"/test_1.txt";
+    assertTrue("Does ls output contains file " + fileName + "?",
+               scripts.lookForGivenString(TestLs_output, fileName) == true);
+
+    fileName = "/user/"+user_testinputdir+"/test_2.txt";
+    assertTrue("Does ls output contains file " + fileName + "?",
+               scripts.lookForGivenString(TestLs_output, fileName) == true);
+
+    fileName = "/user/"+user_testinputdir+"/test.zip";
+    assertTrue("Does ls output contains file " + fileName + "?",
+                scripts.lookForGivenString(TestLs_output, fileName) == true);
+  }
+
+  @Test
+  public void testLsForHumanReadableFormat() {
+    println("testLsForHumanReadableFormat");
+    //verify that when '-h' and '-R' are used the files are listed recursively
+    sh.exec("hdfs dfs -ls -h /user/$USERNAME/$testLsInputDir/" +
+            "$testLsInputs/test_3 ");
+    assertTrue("Able to list file size in human readable format?",
+               sh.getRet() == 0);
+    TestLs_output=sh.getOut();
+
+    String fileName = "/user/$USERNAME/$testLsInputDir/$testLsInputs/test_3";
+    assertTrue("Does output contains proper size value for " + fileName + "?",
+               scripts.lookForGivenString(sh.getOut(), "131.8 K") == true);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy
new file mode 100644
index 0000000..604cc82
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMkdir.groovy
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestMkdir {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for mkdir absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testMkdirInputDir = "testMkdirInputDir" + date;
+  private static String testMkdirOut = "testMkdirOut" + date;
+  private static int repfactor = 2;
+  private static String user_testinputdir  = USERNAME+"/"+testMkdirInputDir;
+  static List<String> TestMkdir_output = new ArrayList<String>();
+  static List<String> TestMkdir_error = new ArrayList<String>();
+  private static String TESTDIR  = "/user/$USERNAME/$testMkdirInputDir";
+  private CommonFunctions scripts = new CommonFunctions();
+  static boolean result = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestMkdir.class, "." , null);
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testMkdirOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMkdirOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testMkdirBasics() {
+    println("TestMkdirBasics");
+    result = false;
+    // test whether basic mkdir command works
+    sh.exec("hdfs dfs -mkdir $TESTDIR/test10");
+    assertTrue("Able to create directory?", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR ");
+    assertTrue("Directory found on HDFS?", sh.getRet() == 0);
+
+    TestMkdir_output=sh.getOut();
+    for (String output_String : TestMkdir_output) {
+      String[] string= output_String.split(" ");
+      if (output_String.contains("test10") &&
+          string[0].contains("drwxr-xr-x")) {
+        result = true;
+        break;
+      }
+    }
+    assertTrue("Does Directory created properly on hdfs?", result == true);
+  }
+
+  @Test
+  public void testMkdirWithpOption() {
+    println("testMkdirWithpOption");
+    // test whether mkdir command works with -p option
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR/test1/test2");
+    assertTrue("Could not create directory on HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR/test1 ");
+    assertTrue("directory not found on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does $TESTDIR/test1/test2 folder got created with -p option?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "$TESTDIR/test1/test2") == true);
+  }
+
+  @Test
+  public void testMkdirWithOutpOption() {
+    println("testMkdirWithOutpOption");
+    /* 
+     * test whether a directory can be created without creating a parent directory
+     * without using '-p'
+     */
+    sh.exec("hdfs dfs -mkdir $TESTDIR/test2/test2");
+    assertTrue("Directory created on HDFS without -p option", sh.getRet() == 1);
+
+    String errMsg = "mkdir: `$TESTDIR/test2/test2': No such file or directory";
+    assertTrue("Does $TESTDIR/test2/test2 folder created without -p option?",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+
+  @Test
+  public void testMkdirWithExistingDir() {
+    println("testMkdirWithExistingDir");
+    // test whether msg is thrown if the directory already exists
+    sh.exec("hdfs dfs -mkdir $TESTDIR");
+    assertTrue("Able to create existing directory?", sh.getRet() == 1);
+
+    String errMsg = "mkdir: `$TESTDIR': File exists";
+    assertTrue("Does $TESTDIR folder created",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+
+  @Test
+  public void testMkdirWithMultipleDirectories() {
+    //test creating multiple directories
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR/test2 $TESTDIR/test3");
+    assertTrue("Able to Create multiple directories on HDFS?",
+               sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls -d $TESTDIR/test2");
+    assertTrue("ls command failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does $TESTDIR/test2 folder created?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "$TESTDIR/test2") == true);
+
+    sh.exec("hdfs dfs -ls -d $TESTDIR/test3");
+    assertTrue("ls command failed on HDFS", sh.getRet() == 0);
+
+    assertTrue("Does $TESTDIR/test3 folder created?",
+               scripts.lookForGivenString(sh.getOut(),
+                                          "$TESTDIR/test3") == true);
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy
new file mode 100644
index 0000000..5cc741a
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestMv.groovy
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import org.junit.runners.MethodSorters;
+
+public class TestMv {
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static final String USERDIR = System.getProperty("user.dir");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testMvInputDir = "testMvInputDir" + date;
+  private static String testMvInputs = "test_data_TestMv"
+  private static String testMvOut = "testMvOut" + date;
+  private static String testMvOutCmp = "testMvOutCmp" + date;
+  private static String TESTDIR  = "/user/$USERNAME/$testMvInputDir";
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestMv.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestMv");
+    assertTrue("Could not copy data into test_data_TestMv", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testMvInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testMvInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+
+    sh.exec("test -d temp_testmv");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testmv");
+    }
+    sh.exec("mkdir temp_testmv");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+
+    println("Running mv:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testMvOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMvOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -f $testMvOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMvOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -d temp_testmv");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf temp_testmv");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testMvInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testMvInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+  }
+
+  @Test
+  public void testMvFile() {
+    println("testMvFile");
+    //mv file from one hdfs location to other
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_1.txt $TESTDIR");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs/test_1.txt");
+    assertTrue("Able to find original file?", sh.getRet() == 1);
+
+    sh.exec("hdfs dfs -ls $TESTDIR/test_1.txt");
+    assertTrue("Able to find file in moved location?", sh.getRet() == 0);
+
+    // Now verify the moved file data
+    sh.exec("hdfs dfs -get $TESTDIR/test_1.txt temp_testmv/test_1.txt");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("diff temp_testmv/test_1.txt test_data/test_1.txt");
+    assertTrue("files differ", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testMvDirectory() {
+    println("testMvDirectory");
+    //mv dir from one hdfs location to other
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs $TESTDIR/test_moved");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    // check that original contents are no longer exists
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs");
+    assertTrue("Able to list deleted files?", sh.getRet() == 1);
+
+    // check the moved location
+    sh.exec("hdfs dfs -ls -R $TESTDIR/test_moved");
+    assertTrue("Able to list moved files?", sh.getRet() == 0);
+
+    List out_msgs = sh.getOut();
+    Boolean success_1= false;
+    Boolean success_2= false;
+    Iterator out_iter = out_msgs.iterator();
+    while (out_iter.hasNext()) {
+      String next_val = out_iter.next();
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_moved/test_2.txt")) {
+        success_1 = true;
+        continue;
+      }
+      if (next_val.contains("-rw-r--r--") && next_val.contains("$USERNAME") &&
+          next_val.contains("$TESTDIR/test_moved/test.zip"))  {
+        success_2 = true;
+        continue;
+      }
+    }
+    assertTrue("Does the moved files details are correct?",
+               success_1 == true && success_2 == true);
+
+    // now move back the files
+    sh.exec("hdfs dfs -mv $TESTDIR/test_moved $TESTDIR/$testMvInputs");
+    assertTrue("mv command failed", sh.getRet() == 0);
+  }
+ 
+  @Test
+  public void testMvMultipleFiles() {
+    println("testMvMultipleFiles");
+    sh.exec("hdfs dfs -test -d $TESTDIR/test_moved");
+    if (sh.getRet() == 0)
+    {
+      sh.exec("hdfs dfs -rm -r $TESTDIR/test_moved");
+      assertTrue("Able to clear contents of dir $TESTDIR/test_moved?",
+                 sh.getRet() == 0);
+    }
+    //mv multiple files from one hdfs location to other
+    sh.exec("hdfs dfs -mkdir $TESTDIR/test_moved");
+    assertTrue("could not create a dir", sh.getRet() == 0);
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_2.txt " +
+            "$TESTDIR/$testMvInputs/test.zip $TESTDIR/test_moved");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    // check that files in old location got deleted
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs/test_2.txt");
+    assertTrue("Does $TESTDIR/$testMvInputs/test_2.txt not moved?", sh.getRet() == 1);
+    sh.exec("hdfs dfs -ls $TESTDIR/$testMvInputs/test.zip");
+    assertTrue("Does $TESTDIR/$testMvInputs/test.zip not moved?", sh.getRet() == 1);
+
+    // check that files present in new location
+    sh.exec("hdfs dfs -ls $TESTDIR/test_moved/test_2.txt");
+    assertTrue("Is $TESTDIR/test_moved/test_2.txt not present?",
+               sh.getRet() == 0);
+    sh.exec("hdfs dfs -ls $TESTDIR/test_moved/test.zip");
+    assertTrue("Is $TESTDIR/test_moved/test_2.txt not present?",
+               sh.getRet() == 0);
+
+    // verify the moved files data
+    sh.exec("hdfs dfs -get $TESTDIR/test_moved/test.zip temp_testmv");
+    assertTrue("get command failed", sh.getRet() == 0);
+    sh.exec("diff temp_testmv/test.zip test_data/test.zip");
+    assertTrue("files differ", sh.getRet() == 0);
+
+    // Now move back files
+    sh.exec("hdfs dfs -mv $TESTDIR/test_moved/test_2.txt " +
+            "$TESTDIR/test_moved/test.zip $TESTDIR/$testMvInputs");
+    assertTrue("mv command failed", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testMvWithOutProperInputs() {
+    println("testMvWithOutProperInputs");
+    //mv files from one hdfs, but dont provide the destination
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_3");
+    assertTrue("mv command failed", sh.getRet() == 255);
+
+    List err_msgs = sh.getErr();
+    Boolean failure= false;
+    String failure_msg = "-mv: Not enough arguments: expected 2 but got 1";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("get command failed", failure == true);
+  }
+
+  @Test
+  public void testMvNonexistingFile() {
+    println("testMvNonexistingFile");
+    //mv non existing file from one hdfs to other
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_13.txt $TESTDIR/temp_moved");
+    assertTrue("mv command failed", sh.getRet() == 1);
+
+    List err_msgs = sh.getErr();
+    boolean failure= false;
+    String failure_msg = "mv: `$TESTDIR/$testMvInputs/test_13.txt': " +
+                         "No such file or directory";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("Does mv returned proper error message for non existing file?",
+               failure == true);
+  }
+
+  @Test
+  public void testMvProtocol() {
+    println("testMvProtocol");
+    //mv files from one hdfs location to a local location
+    sh.exec("hdfs dfs -mv $TESTDIR/$testMvInputs/test_2.txt file://$USERDIR");
+    assertTrue("mv command failed", sh.getRet() == 1);
+    List err_msgs = sh.getErr();
+    Boolean failure= false;
+    String failure_msg = "mv: `$TESTDIR/$testMvInputs/test_2.txt': " +
+                         "Does not match target filesystem";
+    if (err_msgs.get(0).toString().contains(failure_msg)){
+      failure = true;
+    }
+    assertTrue("get command failed", failure == true);
+
+    //mv dir from one local location to another location
+    sh.exec("hdfs dfs -mv file://$USERDIR/$testMvInputs/ " +
+            "file://$USERDIR/temp_testmv_1");
+    assertTrue("mv command failed", sh.getRet() == 0);
+
+    sh.exec("ls $testMvInputs");
+    assertTrue("Does mv source files still present?", sh.getRet() == 2);
+
+    sh.exec("ls temp_testmv_1");
+    assertTrue("listing file failed", sh.getRet() == 0);
+
+    //revert the changes
+    sh.exec("hdfs dfs -mv file://$USERDIR/temp_testmv_1 " +
+            "file://$USERDIR/$testMvInputs");
+    assertTrue("mv command failed", sh.getRet() == 0);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy
new file mode 100644
index 0000000..6f34363
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestPut.groovy
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional infoPutation
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing pePutissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError;
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestPut {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testPutInputDir = "testPutInputDir" + date;
+  private static String testPutInputs = "test_data_TestPut"
+  private static String testPutOut = "testPutOut" + date;
+  private static String testPutOutCmp = "testPutOutCmp" + date;
+  private static String user_testinputdir = USERNAME+"/"+testPutInputDir+"/"+
+                                             testPutInputs;
+  static List<String> TestPut_output = new ArrayList<String>();
+  static List<String> TestPut_error = new ArrayList<String>();
+  private static String TESTDIR  = "/user/$USERNAME/$testPutInputDir";
+  static boolean result = false;
+  private CommonFunctions scripts = new CommonFunctions();
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestPut.class, "." , null);
+    sh.exec("cp -r test_data test_data_TestPut");
+    assertTrue("Could not copy data into test_data_TestPut", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      println("hdfs dfs -rm -r -skipTrash $TESTDIR")
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    println("Running Put:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testPutOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -f $testPutOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testPutInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testPutInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testPutDirectory() {
+    println("testPutDirectory");
+    // upload directory to hdfs
+    sh.exec("hdfs dfs -put $testPutInputs $TESTDIR ");
+    assertTrue("Could not put files to HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -ls -R $TESTDIR/$testPutInputs ");
+    assertTrue("could not find the copied directory on hdfs",
+               sh.getRet() == 0);
+
+    assertTrue("Able to find uploaded files on hdfs?",
+               scripts.lookForGivenString(sh.getOut(),"test_3") == true);
+    result = false;
+  }
+
+  @Test
+  public void testPutFile() {
+    println("testPutFile");
+    // upload single files
+    sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -cat $TESTDIR/test_1.txt &> $testPutOut");
+    assertTrue("Able to cat data from $TESTDIR/test_1.txt from hdfs?",
+               sh.getRet() == 0);
+
+    sh.exec("cat $testPutInputs/test_1.txt &> $testPutOutCmp");
+    assertTrue("Able to cat data from $testPutInputs/test_1.txt from local?",
+               sh.getRet() == 0);
+
+    sh.exec("diff $testPutOutCmp $testPutOut");
+    assertTrue("Uploaded file data differs with local file?",
+               sh.getRet() == 0);
+  }
+
+  @Test
+  public void testPutMutltipleFiles() {
+    println("TestPutAdvanced");
+    // copy multiple input files to hdfs
+    sh.exec("hdfs dfs -put $testPutInputs/test_2.txt $testPutInputs/test.zip "+
+            "$testPutInputs/test_3 $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -ls -R $TESTDIR ");
+    assertTrue("could not find the copied directory on hdfs",
+               sh.getRet() == 0);
+
+    assertTrue("Does test_2.txt uploaded properly?",
+               scripts.lookForGivenString(sh.getOut(),"test_2.txt") == true);
+
+    assertTrue("Does test.zip uploaded properly?",
+               scripts.lookForGivenString(sh.getOut(),"test.zip") == true);
+
+    assertTrue("Does test_3 uploaded properly?",
+               scripts.lookForGivenString(sh.getOut(),"test_3") == true);
+  }
+
+  @Test
+  public void testPutNonExistingFile() {
+    println("testPutNonExistingFile");
+    sh.exec("hdfs dfs -put $testPutInputs/test_3.txt $TESTDIR");
+    assertTrue("A non existing file got copied to hdfs", sh.getRet() == 1);
+
+    String searchToken = "put: `"+testPutInputs+"/test_3.txt': " +
+                         "No such file or directory";
+    println(searchToken);
+    assertTrue("Able to Upload non-existing file?",
+               scripts.lookForGivenString(sh.getErr(), searchToken) == true);
+  }
+
+  @Test
+  public void testPutToOverWriteFile() {
+    println("testPutNonExistingFile");
+    // copy a file which is already present on HDFS at the destination
+    sh.exec("hdfs dfs -test -f $TESTDIR/test_1.txt");
+    if (sh.getRet() == 1){
+      sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR");
+      assertTrue("Able to upload file?", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -put $testPutInputs/test_1.txt $TESTDIR ");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 1);
+
+    String searchToken = "put: `/user/"+USERNAME+"/"+
+                          testPutInputDir+"/test_1.txt': File exists";
+    assertTrue("Able to Upload non-existing file?",
+               scripts.lookForGivenString(sh.getErr(), searchToken) == true);
+  }
+}


[2/3] bigtop git commit: BIGTOP-2009: added new tests for chgrp, cp, ls, mv, du, put, get, mkdir, stat and touchz

Posted by yw...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestStat.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestStat.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestStat.groovy
new file mode 100644
index 0000000..1e6f89a
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestStat.groovy
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestStat {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testStatInputDir = "testStatInputDir" + date;
+  private static String testStatInputs = "test_data_TestStat";
+  private static String testStatOut = "testStatOut" + date;
+  private static String testStatOutCmp= "testStatOutCmp" + date;
+  private static int repfactor = 2;
+  private static String TESTDIR  = "/user/$USERNAME/$testStatInputDir";
+  private CommonFunctions scripts = new CommonFunctions();
+  static boolean result = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestStat.class, "." , null);
+
+    sh.exec("cp -r test_data test_data_TestStat");
+    assertTrue("Could not copy data into test_data_TestStat",
+               sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS",
+               sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testStatInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    // set the replication if file exists
+    sh.exec("hdfs dfs -test -f $TESTDIR/$testStatInputs/test_2.txt");
+    assertTrue("Could not find files on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -setrep $repfactor " +
+            "$TESTDIR/$testStatInputs/test_3");
+    assertTrue("Could not set replication factor", sh.getRet() == 0);
+
+    println("Running stat:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+    sh.exec("hdfs dfs -test -d $testStatOut");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $testStatOut");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+    sh.exec("hdfs dfs -test -d $testStatOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $testStatOutCmp");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testStatInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testStatInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testStatBasics() {
+    println("TestStatBasics");
+    // test whether basic stat command works
+    sh.exec("hdfs dfs -stat $TESTDIR/$testStatInputs | awk -F ':' " +
+            "'BEGIN { OFS=\":\"} {print \$1,\$2}' &> $testStatOut");
+    assertTrue("stat command on HDFS failed", sh.getRet() == 0);
+
+    // compare -stat output with -ls output
+    sh.exec("hdfs dfs -ls $TESTDIR | tail -1 | awk '{print \$6, \$7}' &> " +
+            "$testStatOutCmp");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+
+    sh.exec("diff $testStatOut $testStatOutCmp");
+    assertTrue("Does stat command shows correct data?", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -stat $TESTDIR/$testStatInputs/* | awk -F ':' " +
+            "'BEGIN { OFS=\":\"} {print \$1,\$2}' &> $testStatOut");
+    assertTrue("stat command on HDFS failed", sh.getRet() == 0);
+
+    // compare -stat output with -ls output
+    sh.exec("hdfs dfs -ls $TESTDIR/$testStatInputs | tail -4 | " +
+            "awk '{print \$6, \$7}' &> $testStatOutCmp");
+    assertTrue("ls command on HDFS failed", sh.getRet() == 0);
+
+    sh.exec("diff $testStatOut $testStatOutCmp");
+    assertTrue("Does stat provides correct data?", sh.getRet() == 0);
+  }
+
+  @Test
+  public void testStatReplication() {
+    println("testStatReplication");
+
+    sh.exec("hdfs dfs -stat \"%r\" $TESTDIR/$testStatInputs/test_3");
+    assertTrue("Able to get Replication details of a file using stat",
+               sh.getRet() == 0);
+
+    if (!(sh.getOut().get(0).toString().equals("2"))) {
+      assertTrue("Does stat provides correct value for replication?", false);
+    }
+  }
+
+  @Test
+  public void testStatForFileType() {
+    println("testStatForFileType");
+
+    sh.exec("hdfs dfs -stat \"%F\" $TESTDIR/$testStatInputs/test_3");
+    assertTrue("Able to get file type details of a file using stat",
+               sh.getRet() == 0);
+
+    if (!(sh.getOut().get(0).toString().equals("regular file"))) {
+      assertTrue("Does stat provides correct value for File type?", false);
+    }
+
+    sh.exec("hdfs dfs -stat \"%F\" $TESTDIR/$testStatInputs");
+    assertTrue("Able to get file type  details of a file using stat",
+               sh.getRet() == 0);
+
+    if (!(sh.getOut().get(0).toString().equals("directory"))) {
+      assertTrue("Does stat provides correct value for File type?", false);
+    }
+  }
+
+  @Test
+  public void testStatForName() {
+    println("testStatForName");
+
+    sh.exec("hdfs dfs -stat \"%n\" $TESTDIR/$testStatInputs/test_3");
+    assertTrue("Able to get name of a file using stat",
+               sh.getRet() == 0);
+
+    if (!(sh.getOut().get(0).toString().equals("test_3"))) {
+      assertTrue("Does stat provides correct value for name?", false);
+    }
+  }
+
+  @Test
+  public void testStatWithAllOptions() {
+    println("testStatWithAllOptions");
+
+    sh.exec("hdfs dfs -stat \"%F %u:%g %b %y %n\" "+
+            "$TESTDIR/$testStatInputs/test_3");
+    assertTrue("Able to get all details of a file using stat",
+               sh.getRet() == 0);
+
+    String output = sh.getOut().get(0).toString();
+    if (!(output.contains("regular file") && output.contains("135008") && output.contains("test_3") ) ) {
+      assertTrue("Does stat provides correct values in stat ouput?", false);
+    }
+  }
+
+  @Test
+  public void testStatNegative() {
+    println("TestStatNegative");
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -stat $TESTDIR/$testStatInputs");
+    assertTrue("stat command on HDFS failed", sh.getRet() == 1);
+
+    String errMsg = "stat: `$TESTDIR/$testStatInputs': " +
+                    "No such file or directory";
+    assertTrue("Does stat provides correct message for non-existent fodler?",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTouchz.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTouchz.groovy b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTouchz.groovy
new file mode 100644
index 0000000..226dd93
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/groovy/org/apache/bigtop/itest/hadoop/hdfs/TestTouchz.groovy
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.bigtop.itest.hadoop.hdfs;
+
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.*;
+import org.junit.Test;
+import org.apache.bigtop.itest.shell.Shell;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.bigtop.itest.JarContent;
+import static org.apache.bigtop.itest.LogErrorsUtils.logError
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestTouchz {
+
+  private static Shell sh = new Shell("/bin/bash -s");
+  //extracting user identity for ls absolute path
+  private static final String USERNAME = System.getProperty("user.name");
+  private static String date = sh.exec("date").getOut().get(0).
+                               replaceAll("\\s","").replaceAll(":","");
+  private static String namenode = "";
+  private static String testTouchzInputDir = "testTouchzInputDir" + date;
+  private static String testTouchzInputs = "test_data_TestTouchz";
+  private static String testTouchzOut = "testTouchzOut" + date;
+  private static String testTouchzOutCmp= "testTouchzOutCmp" + date;
+  private static int repfactor = 2;
+  private static String TESTDIR  = "/user/$USERNAME/$testTouchzInputDir";
+  private static String user_testinputdir = USERNAME+"/"+testTouchzInputDir+
+                                            "/"+testTouchzInputs;
+  private CommonFunctions scripts = new CommonFunctions();
+  static List<String> TestTouchz_output = new ArrayList<String>();
+  static List<String> TestTouchz_error = new ArrayList<String>();
+  static boolean result = false;
+
+  @BeforeClass
+  public static void setUp() {
+    // unpack resource
+    JarContent.unpackJarContainer(TestTouchz.class, "." , null);
+
+    sh.exec("cp -r test_data test_data_TestTouchz");
+    assertTrue("Could not copy data into test_data_TestTouchz", sh.getRet() == 0);
+
+    // get namenode hostname from core-site.xml
+    Configuration conf = new Configuration();
+    namenode = conf.get("fs.defaultFS");
+    if (namenode == null) {
+      namenode = conf.get("fs.default.name");
+    }
+    assertTrue("Could not find namenode", namenode != null);
+
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("hdfs dfs -mkdir -p $TESTDIR");
+    assertTrue("Could not create input directory on HDFS", sh.getRet() == 0);
+
+    // copy input directory to hdfs
+    sh.exec("hdfs dfs -put $testTouchzInputs $TESTDIR");
+    assertTrue("Could not copy files to HDFS", sh.getRet() == 0);
+
+    println("Running touchz:");
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    sh.exec("hdfs dfs -test -d $TESTDIR");
+    if (sh.getRet() == 0) {
+      sh.exec("hdfs dfs -rm -r -skipTrash $TESTDIR");
+      assertTrue("Could not remove input directory", sh.getRet() == 0);
+    }
+
+    sh.exec("test -f $testTouchzOut");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testTouchzOut");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+    sh.exec("test -f $testTouchzOutCmp");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testTouchzOutCmp");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+
+    sh.exec("test -d $testTouchzInputs");
+    if (sh.getRet() == 0) {
+      sh.exec("rm -rf $testTouchzInputs");
+      assertTrue("Could not remove output directory/file", sh.getRet() == 0);
+    }
+  }
+
+  @Test
+  public void testTouchzBasics() {
+    println("testTouchzBasics");
+    // test whether basic stat command works
+    sh.exec("hdfs dfs -touchz $TESTDIR/test_3.txt");
+    assertTrue("touchz command on HDFS failed", sh.getRet() == 0);
+    // check if file is present on hdfs
+    sh.exec("hdfs dfs -test -f $TESTDIR/test_3.txt");
+    assertTrue("file does not found on HDFS", sh.getRet() == 0);
+    // check if path is empty
+    sh.exec("hdfs dfs -test -s $TESTDIR/test_3.txt");
+    assertTrue("created file is not zero size", sh.getRet() == 1);
+
+    // try crearte multiple files with touchz
+    sh.exec("hdfs dfs -touchz $TESTDIR/test_4.txt $TESTDIR/test_5.txt");
+    assertTrue("touchz command on HDFS failed", sh.getRet() == 0);
+
+    sh.exec("hdfs dfs -test -f $TESTDIR/test_4.txt");
+    assertTrue("file does not found on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -test -s $TESTDIR/test_4.txt");
+    assertTrue("created file is not zero size", sh.getRet() == 1);
+
+    sh.exec("hdfs dfs -test -f $TESTDIR/test_5.txt");
+    assertTrue("file does not found on HDFS", sh.getRet() == 0);
+    sh.exec("hdfs dfs -test -s $TESTDIR/test_5.txt");
+    assertTrue("created file is not zero size", sh.getRet() == 1);
+  }
+
+  @Test
+  public void testTouchzToCreateDirectory() {
+    println("testTouchzNegatives");
+    sh.exec("hdfs dfs -touchz $TESTDIR/$testTouchzInputs");
+    assertTrue("touchz command on HDFS failed", sh.getRet() == 1);
+
+    String errMsg = "touchz: `/user/"+user_testinputdir+"': Is a directory";
+    assertTrue("Does touch failed to create zero length directory?",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+
+  @Test
+  public void testTouchzInNonExistentDirectory() {
+    println("testTouchzInNonExistentDirectory");
+    sh.exec("hdfs dfs -touchz $TESTDIR/test_dir2/test_6.txt ");
+    assertTrue("touchz command on HDFS executed successfully?", sh.getRet() == 1);
+
+    String errMsg = "touchz: `/user/"+USERNAME+"/"+testTouchzInputDir+
+                    "/test_dir2/test_6.txt': No such file or directory";
+    assertTrue("Does touchz able to create files in non-existent directory?",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+
+  @Test
+  public void testTouchzOnExistingFile() {
+    println("testTouchzInNonExistentDirectory");
+    sh.exec("hdfs dfs -touchz $TESTDIR/$testTouchzInputs/test_1.txt ");
+    assertTrue("Does touchz command executed properly?", sh.getRet() == 1);
+
+    String errMsg = "touchz: `/user/"+user_testinputdir+
+                    "/test_1.txt': Not a zero-length file";
+    assertTrue("Does touch able to make existing file as zero size file?",
+               scripts.lookForGivenString(sh.getErr(), errMsg) == true);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test.zip
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test.zip b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test.zip
new file mode 100644
index 0000000..f1bcc01
Binary files /dev/null and b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test.zip differ

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_1.txt
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_1.txt b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_1.txt
new file mode 100644
index 0000000..afa6ae8
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_1.txt
@@ -0,0 +1,4 @@
+1
+20
+300
+4000

http://git-wip-us.apache.org/repos/asf/bigtop/blob/96ecf29a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_2.txt
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_2.txt b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_2.txt
new file mode 100644
index 0000000..33bf531
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hadoop/src/main/resources/test_data/test_2.txt
@@ -0,0 +1,4 @@
+a
+bc
+def
+ghij