source: proiecte/HadoopJUnit/hadoop-0.20.1/docs/hdfs_shell.html @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 24.2 KB
Line 
1<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
2<html>
3<head>
4<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
5<meta content="Apache Forrest" name="Generator">
6<meta name="Forrest-version" content="0.8">
7<meta name="Forrest-skin-name" content="pelt">
8<title>HDFS File System Shell Guide</title>
9<link type="text/css" href="skin/basic.css" rel="stylesheet">
10<link media="screen" type="text/css" href="skin/screen.css" rel="stylesheet">
11<link media="print" type="text/css" href="skin/print.css" rel="stylesheet">
12<link type="text/css" href="skin/profile.css" rel="stylesheet">
13<script src="skin/getBlank.js" language="javascript" type="text/javascript"></script><script src="skin/getMenu.js" language="javascript" type="text/javascript"></script><script src="skin/fontsize.js" language="javascript" type="text/javascript"></script>
14<link rel="shortcut icon" href="images/favicon.ico">
15</head>
16<body onload="init()">
17<script type="text/javascript">ndeSetTextSize();</script>
18<div id="top">
19<!--+
20    |breadtrail
21    +-->
22<div class="breadtrail">
23<a href="http://www.apache.org/">Apache</a> &gt; <a href="http://hadoop.apache.org/">Hadoop</a> &gt; <a href="http://hadoop.apache.org/core/">Core</a><script src="skin/breadcrumbs.js" language="JavaScript" type="text/javascript"></script>
24</div>
25<!--+
26    |header
27    +-->
28<div class="header">
29<!--+
30    |start group logo
31    +-->
32<div class="grouplogo">
33<a href="http://hadoop.apache.org/"><img class="logoImage" alt="Hadoop" src="images/hadoop-logo.jpg" title="Apache Hadoop"></a>
34</div>
35<!--+
36    |end group logo
37    +-->
38<!--+
39    |start Project Logo
40    +-->
41<div class="projectlogo">
42<a href="http://hadoop.apache.org/core/"><img class="logoImage" alt="Hadoop" src="images/core-logo.gif" title="Scalable Computing Platform"></a>
43</div>
44<!--+
45    |end Project Logo
46    +-->
47<!--+
48    |start Search
49    +-->
50<div class="searchbox">
51<form action="http://www.google.com/search" method="get" class="roundtopsmall">
52<input value="hadoop.apache.org" name="sitesearch" type="hidden"><input onFocus="getBlank (this, 'Search the site with google');" size="25" name="q" id="query" type="text" value="Search the site with google">&nbsp; 
53                    <input name="Search" value="Search" type="submit">
54</form>
55</div>
56<!--+
57    |end search
58    +-->
59<!--+
60    |start Tabs
61    +-->
62<ul id="tabs">
63<li>
64<a class="unselected" href="http://hadoop.apache.org/core/">Project</a>
65</li>
66<li>
67<a class="unselected" href="http://wiki.apache.org/hadoop">Wiki</a>
68</li>
69<li class="current">
70<a class="selected" href="index.html">Hadoop 0.20 Documentation</a>
71</li>
72</ul>
73<!--+
74    |end Tabs
75    +-->
76</div>
77</div>
78<div id="main">
79<div id="publishedStrip">
80<!--+
81    |start Subtabs
82    +-->
83<div id="level2tabs"></div>
84<!--+
85    |end Endtabs
86    +-->
87<script type="text/javascript"><!--
88document.write("Last Published: " + document.lastModified);
89//  --></script>
90</div>
91<!--+
92    |breadtrail
93    +-->
94<div class="breadtrail">
95
96             &nbsp;
97           </div>
98<!--+
99    |start Menu, mainarea
100    +-->
101<!--+
102    |start Menu
103    +-->
104<div id="menu">
105<div onclick="SwitchMenu('menu_1.1', 'skin/')" id="menu_1.1Title" class="menutitle">Getting Started</div>
106<div id="menu_1.1" class="menuitemgroup">
107<div class="menuitem">
108<a href="index.html">Overview</a>
109</div>
110<div class="menuitem">
111<a href="quickstart.html">Quick Start</a>
112</div>
113<div class="menuitem">
114<a href="cluster_setup.html">Cluster Setup</a>
115</div>
116<div class="menuitem">
117<a href="mapred_tutorial.html">Map/Reduce Tutorial</a>
118</div>
119</div>
120<div onclick="SwitchMenu('menu_1.2', 'skin/')" id="menu_1.2Title" class="menutitle">Programming Guides</div>
121<div id="menu_1.2" class="menuitemgroup">
122<div class="menuitem">
123<a href="commands_manual.html">Commands</a>
124</div>
125<div class="menuitem">
126<a href="distcp.html">DistCp</a>
127</div>
128<div class="menuitem">
129<a href="native_libraries.html">Native Libraries</a>
130</div>
131<div class="menuitem">
132<a href="streaming.html">Streaming</a>
133</div>
134<div class="menuitem">
135<a href="fair_scheduler.html">Fair Scheduler</a>
136</div>
137<div class="menuitem">
138<a href="capacity_scheduler.html">Capacity Scheduler</a>
139</div>
140<div class="menuitem">
141<a href="service_level_auth.html">Service Level Authorization</a>
142</div>
143<div class="menuitem">
144<a href="vaidya.html">Vaidya</a>
145</div>
146<div class="menuitem">
147<a href="hadoop_archives.html">Archives</a>
148</div>
149</div>
150<div onclick="SwitchMenu('menu_selected_1.3', 'skin/')" id="menu_selected_1.3Title" class="menutitle" style="background-image: url('skin/images/chapter_open.gif');">HDFS</div>
151<div id="menu_selected_1.3" class="selectedmenuitemgroup" style="display: block;">
152<div class="menuitem">
153<a href="hdfs_user_guide.html">User Guide</a>
154</div>
155<div class="menuitem">
156<a href="hdfs_design.html">Architecture</a>
157</div>
158<div class="menupage">
159<div class="menupagetitle">File System Shell Guide</div>
160</div>
161<div class="menuitem">
162<a href="hdfs_permissions_guide.html">Permissions Guide</a>
163</div>
164<div class="menuitem">
165<a href="hdfs_quota_admin_guide.html">Quotas Guide</a>
166</div>
167<div class="menuitem">
168<a href="SLG_user_guide.html">Synthetic Load Generator Guide</a>
169</div>
170<div class="menuitem">
171<a href="libhdfs.html">C API libhdfs</a>
172</div>
173</div>
174<div onclick="SwitchMenu('menu_1.4', 'skin/')" id="menu_1.4Title" class="menutitle">HOD</div>
175<div id="menu_1.4" class="menuitemgroup">
176<div class="menuitem">
177<a href="hod_user_guide.html">User Guide</a>
178</div>
179<div class="menuitem">
180<a href="hod_admin_guide.html">Admin Guide</a>
181</div>
182<div class="menuitem">
183<a href="hod_config_guide.html">Config Guide</a>
184</div>
185</div>
186<div onclick="SwitchMenu('menu_1.5', 'skin/')" id="menu_1.5Title" class="menutitle">Miscellaneous</div>
187<div id="menu_1.5" class="menuitemgroup">
188<div class="menuitem">
189<a href="api/index.html">API Docs</a>
190</div>
191<div class="menuitem">
192<a href="jdiff/changes.html">API Changes</a>
193</div>
194<div class="menuitem">
195<a href="http://wiki.apache.org/hadoop/">Wiki</a>
196</div>
197<div class="menuitem">
198<a href="http://wiki.apache.org/hadoop/FAQ">FAQ</a>
199</div>
200<div class="menuitem">
201<a href="releasenotes.html">Release Notes</a>
202</div>
203<div class="menuitem">
204<a href="changes.html">Change Log</a>
205</div>
206</div>
207<div id="credit"></div>
208<div id="roundbottom">
209<img style="display: none" class="corner" height="15" width="15" alt="" src="skin/images/rc-b-l-15-1body-2menu-3menu.png"></div>
210<!--+
211  |alternative credits
212  +-->
213<div id="credit2"></div>
214</div>
215<!--+
216    |end Menu
217    +-->
218<!--+
219    |start content
220    +-->
221<div id="content">
222<div title="Portable Document Format" class="pdflink">
223<a class="dida" href="hdfs_shell.pdf"><img alt="PDF -icon" src="skin/images/pdfdoc.gif" class="skin"><br>
224        PDF</a>
225</div>
226<h1>HDFS File System Shell Guide</h1>
227<div id="minitoc-area">
228<ul class="minitoc">
229<li>
230<a href="#Overview">Overview</a>
231<ul class="minitoc">
232<li>
233<a href="#cat"> cat </a>
234</li>
235<li>
236<a href="#chgrp"> chgrp </a>
237</li>
238<li>
239<a href="#chmod"> chmod </a>
240</li>
241<li>
242<a href="#chown"> chown </a>
243</li>
244<li>
245<a href="#copyFromLocal">copyFromLocal</a>
246</li>
247<li>
248<a href="#copyToLocal"> copyToLocal</a>
249</li>
250<li>
251<a href="#count"> count </a>
252</li>
253<li>
254<a href="#cp"> cp </a>
255</li>
256<li>
257<a href="#du">du</a>
258</li>
259<li>
260<a href="#dus"> dus </a>
261</li>
262<li>
263<a href="#expunge"> expunge </a>
264</li>
265<li>
266<a href="#get"> get </a>
267</li>
268<li>
269<a href="#getmerge"> getmerge </a>
270</li>
271<li>
272<a href="#ls">ls</a>
273</li>
274<li>
275<a href="#lsr">lsr</a>
276</li>
277<li>
278<a href="#mkdir"> mkdir </a>
279</li>
280<li>
281<a href="#moveFromLocal"> moveFromLocal </a>
282</li>
283<li>
284<a href="#moveToLocal"> moveToLocal</a>
285</li>
286<li>
287<a href="#mv"> mv </a>
288</li>
289<li>
290<a href="#put"> put </a>
291</li>
292<li>
293<a href="#rm"> rm </a>
294</li>
295<li>
296<a href="#rmr"> rmr </a>
297</li>
298<li>
299<a href="#setrep"> setrep </a>
300</li>
301<li>
302<a href="#stat"> stat </a>
303</li>
304<li>
305<a href="#tail"> tail </a>
306</li>
307<li>
308<a href="#test"> test </a>
309</li>
310<li>
311<a href="#text"> text </a>
312</li>
313<li>
314<a href="#touchz"> touchz </a>
315</li>
316</ul>
317</li>
318</ul>
319</div>
320               
321<a name="N1000D"></a><a name="Overview"></a>
322<h2 class="h3">Overview</h2>
323<div class="section">
324<p>
325      The FileSystem (FS) shell is invoked by
326      <span class="codefrag">bin/hadoop fs &lt;args&gt;</span>.
327      All FS shell commands take path URIs as arguments. The URI
328      format is <em>scheme://autority/path</em>. For HDFS the scheme
329      is <em>hdfs</em>, and for the local filesystem the scheme
330      is <em>file</em>. The scheme and authority are optional. If not
331      specified, the default scheme specified in the configuration is
332      used. An HDFS file or directory such as <em>/parent/child</em>
333      can be specified as <em>hdfs://namenodehost/parent/child</em> or
334      simply as <em>/parent/child</em> (given that your configuration
335      is set to point to <em>hdfs://namenodehost</em>). Most of the
336      commands in FS shell behave like corresponding Unix
337      commands. Differences are described with each of the
338      commands. Error information is sent to <em>stderr</em> and the
339      output is sent to <em>stdout</em>.
340  </p>
341<a name="N10034"></a><a name="cat"></a>
342<h3 class="h4"> cat </h3>
343<p>
344                               
345<span class="codefrag">Usage: hadoop fs -cat URI [URI &hellip;]</span>
346                       
347</p>
348<p>
349                   Copies source paths to <em>stdout</em>.
350                   </p>
351<p>Example:</p>
352<ul>
353                               
354<li>
355                                       
356<span class="codefrag"> hadoop fs -cat hdfs://nn1.example.com/file1 hdfs://nn2.example.com/file2
357                   </span>
358                               
359</li>
360                               
361<li>
362                                       
363<span class="codefrag">hadoop fs -cat file:///file3 /user/hadoop/file4 </span>
364                               
365</li>
366                       
367</ul>
368<p>Exit Code:<br>
369                   
370<span class="codefrag"> Returns 0 on success and -1 on error. </span>
371</p>
372<a name="N10060"></a><a name="chgrp"></a>
373<h3 class="h4"> chgrp </h3>
374<p>
375                               
376<span class="codefrag">Usage: hadoop fs -chgrp [-R] GROUP URI [URI &hellip;]</span>
377                       
378</p>
379<p>
380            Change group association of files. With <span class="codefrag">-R</span>, make the change recursively through the directory structure. The user must be the owner of files, or else a super-user. Additional information is in the <a href="hdfs_permissions_guide.html">HDFS Admin Guide: Permissions</a>.
381            </p>
382<a name="N10077"></a><a name="chmod"></a>
383<h3 class="h4"> chmod </h3>
384<p>
385                               
386<span class="codefrag">Usage: hadoop fs -chmod [-R] &lt;MODE[,MODE]... | OCTALMODE&gt; URI [URI &hellip;]</span>
387                       
388</p>
389<p>
390            Change the permissions of files. With <span class="codefrag">-R</span>, make the change recursively through the directory structure. The user must be the owner of the file, or else a super-user. Additional information is in the <a href="hdfs_permissions_guide.html">HDFS Admin Guide: Permissions</a>.
391            </p>
392<a name="N1008E"></a><a name="chown"></a>
393<h3 class="h4"> chown </h3>
394<p>
395                               
396<span class="codefrag">Usage: hadoop fs -chown [-R] [OWNER][:[GROUP]] URI [URI ]</span>
397                       
398</p>
399<p>
400            Change the owner of files. With <span class="codefrag">-R</span>, make the change recursively through the directory structure. The user must be a super-user. Additional information is in the <a href="hdfs_permissions_guide.html">HDFS Admin Guide: Permissions</a>.
401            </p>
402<a name="N100A5"></a><a name="copyFromLocal"></a>
403<h3 class="h4">copyFromLocal</h3>
404<p>
405                               
406<span class="codefrag">Usage: hadoop fs -copyFromLocal &lt;localsrc&gt; URI</span>
407                       
408</p>
409<p>Similar to <a href="#put"><strong>put</strong></a> command, except that the source is restricted to a local file reference. </p>
410<a name="N100BA"></a><a name="copyToLocal"></a>
411<h3 class="h4"> copyToLocal</h3>
412<p>
413                               
414<span class="codefrag">Usage: hadoop fs -copyToLocal [-ignorecrc] [-crc] URI &lt;localdst&gt;</span>
415                       
416</p>
417<p> Similar to <a href="#get"><strong>get</strong></a> command, except that the destination is restricted to a local file reference.</p>
418<a name="N100CF"></a><a name="count"></a>
419<h3 class="h4"> count </h3>
420<p>
421                               
422<span class="codefrag">Usage: hadoop fs -count [-q]  &lt;paths&gt;</span>
423                       
424</p>
425<p>
426                                Count the number of directories, files and bytes under the paths that match the specified file pattern. The output columns are:<br>
427<span class="codefrag">DIR_COUNT, FILE_COUNT, CONTENT_SIZE FILE_NAME</span>. <br>
428<br>The output columns with <span class="codefrag">-q</span> are:<br>
429<span class="codefrag">QUOTA, REMAINING_QUATA, SPACE_QUOTA, REMAINING_SPACE_QUOTA, DIR_COUNT, FILE_COUNT, CONTENT_SIZE, FILE_NAME</span>.
430                   </p>
431<p>Example:</p>
432<ul>
433                               
434<li>
435                                       
436<span class="codefrag"> hadoop fs -count hdfs://nn1.example.com/file1 hdfs://nn2.example.com/file2
437                   </span>
438                               
439</li>
440                               
441<li>
442                                       
443<span class="codefrag"> hadoop fs -count -q hdfs://nn1.example.com/file1
444                   </span>
445                               
446</li>
447                       
448</ul>
449<p>Exit Code:</p>
450<p>
451                               
452<span class="codefrag"> Returns 0 on success and -1 on error.</span>
453                       
454</p>
455<a name="N10108"></a><a name="cp"></a>
456<h3 class="h4"> cp </h3>
457<p>
458                               
459<span class="codefrag">Usage: hadoop fs -cp URI [URI &hellip;] &lt;dest&gt;</span>
460                       
461</p>
462<p>
463            Copy files from source to destination. This command allows multiple sources as well in which case the destination must be a directory.
464            <br>
465            Example:</p>
466<ul>
467                               
468<li>
469                                       
470<span class="codefrag"> hadoop fs -cp /user/hadoop/file1 /user/hadoop/file2</span>
471                               
472</li>
473                               
474<li>
475                                       
476<span class="codefrag"> hadoop fs -cp /user/hadoop/file1 /user/hadoop/file2 /user/hadoop/dir </span>
477                               
478</li>
479                       
480</ul>
481<p>Exit Code:</p>
482<p>
483                               
484<span class="codefrag"> Returns 0 on success and -1 on error.</span>
485                       
486</p>
487<a name="N10132"></a><a name="du"></a>
488<h3 class="h4">du</h3>
489<p>
490                               
491<span class="codefrag">Usage: hadoop fs -du URI [URI &hellip;]</span>
492                       
493</p>
494<p>
495             Displays aggregate length of  files contained in the directory or the length of a file in case its just a file.<br>
496             Example:<br>
497<span class="codefrag">hadoop fs -du /user/hadoop/dir1 /user/hadoop/file1 hdfs://nn.example.com/user/hadoop/dir1</span>
498<br>
499             Exit Code:<br>
500<span class="codefrag"> Returns 0 on success and -1 on error. </span>
501<br>
502</p>
503<a name="N1014D"></a><a name="dus"></a>
504<h3 class="h4"> dus </h3>
505<p>
506                               
507<span class="codefrag">Usage: hadoop fs -dus &lt;args&gt;</span>
508                       
509</p>
510<p>
511            Displays a summary of file lengths.
512           </p>
513<a name="N1015D"></a><a name="expunge"></a>
514<h3 class="h4"> expunge </h3>
515<p>
516                               
517<span class="codefrag">Usage: hadoop fs -expunge</span>
518                       
519</p>
520<p>Empty the Trash. Refer to <a href="hdfs_design.html">HDFS Architecture</a> for more information on Trash feature.
521           </p>
522<a name="N10171"></a><a name="get"></a>
523<h3 class="h4"> get </h3>
524<p>
525                               
526<span class="codefrag">Usage: hadoop fs -get [-ignorecrc] [-crc] &lt;src&gt; &lt;localdst&gt;</span>
527                               
528<br>
529                       
530</p>
531<p>
532           Copy files to the local file system. Files that fail the CRC check may be copied with the 
533           <span class="codefrag">-ignorecrc</span> option. Files and CRCs may be copied using the
534           <span class="codefrag">-crc</span> option.
535          </p>
536<p>Example:</p>
537<ul>
538                               
539<li>
540                                       
541<span class="codefrag"> hadoop fs -get /user/hadoop/file localfile </span>
542                               
543</li>
544                               
545<li>
546                                       
547<span class="codefrag"> hadoop fs -get hdfs://nn.example.com/user/hadoop/file localfile</span>
548                               
549</li>
550                       
551</ul>
552<p>Exit Code:</p>
553<p>
554                               
555<span class="codefrag"> Returns 0 on success and -1 on error. </span>
556                       
557</p>
558<a name="N101A4"></a><a name="getmerge"></a>
559<h3 class="h4"> getmerge </h3>
560<p>
561                               
562<span class="codefrag">Usage: hadoop fs -getmerge &lt;src&gt; &lt;localdst&gt; [addnl]</span>
563                       
564</p>
565<p>
566          Takes a source directory and a destination file as input and concatenates files in src into the destination local file. Optionally <span class="codefrag">addnl</span> can be set to enable adding a newline character at the end of each file. 
567          </p>
568<a name="N101B7"></a><a name="ls"></a>
569<h3 class="h4">ls</h3>
570<p>
571               
572<span class="codefrag">Usage: hadoop fs -ls &lt;args&gt;</span>
573           
574</p>
575<p>For a file returns stat on the file with the following format:</p>
576<p>
577               
578<span class="codefrag">permissions number_of_replicas userid  groupid  filesize modification_date modification_time filename</span>
579           
580</p>
581<p>For a directory it returns list of its direct children as in unix.A directory is listed as:</p>
582<p>
583               
584<span class="codefrag">permissions userid groupid modification_date modification_time dirname</span>
585           
586</p>
587<p>Example:</p>
588<p>
589               
590<span class="codefrag">hadoop fs -ls /user/hadoop/file1 </span>
591           
592</p>
593<p>Exit Code:</p>
594<p>
595               
596<span class="codefrag">Returns 0 on success and -1 on error.</span>
597           
598</p>
599<a name="N101E8"></a><a name="lsr"></a>
600<h3 class="h4">lsr</h3>
601<p>
602<span class="codefrag">Usage: hadoop fs -lsr &lt;args&gt;</span>
603<br>
604              Recursive version of <span class="codefrag">ls</span>. Similar to Unix <span class="codefrag">ls -R</span>.
605              </p>
606<a name="N101FB"></a><a name="mkdir"></a>
607<h3 class="h4"> mkdir </h3>
608<p>
609                               
610<span class="codefrag">Usage: hadoop fs -mkdir &lt;paths&gt;</span>
611                               
612<br>
613                       
614</p>
615<p>
616           Takes path uri's as argument and creates directories. The behavior is much like unix mkdir -p creating parent directories along the path.
617          </p>
618<p>Example:</p>
619<ul>
620                               
621<li>
622                                       
623<span class="codefrag">hadoop fs -mkdir /user/hadoop/dir1 /user/hadoop/dir2 </span>
624                               
625</li>
626                               
627<li>
628                                       
629<span class="codefrag">hadoop fs -mkdir hdfs://nn1.example.com/user/hadoop/dir hdfs://nn2.example.com/user/hadoop/dir
630          </span>
631                               
632</li>
633                       
634</ul>
635<p>Exit Code:</p>
636<p>
637                               
638<span class="codefrag">Returns 0 on success and -1 on error.</span>
639                       
640</p>
641<a name="N10228"></a><a name="moveFromLocal"></a>
642<h3 class="h4"> moveFromLocal </h3>
643<p>
644                               
645<span class="codefrag">Usage: dfs -moveFromLocal &lt;localsrc&gt; &lt;dst&gt;</span>
646                       
647</p>
648<p>Similar to <a href="#put"><strong>put</strong></a> command, except that the source <span class="codefrag">localsrc</span> is deleted after it's copied. </p>
649<a name="N10240"></a><a name="moveToLocal"></a>
650<h3 class="h4"> moveToLocal</h3>
651<p>
652                               
653<span class="codefrag">Usage: hadoop fs -moveToLocal [-crc] &lt;src&gt; &lt;dst&gt;</span>
654                       
655</p>
656<p>Displays a "Not implemented yet" message.</p>
657<a name="N10250"></a><a name="mv"></a>
658<h3 class="h4"> mv </h3>
659<p>
660                               
661<span class="codefrag">Usage: hadoop fs -mv URI [URI &hellip;] &lt;dest&gt;</span>
662                       
663</p>
664<p>
665            Moves files from source to destination. This command allows multiple sources as well in which case the destination needs to be a directory. Moving files across filesystems is not permitted.
666            <br>
667            Example:
668            </p>
669<ul>
670                               
671<li>
672                                       
673<span class="codefrag"> hadoop fs -mv /user/hadoop/file1 /user/hadoop/file2</span>
674                               
675</li>
676                               
677<li>
678                                       
679<span class="codefrag"> hadoop fs -mv hdfs://nn.example.com/file1 hdfs://nn.example.com/file2 hdfs://nn.example.com/file3 hdfs://nn.example.com/dir1</span>
680                               
681</li>
682                       
683</ul>
684<p>Exit Code:</p>
685<p>
686                               
687<span class="codefrag"> Returns 0 on success and -1 on error.</span>
688                       
689</p>
690<a name="N1027A"></a><a name="put"></a>
691<h3 class="h4"> put </h3>
692<p>
693                               
694<span class="codefrag">Usage: hadoop fs -put &lt;localsrc&gt; ... &lt;dst&gt;</span>
695                       
696</p>
697<p>Copy single src, or multiple srcs from local file system to the destination filesystem. Also reads input from stdin and writes to destination filesystem.<br>
698           
699</p>
700<ul>
701                               
702<li>
703                                       
704<span class="codefrag"> hadoop fs -put localfile /user/hadoop/hadoopfile</span>
705                               
706</li>
707                               
708<li>
709                                       
710<span class="codefrag"> hadoop fs -put localfile1 localfile2 /user/hadoop/hadoopdir</span>
711                               
712</li>
713                               
714<li>
715                                       
716<span class="codefrag"> hadoop fs -put localfile hdfs://nn.example.com/hadoop/hadoopfile</span>
717                               
718</li>
719                               
720<li>
721<span class="codefrag">hadoop fs -put - hdfs://nn.example.com/hadoop/hadoopfile</span>
722<br>Reads the input from stdin.</li>
723                       
724</ul>
725<p>Exit Code:</p>
726<p>
727                               
728<span class="codefrag"> Returns 0 on success and -1 on error. </span>
729                       
730</p>
731<a name="N102B0"></a><a name="rm"></a>
732<h3 class="h4"> rm </h3>
733<p>
734                               
735<span class="codefrag">Usage: hadoop fs -rm [-skipTrash] URI [URI &hellip;] </span>
736                       
737</p>
738<p>
739           Delete files specified as args. Only deletes non empty directory and files. If the <span class="codefrag">-skipTrash</span> option
740           is specified, the trash, if enabled, will be bypassed and the specified file(s) deleted immediately.         This can be
741           useful when it is necessary to delete files from an over-quota directory.
742           Refer to rmr for recursive deletes.<br>
743           Example:
744           </p>
745<ul>
746                               
747<li>
748                                       
749<span class="codefrag"> hadoop fs -rm hdfs://nn.example.com/file /user/hadoop/emptydir </span>
750                               
751</li>
752                       
753</ul>
754<p>Exit Code:</p>
755<p>
756                               
757<span class="codefrag"> Returns 0 on success and -1 on error.</span>
758                       
759</p>
760<a name="N102D7"></a><a name="rmr"></a>
761<h3 class="h4"> rmr </h3>
762<p>
763                               
764<span class="codefrag">Usage: hadoop fs -rmr [-skipTrash] URI [URI &hellip;]</span>
765                       
766</p>
767<p>Recursive version of delete. If the <span class="codefrag">-skipTrash</span> option
768                   is specified, the trash, if enabled, will be bypassed and the specified file(s) deleted immediately. This can be
769                   useful when it is necessary to delete files from an over-quota directory.<br>
770
771           Example:
772           </p>
773<ul>
774                               
775<li>
776                                       
777<span class="codefrag"> hadoop fs -rmr /user/hadoop/dir </span>
778                               
779</li>
780                               
781<li>
782                                       
783<span class="codefrag"> hadoop fs -rmr hdfs://nn.example.com/user/hadoop/dir </span>
784                               
785</li>
786                       
787</ul>
788<p>Exit Code:</p>
789<p>
790                               
791<span class="codefrag"> Returns 0 on success and -1 on error. </span>
792                       
793</p>
794<a name="N10304"></a><a name="setrep"></a>
795<h3 class="h4"> setrep </h3>
796<p>
797                               
798<span class="codefrag">Usage: hadoop fs -setrep [-R] &lt;path&gt;</span>
799                       
800</p>
801<p>
802           Changes the replication factor of a file. -R option is for recursively increasing the replication factor of files within a directory.
803          </p>
804<p>Example:</p>
805<ul>
806                               
807<li>
808                                       
809<span class="codefrag"> hadoop fs -setrep -w 3 -R /user/hadoop/dir1 </span>
810                               
811</li>
812                       
813</ul>
814<p>Exit Code:</p>
815<p>
816                               
817<span class="codefrag">Returns 0 on success and -1 on error. </span>
818                       
819</p>
820<a name="N10329"></a><a name="stat"></a>
821<h3 class="h4"> stat </h3>
822<p>
823                               
824<span class="codefrag">Usage: hadoop fs -stat URI [URI &hellip;]</span>
825                       
826</p>
827<p>
828           Returns the stat information on the path.
829           </p>
830<p>Example:</p>
831<ul>
832                               
833<li>
834                                       
835<span class="codefrag"> hadoop fs -stat path </span>
836                               
837</li>
838                       
839</ul>
840<p>Exit Code:<br>
841           
842<span class="codefrag"> Returns 0 on success and -1 on error.</span>
843</p>
844<a name="N1034C"></a><a name="tail"></a>
845<h3 class="h4"> tail </h3>
846<p>
847                               
848<span class="codefrag">Usage: hadoop fs -tail [-f] URI </span>
849                       
850</p>
851<p>
852           Displays last kilobyte of the file to stdout. -f option can be used as in Unix.
853           </p>
854<p>Example:</p>
855<ul>
856                               
857<li>
858                                       
859<span class="codefrag"> hadoop fs -tail pathname </span>
860                               
861</li>
862                       
863</ul>
864<p>Exit Code: <br>
865           
866<span class="codefrag"> Returns 0 on success and -1 on error.</span>
867</p>
868<a name="N1036F"></a><a name="test"></a>
869<h3 class="h4"> test </h3>
870<p>
871                               
872<span class="codefrag">Usage: hadoop fs -test -[ezd] URI</span>
873                       
874</p>
875<p>
876           Options: <br>
877           -e check to see if the file exists. Return 0 if true. <br>
878           -z check to see if the file is zero length. Return 0 if true. <br>
879           -d check to see if the path is directory. Return 0 if true. <br>
880</p>
881<p>Example:</p>
882<ul>
883                               
884<li>
885                                       
886<span class="codefrag"> hadoop fs -test -e filename </span>
887                               
888</li>
889                       
890</ul>
891<a name="N10392"></a><a name="text"></a>
892<h3 class="h4"> text </h3>
893<p>
894                               
895<span class="codefrag">Usage: hadoop fs -text &lt;src&gt;</span>
896                               
897<br>
898                       
899</p>
900<p>
901           Takes a source file and outputs the file in text format. The allowed formats are zip and TextRecordInputStream.
902          </p>
903<a name="N103A4"></a><a name="touchz"></a>
904<h3 class="h4"> touchz </h3>
905<p>
906                               
907<span class="codefrag">Usage: hadoop fs -touchz URI [URI &hellip;]</span>
908                               
909<br>
910                       
911</p>
912<p>
913           Create a file of zero length.
914           </p>
915<p>Example:</p>
916<ul>
917                               
918<li>
919                                       
920<span class="codefrag"> hadoop -touchz pathname </span>
921                               
922</li>
923                       
924</ul>
925<p>Exit Code:<br>
926           
927<span class="codefrag"> Returns 0 on success and -1 on error.</span>
928</p>
929</div>
930       
931</div>
932<!--+
933    |end content
934    +-->
935<div class="clearboth">&nbsp;</div>
936</div>
937<div id="footer">
938<!--+
939    |start bottomstrip
940    +-->
941<div class="lastmodified">
942<script type="text/javascript"><!--
943document.write("Last Published: " + document.lastModified);
944//  --></script>
945</div>
946<div class="copyright">
947        Copyright &copy;
948         2008 <a href="http://www.apache.org/licenses/">The Apache Software Foundation.</a>
949</div>
950<!--+
951    |end bottomstrip
952    +-->
953</div>
954</body>
955</html>
Note: See TracBrowser for help on using the repository browser.