001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hdfs.server.datanode.web.resources; 019 020import java.io.IOException; 021import java.io.InputStream; 022import java.net.URI; 023import java.net.URISyntaxException; 024import java.security.PrivilegedExceptionAction; 025import java.util.EnumSet; 026 027import javax.servlet.ServletContext; 028import javax.servlet.http.HttpServletRequest; 029import javax.servlet.http.HttpServletResponse; 030import javax.ws.rs.Consumes; 031import javax.ws.rs.DefaultValue; 032import javax.ws.rs.GET; 033import javax.ws.rs.POST; 034import javax.ws.rs.PUT; 035import javax.ws.rs.Path; 036import javax.ws.rs.PathParam; 037import javax.ws.rs.Produces; 038import javax.ws.rs.QueryParam; 039import javax.ws.rs.core.Context; 040import javax.ws.rs.core.MediaType; 041import javax.ws.rs.core.Response; 042 043import com.google.common.annotations.VisibleForTesting; 044import org.apache.commons.logging.Log; 045import org.apache.commons.logging.LogFactory; 046import org.apache.hadoop.conf.Configuration; 047import org.apache.hadoop.fs.CreateFlag; 048import org.apache.hadoop.fs.FSDataOutputStream; 049import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; 050import org.apache.hadoop.fs.permission.FsPermission; 051import org.apache.hadoop.hdfs.DFSClient; 052import org.apache.hadoop.hdfs.HAUtil; 053import org.apache.hadoop.hdfs.client.HdfsDataInputStream; 054import org.apache.hadoop.hdfs.protocol.HdfsConstants; 055import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; 056import org.apache.hadoop.hdfs.server.datanode.DataNode; 057import org.apache.hadoop.hdfs.web.JsonUtil; 058import org.apache.hadoop.hdfs.web.ParamFilter; 059import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; 060import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; 061import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; 062import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; 063import org.apache.hadoop.hdfs.web.resources.DelegationParam; 064import org.apache.hadoop.hdfs.web.resources.GetOpParam; 065import org.apache.hadoop.hdfs.web.resources.HttpOpParam; 066import org.apache.hadoop.hdfs.web.resources.LengthParam; 067import org.apache.hadoop.hdfs.web.resources.NamenodeAddressParam; 068import org.apache.hadoop.hdfs.web.resources.OffsetParam; 069import org.apache.hadoop.hdfs.web.resources.OverwriteParam; 070import org.apache.hadoop.hdfs.web.resources.Param; 071import org.apache.hadoop.hdfs.web.resources.PermissionParam; 072import org.apache.hadoop.hdfs.web.resources.PostOpParam; 073import org.apache.hadoop.hdfs.web.resources.PutOpParam; 074import org.apache.hadoop.hdfs.web.resources.ReplicationParam; 075import org.apache.hadoop.hdfs.web.resources.UriFsPathParam; 076import org.apache.hadoop.io.IOUtils; 077import org.apache.hadoop.security.SecurityUtil; 078import org.apache.hadoop.security.UserGroupInformation; 079import org.apache.hadoop.security.token.Token; 080 081import com.sun.jersey.spi.container.ResourceFilters; 082 083/** Web-hdfs DataNode implementation. */ 084@Path("") 085@ResourceFilters(ParamFilter.class) 086public class DatanodeWebHdfsMethods { 087 public static final Log LOG = LogFactory.getLog(DatanodeWebHdfsMethods.class); 088 089 private static final UriFsPathParam ROOT = new UriFsPathParam(""); 090 091 private @Context ServletContext context; 092 private @Context HttpServletRequest request; 093 private @Context HttpServletResponse response; 094 095 private void init(final UserGroupInformation ugi, 096 final DelegationParam delegation, final String nnId, 097 final UriFsPathParam path, final HttpOpParam<?> op, 098 final Param<?, ?>... parameters) throws IOException { 099 if (LOG.isTraceEnabled()) { 100 LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path 101 + ", ugi=" + ugi + Param.toSortedString(", ", parameters)); 102 } 103 if (nnId == null) { 104 throw new IllegalArgumentException(NamenodeAddressParam.NAME 105 + " is not specified."); 106 } 107 108 //clear content type 109 response.setContentType(null); 110 111 if (UserGroupInformation.isSecurityEnabled()) { 112 //add a token for RPC. 113 final Token<DelegationTokenIdentifier> token = deserializeToken 114 (delegation.getValue(), nnId); 115 ugi.addToken(token); 116 } 117 } 118 119 @VisibleForTesting 120 Token<DelegationTokenIdentifier> deserializeToken 121 (String delegation,String nnId) throws IOException { 122 final DataNode datanode = (DataNode) context.getAttribute("datanode"); 123 final Configuration conf = datanode.getConf(); 124 final Token<DelegationTokenIdentifier> token = new 125 Token<DelegationTokenIdentifier>(); 126 token.decodeFromUrlString(delegation); 127 URI nnUri = URI.create(HdfsConstants.HDFS_URI_SCHEME + 128 "://" + nnId); 129 boolean isLogical = HAUtil.isLogicalUri(conf, nnUri); 130 if (isLogical) { 131 token.setService(HAUtil.buildTokenServiceForLogicalUri(nnUri, 132 HdfsConstants.HDFS_URI_SCHEME)); 133 } else { 134 token.setService(SecurityUtil.buildTokenService(nnUri)); 135 } 136 token.setKind(DelegationTokenIdentifier.HDFS_DELEGATION_KIND); 137 return token; 138 } 139 140 /** Handle HTTP PUT request for the root. */ 141 @PUT 142 @Path("/") 143 @Consumes({"*/*"}) 144 @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) 145 public Response putRoot( 146 final InputStream in, 147 @Context final UserGroupInformation ugi, 148 @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) 149 final DelegationParam delegation, 150 @QueryParam(NamenodeAddressParam.NAME) 151 @DefaultValue(NamenodeAddressParam.DEFAULT) 152 final NamenodeAddressParam namenode, 153 @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT) 154 final PutOpParam op, 155 @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT) 156 final PermissionParam permission, 157 @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT) 158 final OverwriteParam overwrite, 159 @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) 160 final BufferSizeParam bufferSize, 161 @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT) 162 final ReplicationParam replication, 163 @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT) 164 final BlockSizeParam blockSize 165 ) throws IOException, InterruptedException { 166 return put(in, ugi, delegation, namenode, ROOT, op, permission, 167 overwrite, bufferSize, replication, blockSize); 168 } 169 170 /** Handle HTTP PUT request. */ 171 @PUT 172 @Path("{" + UriFsPathParam.NAME + ":.*}") 173 @Consumes({"*/*"}) 174 @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) 175 public Response put( 176 final InputStream in, 177 @Context final UserGroupInformation ugi, 178 @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) 179 final DelegationParam delegation, 180 @QueryParam(NamenodeAddressParam.NAME) 181 @DefaultValue(NamenodeAddressParam.DEFAULT) 182 final NamenodeAddressParam namenode, 183 @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, 184 @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT) 185 final PutOpParam op, 186 @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT) 187 final PermissionParam permission, 188 @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT) 189 final OverwriteParam overwrite, 190 @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) 191 final BufferSizeParam bufferSize, 192 @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT) 193 final ReplicationParam replication, 194 @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT) 195 final BlockSizeParam blockSize 196 ) throws IOException, InterruptedException { 197 198 final String nnId = namenode.getValue(); 199 init(ugi, delegation, nnId, path, op, permission, 200 overwrite, bufferSize, replication, blockSize); 201 202 return ugi.doAs(new PrivilegedExceptionAction<Response>() { 203 @Override 204 public Response run() throws IOException, URISyntaxException { 205 return put(in, nnId, path.getAbsolutePath(), op, 206 permission, overwrite, bufferSize, replication, blockSize); 207 } 208 }); 209 } 210 211 private Response put( 212 final InputStream in, 213 final String nnId, 214 final String fullpath, 215 final PutOpParam op, 216 final PermissionParam permission, 217 final OverwriteParam overwrite, 218 final BufferSizeParam bufferSize, 219 final ReplicationParam replication, 220 final BlockSizeParam blockSize 221 ) throws IOException, URISyntaxException { 222 final DataNode datanode = (DataNode)context.getAttribute("datanode"); 223 224 switch(op.getValue()) { 225 case CREATE: 226 { 227 final Configuration conf = new Configuration(datanode.getConf()); 228 conf.set(FsPermission.UMASK_LABEL, "000"); 229 230 final int b = bufferSize.getValue(conf); 231 DFSClient dfsclient = newDfsClient(nnId, conf); 232 FSDataOutputStream out = null; 233 try { 234 out = dfsclient.createWrappedOutputStream(dfsclient.create( 235 fullpath, permission.getFsPermission(), 236 overwrite.getValue() ? 237 EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) : 238 EnumSet.of(CreateFlag.CREATE), 239 replication.getValue(conf), blockSize.getValue(conf), null, 240 b, null), null); 241 IOUtils.copyBytes(in, out, b); 242 out.close(); 243 out = null; 244 dfsclient.close(); 245 dfsclient = null; 246 } finally { 247 IOUtils.cleanup(LOG, out); 248 IOUtils.cleanup(LOG, dfsclient); 249 } 250 final String scheme = "http".equals(request.getScheme()) ? 251 WebHdfsFileSystem.SCHEME : SWebHdfsFileSystem.SCHEME; 252 final URI uri = new URI(scheme, nnId, fullpath, null, null); 253 return Response.created(uri).type(MediaType.APPLICATION_OCTET_STREAM).build(); 254 } 255 default: 256 throw new UnsupportedOperationException(op + " is not supported"); 257 } 258 } 259 260 /** Handle HTTP POST request for the root for the root. */ 261 @POST 262 @Path("/") 263 @Consumes({"*/*"}) 264 @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) 265 public Response postRoot( 266 final InputStream in, 267 @Context final UserGroupInformation ugi, 268 @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) 269 final DelegationParam delegation, 270 @QueryParam(NamenodeAddressParam.NAME) 271 @DefaultValue(NamenodeAddressParam.DEFAULT) 272 final NamenodeAddressParam namenode, 273 @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT) 274 final PostOpParam op, 275 @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) 276 final BufferSizeParam bufferSize 277 ) throws IOException, InterruptedException { 278 return post(in, ugi, delegation, namenode, ROOT, op, bufferSize); 279 } 280 281 /** Handle HTTP POST request. */ 282 @POST 283 @Path("{" + UriFsPathParam.NAME + ":.*}") 284 @Consumes({"*/*"}) 285 @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) 286 public Response post( 287 final InputStream in, 288 @Context final UserGroupInformation ugi, 289 @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) 290 final DelegationParam delegation, 291 @QueryParam(NamenodeAddressParam.NAME) 292 @DefaultValue(NamenodeAddressParam.DEFAULT) 293 final NamenodeAddressParam namenode, 294 @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, 295 @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT) 296 final PostOpParam op, 297 @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) 298 final BufferSizeParam bufferSize 299 ) throws IOException, InterruptedException { 300 301 final String nnId = namenode.getValue(); 302 init(ugi, delegation, nnId, path, op, bufferSize); 303 304 return ugi.doAs(new PrivilegedExceptionAction<Response>() { 305 @Override 306 public Response run() throws IOException { 307 return post(in, nnId, path.getAbsolutePath(), op, 308 bufferSize); 309 } 310 }); 311 } 312 313 private Response post( 314 final InputStream in, 315 final String nnId, 316 final String fullpath, 317 final PostOpParam op, 318 final BufferSizeParam bufferSize 319 ) throws IOException { 320 final DataNode datanode = (DataNode)context.getAttribute("datanode"); 321 322 switch(op.getValue()) { 323 case APPEND: 324 { 325 final Configuration conf = new Configuration(datanode.getConf()); 326 final int b = bufferSize.getValue(conf); 327 DFSClient dfsclient = newDfsClient(nnId, conf); 328 FSDataOutputStream out = null; 329 try { 330 out = dfsclient.append(fullpath, b, null, null); 331 IOUtils.copyBytes(in, out, b); 332 out.close(); 333 out = null; 334 dfsclient.close(); 335 dfsclient = null; 336 } finally { 337 IOUtils.cleanup(LOG, out); 338 IOUtils.cleanup(LOG, dfsclient); 339 } 340 return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build(); 341 } 342 default: 343 throw new UnsupportedOperationException(op + " is not supported"); 344 } 345 } 346 347 /** Handle HTTP GET request for the root. */ 348 @GET 349 @Path("/") 350 @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) 351 public Response getRoot( 352 @Context final UserGroupInformation ugi, 353 @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) 354 final DelegationParam delegation, 355 @QueryParam(NamenodeAddressParam.NAME) 356 @DefaultValue(NamenodeAddressParam.DEFAULT) 357 final NamenodeAddressParam namenode, 358 @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT) 359 final GetOpParam op, 360 @QueryParam(OffsetParam.NAME) @DefaultValue(OffsetParam.DEFAULT) 361 final OffsetParam offset, 362 @QueryParam(LengthParam.NAME) @DefaultValue(LengthParam.DEFAULT) 363 final LengthParam length, 364 @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) 365 final BufferSizeParam bufferSize 366 ) throws IOException, InterruptedException { 367 return get(ugi, delegation, namenode, ROOT, op, offset, length, 368 bufferSize); 369 } 370 371 /** Handle HTTP GET request. */ 372 @GET 373 @Path("{" + UriFsPathParam.NAME + ":.*}") 374 @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) 375 public Response get( 376 @Context final UserGroupInformation ugi, 377 @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) 378 final DelegationParam delegation, 379 @QueryParam(NamenodeAddressParam.NAME) 380 @DefaultValue(NamenodeAddressParam.DEFAULT) 381 final NamenodeAddressParam namenode, 382 @PathParam(UriFsPathParam.NAME) final UriFsPathParam path, 383 @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT) 384 final GetOpParam op, 385 @QueryParam(OffsetParam.NAME) @DefaultValue(OffsetParam.DEFAULT) 386 final OffsetParam offset, 387 @QueryParam(LengthParam.NAME) @DefaultValue(LengthParam.DEFAULT) 388 final LengthParam length, 389 @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) 390 final BufferSizeParam bufferSize 391 ) throws IOException, InterruptedException { 392 393 final String nnId = namenode.getValue(); 394 init(ugi, delegation, nnId, path, op, offset, length, bufferSize); 395 396 return ugi.doAs(new PrivilegedExceptionAction<Response>() { 397 @Override 398 public Response run() throws IOException { 399 return get(nnId, path.getAbsolutePath(), op, offset, 400 length, bufferSize); 401 } 402 }); 403 } 404 405 private Response get( 406 final String nnId, 407 final String fullpath, 408 final GetOpParam op, 409 final OffsetParam offset, 410 final LengthParam length, 411 final BufferSizeParam bufferSize 412 ) throws IOException { 413 final DataNode datanode = (DataNode)context.getAttribute("datanode"); 414 final Configuration conf = new Configuration(datanode.getConf()); 415 416 switch(op.getValue()) { 417 case OPEN: 418 { 419 final int b = bufferSize.getValue(conf); 420 final DFSClient dfsclient = newDfsClient(nnId, conf); 421 HdfsDataInputStream in = null; 422 try { 423 in = dfsclient.createWrappedInputStream( 424 dfsclient.open(fullpath, b, true)); 425 in.seek(offset.getValue()); 426 } catch(IOException ioe) { 427 IOUtils.cleanup(LOG, in); 428 IOUtils.cleanup(LOG, dfsclient); 429 throw ioe; 430 } 431 432 final long n = length.getValue() != null ? 433 Math.min(length.getValue(), in.getVisibleLength() - offset.getValue()) : 434 in.getVisibleLength() - offset.getValue(); 435 436 // jetty 6 reserves 12 bytes in the out buffer for chunked responses 437 // (file length > 2GB) which causes extremely poor performance when 438 // 12 bytes of the output spill into another buffer which results 439 // in a big and little write 440 int outBufferSize = response.getBufferSize(); 441 if (n > Integer.MAX_VALUE) { 442 outBufferSize -= 12; 443 } 444 /** 445 * Allow the Web UI to perform an AJAX request to get the data. 446 */ 447 return Response.ok(new OpenEntity(in, n, outBufferSize, dfsclient)) 448 .type(MediaType.APPLICATION_OCTET_STREAM) 449 .header("Access-Control-Allow-Methods", "GET") 450 .header("Access-Control-Allow-Origin", "*") 451 .build(); 452 } 453 case GETFILECHECKSUM: 454 { 455 MD5MD5CRC32FileChecksum checksum = null; 456 DFSClient dfsclient = newDfsClient(nnId, conf); 457 try { 458 checksum = dfsclient.getFileChecksum(fullpath, Long.MAX_VALUE); 459 dfsclient.close(); 460 dfsclient = null; 461 } finally { 462 IOUtils.cleanup(LOG, dfsclient); 463 } 464 final String js = JsonUtil.toJsonString(checksum); 465 return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); 466 } 467 default: 468 throw new UnsupportedOperationException(op + " is not supported"); 469 } 470 } 471 472 private static DFSClient newDfsClient(String nnId, 473 Configuration conf) throws IOException { 474 URI uri = URI.create(HdfsConstants.HDFS_URI_SCHEME + "://" + nnId); 475 return new DFSClient(uri, conf); 476 } 477}