java - Write a file on hdfs with permissions enabled -


i try write file on hdfs using java (v 1.8) permissions enabled.

as hadoop instance have used ready docker image : https://hub.docker.com/r/sequenceiq/hadoop-docker/

i have followed write file in hdfs java following:

        configuration configuration = new configuration();         configuration.set("fs.defaultfs", "hdfs://127.0.0.1:9000/user/root");         configuration.set("hadoop.job.ugi", "root");         filesystem hdfs = filesystem.get(configuration );         path file = new path("hdfs://127.0.0.1:9000//user/root/test.txt");         if ( hdfs.exists( file )) { //hdfs.delete( file, true );             system.out.println("file exist");              }         outputstream os = hdfs.create( file,                 new progressable() {                     public void progress() {                         system.out.println("");                     } });         bufferedwriter br = new bufferedwriter( new outputstreamwriter( os, "utf-8" ) );         br.write("hello world");         br.close();         hdfs.close(); 

i following exceptions:

org.apache.hadoop.security.accesscontrolexception: permission denied: user=xxx, access=write, inode="/user/root/text.txt":root:supergroup:drwxr-xr-x     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.check(fspermissionchecker.java:319)     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.check(fspermissionchecker.java:292)     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.checkpermission(fspermissionchecker.java:213)     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.checkpermission(fspermissionchecker.java:190)     @ org.apache.hadoop.hdfs.server.namenode.fsdirectory.checkpermission(fsdirectory.java:1698)     @ org.apache.hadoop.hdfs.server.namenode.fsdirectory.checkpermission(fsdirectory.java:1682)     @ org.apache.hadoop.hdfs.server.namenode.fsdirectory.checkancestoraccess(fsdirectory.java:1665)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.startfileinternal(fsnamesystem.java:2494)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.startfileint(fsnamesystem.java:2429)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.startfile(fsnamesystem.java:2312)     @ org.apache.hadoop.hdfs.server.namenode.namenoderpcserver.create(namenoderpcserver.java:622)     @ org.apache.hadoop.hdfs.protocolpb.clientnamenodeprotocolserversidetranslatorpb.create(clientnamenodeprotocolserversidetranslatorpb.java:397)     @ org.apache.hadoop.hdfs.protocol.proto.clientnamenodeprotocolprotos$clientnamenodeprotocol$2.callblockingmethod(clientnamenodeprotocolprotos.java)     @ org.apache.hadoop.ipc.protobufrpcengine$server$protobufrpcinvoker.call(protobufrpcengine.java:616)     @ org.apache.hadoop.ipc.rpc$server.call(rpc.java:969)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2049)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2045)     @ java.security.accesscontroller.doprivileged(native method)     @ javax.security.auth.subject.doas(subject.java:415)     @ org.apache.hadoop.security.usergroupinformation.doas(usergroupinformation.java:1657)     @ org.apache.hadoop.ipc.server$handler.run(server.java:2043)      @ sun.reflect.nativeconstructoraccessorimpl.newinstance0(native method)     @ sun.reflect.nativeconstructoraccessorimpl.newinstance(nativeconstructoraccessorimpl.java:62)     @ sun.reflect.delegatingconstructoraccessorimpl.newinstance(delegatingconstructoraccessorimpl.java:45)     @ java.lang.reflect.constructor.newinstance(constructor.java:422)     @ org.apache.hadoop.ipc.remoteexception.instantiateexception(remoteexception.java:106)     @ org.apache.hadoop.ipc.remoteexception.unwrapremoteexception(remoteexception.java:73)     @ org.apache.hadoop.hdfs.dfsoutputstream.newstreamforcreate(dfsoutputstream.java:1628)     @ org.apache.hadoop.hdfs.dfsclient.create(dfsclient.java:1703)     @ org.apache.hadoop.hdfs.dfsclient.create(dfsclient.java:1638)     @ org.apache.hadoop.hdfs.distributedfilesystem$7.docall(distributedfilesystem.java:448)     @ org.apache.hadoop.hdfs.distributedfilesystem$7.docall(distributedfilesystem.java:444)     @ org.apache.hadoop.fs.filesystemlinkresolver.resolve(filesystemlinkresolver.java:81)     @ org.apache.hadoop.hdfs.distributedfilesystem.create(distributedfilesystem.java:459)     @ org.apache.hadoop.hdfs.distributedfilesystem.create(distributedfilesystem.java:387)     @ org.apache.hadoop.fs.filesystem.create(filesystem.java:909)     @ org.apache.hadoop.fs.filesystem.create(filesystem.java:802)     @ com.finscience.main.main(main.java:28)     @ sun.reflect.nativemethodaccessorimpl.invoke0(native method)     @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:62)     @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43)     @ java.lang.reflect.method.invoke(method.java:497)     @ com.intellij.rt.execution.application.appmain.main(appmain.java:144) caused by: org.apache.hadoop.ipc.remoteexception(org.apache.hadoop.security.accesscontrolexception): permission denied: user=stefano, access=write, inode="/user/root/capacity-scheduler.xml":root:supergroup:drwxr-xr-x     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.check(fspermissionchecker.java:319)     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.check(fspermissionchecker.java:292)     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.checkpermission(fspermissionchecker.java:213)     @ org.apache.hadoop.hdfs.server.namenode.fspermissionchecker.checkpermission(fspermissionchecker.java:190)     @ org.apache.hadoop.hdfs.server.namenode.fsdirectory.checkpermission(fsdirectory.java:1698)     @ org.apache.hadoop.hdfs.server.namenode.fsdirectory.checkpermission(fsdirectory.java:1682)     @ org.apache.hadoop.hdfs.server.namenode.fsdirectory.checkancestoraccess(fsdirectory.java:1665)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.startfileinternal(fsnamesystem.java:2494)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.startfileint(fsnamesystem.java:2429)     @ org.apache.hadoop.hdfs.server.namenode.fsnamesystem.startfile(fsnamesystem.java:2312)     @ org.apache.hadoop.hdfs.server.namenode.namenoderpcserver.create(namenoderpcserver.java:622)     @ org.apache.hadoop.hdfs.protocolpb.clientnamenodeprotocolserversidetranslatorpb.create(clientnamenodeprotocolserversidetranslatorpb.java:397)     @ org.apache.hadoop.hdfs.protocol.proto.clientnamenodeprotocolprotos$clientnamenodeprotocol$2.callblockingmethod(clientnamenodeprotocolprotos.java)     @ org.apache.hadoop.ipc.protobufrpcengine$server$protobufrpcinvoker.call(protobufrpcengine.java:616)     @ org.apache.hadoop.ipc.rpc$server.call(rpc.java:969)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2049)     @ org.apache.hadoop.ipc.server$handler$1.run(server.java:2045)     @ java.security.accesscontroller.doprivileged(native method)     @ javax.security.auth.subject.doas(subject.java:415)     @ org.apache.hadoop.security.usergroupinformation.doas(usergroupinformation.java:1657)     @ org.apache.hadoop.ipc.server$handler.run(server.java:2043)      @ org.apache.hadoop.ipc.client.call(client.java:1476)     @ org.apache.hadoop.ipc.client.call(client.java:1407)     @ org.apache.hadoop.ipc.protobufrpcengine$invoker.invoke(protobufrpcengine.java:229)     @ com.sun.proxy.$proxy9.create(unknown source)     @ org.apache.hadoop.hdfs.protocolpb.clientnamenodeprotocoltranslatorpb.create(clientnamenodeprotocoltranslatorpb.java:296)     @ sun.reflect.nativemethodaccessorimpl.invoke0(native method)     @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:62)     @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43)     @ java.lang.reflect.method.invoke(method.java:497)     @ org.apache.hadoop.io.retry.retryinvocationhandler.invokemethod(retryinvocationhandler.java:187)     @ org.apache.hadoop.io.retry.retryinvocationhandler.invoke(retryinvocationhandler.java:102)     @ com.sun.proxy.$proxy10.create(unknown source)     @ org.apache.hadoop.hdfs.dfsoutputstream.newstreamforcreate(dfsoutputstream.java:1623)     ... 15 more 

how can write file on hdfs without disable permissions?

hdfs posix-like file-system. access directories , files restricted acls. able write files, have deal acls.

if not familiar concept, check out following link https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-hdfs/hdfspermissionsguide.html

in specific case, seems want write home directory of root.

you have 3 options: * execute application root user * add service user under java application run root users group * change owner of directory(chown) or rights(chmod)


Comments

Popular posts from this blog

ruby - Trying to change last to "x"s to 23 -

jquery - Clone last and append item to closest class -

c - Unrecognised emulation mode: elf_i386 on MinGW32 -