Compare commits

...

No commits in common. "master" and "main" have entirely different histories.
master ... main

28 changed files with 29 additions and 317 deletions

26
.gitignore vendored Normal file
View File

@ -0,0 +1,26 @@
# ---> Java
# Compiled class file
*.class
# Log file
*.log
# BlueJ files
*.ctxt
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
*.jar
*.war
*.nar
*.ear
*.zip
*.tar.gz
*.rar
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
replay_pid*

3
.idea/.gitignore vendored
View File

@ -1,3 +0,0 @@
# Default ignored files
/shelf/
/workspace.xml

View File

@ -1,13 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<annotationProcessing>
<profile name="Maven default annotation processors profile" enabled="true">
<sourceOutputDir name="target/generated-sources/annotations" />
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
<outputRelativeToContentRoot value="true" />
<module name="hdfs_pro" />
</profile>
</annotationProcessing>
</component>
</project>

View File

@ -1,20 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Central Repository" />
<option name="url" value="https://repo.maven.apache.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
</component>
</project>

View File

@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="MavenProjectsManager">
<option name="originalFiles">
<list>
<option value="$PROJECT_DIR$/pom.xml" />
</list>
</option>
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="true" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>

View File

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

3
README.md Normal file
View File

@ -0,0 +1,3 @@
# hdfs_pro
HDFS编程

16
pom.xml
View File

@ -1,16 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>hdfs_pro</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
</project>

View File

@ -1,21 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.util.Scanner;
import java.net.URI;
public class CreateDir {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String dirPath = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path hdfsPath = new Path(dirPath);
if(fs.mkdirs(hdfsPath)){
System.out.println("Directory "+ dirPath +" has been created successfully!");
}
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,20 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class CreateFile {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String filePath = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path hdfsPath = new Path(filePath);
fs.create(hdfsPath);
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,22 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class DeleteDir {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String dirPath = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path hdfsPath = new Path(dirPath);
if(fs.delete(hdfsPath,true)){
System.out.println("Directory "+ dirPath +" has been deleted successfully!");
}
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,23 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class DeleteFile {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String filePath = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path hdfsPath = new Path(filePath);
if(fs.delete(hdfsPath,false)){
System.out.println("File "+ filePath +" has been deleted successfully!");
}
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,22 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.util.Scanner;
import java.net.URI;
public class DirExist {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String dirName = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
if(fs.exists(new Path(dirName ))) {
System.out.println("Directory Exists!");
} else {
System.out.println("Directory not Exists!");
}
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,24 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class FileExist {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String fileName = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
if(fs.exists(new Path(fileName))) {
System.out.println("File Exists!");
} else {
System.out.println("File not Exists!");
}
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,25 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class ListFiles {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String filePath = sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path srcPath = new Path(filePath);
FileStatus[] stats = fs.listStatus(srcPath);
Path[] paths = FileUtil.stat2Paths(stats);
for(Path p : paths)
System.out.println(p.getName());
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,32 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class ReadFile {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String filePath = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path srcPath = new Path(filePath);
FSDataInputStream is = fs.open(srcPath);
while(true) {
String line = is.readLine();
if(line == null) {
break;
}
System.out.println(line);
}
is.close();
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,24 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class Rename {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String srcStrPath = '/'+sc.next();
String dstStrPath = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path srcPath = new Path(srcStrPath);
Path dstPath = new Path(dstStrPath);
if(fs.rename(srcPath,dstPath)) {
System.out.println("rename from " + srcStrPath + " to " + dstStrPath + "successfully!");
}
}catch(Exception e) {
e.printStackTrace();
}
}
}

View File

@ -1,32 +0,0 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.util.Scanner;
public class WriteFile {
public static void main(String[] args) {
try {
Scanner sc = new Scanner(System.in);
String filePath = '/'+sc.next();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
Path srcPath = new Path(filePath);
FSDataOutputStream os = fs.create(srcPath,true,1024,(short)1,(long)(1<<26));
String str = "Hello, this is a sentence that should be written into the file.\n";
os.write(str.getBytes());
os.flush();
os.close();
os = fs.append(srcPath);
str = "Hello, this is another sentence that should be written into the file.\n";
os.write(str.getBytes());
os.flush();
os.close();
}catch(Exception e) {
e.printStackTrace();
}
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.