1 # Test a minimal HDFS cluster with no HA
2 import ../make-test-python.nix (
10 "fs.defaultFS" = "hdfs://namenode:8020";
11 "hadoop.proxyuser.httpfs.groups" = "*";
12 "hadoop.proxyuser.httpfs.hosts" = "*";
28 # The NixOS hadoop module only support webHDFS on 3.3 and newer
29 enable = lib.mkIf (lib.versionAtLeast package.version "3.3") true;
39 virtualisation.diskSize = 4096;
61 namenode.wait_for_unit("hdfs-namenode")
62 namenode.wait_for_unit("network.target")
63 namenode.wait_for_open_port(8020)
64 namenode.succeed("systemd-cat ss -tulpne")
65 namenode.succeed("systemd-cat cat /etc/hadoop*/hdfs-site.xml")
66 namenode.wait_for_open_port(9870)
68 datanode.wait_for_unit("hdfs-datanode")
69 datanode.wait_for_unit("network.target")
72 if lib.versionAtLeast package.version "3" then
74 datanode.wait_for_open_port(9864)
75 datanode.wait_for_open_port(9866)
76 datanode.wait_for_open_port(9867)
78 datanode.succeed("curl -f http://datanode:9864")
82 datanode.wait_for_open_port(50075)
83 datanode.wait_for_open_port(50010)
84 datanode.wait_for_open_port(50020)
86 datanode.succeed("curl -f http://datanode:50075")
90 namenode.succeed("curl -f http://namenode:9870")
92 datanode.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait")
93 datanode.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile")
94 assert "testfilecontents" in datanode.succeed("sudo -u hdfs hdfs dfs -cat /testfile")
97 + lib.optionalString (lib.versionAtLeast package.version "3.3") ''
98 namenode.wait_for_unit("hdfs-httpfs")
99 namenode.wait_for_open_port(14000)
100 assert "testfilecontents" in datanode.succeed("curl -f \"http://namenode:14000/webhdfs/v1/testfile?user.name=hdfs&op=OPEN\" 2>&1")