diff --git a/1-2.py b/1-2.py new file mode 100644 index 0000000..12a4e42 --- /dev/null +++ b/1-2.py @@ -0,0 +1,30 @@ +import re +from pyspark import SparkContext + +sc = SparkContext.getOrCreate() + +LOG_PATTERN = re.compile(r'^(\S+) (\S+) (\S+) \[([\w:/]+\s[+-]\d{4})\] "(\S+) (\S+)\s*(\S*)\s?" (\d{3}) (\S+)') + + +def parse_log_line(line): + match = LOG_PATTERN.match(line) + if not match: + return None + + return { + 'ip': match.group(1) + } + + +logFile = "hdfs://master:9000/user/root/apache.access.log.PROJECT" +raw_logs = sc.textFile(logFile) + +parsed_logs = raw_logs.map(parse_log_line).filter(lambda x: x is not None) + +# 提取 IP 地址并统计唯一主机数量 +unique_hosts = parsed_logs.map(lambda log: log['ip']).distinct() +unique_host_count = unique_hosts.count() + +print("Total number of unique hosts: {0}".format(unique_host_count)) + +sc.stop()