2019-12-10 09:34:46 +00:00
|
|
|
#!/usr/bin/env python3
|
2019-12-05 01:14:10 +00:00
|
|
|
"""This module takes input and returns link_data, the data structure linkulator works from"""
|
|
|
|
import time
|
|
|
|
from pathlib import PurePath
|
|
|
|
from glob import glob
|
2019-12-05 02:20:23 +00:00
|
|
|
import re
|
2019-12-05 01:14:10 +00:00
|
|
|
|
2019-12-06 18:11:00 +00:00
|
|
|
BADCHARS = re.compile(r"^[ -~]+$")
|
2019-12-05 02:20:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
def is_well_formed_line(line: str) -> bool:
|
2019-12-05 01:14:10 +00:00
|
|
|
"""Checks if current line is valid or not, returns true and false respectively."""
|
2019-12-05 02:20:23 +00:00
|
|
|
pipe_count = (
|
|
|
|
4 ## A PROPERLY FORMATED LINE IN linkulator.data HAS EXACTLY FOUR PIPES.
|
|
|
|
)
|
|
|
|
return line.count("|") == pipe_count
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_time(split_line):
|
|
|
|
"""identifies future dated records"""
|
|
|
|
return split_line[0] and float(split_line[0]) < time.time()
|
2019-12-05 01:14:10 +00:00
|
|
|
|
|
|
|
|
2019-12-05 02:20:23 +00:00
|
|
|
def wash_line(line):
|
|
|
|
"""take line and return a version with bad characters removed"""
|
2019-12-10 09:34:46 +00:00
|
|
|
# return BADCHARS.sub("", line)
|
|
|
|
return line.rstrip("\r\n")
|
2019-12-05 02:20:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
def process(line: str, file_owner: str):
|
2019-12-05 01:14:10 +00:00
|
|
|
"""Takes a line, returns a list based on the delimeter pipe character"""
|
2019-12-05 02:20:23 +00:00
|
|
|
if not is_well_formed_line(line):
|
|
|
|
raise ValueError("Not a well formed record")
|
|
|
|
line = wash_line(line)
|
2019-12-05 01:14:10 +00:00
|
|
|
split_line = line.split("|")
|
2019-12-05 02:20:23 +00:00
|
|
|
if not is_valid_time(split_line):
|
|
|
|
raise ValueError("Invalid date")
|
|
|
|
split_line.insert(0, file_owner)
|
2019-12-05 01:14:10 +00:00
|
|
|
return split_line
|
|
|
|
|
|
|
|
|
|
|
|
def get(config, ignore_names):
|
2019-12-05 02:20:23 +00:00
|
|
|
"""reads data files for non-ignored users and returns valid data in linkulator format"""
|
2019-12-05 01:14:10 +00:00
|
|
|
link_data = []
|
|
|
|
## username, datestamp, parent-id, category, link-url, link-title
|
|
|
|
categories = []
|
|
|
|
category_counts = {}
|
|
|
|
ignore_names = []
|
|
|
|
|
|
|
|
## WHENEVER THIS FUNCTION IS CALLED, THE DATA IS REFRESHED FROM FILES. SINCE
|
|
|
|
## DISK IO IS PROBABLY THE HEAVIEST PART OF THIS SCRIPT, DON'T DO THIS OFTEN.
|
|
|
|
|
|
|
|
files_pattern = str(
|
|
|
|
PurePath(config.PATHS.all_homedir_pattern).joinpath(
|
|
|
|
config.PATHS.datadir, config.PATHS.datafile
|
|
|
|
)
|
|
|
|
)
|
|
|
|
linkulator_files = glob(files_pattern)
|
|
|
|
|
|
|
|
linkulator_lines = []
|
|
|
|
for filename in linkulator_files:
|
|
|
|
with open(filename) as f:
|
|
|
|
# get file owner username from path
|
|
|
|
file_owner = PurePath(filename).parent.parent.name
|
|
|
|
if file_owner in ignore_names:
|
|
|
|
# ignore names found in ignore file
|
|
|
|
continue
|
|
|
|
for line in f:
|
2019-12-05 02:20:23 +00:00
|
|
|
try:
|
|
|
|
split_line = process(line, file_owner)
|
|
|
|
except ValueError:
|
2019-12-05 01:14:10 +00:00
|
|
|
continue
|
2019-12-05 02:20:23 +00:00
|
|
|
linkulator_lines.append(split_line) ## creating a list of lists
|
2019-12-05 01:14:10 +00:00
|
|
|
|
|
|
|
i = 1
|
|
|
|
for idx, line in enumerate(linkulator_lines):
|
|
|
|
if line[2] == "": # CREATE/INSERT PARENT ID:
|
|
|
|
linkulator_lines[idx].insert(0, i)
|
|
|
|
i = i + 1
|
|
|
|
else: ## NOT PARENT, SO NO PARENT ID
|
|
|
|
linkulator_lines[idx].insert(0, "")
|
|
|
|
link_data = linkulator_lines
|
|
|
|
|
|
|
|
## THIS IS SUPPOSED TO SORT ALL LINKS BY CREATION DATE. NEED TO CONFIRM THAT IT WORKS.
|
2019-12-06 00:28:52 +00:00
|
|
|
link_data.sort(key=lambda x: x[2], reverse=True)
|
2019-12-05 01:14:10 +00:00
|
|
|
|
|
|
|
category_counts.clear() ## CLEAR SO WE DON'T DOUBLE-COUNT IF FNC RUN MORE THAN ONCE.
|
|
|
|
for line in link_data:
|
|
|
|
if line[4] not in categories and line[4] != "":
|
|
|
|
categories.append(line[4])
|
|
|
|
category_counts[line[4]] = 1
|
|
|
|
elif line[4] in categories:
|
|
|
|
category_counts[line[4]] = category_counts[line[4]] + 1
|
|
|
|
|
|
|
|
return link_data, categories, category_counts
|