import os
import pytz
from loguru import logger
from datetime import datetime
import csv
from multiprocessing import Queue
[docs]
def cleanup_local_sensor_output_directory(sensor_dir: str):
"""
Iterates through the directory and removes all empty 'date' directories that are empty and have a date prior to the
current date(timezone aware).
Args:
sensor_dir (str): The directory to be cleaned.
"""
logger.info(f"Cleaning up directory {sensor_dir}.")
sensor_name = sensor_dir.split('/')[-1]
today_date = datetime.today().astimezone(pytz.timezone("US/Eastern")).date()
for root, dirs, files in os.walk(top=sensor_dir, topdown=False):
for name in dirs:
try:
dir_date = datetime.date(datetime.strptime(name, "%Y-%m-%d"))
except ValueError as e:
logger.warning(f"Failed to get date of directory '{name}': {e}.")
continue
dir_path = os.path.join(root, name)
num_files_in_date_dir = len(os.listdir(dir_path))
if dir_date < today_date and num_files_in_date_dir == 0:
logger.debug(f"Removing empty directory `{name}` from the `{sensor_name}` output directory.")
os.rmdir(dir_path)
[docs]
def write_sensor_csv_file(start_time_queue: Queue, file_path: str, data: list):
"""
This function is used to write .csv files for the sensors after they gather their data. The function checks whether
it is the last recording of the day. If it is not the last recording, it simply appends to the .csv file. If it is
the last recording of the day the file is read and all the contents of the file are written to a new file with
the same path but .final appended to the end of the filename. THis tags the file as being the final recording of
the day. When the uploader uploads the .final file, it will also remove the .csv file.
Args:
start_time_queue (Queue): The queue that defines the starting times of the recordings. This is used to check if
it is the last recording of the day.
file_path (str): The path to which the file should be written. If that file exists it the data will be appended
to the end of the file.
data (list): A list that contains the data to add to the .csv file. Each element of the list is separated by a
comma and written to the CSV as one row.
Raises:
FileNotFoundError: File at given ``file_path`` or ``file_path.final`` may not exist.
"""
if not start_time_queue.empty():
with open(file_path, 'a+') as file:
writer = csv.writer(file, delimiter=',', quotechar="\"", quoting=csv.QUOTE_NONNUMERIC)
writer.writerow(data)
# Else write the contents of the old file to a new file with the extension .final. That triggers the uploader to
# remove the file after it is uploaded.
else:
logger.debug(f'Writing the final recording to {file_path}.final')
with open(f"{file_path}.final", 'w') as file_final, open(
file_path, 'r') as file:
writer = csv.writer(file_final, delimiter=',', quotechar="\"", quoting=csv.QUOTE_NONNUMERIC)
reader = csv.reader(file, delimiter=',', quotechar="\"", quoting=csv.QUOTE_NONNUMERIC)
for row in reader:
writer.writerow(row)
writer.writerow(data)
file.close()
file_final.close()