diff --git a/src/live/kiosk.py b/src/live/kiosk.py index e0c7c1a..36194e4 100644 --- a/src/live/kiosk.py +++ b/src/live/kiosk.py @@ -10,17 +10,22 @@ kiosk_config = {} def update_kiosk_config(refresh_payload): from src.utils.probe import OSFamily + from src.utils.cache import CacheDataFile + + data_file = CacheDataFile() os_list = [] for part_setup in refresh_payload['partition_setup']: if part_setup['os_family'] == OSFamily.UNKNOWN.name: del part_setup['os_family'] continue + partnum = int(part_setup['partition']) + disknum = int(part_setup['disk']) os_data = { 'name': part_setup['os'], - 'partition': int(part_setup['partition']), - 'disk': int(part_setup['disk']), + 'partition': partnum, + 'disk': disknum, 'os': part_setup['os_family'], - 'image': '' + 'image': data_file.get_image_name(disknum, partnum) } del part_setup['os_family'] os_list.append(os_data) diff --git a/src/live/ogOperations.py b/src/live/ogOperations.py index 6b65442..e42e11b 100644 --- a/src/live/ogOperations.py +++ b/src/live/ogOperations.py @@ -619,6 +619,10 @@ class OgLiveOperations: extend_filesystem(disk, partition) + data_file = CacheDataFile() + data_file.add_entry(name + '.img', disk, partition) + data_file.save() + if disk == 1: configure_os(disk, partition) @@ -800,6 +804,11 @@ class OgLiveOperations: logging.info(f'Removing checksum file {csum_path} from cache') os.remove(csum_path) + data_file = CacheDataFile() + for image_name in images: + data_file.remove_image(image_name) + data_file.save() + result = {'cache': self._get_cache_contents()} self._restartBrowser(self._url) diff --git a/src/utils/cache.py b/src/utils/cache.py index b13aa8e..e8f3734 100644 --- a/src/utils/cache.py +++ b/src/utils/cache.py @@ -8,6 +8,7 @@ import logging import shutil +import json import os from src.utils.fs import mount_mkdir, umount @@ -18,6 +19,7 @@ OG_IMAGE_PATH = '/opt/opengnsys/images/' OG_CACHE_PATH = '/opt/opengnsys/cache' OG_CACHE_IMAGE_PATH = OG_CACHE_PATH + OG_IMAGE_PATH OGCLIENT_LOG_CACHE='/opt/opengnsys/log/{ip}.cache.txt' +CACHE_FILE_NAME = 'cache.json' def mount_cache(): cache_dev = get_cache_dev_path() @@ -114,3 +116,61 @@ def update_live_cache(): shutil.copyfile(server_file + sum_extension, client_file + sum_extension) else: logging.info(f'{client_file} is already up to date') + +class CacheDataFile: + def __init__(self): + self.file_path = os.path.join(OG_CACHE_IMAGE_PATH, CACHE_FILE_NAME) + self.data = {'images': []} + + cache_mnt = mount_cache() + if not cache_mnt: + self.file_path = None + return + + if os.path.exists(self.file_path): + self.load() + else: + self.save() + + def load(self): + if not self.file_path: + return + + try: + with open(self.file_path, 'r', encoding='utf-8') as f: + self.data = json.load(f) + except (json.JSONDecodeError, OSError): + self.data = {'images': []} + + def add_entry(self, img_name, disknum, partnum): + new_entry = {'name': img_name, 'disk': disknum, 'partition': partnum} + + filtered_images = [] + for img in self.data['images']: + if img['disk'] != disknum and img['partition'] != partnum: + filtered_images.append(img) + + self.data['images'] = filtered_images + self.data['images'].append(new_entry) + + def remove_image(self, img_name): + filtered_images = [] + for img in self.data['images']: + if img['name'] != img_name: + filtered_images.append(img) + + self.data['images'] = filtered_images + + + def get_image_name(self, disknum, partnum): + for img in self.data['images']: + if img['disk'] == disknum and img['partition'] == partnum: + return img['name'] + return '' + + def save(self): + if not self.file_path: + return + + with open(self.file_path, 'w', encoding='utf-8') as f: + json.dump(self.data, f, indent=2)