cache: add cache.json to track cache metadata

Track partitions and images associated to a restore
operation through a cache.json file located in the
cache partition.

cache.json example contents:
{
    'images': [
        {
            'name': 'xxx.img',
            'disk': 1,
            'partition': 2
        },
        {
            'name': 'xxx.img',
            'disk': 1,
            'partition': 3
        }
    ]
}

Add class CacheDataFile to handle the cache.json
file.

Add an entry to the 'images' array each time a
restore operation is executed successfuly,
replace entries with the same disk/partition.

Remove entry to the 'images' array with image delete
operations.

Add image info to kiosk configuration. This enables
image restore operations. This information is not
retroactive, only newly restored images will enable
the restore button in Kiosk.
ogkiosk
Alejandro Sirgo Rica 2025-02-12 10:03:11 +01:00
parent 3ae6298f2c
commit 3350c30353
3 changed files with 77 additions and 3 deletions

View File

@ -10,17 +10,22 @@ kiosk_config = {}
def update_kiosk_config(refresh_payload):
from src.utils.probe import OSFamily
from src.utils.cache import CacheDataFile
data_file = CacheDataFile()
os_list = []
for part_setup in refresh_payload['partition_setup']:
if part_setup['os_family'] == OSFamily.UNKNOWN.name:
del part_setup['os_family']
continue
partnum = int(part_setup['partition'])
disknum = int(part_setup['disk'])
os_data = {
'name': part_setup['os'],
'partition': int(part_setup['partition']),
'disk': int(part_setup['disk']),
'partition': partnum,
'disk': disknum,
'os': part_setup['os_family'],
'image': ''
'image': data_file.get_image_name(disknum, partnum)
}
del part_setup['os_family']
os_list.append(os_data)

View File

@ -619,6 +619,10 @@ class OgLiveOperations:
extend_filesystem(disk, partition)
data_file = CacheDataFile()
data_file.add_entry(name + '.img', disk, partition)
data_file.save()
if disk == 1:
configure_os(disk, partition)
@ -800,6 +804,11 @@ class OgLiveOperations:
logging.info(f'Removing checksum file {csum_path} from cache')
os.remove(csum_path)
data_file = CacheDataFile()
for image_name in images:
data_file.remove_image(image_name)
data_file.save()
result = {'cache': self._get_cache_contents()}
self._restartBrowser(self._url)

View File

@ -8,6 +8,7 @@
import logging
import shutil
import json
import os
from src.utils.fs import mount_mkdir, umount
@ -18,6 +19,7 @@ OG_IMAGE_PATH = '/opt/opengnsys/images/'
OG_CACHE_PATH = '/opt/opengnsys/cache'
OG_CACHE_IMAGE_PATH = OG_CACHE_PATH + OG_IMAGE_PATH
OGCLIENT_LOG_CACHE='/opt/opengnsys/log/{ip}.cache.txt'
CACHE_FILE_NAME = 'cache.json'
def mount_cache():
cache_dev = get_cache_dev_path()
@ -114,3 +116,61 @@ def update_live_cache():
shutil.copyfile(server_file + sum_extension, client_file + sum_extension)
else:
logging.info(f'{client_file} is already up to date')
class CacheDataFile:
def __init__(self):
self.file_path = os.path.join(OG_CACHE_IMAGE_PATH, CACHE_FILE_NAME)
self.data = {'images': []}
cache_mnt = mount_cache()
if not cache_mnt:
self.file_path = None
return
if os.path.exists(self.file_path):
self.load()
else:
self.save()
def load(self):
if not self.file_path:
return
try:
with open(self.file_path, 'r', encoding='utf-8') as f:
self.data = json.load(f)
except (json.JSONDecodeError, OSError):
self.data = {'images': []}
def add_entry(self, img_name, disknum, partnum):
new_entry = {'name': img_name, 'disk': disknum, 'partition': partnum}
filtered_images = []
for img in self.data['images']:
if img['disk'] != disknum and img['partition'] != partnum:
filtered_images.append(img)
self.data['images'] = filtered_images
self.data['images'].append(new_entry)
def remove_image(self, img_name):
filtered_images = []
for img in self.data['images']:
if img['name'] != img_name:
filtered_images.append(img)
self.data['images'] = filtered_images
def get_image_name(self, disknum, partnum):
for img in self.data['images']:
if img['disk'] == disknum and img['partition'] == partnum:
return img['name']
return ''
def save(self):
if not self.file_path:
return
with open(self.file_path, 'w', encoding='utf-8') as f:
json.dump(self.data, f, indent=2)