Compare commits

..

No commits in common. "96877fbf07d88f2b10850326d2053b995ee7c701" and "ebcf90e8eae977dbd8170975613c9690f4e88bc9" have entirely different histories.

6 changed files with 532 additions and 1382 deletions

585
api/gitapi.py 100755 → 100644
View File

@ -1,103 +1,32 @@
#!/usr/bin/env python3
"""
This module provides a Flask-based API for managing Git repositories in the OpenGnsys system.
It includes endpoints for creating, deleting, synchronizing, backing up, and performing garbage
collection on Git repositories. The API also provides endpoints for retrieving repository
information such as the list of repositories and branches, as well as checking the status of
asynchronous tasks.
Classes:
None
Functions:
do_repo_backup(repo, params)
do_repo_sync(repo, params)
do_repo_gc(repo)
home()
get_repositories()
create_repo(repo)
sync_repo(repo)
backup_repository(repo)
gc_repo(repo)
tasks_status(task_id)
delete_repo(repo)
get_repository_branches(repo)
health_check()
Constants:
REPOSITORIES_BASE_PATH (str): The base path where Git repositories are stored.
Global Variables:
app (Flask): The Flask application instance.
executor (Executor): The Flask-Executor instance for managing asynchronous tasks.
tasks (dict): A dictionary to store the status of asynchronous tasks.
"""
# pylint: disable=locally-disabled, line-too-long
from flask import Flask, jsonify
import os.path
import os
import shutil
import uuid
import git
import time
import shutil
import subprocess
import uuid
from opengnsys_git_installer import OpengnsysGitInstaller
from flask import Flask, request, jsonify # stream_with_context, Response,
from flask import Flask, request
from flask_executor import Executor
from flask_restx import Api, Resource, fields
#from flasgger import Swagger
import subprocess
from flask import stream_with_context, Response
import paramiko
REPOSITORIES_BASE_PATH = "/opt/opengnsys/images"
start_time = time.time()
tasks = {}
repositories_base_path = "/opt/opengnsys/images"
# Create an instance of the Flask class
app = Flask(__name__)
api = Api(app,
version='0.50',
title = "OpenGnsys Git API",
description = "API for managing disk images stored in Git",
doc = "/swagger/")
git_ns = api.namespace(name = "oggit", description = "Git operations", path = "/oggit/v1")
executor = Executor(app)
tasks = {}
def do_repo_backup(repo, params):
"""
Creates a backup of the specified Git repository and uploads it to a remote server via SFTP.
Args:
repo (str): The name of the repository to back up.
params (dict): A dictionary containing the following keys:
- ssh_server (str): The SSH server address.
- ssh_port (int): The SSH server port.
- ssh_user (str): The SSH username.
- filename (str): The remote filename where the backup will be stored.
Returns:
bool: True if the backup was successful.
"""
gitrepo = git.Repo(f"{REPOSITORIES_BASE_PATH}/{repo}.git")
gitrepo = git.Repo(f"{repositories_base_path}/{repo}.git")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
@ -113,380 +42,320 @@ def do_repo_backup(repo, params):
return True
def do_repo_sync(repo, params):
"""
Synchronizes a local Git repository with a remote repository.
Args:
repo (str): The name of the local repository to synchronize.
params (dict): A dictionary containing the remote repository URL with the key "remote_repository".
Returns:
list: A list of dictionaries, each containing:
- "local_ref" (str): The name of the local reference.
- "remote_ref" (str): The name of the remote reference.
- "summary" (str): A summary of the push operation for the reference.
"""
gitrepo = git.Repo(f"{REPOSITORIES_BASE_PATH}/{repo}.git")
gitrepo = git.Repo(f"{repositories_base_path}/{repo}.git")
# Recreate the remote every time, it might change
if "backup" in gitrepo.remotes:
gitrepo.delete_remote("backup")
backup_repo = gitrepo.create_remote("backup", params["remote_repository"])
pushed_references = backup_repo.push("*:*")
pushrets = backup_repo.push("*:*")
results = []
# This gets returned to the API
for ref in pushed_references:
results = results + [ {"local_ref" : ref.local_ref.name, "remote_ref" : ref.remote_ref.name, "summary" : ref.summary }]
for ret in pushrets:
results = results + [ {"local_ref" : ret.local_ref.name, "remote_ref" : ret.remote_ref.name, "summary" : ret.summary }]
return results
def do_repo_gc(repo):
"""
Perform garbage collection on the specified Git repository.
Args:
repo (str): The name of the repository to perform garbage collection on.
Returns:
bool: True if the garbage collection command was executed successfully.
"""
gitrepo = git.Repo(f"{REPOSITORIES_BASE_PATH}/{repo}.git")
gitrepo = git.Repo(f"{repositories_base_path}/{repo}.git")
gitrepo.git.gc()
return True
# Define a route for the root URL
@api.route('/')
class GitLib(Resource):
@app.route('/')
def home():
"""
Home route that returns a JSON response with a welcome message for the OpenGnsys Git API.
@api.doc('home')
def get(self):
"""
Home route that returns a JSON response with a welcome message for the OpenGnsys Git API.
Returns:
Response: A Flask JSON response containing a welcome message.
"""
return jsonify({
"message": "OpenGnsys Git API"
})
Returns:
Response: A Flask JSON response containing a welcome message.
"""
return {
"message": "OpenGnsys Git API"
}
@app.route('/repositories')
def get_repositories():
"""
Retrieve a list of Git repositories.
@git_ns.route('/oggit/v1/repositories')
class GitRepositories(Resource):
def get(self):
"""
Retrieve a list of Git repositories.
This endpoint scans the OpenGnsys image path for directories that
appear to be Git repositories (i.e., they contain a "HEAD" file).
It returns a JSON response containing the names of these repositories.
This endpoint scans the OpenGnsys image path for directories that
appear to be Git repositories (i.e., they contain a "HEAD" file).
It returns a JSON response containing the names of these repositories.
Returns:
Response: A JSON response with a list of repository names or an
error message if the repository storage is not found.
- 200 OK: When the repositories are successfully retrieved.
- 500 Internal Server Error: When the repository storage is not found.
Returns:
Response: A JSON response with a list of repository names or an
error message if the repository storage is not found.
- 200 OK: When the repositories are successfully retrieved.
- 500 Internal Server Error: When the repository storage is not found.
Example JSON response:
{
"repositories": ["repo1", "repo2"]
}
"""
Example JSON response:
{
"repositories": ["repo1", "repo2"]
}
"""
if not os.path.isdir(repositories_base_path):
return jsonify({"error": "Repository storage not found, git functionality may not be installed."}), 500
if not os.path.isdir(REPOSITORIES_BASE_PATH):
return jsonify({"error": "Repository storage not found, git functionality may not be installed."}), 500
repos = []
for entry in os.scandir(repositories_base_path):
if entry.is_dir(follow_symlinks=False) and os.path.isfile(os.path.join(entry.path, "HEAD")):
name = entry.name
if name.endswith(".git"):
name = name[:-4]
repos = []
for entry in os.scandir(REPOSITORIES_BASE_PATH):
if entry.is_dir(follow_symlinks=False) and os.path.isfile(os.path.join(entry.path, "HEAD")):
name = entry.name
if name.endswith(".git"):
name = name[:-4]
repos = repos + [name]
repos = repos + [name]
return jsonify({
"repositories": repos
})
return jsonify({
"repositories": repos
})
@app.route('/repositories/<repo>', methods=['PUT'])
def create_repo(repo):
"""
Create a new Git repository.
def post(self):
"""
Create a new Git repository.
This endpoint creates a new Git repository with the specified name.
If the repository already exists, it returns a status message indicating so.
This endpoint creates a new Git repository with the specified name.
If the repository already exists, it returns a status message indicating so.
Args:
repo (str): The name of the repository to be created.
Args:
repo (str): The name of the repository to be created.
Returns:
Response: A JSON response with a status message and HTTP status code.
- 200: If the repository already exists.
- 201: If the repository is successfully created.
"""
repo_path = os.path.join(repositories_base_path, repo + ".git")
if os.path.isdir(repo_path):
return jsonify({"status": "Repository already exists"}), 200
Returns:
Response: A JSON response with a status message and HTTP status code.
- 200: If the repository already exists.
- 201: If the repository is successfully created.
"""
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
installer = OpengnsysGitInstaller()
installer._init_git_repo(repo + ".git")
repo = data["name"]
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if os.path.isdir(repo_path):
return jsonify({"status": "Repository already exists"}), 200
return jsonify({"status": "Repository created"}), 201
installer = OpengnsysGitInstaller()
installer.add_forgejo_repo(repo)
@app.route('/repositories/<repo>/sync', methods=['POST'])
def sync_repo(repo):
"""
Synchronize a repository with a remote repository.
#installer.init_git_repo(repo + ".git")
This endpoint triggers the synchronization process for a specified repository.
It expects a JSON payload with the remote repository details.
Args:
repo (str): The name of the repository to be synchronized.
return jsonify({"status": "Repository created"}), 201
Returns:
Response: A JSON response indicating the status of the synchronization process.
- 200: If the synchronization process has started successfully.
- 400: If the request payload is missing or invalid.
- 404: If the specified repository is not found.
"""
repo_path = os.path.join(repositories_base_path, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
@git_ns.route('/oggit/v1/repositories/<repo>/sync')
class GitRepoSync(Resource):
def post(self, repo):
"""
Synchronize a repository with a remote repository.
data = request.json
This endpoint triggers the synchronization process for a specified repository.
It expects a JSON payload with the remote repository details.
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
Args:
repo (str): The name of the repository to be synchronized.
dest_repo = data["remote_repository"]
Returns:
Response: A JSON response indicating the status of the synchronization process.
- 200: If the synchronization process has started successfully.
- 400: If the request payload is missing or invalid.
- 404: If the specified repository is not found.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
future = executor.submit(do_repo_sync, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
@app.route('/repositories/<repo>/backup', methods=['POST'])
def backup_repo(repo):
"""
Backup a specified repository.
future = executor.submit(do_repo_sync, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
Endpoint: POST /repositories/<repo>/backup
Args:
repo (str): The name of the repository to back up.
Request Body (JSON):
ssh_port (int, optional): The SSH port to use for the backup. Defaults to 22.
@git_ns.route('/oggit/v1/repositories/<repo>/backup')
class GitRepoBackup(Resource):
def backup_repository(self, repo):
"""
Backup a specified repository.
Returns:
Response: A JSON response indicating the status of the backup operation.
- If the repository is not found, returns a 404 error with a message.
- If the request body is missing, returns a 400 error with a message.
- If the backup process starts successfully, returns a 200 status with the task ID.
Endpoint: POST /repositories/<repo>/backup
Notes:
- The repository path is constructed by appending ".git" to the repository name.
- The backup operation is performed asynchronously using a thread pool executor.
- The task ID of the backup operation is generated using UUID and stored in a global tasks dictionary.
"""
repo_path = os.path.join(repositories_base_path, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
Args:
repo (str): The name of the repository to back up.
Request Body (JSON):
ssh_port (int, optional): The SSH port to use for the backup. Defaults to 22.
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
Returns:
Response: A JSON response indicating the status of the backup operation.
- If the repository is not found, returns a 404 error with a message.
- If the request body is missing, returns a 400 error with a message.
- If the backup process starts successfully, returns a 200 status with the task ID.
Notes:
- The repository path is constructed by appending ".git" to the repository name.
- The backup operation is performed asynchronously using a thread pool executor.
- The task ID of the backup operation is generated using UUID and stored in a global tasks dictionary.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
if not "ssh_port" in data:
data["ssh_port"] = 22
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
future = executor.submit(do_repo_backup, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
if not "ssh_port" in data:
data["ssh_port"] = 22
@app.route('/repositories/<repo>/gc', methods=['POST'])
def gc_repo(repo):
"""
Initiates a garbage collection (GC) process for a specified Git repository.
This endpoint triggers an asynchronous GC task for the given repository.
The task is submitted to an executor, and a unique task ID is generated
and returned to the client.
future = executor.submit(do_repo_backup, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
Args:
repo (str): The name of the repository to perform GC on.
return jsonify({"status": "started", "task_id" : task_id}), 200
Returns:
Response: A JSON response containing the status of the request and
a unique task ID if the repository is found, or an error
message if the repository is not found.
"""
repo_path = os.path.join(repositories_base_path, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
@git_ns.route('/oggit/v1/repositories/<repo>/compact', methods=['POST'])
class GitRepoCompact(Resource):
def post(self, repo):
"""
Initiates a garbage collection (GC) process for a specified Git repository.
future = executor.submit(do_repo_gc, repo)
task_id = str(uuid.uuid4())
tasks[task_id] = future
This endpoint triggers an asynchronous GC task for the given repository.
The task is submitted to an executor, and a unique task ID is generated
and returned to the client.
return jsonify({"status": "started", "task_id" : task_id}), 200
Args:
repo (str): The name of the repository to perform GC on.
Returns:
Response: A JSON response containing the status of the request and
a unique task ID if the repository is found, or an error
message if the repository is not found.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
@app.route('/tasks/<task_id>/status')
def tasks_status(task_id):
"""
Endpoint to check the status of a specific task.
future = executor.submit(do_repo_gc, repo)
task_id = str(uuid.uuid4())
tasks[task_id] = future
Args:
task_id (str): The unique identifier of the task.
return jsonify({"status": "started", "task_id" : task_id}), 200
Returns:
Response: A JSON response containing the status of the task.
- If the task is not found, returns a 404 error with an error message.
- If the task is completed, returns a 200 status with the result.
- If the task is still in progress, returns a 202 status indicating the task is in progress.
"""
if not task_id in tasks:
return jsonify({"error": "Task not found"}), 404
future = tasks[task_id]
@git_ns.route('/oggit/v1/tasks/<task_id>/status')
class GitTaskStatus(Resource):
def get(self, task_id):
"""
Endpoint to check the status of a specific task.
if future.done():
result = future.result()
return jsonify({"status" : "completed", "result" : result}), 200
else:
return jsonify({"status" : "in progress"}), 202
Args:
task_id (str): The unique identifier of the task.
Returns:
Response: A JSON response containing the status of the task.
- If the task is not found, returns a 404 error with an error message.
- If the task is completed, returns a 200 status with the result.
- If the task is still in progress, returns a 202 status indicating the task is in progress.
"""
if not task_id in tasks:
return jsonify({"error": "Task not found"}), 404
future = tasks[task_id]
@app.route('/repositories/<repo>', methods=['DELETE'])
def delete_repo(repo):
"""
Deletes a Git repository.
if future.done():
result = future.result()
return jsonify({"status" : "completed", "result" : result}), 200
else:
return jsonify({"status" : "in progress"}), 202
This endpoint deletes a Git repository specified by the `repo` parameter.
If the repository does not exist, it returns a 404 error with a message
indicating that the repository was not found. If the repository is successfully
deleted, it returns a 200 status with a message indicating that the repository
was deleted.
Args:
repo (str): The name of the repository to delete.
Returns:
Response: A JSON response with a status message and the appropriate HTTP status code.
"""
repo_path = os.path.join(repositories_base_path, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
@git_ns.route('/oggit/v1/repositories/<repo>', methods=['DELETE'])
class GitRepo(Resource):
def delete(self, repo):
"""
Deletes a Git repository.
This endpoint deletes a Git repository specified by the `repo` parameter.
If the repository does not exist, it returns a 404 error with a message
indicating that the repository was not found. If the repository is successfully
deleted, it returns a 200 status with a message indicating that the repository
was deleted.
shutil.rmtree(repo_path)
return jsonify({"status": "Repository deleted"}), 200
Args:
repo (str): The name of the repository to delete.
Returns:
Response: A JSON response with a status message and the appropriate HTTP status code.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
shutil.rmtree(repo_path)
return jsonify({"status": "Repository deleted"}), 200
@app.route('/repositories/<repo>/branches')
def get_repository_branches(repo):
"""
Retrieve the list of branches for a given repository.
Args:
repo (str): The name of the repository.
Returns:
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
- 200: A JSON object with a "branches" key containing a list of branch names.
- 404: A JSON object with an "error" key containing the message "Repository not found" if the repository does not exist.
"""
repo_path = os.path.join(repositories_base_path, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
gitRepo = git.Repo(repo_path)
@git_ns.route('/oggit/v1/repositories/<repo>/branches')
class GitRepoBranches(Resource):
def get(self, repo):
"""
Retrieve the list of branches for a given repository.
branches = []
for branch in gitRepo.branches:
branches = branches + [branch.name]
Args:
repo (str): The name of the repository.
Returns:
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
- 200: A JSON object with a "branches" key containing a list of branch names.
- 404: A JSON object with an "error" key containing the message "Repository not found" if the repository does not exist.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
return jsonify({
"branches": branches
})
git_repo = git.Repo(repo_path)
branches = []
for branch in git_repo.branches:
branches = branches + [branch.name]
return jsonify({
"branches": branches
})
@app.route('/health')
def health_check():
"""
Health check endpoint.
This endpoint returns a JSON response indicating the health status of the application.
Returns:
Response: A JSON response with a status key set to "OK". Currently it always returns
a successful value, but this endpoint can still be used to check that the API is
active and functional.
@git_ns.route('/health')
class GitHealth(Resource):
def get(self):
"""
Health check endpoint.
This endpoint returns a JSON response indicating the health status of the application.
Returns:
Response: A JSON response with a status key set to "OK". Currently it always returns
a successful value, but this endpoint can still be used to check that the API is
active and functional.
"""
return {
"status": "OK"
}
@git_ns.route('/status')
class GitStatus(Resource):
def get(self):
"""
Status check endpoint.
This endpoint returns a JSON response indicating the status of the application.
Returns:
Response: A JSON response with status information
"""
return {
"uptime" : time.time() - start_time,
"active_tasks" : len(tasks)
}
api.add_namespace(git_ns)
"""
return jsonify({
"status": "OK"
})
# Run the Flask app
if __name__ == '__main__':
print(f"Map: {app.url_map}")
app.run(debug=True, host='0.0.0.0')

View File

@ -1,34 +0,0 @@
aniso8601==9.0.1
attrs==24.2.0
bcrypt==4.2.0
blinker==1.8.2
cffi==1.17.1
click==8.1.7
cryptography==43.0.1
dataclasses==0.6
flasgger==0.9.7.1
Flask==3.0.3
Flask-Executor==1.0.0
flask-restx==1.3.0
gitdb==4.0.11
GitPython==3.1.43
importlib_resources==6.4.5
itsdangerous==2.2.0
Jinja2==3.1.4
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
libarchive-c==5.1
MarkupSafe==3.0.1
mistune==3.0.2
packaging==24.1
paramiko==3.5.0
pycparser==2.22
PyNaCl==1.5.0
pytz==2024.2
PyYAML==6.0.2
referencing==0.35.1
rpds-py==0.20.0
six==1.16.0
smmap==5.0.1
termcolor==2.5.0
Werkzeug==3.0.4

View File

@ -1,106 +0,0 @@
#!/usr/bin/env python3
import hivex
import argparse
from hivex import Hivex
from hivex.hive_types import *
# Docs:
#
# https://www.geoffchappell.com/notes/windows/boot/bcd/objects.htm
# https://learn.microsoft.com/en-us/previous-versions/windows/desktop/bcd/bcdbootmgrelementtypes
#print(f"Root: {root}")
def dump_all(root, depth = 0):
padding = "\t" * depth
children = bcd.node_children(root)
if len(children) > 0:
for child in children:
name = bcd.node_name(child)
print(f"{padding}{name}")
dump_all(child, depth + 1)
# print(f"Child: {child}")
#print(f"Values: {num_vals}")
return
values = bcd.node_values(root)
#print(f"Value list: {values}")
for v in values:
# print(f"\tValue: {v}")
name = bcd.value_key(v)
(type, length) = bcd.value_type(v)
tname = ""
value = ""
if type == REG_SZ:
tname = "SZ"
value = bcd.value_string(v)
elif type == REG_DWORD:
tname = "DWORD"
dval = bcd.value_dword(v)
value = hex(dval) + " (" + str(bcd.value_dword(v)) + ")"
elif type == REG_BINARY:
tname = "BIN"
(length, value) = bcd.value_value(v)
value = value.hex()
elif type == REG_DWORD_BIG_ENDIAN:
tname = "DWORD_BE"
elif type == REG_EXPAND_SZ:
tname = "EXPAND SZ"
elif type == REG_FULL_RESOURCE_DESCRIPTOR:
tname = "RES DESC"
elif type == REG_LINK:
tname = "LINK"
elif type == REG_MULTI_SZ:
tname = "MULTISZ"
(length, value) = bcd.value_value(v)
value = value.decode('utf-16le')
value = value.replace("\0", ";")
#value = ";".join("\0".split(value))
elif type == REG_NONE:
tname = "NONE"
elif type == REG_QWORD:
tname = "QWORD"
elif type == REG_RESOURCE_LIST:
tname = "RES LIST"
elif type == REG_RESOURCE_REQUIREMENTS_LIST:
tname = "REQ LIST"
else:
tname = str(type)
value = "???"
#value = bcd.value_string(v)
print(f"{padding}{name: <16}: [{tname: <10}]; ({length: < 4}) {value}")
parser = argparse.ArgumentParser(
prog="Windows BCD parser",
description="Parses the BCD",
)
parser.add_argument("--dump", type=str, metavar='BCD file', help="Dumps the specified database")
args = parser.parse_args()
if args.dump:
# "/home/vadim/opengnsys/winboot/boot-copy/EFI/Microsoft/Boot/BCD"
bcd = Hivex(args.dump)
root = bcd.root()
dump_all(root)

File diff suppressed because it is too large Load Diff

View File

@ -7,19 +7,8 @@ Para instalar dependencias de python se usa el modulo venv (https://docs.python.
# Instalación rápida
## Ubuntu 24.04
sudo apt install python3-git python3-libarchive-c python3-termcolor bsdextrautils
## Distribuciones antiguas (< 24.04)
sudo apt install python3 python3-venv libarchive-dev
python3 -m venv venvog
. venvog/bin/activate
python3 -m pip install --upgrade pip
pip3 install -r requirements.txt
## Distribuciones muy antiguas (18.04, no soportado)
## Distribuciones antiguas (18.04)
**Nota:** En 18.04, `uname` solo se encuentra en `/bin`, lo que causa un error inocuo en el log durante la creación de los repositorios:
@ -39,6 +28,13 @@ Ejecutar con:
python3.8 ./opengnsys_git_installer.py
## Distribuciones nuevas (22.04)
sudo apt install python3 python3-venv libarchive-dev
python3 -m venv venvog
. venvog/bin/activate
python3 -m pip install --upgrade pip
pip3 install -r requirements.txt
## Agregar clave de SSH si es necesario
@ -55,15 +51,6 @@ para agregarla.
# . venv/bin/activate
# ./opengnsys_git_installer.py
El instalador descarga e instala Forgejo, un interfaz web de Git. La configuración se genera automáticamente.
Forgejo gestiona los repositorios y el acceso por SSH, por lo cual debe quedarse siempre corriendo. Por defecto se instala en el puerto 3000.
El usuario por defecto es `opengnsys` con password `opengnsys`.
# Documentación
Se puede generar documentación de Python con una utilidad como pdoc3 (hay multiples alternativas posibles):

View File

@ -10,21 +10,9 @@ import subprocess
import sys
import pwd
import grp
from termcolor import cprint
from termcolor import colored, cprint
import git
import libarchive
import urllib.request
import pathlib
import socket
import time
import requests
#FORGEJO_VERSION="8.0.3"
FORGEJO_VERSION="9.0.0"
FORGEJO_URL=f"https://codeberg.org/forgejo/forgejo/releases/download/v{FORGEJO_VERSION}/forgejo-{FORGEJO_VERSION}-linux-amd64"
def show_error(*args):
@ -39,7 +27,6 @@ def show_error(*args):
"""
cprint(*args, "red", attrs = ["bold"], file=sys.stderr)
class RequirementException(Exception):
"""Excepción que indica que nos falta algún requisito
@ -113,17 +100,13 @@ class OpengnsysGitInstaller:
self.testmode = False
self.base_path = "/opt/opengnsys"
self.git_basedir = "base.git"
self.email = "OpenGnsys@opengnsys.com"
self.forgejo_user = "oggit"
self.forgejo_password = "opengnsys"
self.forgejo_organization = "opengnsys"
self.forgejo_port = 3000
self.set_ssh_user_group("oggit", "oggit")
self.ssh_user = "opengnsys"
self.ssh_group = "opengnsys"
self.ssh_homedir = pwd.getpwnam(self.ssh_user).pw_dir
self.ssh_uid = pwd.getpwnam(self.ssh_user).pw_uid
self.ssh_gid = grp.getgrnam(self.ssh_group).gr_gid
self.temp_dir = None
self.script_path = os.path.realpath(os.path.dirname(__file__))
# Possible names for SSH key
self.key_paths = ["scripts/ssl/id_rsa.pub", "scripts/ssl/id_ed25519.pub", "scripts/ssl/id_ecdsa.pub", "scripts/ssl/id_ed25519_sk.pub", "scripts/ssl/id_ecdsa_sk.pub"]
@ -174,33 +157,7 @@ class OpengnsysGitInstaller:
if self.temp_dir:
shutil.rmtree(self.temp_dir, ignore_errors=True)
def set_ssh_user_group(self, username, groupname):
self.ssh_group = groupname
self.ssh_user = username
try:
self.ssh_gid = grp.getgrnam(self.ssh_group).gr_gid
self.__logger.info("Group %s exists with gid %i", self.ssh_group, self.ssh_gid)
except KeyError:
self.__logger.info("Need to create group %s", self.ssh_group)
subprocess.run(["/usr/sbin/groupadd", "--system", self.ssh_group], check=True)
self.ssh_gid = grp.getgrnam(groupname).gr_gid
try:
self.ssh_uid = pwd.getpwnam(self.ssh_user).pw_uid
self.__logger.info("User %s exists with gid %i", self.ssh_user, self.ssh_uid)
except KeyError:
self.__logger.info("Need to create user %s", self.ssh_user)
subprocess.run(["/usr/sbin/useradd", "--gid", str(self.ssh_gid), "-m", "--system", self.ssh_user], check=True)
self.ssh_uid = pwd.getpwnam(username).pw_uid
self.ssh_homedir = pwd.getpwnam(username).pw_dir
def init_git_repo(self, reponame):
def _init_git_repo(self, reponame):
"""Inicializa un repositorio Git"""
# Creamos repositorio
ogdir_images = os.path.join(self.base_path, "images")
@ -223,7 +180,7 @@ class OpengnsysGitInstaller:
self.__logger.info("Configurando repositorio de GIT")
repo.config_writer().set_value("user", "name", "OpenGnsys").release()
repo.config_writer().set_value("user", "email", self.email).release()
repo.config_writer().set_value("user", "email", "OpenGnsys@opengnsys.com").release()
self._recursive_chown(repo_path, ouid=self.ssh_uid, ogid=self.ssh_gid)
@ -252,71 +209,6 @@ class OpengnsysGitInstaller:
for filename in filenames:
os.chown(os.path.join(dirpath, filename), uid=ouid, gid=ogid)
def _wait_for_port(self, host, port):
self.__logger.info("Waiting for %s:%i to be up", host, port)
timeout = 60
start_time = time.time()
ready = False
while not ready and (time.time() - start_time) < 60:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((host, port))
ready = True
s.close()
except TimeoutError:
self.__logger.debug("Timed out, no connection yet.")
except OSError as oserr:
self.__logger.debug("%s, no connection yet. %.1f seconds left.", oserr.strerror, timeout - (time.time() - start_time))
time.sleep(0.1)
if ready:
self.__logger.info("Connection established.")
else:
self.__logger.error("Timed out waiting for connection!")
raise TimeoutError("Timed out waiting for connection!")
def _extract_ssh_key(self):
public_key=""
INITRD = "oginitrd.img"
tftp_dir = os.path.join(self.base_path, "tftpboot")
default_num = self.oglive.get_default()
default_client = self.oglive.get_clients()[default_num]
client_initrd_path = os.path.join(tftp_dir, default_client, INITRD)
#self.temp_dir = self._get_tempdir()
if self.usesshkey:
with open(self.usesshkey, 'r') as f:
public_key = f.read().strip()
else:
if os.path.isfile(client_initrd_path):
#os.makedirs(temp_dir, exist_ok=True)
#os.chdir(self.temp_dir.name)
self.__logger.debug("Descomprimiendo %s", client_initrd_path)
public_key = None
with libarchive.file_reader(client_initrd_path) as initrd:
for file in initrd:
#self.__logger.debug("Archivo: %s", file)
if file.pathname in self.key_paths_dict:
data = bytearray()
for block in file.get_blocks():
data = data + block
public_key = data.decode('utf-8').strip()
break
else:
print(f"No se encuentra la imagen de initrd {client_initrd_path}")
exit(2)
return public_key
def install(self):
"""Instalar
@ -377,7 +269,31 @@ class OpengnsysGitInstaller:
if not self.ignoresshkey:
public_key = self._extract_ssh_key()
public_key=""
if self.usesshkey:
with open(self.usesshkey, 'r') as f:
public_key = f.read().strip()
else:
if os.path.isfile(client_initrd_path):
#os.makedirs(temp_dir, exist_ok=True)
os.chdir(self.temp_dir.name)
self.__logger.debug("Descomprimiendo %s", client_initrd_path)
public_key = None
with libarchive.file_reader(client_initrd_path) as initrd:
for file in initrd:
self.__logger.debug("Archivo: %s", file)
if file.pathname in self.key_paths_dict:
data = bytearray()
for block in file.get_blocks():
data = data + block
public_key = data.decode('utf-8').strip()
break
else:
print(f"No se encuentra la imagen de initrd {client_initrd_path}")
exit(2)
# Si la clave publica no existe me salgo con error
if not public_key:
@ -414,225 +330,13 @@ class OpengnsysGitInstaller:
os.system(f"usermod -s {SHELL} opengnsys")
# Creamos repositorios
#self.init_git_repo('windows.git')
#self.init_git_repo('linux.git')
#self.init_git_repo('mac.git')
self._init_git_repo('windows.git')
self._init_git_repo('linux.git')
self._init_git_repo('mac.git')
# Damos permiso al usuario opengnsys
#for DIR in ["base.git", "linux.git", "windows.git"]: #, "LinAcl", "WinAcl"]:
# self._recursive_chown(os.path.join(ogdir_images, DIR), ouid=self.ssh_uid, ogid=self.ssh_gid)
def _install_template(self, template, destination, keysvalues):
self.__logger.info("Writing template %s into %s", template, destination)
data = ""
with open(template, "r", encoding="utf-8") as template_file:
data = template_file.read()
for key in keysvalues.keys():
data = data.replace("{" + key + "}", keysvalues[key])
with open(destination, "w+", encoding="utf-8") as out_file:
out_file.write(data)
def _runcmd(self, cmd):
self.__logger.debug("Running: %s", cmd)
ret = subprocess.run(cmd, check=True,capture_output=True, encoding='utf-8')
return ret.stdout.strip()
def install_forgejo(self):
self.__logger.info("Installing Forgejo")
bin_path = os.path.join(self.base_path, "bin", "forgejo")
conf_dir_path = os.path.join(self.base_path, "etc", "forgejo")
lfs_dir_path = os.path.join(self.base_path, "images", "git-lfs")
git_dir_path = os.path.join(self.base_path, "images", "git")
forgejo_work_dir_path = os.path.join(self.base_path, "var", "lib", "forgejo/work")
forgejo_db_dir_path = os.path.join(self.base_path, "var", "lib", "forgejo/db")
forgejo_data_dir_path = os.path.join(self.base_path, "var", "lib", "forgejo/data")
forgejo_db_path = os.path.join(forgejo_db_dir_path, "forgejo.db")
forgejo_log_dir_path = os.path.join(self.base_path, "log", "forgejo")
conf_path = os.path.join(conf_dir_path, "app.ini")
self.__logger.debug("Stopping opengnsys-forgejo service")
subprocess.run(["systemctl", "stop", "opengnsys-forgejo"], check=False)
if not os.path.exists(bin_path):
self.__logger.debug("Downloading from %s into %s", FORGEJO_URL, bin_path)
urllib.request.urlretrieve(FORGEJO_URL, bin_path)
os.chmod(bin_path, 0o755)
if os.path.exists(forgejo_db_path):
self.__logger.debug("Removing old configuration")
os.unlink(forgejo_db_path)
else:
self.__logger.debug("Old configuration not present, ok.")
self.__logger.debug("Wiping old data")
for dir in [conf_dir_path, git_dir_path, lfs_dir_path, forgejo_work_dir_path, forgejo_data_dir_path, forgejo_db_dir_path]:
if os.path.exists(dir):
self.__logger.debug("Removing %s", dir)
shutil.rmtree(dir)
self.__logger.debug("Creating directories")
pathlib.Path(conf_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(git_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(lfs_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_work_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_data_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_db_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_log_dir_path).mkdir(parents=True, exist_ok=True)
os.chown(lfs_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(git_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_data_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_work_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_db_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_log_dir_path, self.ssh_uid, self.ssh_gid)
data = {
"forgejo_user" : self.ssh_user,
"forgejo_group" : self.ssh_group,
"forgejo_port" : str(self.forgejo_port),
"forgejo_bin" : bin_path,
"forgejo_app_ini" : conf_path,
"forgejo_work_path" : forgejo_work_dir_path,
"forgejo_data_path" : forgejo_data_dir_path,
"forgejo_db_path" : forgejo_db_path,
"forgejo_repository_root" : git_dir_path,
"forgejo_lfs_path" : lfs_dir_path,
"forgejo_log_path" : forgejo_log_dir_path,
"forgejo_hostname" : self._runcmd("hostname"),
"forgejo_lfs_jwt_secret" : self._runcmd([bin_path,"generate", "secret", "LFS_JWT_SECRET"]),
"forgejo_jwt_secret" : self._runcmd([bin_path,"generate", "secret", "JWT_SECRET"]),
"forgejo_internal_token" : self._runcmd([bin_path,"generate", "secret", "INTERNAL_TOKEN"]),
"forgejo_secret_key" : self._runcmd([bin_path,"generate", "secret", "SECRET_KEY"])
}
self._install_template(os.path.join(self.script_path, "forgejo-app.ini"), conf_path, data)
self._install_template(os.path.join(self.script_path, "forgejo.service"), "/etc/systemd/system/opengnsys-forgejo.service", data)
self.__logger.debug("Reloading systemd and starting service")
subprocess.run(["systemctl", "daemon-reload"], check=True)
subprocess.run(["systemctl", "enable", "opengnsys-forgejo"], check=True)
subprocess.run(["systemctl", "restart", "opengnsys-forgejo"], check=True)
self.__logger.info("Waiting for forgejo to start")
self._wait_for_port("localhost", self.forgejo_port)
self.__logger.info("Configuring forgejo")
def run_forge_cmd(args):
cmd = [bin_path, "--config", conf_path] + args
self.__logger.debug("Running command: %s", cmd)
ret = subprocess.run(cmd, check=False, capture_output=True, encoding='utf-8', user=self.ssh_user)
if ret.returncode == 0:
return ret.stdout.strip()
else:
self.__logger.error("Failed to run command: %s, return code %i", cmd, ret.returncode)
self.__logger.error("stdout: %s", ret.stdout)
self.__logger.error("stderr: %s", ret.stderr)
raise RuntimeError("Failed to run necessary command")
run_forge_cmd(["admin", "doctor", "check"])
run_forge_cmd(["admin", "user", "create", "--username", self.forgejo_user, "--password", self.forgejo_password, "--email", self.email])
token = run_forge_cmd(["admin", "user", "generate-access-token", "--username", self.forgejo_user, "-t", "gitapi", "--scopes", "all", "--raw"])
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "w+", encoding='utf-8') as token_file:
token_file.write(token)
ssh_key = self._extract_ssh_key()
self.add_forgejo_sshkey(ssh_key, "Default key")
def add_forgejo_repo(self, repository_name, description = ""):
token = ""
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "r", encoding='utf-8') as token_file:
token = token_file.read().strip()
self.__logger.info("Adding repository %s for Forgejo", repository_name)
r = requests.post(
f"http://localhost:{self.forgejo_port}/api/v1/user/repos",
json={
"auto_init" : False,
"default_branch" : "main",
"description" : description,
"name" : repository_name,
"private" : False
}, headers={
'Authorization' : f"token {token}"
},
timeout = 60
)
self.__logger.info("Request status was %i", r.status_code)
def add_forgejo_sshkey(self, pubkey, description = ""):
token = ""
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "r", encoding='utf-8') as token_file:
token = token_file.read().strip()
self.__logger.info("Adding SSH key to Forgejo: %s", pubkey)
r = requests.post(
f"http://localhost:{self.forgejo_port}/api/v1/user/keys",
json={
"key" : pubkey,
"read_only" : False,
"title" : description
}, headers={
'Authorization' : f"token {token}"
},
timeout = 60
)
self.__logger.info("Request status was %i", r.status_code)
def add_forgejo_organization(self, pubkey, description = ""):
token = ""
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "r", encoding='utf-8') as token_file:
token = token_file.read().strip()
self.__logger.info("Adding SSH key to Forgejo: %s", pubkey)
r = requests.post(
f"http://localhost:{self.forgejo_port}/api/v1/user/keys",
json={
"key" : pubkey,
"read_only" : False,
"title" : description
}, headers={
'Authorization' : f"token {token}"
},
timeout = 60
)
self.__logger.info("Request status was %i", r.status_code)
for DIR in ["base.git", "linux.git", "windows.git"]: #, "LinAcl", "WinAcl"]:
self._recursive_chown(os.path.join(ogdir_images, DIR), ouid=self.ssh_uid, ogid=self.ssh_gid)
@ -646,13 +350,9 @@ if __name__ == '__main__':
prog="OpenGnsys Installer",
description="Script para la instalación del repositorio git",
)
parser.add_argument('--forgejo-only', action='store_true', help="Solo instalar forgejo")
parser.add_argument('--forgejo-addrepos', action='store_true', help="Solo agregar repositorios forgejo")
parser.add_argument('--testmode', action='store_true', help="Modo de prueba")
parser.add_argument('--ignoresshkey', action='store_true', help="Ignorar clave de SSH")
parser.add_argument('--usesshkey', type=str, help="Usar clave SSH especificada")
parser.add_argument('--test-createuser', action='store_true')
args = parser.parse_args()
@ -664,20 +364,7 @@ if __name__ == '__main__':
logger.debug("Inicio de instalación")
try:
if args.forgejo_only:
installer.install_forgejo()
elif args.forgejo_addrepos:
installer.add_forgejo_repo("linux")
elif args.test_createuser:
installer.set_ssh_user_group("oggit2", "oggit2")
else:
installer.install()
installer.install_forgejo()
installer.add_forgejo_repo("windows", "Windows")
installer.add_forgejo_repo("linux", "Linux")
installer.add_forgejo_repo("mac", "Mac")
installer.install()
except RequirementException as req:
show_error(f"Requisito para la instalación no satisfecho: {req.message}")
exit(1)