Compare commits

...

4 Commits

5 changed files with 640 additions and 249 deletions

477
api/gitapi.py 100644 → 100755
View File

@ -52,22 +52,34 @@ import os
import shutil
import uuid
import git
import time
from opengnsys_git_installer import OpengnsysGitInstaller
from flask import Flask, request, jsonify # stream_with_context, Response,
from flask_executor import Executor
from flask_restx import Api, Resource, fields
#from flasgger import Swagger
import paramiko
REPOSITORIES_BASE_PATH = "/opt/opengnsys/images"
start_time = time.time()
tasks = {}
# Create an instance of the Flask class
app = Flask(__name__)
api = Api(app,
version='0.50',
title = "OpenGnsys Git API",
description = "API for managing disk images stored in Git",
doc = "/swagger/")
git_ns = api.namespace(name = "oggit", description = "Git operations", path = "/oggit/v1")
executor = Executor(app)
tasks = {}
def do_repo_backup(repo, params):
"""
@ -147,291 +159,334 @@ def do_repo_gc(repo):
# Define a route for the root URL
@app.route('/')
def home():
"""
Home route that returns a JSON response with a welcome message for the OpenGnsys Git API.
@api.route('/')
class GitLib(Resource):
Returns:
Response: A Flask JSON response containing a welcome message.
"""
return jsonify({
"message": "OpenGnsys Git API"
})
@api.doc('home')
def get(self):
"""
Home route that returns a JSON response with a welcome message for the OpenGnsys Git API.
@app.route('/repositories')
def get_repositories():
"""
Retrieve a list of Git repositories.
Returns:
Response: A Flask JSON response containing a welcome message.
"""
return {
"message": "OpenGnsys Git API"
}
This endpoint scans the OpenGnsys image path for directories that
appear to be Git repositories (i.e., they contain a "HEAD" file).
It returns a JSON response containing the names of these repositories.
@git_ns.route('/oggit/v1/repositories')
class GitRepositories(Resource):
def get(self):
"""
Retrieve a list of Git repositories.
Returns:
Response: A JSON response with a list of repository names or an
error message if the repository storage is not found.
- 200 OK: When the repositories are successfully retrieved.
- 500 Internal Server Error: When the repository storage is not found.
This endpoint scans the OpenGnsys image path for directories that
appear to be Git repositories (i.e., they contain a "HEAD" file).
It returns a JSON response containing the names of these repositories.
Example JSON response:
{
"repositories": ["repo1", "repo2"]
}
"""
Returns:
Response: A JSON response with a list of repository names or an
error message if the repository storage is not found.
- 200 OK: When the repositories are successfully retrieved.
- 500 Internal Server Error: When the repository storage is not found.
if not os.path.isdir(REPOSITORIES_BASE_PATH):
return jsonify({"error": "Repository storage not found, git functionality may not be installed."}), 500
repos = []
for entry in os.scandir(REPOSITORIES_BASE_PATH):
if entry.is_dir(follow_symlinks=False) and os.path.isfile(os.path.join(entry.path, "HEAD")):
name = entry.name
if name.endswith(".git"):
name = name[:-4]
Example JSON response:
{
"repositories": ["repo1", "repo2"]
}
"""
repos = repos + [name]
if not os.path.isdir(REPOSITORIES_BASE_PATH):
return jsonify({"error": "Repository storage not found, git functionality may not be installed."}), 500
return jsonify({
"repositories": repos
})
repos = []
for entry in os.scandir(REPOSITORIES_BASE_PATH):
if entry.is_dir(follow_symlinks=False) and os.path.isfile(os.path.join(entry.path, "HEAD")):
name = entry.name
if name.endswith(".git"):
name = name[:-4]
@app.route('/repositories/<repo>', methods=['PUT'])
def create_repo(repo):
"""
Create a new Git repository.
repos = repos + [name]
This endpoint creates a new Git repository with the specified name.
If the repository already exists, it returns a status message indicating so.
return jsonify({
"repositories": repos
})
Args:
repo (str): The name of the repository to be created.
def post(self):
"""
Create a new Git repository.
Returns:
Response: A JSON response with a status message and HTTP status code.
- 200: If the repository already exists.
- 201: If the repository is successfully created.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if os.path.isdir(repo_path):
return jsonify({"status": "Repository already exists"}), 200
This endpoint creates a new Git repository with the specified name.
If the repository already exists, it returns a status message indicating so.
Args:
repo (str): The name of the repository to be created.
installer = OpengnsysGitInstaller()
installer.init_git_repo(repo + ".git")
Returns:
Response: A JSON response with a status message and HTTP status code.
- 200: If the repository already exists.
- 201: If the repository is successfully created.
"""
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
return jsonify({"status": "Repository created"}), 201
repo = data["name"]
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if os.path.isdir(repo_path):
return jsonify({"status": "Repository already exists"}), 200
@app.route('/repositories/<repo>/sync', methods=['POST'])
def sync_repo(repo):
"""
Synchronize a repository with a remote repository.
This endpoint triggers the synchronization process for a specified repository.
It expects a JSON payload with the remote repository details.
installer = OpengnsysGitInstaller()
installer.add_forgejo_repo(repo)
Args:
repo (str): The name of the repository to be synchronized.
#installer.init_git_repo(repo + ".git")
Returns:
Response: A JSON response indicating the status of the synchronization process.
- 200: If the synchronization process has started successfully.
- 400: If the request payload is missing or invalid.
- 404: If the specified repository is not found.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
return jsonify({"status": "Repository created"}), 201
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
@git_ns.route('/oggit/v1/repositories/<repo>/sync')
class GitRepoSync(Resource):
def post(self, repo):
"""
Synchronize a repository with a remote repository.
future = executor.submit(do_repo_sync, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
This endpoint triggers the synchronization process for a specified repository.
It expects a JSON payload with the remote repository details.
Args:
repo (str): The name of the repository to be synchronized.
@app.route('/repositories/<repo>/backup', methods=['POST'])
def backup_repository(repo):
"""
Backup a specified repository.
Returns:
Response: A JSON response indicating the status of the synchronization process.
- 200: If the synchronization process has started successfully.
- 400: If the request payload is missing or invalid.
- 404: If the specified repository is not found.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
Endpoint: POST /repositories/<repo>/backup
Args:
repo (str): The name of the repository to back up.
data = request.json
Request Body (JSON):
ssh_port (int, optional): The SSH port to use for the backup. Defaults to 22.
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
Returns:
Response: A JSON response indicating the status of the backup operation.
- If the repository is not found, returns a 404 error with a message.
- If the request body is missing, returns a 400 error with a message.
- If the backup process starts successfully, returns a 200 status with the task ID.
future = executor.submit(do_repo_sync, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
Notes:
- The repository path is constructed by appending ".git" to the repository name.
- The backup operation is performed asynchronously using a thread pool executor.
- The task ID of the backup operation is generated using UUID and stored in a global tasks dictionary.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
@git_ns.route('/oggit/v1/repositories/<repo>/backup')
class GitRepoBackup(Resource):
def backup_repository(self, repo):
"""
Backup a specified repository.
Endpoint: POST /repositories/<repo>/backup
if not "ssh_port" in data:
data["ssh_port"] = 22
Args:
repo (str): The name of the repository to back up.
Request Body (JSON):
ssh_port (int, optional): The SSH port to use for the backup. Defaults to 22.
future = executor.submit(do_repo_backup, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
Returns:
Response: A JSON response indicating the status of the backup operation.
- If the repository is not found, returns a 404 error with a message.
- If the request body is missing, returns a 400 error with a message.
- If the backup process starts successfully, returns a 200 status with the task ID.
return jsonify({"status": "started", "task_id" : task_id}), 200
Notes:
- The repository path is constructed by appending ".git" to the repository name.
- The backup operation is performed asynchronously using a thread pool executor.
- The task ID of the backup operation is generated using UUID and stored in a global tasks dictionary.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
@app.route('/repositories/<repo>/gc', methods=['POST'])
def gc_repo(repo):
"""
Initiates a garbage collection (GC) process for a specified Git repository.
This endpoint triggers an asynchronous GC task for the given repository.
The task is submitted to an executor, and a unique task ID is generated
and returned to the client.
data = request.json
if data is None:
return jsonify({"error" : "Parameters missing"}), 400
Args:
repo (str): The name of the repository to perform GC on.
Returns:
Response: A JSON response containing the status of the request and
a unique task ID if the repository is found, or an error
message if the repository is not found.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
if not "ssh_port" in data:
data["ssh_port"] = 22
future = executor.submit(do_repo_gc, repo)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
future = executor.submit(do_repo_backup, repo, data)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
@app.route('/tasks/<task_id>/status')
def tasks_status(task_id):
"""
Endpoint to check the status of a specific task.
@git_ns.route('/oggit/v1/repositories/<repo>/compact', methods=['POST'])
class GitRepoCompact(Resource):
def post(self, repo):
"""
Initiates a garbage collection (GC) process for a specified Git repository.
Args:
task_id (str): The unique identifier of the task.
This endpoint triggers an asynchronous GC task for the given repository.
The task is submitted to an executor, and a unique task ID is generated
and returned to the client.
Returns:
Response: A JSON response containing the status of the task.
- If the task is not found, returns a 404 error with an error message.
- If the task is completed, returns a 200 status with the result.
- If the task is still in progress, returns a 202 status indicating the task is in progress.
"""
if not task_id in tasks:
return jsonify({"error": "Task not found"}), 404
Args:
repo (str): The name of the repository to perform GC on.
future = tasks[task_id]
Returns:
Response: A JSON response containing the status of the request and
a unique task ID if the repository is found, or an error
message if the repository is not found.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
if future.done():
result = future.result()
return jsonify({"status" : "completed", "result" : result}), 200
else:
return jsonify({"status" : "in progress"}), 202
future = executor.submit(do_repo_gc, repo)
task_id = str(uuid.uuid4())
tasks[task_id] = future
return jsonify({"status": "started", "task_id" : task_id}), 200
@app.route('/repositories/<repo>', methods=['DELETE'])
def delete_repo(repo):
"""
Deletes a Git repository.
@git_ns.route('/oggit/v1/tasks/<task_id>/status')
class GitTaskStatus(Resource):
def get(self, task_id):
"""
Endpoint to check the status of a specific task.
This endpoint deletes a Git repository specified by the `repo` parameter.
If the repository does not exist, it returns a 404 error with a message
indicating that the repository was not found. If the repository is successfully
deleted, it returns a 200 status with a message indicating that the repository
was deleted.
Args:
task_id (str): The unique identifier of the task.
Args:
repo (str): The name of the repository to delete.
Returns:
Response: A JSON response containing the status of the task.
- If the task is not found, returns a 404 error with an error message.
- If the task is completed, returns a 200 status with the result.
- If the task is still in progress, returns a 202 status indicating the task is in progress.
"""
if not task_id in tasks:
return jsonify({"error": "Task not found"}), 404
Returns:
Response: A JSON response with a status message and the appropriate HTTP status code.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
future = tasks[task_id]
if future.done():
result = future.result()
return jsonify({"status" : "completed", "result" : result}), 200
else:
return jsonify({"status" : "in progress"}), 202
shutil.rmtree(repo_path)
return jsonify({"status": "Repository deleted"}), 200
@git_ns.route('/oggit/v1/repositories/<repo>', methods=['DELETE'])
class GitRepo(Resource):
def delete(self, repo):
"""
Deletes a Git repository.
This endpoint deletes a Git repository specified by the `repo` parameter.
If the repository does not exist, it returns a 404 error with a message
indicating that the repository was not found. If the repository is successfully
deleted, it returns a 200 status with a message indicating that the repository
was deleted.
@app.route('/repositories/<repo>/branches')
def get_repository_branches(repo):
"""
Retrieve the list of branches for a given repository.
Args:
repo (str): The name of the repository to delete.
Args:
repo (str): The name of the repository.
Returns:
Response: A JSON response with a status message and the appropriate HTTP status code.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
Returns:
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
- 200: A JSON object with a "branches" key containing a list of branch names.
- 404: A JSON object with an "error" key containing the message "Repository not found" if the repository does not exist.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
git_repo = git.Repo(repo_path)
shutil.rmtree(repo_path)
return jsonify({"status": "Repository deleted"}), 200
branches = []
for branch in git_repo.branches:
branches = branches + [branch.name]
return jsonify({
"branches": branches
})
@git_ns.route('/oggit/v1/repositories/<repo>/branches')
class GitRepoBranches(Resource):
def get(self, repo):
"""
Retrieve the list of branches for a given repository.
Args:
repo (str): The name of the repository.
Returns:
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
- 200: A JSON object with a "branches" key containing a list of branch names.
- 404: A JSON object with an "error" key containing the message "Repository not found" if the repository does not exist.
"""
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
if not os.path.isdir(repo_path):
return jsonify({"error": "Repository not found"}), 404
@app.route('/health')
def health_check():
"""
Health check endpoint.
git_repo = git.Repo(repo_path)
This endpoint returns a JSON response indicating the health status of the application.
branches = []
for branch in git_repo.branches:
branches = branches + [branch.name]
return jsonify({
"branches": branches
})
@git_ns.route('/health')
class GitHealth(Resource):
def get(self):
"""
Health check endpoint.
This endpoint returns a JSON response indicating the health status of the application.
Returns:
Response: A JSON response with a status key set to "OK". Currently it always returns
a successful value, but this endpoint can still be used to check that the API is
active and functional.
"""
return {
"status": "OK"
}
@git_ns.route('/status')
class GitStatus(Resource):
def get(self):
"""
Status check endpoint.
This endpoint returns a JSON response indicating the status of the application.
Returns:
Response: A JSON response with status information
"""
return {
"uptime" : time.time() - start_time,
"active_tasks" : len(tasks)
}
api.add_namespace(git_ns)
Returns:
Response: A JSON response with a status key set to "OK". Currently it always returns
a successful value, but this endpoint can still be used to check that the API is
active and functional.
"""
return jsonify({
"status": "OK"
})
# Run the Flask app
if __name__ == '__main__':
print(f"Map: {app.url_map}")
app.run(debug=True, host='0.0.0.0')

View File

@ -0,0 +1,34 @@
aniso8601==9.0.1
attrs==24.2.0
bcrypt==4.2.0
blinker==1.8.2
cffi==1.17.1
click==8.1.7
cryptography==43.0.1
dataclasses==0.6
flasgger==0.9.7.1
Flask==3.0.3
Flask-Executor==1.0.0
flask-restx==1.3.0
gitdb==4.0.11
GitPython==3.1.43
importlib_resources==6.4.5
itsdangerous==2.2.0
Jinja2==3.1.4
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
libarchive-c==5.1
MarkupSafe==3.0.1
mistune==3.0.2
packaging==24.1
paramiko==3.5.0
pycparser==2.22
PyNaCl==1.5.0
pytz==2024.2
PyYAML==6.0.2
referencing==0.35.1
rpds-py==0.20.0
six==1.16.0
smmap==5.0.1
termcolor==2.5.0
Werkzeug==3.0.4

View File

@ -1627,6 +1627,7 @@ class OpengnsysGitLibrary:
"""
Restore the repository to the state it had before the non-committed modifications
"""
self.logger.info("Undoing any user changes to the filesystem")
repo = git.Repo(path)
repo.head.reset(index=True, working_tree=True)

View File

@ -51,6 +51,15 @@ para agregarla.
# . venv/bin/activate
# ./opengnsys_git_installer.py
El instalador descarga e instala Forgejo, un interfaz web de Git. La configuración se genera automáticamente.
Forgejo gestiona los repositorios y el acceso por SSH, por lo cual debe quedarse siempre corriendo. Por defecto se instala en el puerto 3000.
El usuario por defecto es `opengnsys` con password `opengnsys`.
# Documentación
Se puede generar documentación de Python con una utilidad como pdoc3 (hay multiples alternativas posibles):

View File

@ -10,9 +10,21 @@ import subprocess
import sys
import pwd
import grp
from termcolor import colored, cprint
from termcolor import cprint
import git
import libarchive
import urllib.request
import pathlib
import socket
import time
import requests
#FORGEJO_VERSION="8.0.3"
FORGEJO_VERSION="9.0.0"
FORGEJO_URL=f"https://codeberg.org/forgejo/forgejo/releases/download/v{FORGEJO_VERSION}/forgejo-{FORGEJO_VERSION}-linux-amd64"
def show_error(*args):
@ -27,6 +39,7 @@ def show_error(*args):
"""
cprint(*args, "red", attrs = ["bold"], file=sys.stderr)
class RequirementException(Exception):
"""Excepción que indica que nos falta algún requisito
@ -100,13 +113,16 @@ class OpengnsysGitInstaller:
self.testmode = False
self.base_path = "/opt/opengnsys"
self.git_basedir = "base.git"
self.ssh_user = "opengnsys"
self.ssh_group = "opengnsys"
self.email = "OpenGnsys@opengnsys.com"
self.forgejo_user = "opengnsys"
self.forgejo_password = "opengnsys"
self.forgejo_port = 3000
self.set_ssh_user_group("oggit", "oggit")
self.ssh_homedir = pwd.getpwnam(self.ssh_user).pw_dir
self.ssh_uid = pwd.getpwnam(self.ssh_user).pw_uid
self.ssh_gid = grp.getgrnam(self.ssh_group).gr_gid
self.temp_dir = None
self.script_path = os.path.realpath(os.path.dirname(__file__))
# Possible names for SSH key
self.key_paths = ["scripts/ssl/id_rsa.pub", "scripts/ssl/id_ed25519.pub", "scripts/ssl/id_ecdsa.pub", "scripts/ssl/id_ed25519_sk.pub", "scripts/ssl/id_ecdsa_sk.pub"]
@ -157,6 +173,32 @@ class OpengnsysGitInstaller:
if self.temp_dir:
shutil.rmtree(self.temp_dir, ignore_errors=True)
def set_ssh_user_group(self, username, groupname):
self.ssh_group = groupname
self.ssh_user = username
try:
self.ssh_gid = grp.getgrnam(self.ssh_group).gr_gid
self.__logger.info("Group %s exists with gid %i", self.ssh_group, self.ssh_gid)
except KeyError:
self.__logger.info("Need to create group %s", self.ssh_group)
subprocess.run(["/usr/sbin/groupadd", "--system", self.ssh_group], check=True)
self.ssh_gid = grp.getgrnam(groupname).gr_gid
try:
self.ssh_uid = pwd.getpwnam(self.ssh_user).pw_uid
self.__logger.info("User %s exists with gid %i", self.ssh_user, self.ssh_uid)
except KeyError:
self.__logger.info("Need to create user %s", self.ssh_user)
subprocess.run(["/usr/sbin/useradd", "--gid", str(self.ssh_gid), "-m", "--system", self.ssh_user], check=True)
self.ssh_uid = pwd.getpwnam(username).pw_uid
self.ssh_homedir = pwd.getpwnam(username).pw_dir
def init_git_repo(self, reponame):
"""Inicializa un repositorio Git"""
# Creamos repositorio
@ -180,7 +222,7 @@ class OpengnsysGitInstaller:
self.__logger.info("Configurando repositorio de GIT")
repo.config_writer().set_value("user", "name", "OpenGnsys").release()
repo.config_writer().set_value("user", "email", "OpenGnsys@opengnsys.com").release()
repo.config_writer().set_value("user", "email", self.email).release()
self._recursive_chown(repo_path, ouid=self.ssh_uid, ogid=self.ssh_gid)
@ -209,6 +251,71 @@ class OpengnsysGitInstaller:
for filename in filenames:
os.chown(os.path.join(dirpath, filename), uid=ouid, gid=ogid)
def _wait_for_port(self, host, port):
self.__logger.info("Waiting for %s:%i to be up", host, port)
timeout = 60
start_time = time.time()
ready = False
while not ready and (time.time() - start_time) < 60:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((host, port))
ready = True
s.close()
except TimeoutError:
self.__logger.debug("Timed out, no connection yet.")
except OSError as oserr:
self.__logger.debug("%s, no connection yet. %.1f seconds left.", oserr.strerror, timeout - (time.time() - start_time))
time.sleep(0.1)
if ready:
self.__logger.info("Connection established.")
else:
self.__logger.error("Timed out waiting for connection!")
raise TimeoutError("Timed out waiting for connection!")
def _extract_ssh_key(self):
public_key=""
INITRD = "oginitrd.img"
tftp_dir = os.path.join(self.base_path, "tftpboot")
default_num = self.oglive.get_default()
default_client = self.oglive.get_clients()[default_num]
client_initrd_path = os.path.join(tftp_dir, default_client, INITRD)
#self.temp_dir = self._get_tempdir()
if self.usesshkey:
with open(self.usesshkey, 'r') as f:
public_key = f.read().strip()
else:
if os.path.isfile(client_initrd_path):
#os.makedirs(temp_dir, exist_ok=True)
#os.chdir(self.temp_dir.name)
self.__logger.debug("Descomprimiendo %s", client_initrd_path)
public_key = None
with libarchive.file_reader(client_initrd_path) as initrd:
for file in initrd:
#self.__logger.debug("Archivo: %s", file)
if file.pathname in self.key_paths_dict:
data = bytearray()
for block in file.get_blocks():
data = data + block
public_key = data.decode('utf-8').strip()
break
else:
print(f"No se encuentra la imagen de initrd {client_initrd_path}")
exit(2)
return public_key
def install(self):
"""Instalar
@ -269,31 +376,7 @@ class OpengnsysGitInstaller:
if not self.ignoresshkey:
public_key=""
if self.usesshkey:
with open(self.usesshkey, 'r') as f:
public_key = f.read().strip()
else:
if os.path.isfile(client_initrd_path):
#os.makedirs(temp_dir, exist_ok=True)
os.chdir(self.temp_dir.name)
self.__logger.debug("Descomprimiendo %s", client_initrd_path)
public_key = None
with libarchive.file_reader(client_initrd_path) as initrd:
for file in initrd:
self.__logger.debug("Archivo: %s", file)
if file.pathname in self.key_paths_dict:
data = bytearray()
for block in file.get_blocks():
data = data + block
public_key = data.decode('utf-8').strip()
break
else:
print(f"No se encuentra la imagen de initrd {client_initrd_path}")
exit(2)
public_key = self._extract_ssh_key()
# Si la clave publica no existe me salgo con error
if not public_key:
@ -330,13 +413,205 @@ class OpengnsysGitInstaller:
os.system(f"usermod -s {SHELL} opengnsys")
# Creamos repositorios
self.init_git_repo('windows.git')
self.init_git_repo('linux.git')
self.init_git_repo('mac.git')
#self.init_git_repo('windows.git')
#self.init_git_repo('linux.git')
#self.init_git_repo('mac.git')
# Damos permiso al usuario opengnsys
for DIR in ["base.git", "linux.git", "windows.git"]: #, "LinAcl", "WinAcl"]:
self._recursive_chown(os.path.join(ogdir_images, DIR), ouid=self.ssh_uid, ogid=self.ssh_gid)
#for DIR in ["base.git", "linux.git", "windows.git"]: #, "LinAcl", "WinAcl"]:
# self._recursive_chown(os.path.join(ogdir_images, DIR), ouid=self.ssh_uid, ogid=self.ssh_gid)
def _install_template(self, template, destination, keysvalues):
self.__logger.info("Writing template %s into %s", template, destination)
data = ""
with open(template, "r", encoding="utf-8") as template_file:
data = template_file.read()
for key in keysvalues.keys():
data = data.replace("{" + key + "}", keysvalues[key])
with open(destination, "w+", encoding="utf-8") as out_file:
out_file.write(data)
def _runcmd(self, cmd):
self.__logger.debug("Running: %s", cmd)
ret = subprocess.run(cmd, check=True,capture_output=True, encoding='utf-8')
return ret.stdout.strip()
def install_forgejo(self):
self.__logger.info("Installing Forgejo")
bin_path = os.path.join(self.base_path, "bin", "forgejo")
conf_dir_path = os.path.join(self.base_path, "etc", "forgejo")
lfs_dir_path = os.path.join(self.base_path, "images", "git-lfs")
git_dir_path = os.path.join(self.base_path, "images", "git")
forgejo_work_dir_path = os.path.join(self.base_path, "var", "lib", "forgejo/work")
forgejo_db_dir_path = os.path.join(self.base_path, "var", "lib", "forgejo/db")
forgejo_data_dir_path = os.path.join(self.base_path, "var", "lib", "forgejo/data")
forgejo_db_path = os.path.join(forgejo_db_dir_path, "forgejo.db")
forgejo_log_dir_path = os.path.join(self.base_path, "log", "forgejo")
conf_path = os.path.join(conf_dir_path, "app.ini")
self.__logger.debug("Stopping opengnsys-forgejo service")
subprocess.run(["systemctl", "stop", "opengnsys-forgejo"], check=False)
if not os.path.exists(bin_path):
self.__logger.debug("Downloading from %s into %s", FORGEJO_URL, bin_path)
urllib.request.urlretrieve(FORGEJO_URL, bin_path)
os.chmod(bin_path, 0o755)
if os.path.exists(forgejo_db_path):
self.__logger.debug("Removing old configuration")
os.unlink(forgejo_db_path)
else:
self.__logger.debug("Old configuration not present, ok.")
self.__logger.debug("Wiping old data")
for dir in [conf_dir_path, git_dir_path, lfs_dir_path, forgejo_work_dir_path, forgejo_data_dir_path, forgejo_db_dir_path]:
if os.path.exists(dir):
self.__logger.debug("Removing %s", dir)
shutil.rmtree(dir)
self.__logger.debug("Creating directories")
pathlib.Path(conf_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(git_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(lfs_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_work_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_data_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_db_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(forgejo_log_dir_path).mkdir(parents=True, exist_ok=True)
os.chown(lfs_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(git_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_data_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_work_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_db_dir_path, self.ssh_uid, self.ssh_gid)
os.chown(forgejo_log_dir_path, self.ssh_uid, self.ssh_gid)
data = {
"forgejo_user" : self.ssh_user,
"forgejo_group" : self.ssh_group,
"forgejo_port" : str(self.forgejo_port),
"forgejo_bin" : bin_path,
"forgejo_app_ini" : conf_path,
"forgejo_work_path" : forgejo_work_dir_path,
"forgejo_data_path" : forgejo_data_dir_path,
"forgejo_db_path" : forgejo_db_path,
"forgejo_repository_root" : git_dir_path,
"forgejo_lfs_path" : lfs_dir_path,
"forgejo_log_path" : forgejo_log_dir_path,
"forgejo_hostname" : self._runcmd("hostname"),
"forgejo_lfs_jwt_secret" : self._runcmd([bin_path,"generate", "secret", "LFS_JWT_SECRET"]),
"forgejo_jwt_secret" : self._runcmd([bin_path,"generate", "secret", "JWT_SECRET"]),
"forgejo_internal_token" : self._runcmd([bin_path,"generate", "secret", "INTERNAL_TOKEN"]),
"forgejo_secret_key" : self._runcmd([bin_path,"generate", "secret", "SECRET_KEY"])
}
self._install_template(os.path.join(self.script_path, "forgejo-app.ini"), conf_path, data)
self._install_template(os.path.join(self.script_path, "forgejo.service"), "/etc/systemd/system/opengnsys-forgejo.service", data)
self.__logger.debug("Reloading systemd and starting service")
subprocess.run(["systemctl", "daemon-reload"], check=True)
subprocess.run(["systemctl", "enable", "opengnsys-forgejo"], check=True)
subprocess.run(["systemctl", "restart", "opengnsys-forgejo"], check=True)
self.__logger.info("Waiting for forgejo to start")
self._wait_for_port("localhost", self.forgejo_port)
self.__logger.info("Configuring forgejo")
def run_forge_cmd(args):
cmd = [bin_path, "--config", conf_path] + args
self.__logger.debug("Running command: %s", cmd)
ret = subprocess.run(cmd, check=False, capture_output=True, encoding='utf-8', user=self.ssh_user)
if ret.returncode == 0:
return ret.stdout.strip()
else:
self.__logger.error("Failed to run command: %s, return code %i", cmd, ret.returncode)
self.__logger.error("stdout: %s", ret.stdout)
self.__logger.error("stderr: %s", ret.stderr)
raise RuntimeError("Failed to run necessary command")
run_forge_cmd(["admin", "doctor", "check"])
run_forge_cmd(["admin", "user", "create", "--username", self.forgejo_user, "--password", self.forgejo_password, "--email", self.email])
token = run_forge_cmd(["admin", "user", "generate-access-token", "--username", self.forgejo_user, "-t", "gitapi", "--scopes", "all", "--raw"])
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "w+", encoding='utf-8') as token_file:
token_file.write(token)
ssh_key = self._extract_ssh_key()
self.add_forgejo_sshkey(ssh_key, "Default key")
def add_forgejo_repo(self, repository_name, description = ""):
token = ""
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "r", encoding='utf-8') as token_file:
token = token_file.read().strip()
self.__logger.info("Adding repository %s for Forgejo", repository_name)
r = requests.post(
f"http://localhost:{self.forgejo_port}/api/v1/user/repos",
json={
"auto_init" : False,
"default_branch" : "main",
"description" : description,
"name" : repository_name,
"private" : False
}, headers={
'Authorization' : f"token {token}"
},
timeout = 60
)
self.__logger.info("Request status was %i", r.status_code)
def add_forgejo_sshkey(self, pubkey, description = ""):
token = ""
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "r", encoding='utf-8') as token_file:
token = token_file.read().strip()
self.__logger.info("Adding SSH key to Forgejo: %s", pubkey)
r = requests.post(
f"http://localhost:{self.forgejo_port}/api/v1/user/keys",
json={
"key" : pubkey,
"read_only" : False,
"title" : description
}, headers={
'Authorization' : f"token {token}"
},
timeout = 60
)
self.__logger.info("Request status was %i", r.status_code)
@ -350,9 +625,13 @@ if __name__ == '__main__':
prog="OpenGnsys Installer",
description="Script para la instalación del repositorio git",
)
parser.add_argument('--forgejo-only', action='store_true', help="Solo instalar forgejo")
parser.add_argument('--forgejo-addrepos', action='store_true', help="Solo agregar repositorios forgejo")
parser.add_argument('--testmode', action='store_true', help="Modo de prueba")
parser.add_argument('--ignoresshkey', action='store_true', help="Ignorar clave de SSH")
parser.add_argument('--usesshkey', type=str, help="Usar clave SSH especificada")
parser.add_argument('--test-createuser', action='store_true')
args = parser.parse_args()
@ -364,7 +643,20 @@ if __name__ == '__main__':
logger.debug("Inicio de instalación")
try:
installer.install()
if args.forgejo_only:
installer.install_forgejo()
elif args.forgejo_addrepos:
installer.add_forgejo_repo("linux")
elif args.test_createuser:
installer.set_ssh_user_group("oggit2", "oggit2")
else:
installer.install()
installer.install_forgejo()
installer.add_forgejo_repo("windows", "Windows")
installer.add_forgejo_repo("linux", "Linux")
installer.add_forgejo_repo("mac", "Mac")
except RequirementException as req:
show_error(f"Requisito para la instalación no satisfecho: {req.message}")
exit(1)